source: trunk/python/scantable.py@ 1463

Last change on this file since 1463 was 1443, checked in by Malte Marquarding, 16 years ago

Fix ticket #127; still have to add class header hack

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 67.1 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1435]41 if isinstance(filename, str):# or \
42# (isinstance(filename, list) or isinstance(filename, tuple)) \
43# and isinstance(filename[-1], str):
[976]44 import os.path
45 filename = os.path.expandvars(filename)
46 filename = os.path.expanduser(filename)
47 if not os.path.exists(filename):
48 s = "File '%s' not found." % (filename)
[718]49 if rcParams['verbose']:
[976]50 asaplog.push(s)
51 print asaplog.pop().strip()
[718]52 return
[976]53 raise IOError(s)
[1115]54 if os.path.isdir(filename) \
[1118]55 and not os.path.exists(filename+'/table.f1'):
[976]56 # crude check if asap table
57 if os.path.exists(filename+'/table.info'):
[1118]58 ondisk = rcParams['scantable.storage'] == 'disk'
59 Scantable.__init__(self, filename, ondisk)
[976]60 if unit is not None:
61 self.set_fluxunit(unit)
62 self.set_freqframe(rcParams['scantable.freqframe'])
[718]63 else:
[1118]64 msg = "The given file '%s'is not a valid " \
65 "asap table." % (filename)
[976]66 if rcParams['verbose']:
67 print msg
68 return
69 else:
70 raise IOError(msg)
[226]71 else:
[1118]72 self._fill([filename], unit, average)
73 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]74 and isinstance(filename[-1], str):
75 self._fill(filename, unit, average)
[1259]76 self._add_history("scantable", varlist)
[714]77 print_log()
[102]78
[876]79 def save(self, name=None, format=None, overwrite=False):
[116]80 """
[1280]81 Store the scantable on disk. This can be an asap (aips++) Table,
82 SDFITS or MS2 format.
[116]83 Parameters:
[1093]84 name: the name of the outputfile. For format "ASCII"
85 this is the root file name (data in 'name'.txt
[497]86 and header in 'name'_header.txt)
[116]87 format: an optional file format. Default is ASAP.
[280]88 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]89 'SDFITS' (save as SDFITS file)
[200]90 'ASCII' (saves as ascii text file)
[226]91 'MS2' (saves as an aips++
92 MeasurementSet V2)
[1443]93 'FITS' (save as image FITS - not
94 readable by class)
95 'CLASS' (save as FITS readable by CLASS)
[411]96 overwrite: If the file should be overwritten if it exists.
[256]97 The default False is to return with warning
[411]98 without writing the output. USE WITH CARE.
[116]99 Example:
100 scan.save('myscan.asap')
[1118]101 scan.save('myscan.sdfits', 'SDFITS')
[116]102 """
[411]103 from os import path
[226]104 if format is None: format = rcParams['scantable.save']
[256]105 suffix = '.'+format.lower()
[1118]106 if name is None or name == "":
[256]107 name = 'scantable'+suffix
[718]108 msg = "No filename given. Using default name %s..." % name
109 asaplog.push(msg)
[411]110 name = path.expandvars(name)
[256]111 if path.isfile(name) or path.isdir(name):
112 if not overwrite:
[718]113 msg = "File %s exists." % name
114 if rcParams['verbose']:
115 print msg
116 return
117 else:
118 raise IOError(msg)
[451]119 format2 = format.upper()
120 if format2 == 'ASAP':
[116]121 self._save(name)
122 else:
[989]123 from asap._asap import stwriter as stw
[1118]124 writer = stw(format2)
125 writer.write(self, name)
[718]126 print_log()
[116]127 return
128
[102]129 def copy(self):
130 """
131 Return a copy of this scantable.
[1348]132 Note:
133 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]134 Parameters:
[113]135 none
[102]136 Example:
137 copiedscan = scan.copy()
138 """
[876]139 sd = scantable(Scantable._copy(self))
[113]140 return sd
141
[1093]142 def drop_scan(self, scanid=None):
143 """
144 Return a new scantable where the specified scan number(s) has(have)
145 been dropped.
146 Parameters:
147 scanid: a (list of) scan number(s)
148 """
149 from asap import _is_sequence_or_number as _is_valid
150 from asap import _to_list
151 from asap import unique
152 if not _is_valid(scanid):
153 if rcParams['verbose']:
154 print "Please specify a scanno to drop from the scantable"
155 return
156 else:
157 raise RuntimeError("No scan given")
158 try:
159 scanid = _to_list(scanid)
160 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
161 for sid in scanid: allscans.remove(sid)
[1118]162 if len(allscans) == 0:
163 raise ValueError("Can't remove all scans")
[1093]164 except ValueError:
165 if rcParams['verbose']:
166 print "Couldn't find any match."
167 return
168 else: raise
169 try:
170 bsel = self.get_selection()
171 sel = selector()
172 sel.set_scans(allscans)
173 self.set_selection(bsel+sel)
174 scopy = self._copy()
175 self.set_selection(bsel)
176 return scantable(scopy)
177 except RuntimeError:
[1118]178 if rcParams['verbose']:
179 print "Couldn't find any match."
180 else:
181 raise
[1093]182
183
[102]184 def get_scan(self, scanid=None):
185 """
186 Return a specific scan (by scanno) or collection of scans (by
187 source name) in a new scantable.
[1348]188 Note:
189 See scantable.drop_scan() for the inverse operation.
[102]190 Parameters:
[513]191 scanid: a (list of) scanno or a source name, unix-style
192 patterns are accepted for source name matching, e.g.
193 '*_R' gets all 'ref scans
[102]194 Example:
[513]195 # get all scans containing the source '323p459'
196 newscan = scan.get_scan('323p459')
197 # get all 'off' scans
198 refscans = scan.get_scan('*_R')
199 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]200 newscan = scan.get_scan([0, 2, 7, 10])
[102]201 """
202 if scanid is None:
[718]203 if rcParams['verbose']:
[1118]204 print "Please specify a scan no or name to " \
205 "retrieve from the scantable"
[718]206 return
207 else:
208 raise RuntimeError("No scan given")
209
[102]210 try:
[946]211 bsel = self.get_selection()
212 sel = selector()
[102]213 if type(scanid) is str:
[946]214 sel.set_name(scanid)
215 self.set_selection(bsel+sel)
[876]216 scopy = self._copy()
[946]217 self.set_selection(bsel)
[876]218 return scantable(scopy)
[102]219 elif type(scanid) is int:
[946]220 sel.set_scans([scanid])
221 self.set_selection(bsel+sel)
[876]222 scopy = self._copy()
[946]223 self.set_selection(bsel)
[876]224 return scantable(scopy)
[381]225 elif type(scanid) is list:
[946]226 sel.set_scans(scanid)
227 self.set_selection(sel)
[876]228 scopy = self._copy()
[946]229 self.set_selection(bsel)
[876]230 return scantable(scopy)
[381]231 else:
[718]232 msg = "Illegal scanid type, use 'int' or 'list' if ints."
233 if rcParams['verbose']:
234 print msg
235 else:
236 raise TypeError(msg)
[102]237 except RuntimeError:
[718]238 if rcParams['verbose']: print "Couldn't find any match."
239 else: raise
[102]240
241 def __str__(self):
[1118]242 return Scantable._summary(self, True)
[102]243
[976]244 def summary(self, filename=None):
[102]245 """
246 Print a summary of the contents of this scantable.
247 Parameters:
248 filename: the name of a file to write the putput to
249 Default - no file output
[381]250 verbose: print extra info such as the frequency table
251 The default (False) is taken from .asaprc
[102]252 """
[976]253 info = Scantable._summary(self, True)
254 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]255 if filename is not None:
[256]256 if filename is "":
257 filename = 'scantable_summary.txt'
[415]258 from os.path import expandvars, isdir
[411]259 filename = expandvars(filename)
[415]260 if not isdir(filename):
[413]261 data = open(filename, 'w')
262 data.write(info)
263 data.close()
264 else:
[718]265 msg = "Illegal file name '%s'." % (filename)
266 if rcParams['verbose']:
267 print msg
268 else:
269 raise IOError(msg)
270 if rcParams['verbose']:
[794]271 try:
272 from IPython.genutils import page as pager
273 except ImportError:
274 from pydoc import pager
275 pager(info)
[718]276 else:
277 return info
[710]278
[946]279
280 def get_selection(self):
281 """
[1005]282 Get the selection object currently set on this scantable.
283 Parameters:
284 none
285 Example:
286 sel = scan.get_selection()
287 sel.set_ifs(0) # select IF 0
288 scan.set_selection(sel) # apply modified selection
[946]289 """
290 return selector(self._getselection())
291
[1005]292 def set_selection(self, selection=selector()):
[946]293 """
[1005]294 Select a subset of the data. All following operations on this scantable
295 are only applied to thi selection.
296 Parameters:
297 selection: a selector object (default unset the selection)
298 Examples:
299 sel = selector() # create a selection object
[1118]300 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]301 scan.set_selection(sel) # set the selection
302 scan.summary() # will only print summary of scanno 0 an 3
303 scan.set_selection() # unset the selection
[946]304 """
305 self._setselection(selection)
306
[876]307 def stats(self, stat='stddev', mask=None):
[102]308 """
[135]309 Determine the specified statistic of the current beam/if/pol
[102]310 Takes a 'mask' as an optional parameter to specify which
311 channels should be excluded.
312 Parameters:
[135]313 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
314 'var', 'stddev', 'avdev', 'rms', 'median'
315 mask: an optional mask specifying where the statistic
[102]316 should be determined.
317 Example:
[113]318 scan.set_unit('channel')
[1118]319 msk = scan.create_mask([100, 200], [500, 600])
[135]320 scan.stats(stat='mean', mask=m)
[102]321 """
322 if mask == None:
[876]323 mask = []
[1118]324 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]325 if not self._check_ifs():
[1118]326 raise ValueError("Cannot apply mask as the IFs have different "
327 "number of channels. Please use setselection() "
328 "to select individual IFs")
[256]329
[876]330 statvals = self._math._stats(self, mask, stat)
331 out = ''
332 axes = []
333 for i in range(self.nrow()):
334 axis = []
335 axis.append(self.getscan(i))
336 axis.append(self.getbeam(i))
337 axis.append(self.getif(i))
338 axis.append(self.getpol(i))
339 axis.append(self.getcycle(i))
340 axes.append(axis)
341 tm = self._gettime(i)
342 src = self._getsourcename(i)
343 out += 'Scan[%d] (%s) ' % (axis[0], src)
344 out += 'Time[%s]:\n' % (tm)
345 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
346 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
347 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
348 out += '= %3.3f\n' % (statvals[i])
349 out += "--------------------------------------------------\n"
[256]350
[876]351 if rcParams['verbose']:
352 print "--------------------------------------------------"
[1118]353 print " ", stat
[876]354 print "--------------------------------------------------"
355 print out
[1295]356 #else:
357 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
358 # 'axes' : axes,
359 # 'data': statvals}
360 return statvals
[102]361
[1118]362 def stddev(self, mask=None):
[135]363 """
364 Determine the standard deviation of the current beam/if/pol
365 Takes a 'mask' as an optional parameter to specify which
366 channels should be excluded.
367 Parameters:
368 mask: an optional mask specifying where the standard
369 deviation should be determined.
370
371 Example:
372 scan.set_unit('channel')
[1118]373 msk = scan.create_mask([100, 200], [500, 600])
[135]374 scan.stddev(mask=m)
375 """
[1118]376 return self.stats(stat='stddev', mask=mask);
[135]377
[1003]378
[1259]379 def get_column_names(self):
[1003]380 """
381 Return a list of column names, which can be used for selection.
382 """
[1259]383 return list(Scantable.get_column_names(self))
[1003]384
[876]385 def get_tsys(self):
[113]386 """
387 Return the System temperatures.
388 Returns:
[876]389 a list of Tsys values for the current selection
[113]390 """
[256]391
[876]392 return self._row_callback(self._gettsys, "Tsys")
[256]393
[876]394 def _row_callback(self, callback, label):
395 axes = []
[1118]396 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]397 out = ""
[1118]398 outvec = []
[876]399 for i in range(self.nrow()):
400 axis = []
401 axis.append(self.getscan(i))
402 axis.append(self.getbeam(i))
403 axis.append(self.getif(i))
404 axis.append(self.getpol(i))
405 axis.append(self.getcycle(i))
406 axes.append(axis)
407 tm = self._gettime(i)
408 src = self._getsourcename(i)
409 out += 'Scan[%d] (%s) ' % (axis[0], src)
410 out += 'Time[%s]:\n' % (tm)
411 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
412 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
413 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
414 outvec.append(callback(i))
415 out += '= %3.3f\n' % (outvec[i])
416 out += "--------------------------------------------------\n"
417 if rcParams['verbose']:
418 print "--------------------------------------------------"
419 print " %s" % (label)
420 print "--------------------------------------------------"
421 print out
[1175]422 # disabled because the vector seems more useful
423 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
424 return outvec
[256]425
[1070]426 def _get_column(self, callback, row=-1):
427 """
428 """
429 if row == -1:
430 return [callback(i) for i in range(self.nrow())]
431 else:
432 if 0 <= row < self.nrow():
433 return callback(row)
[256]434
[1070]435
[1348]436 def get_time(self, row=-1, asdatetime=False):
[113]437 """
438 Get a list of time stamps for the observations.
[1348]439 Return a datetime object for each integration time stamp in the scantable.
[113]440 Parameters:
[1348]441 row: row no of integration. Default -1 return all rows
442 asdatetime: return values as datetime objects rather than strings
[113]443 Example:
444 none
445 """
[1175]446 from time import strptime
447 from datetime import datetime
[1392]448 times = self._get_column(self._gettime, row)
[1348]449 if not asdatetime:
[1392]450 return times
[1175]451 format = "%Y/%m/%d/%H:%M:%S"
452 if isinstance(times, list):
453 return [datetime(*strptime(i, format)[:6]) for i in times]
454 else:
455 return datetime(*strptime(times, format)[:6])
[102]456
[1348]457
458 def get_inttime(self, row=-1):
459 """
460 Get a list of integration times for the observations.
461 Return a time in seconds for each integration in the scantable.
462 Parameters:
463 row: row no of integration. Default -1 return all rows.
464 Example:
465 none
466 """
467 return self._get_column(self._getinttime, row)
468
469
[714]470 def get_sourcename(self, row=-1):
471 """
[794]472 Get a list source names for the observations.
[714]473 Return a string for each integration in the scantable.
474 Parameters:
[1348]475 row: row no of integration. Default -1 return all rows.
[714]476 Example:
477 none
478 """
[1070]479 return self._get_column(self._getsourcename, row)
[714]480
[794]481 def get_elevation(self, row=-1):
482 """
483 Get a list of elevations for the observations.
484 Return a float for each integration in the scantable.
485 Parameters:
[1348]486 row: row no of integration. Default -1 return all rows.
[794]487 Example:
488 none
489 """
[1070]490 return self._get_column(self._getelevation, row)
[794]491
492 def get_azimuth(self, row=-1):
493 """
494 Get a list of azimuths for the observations.
495 Return a float for each integration in the scantable.
496 Parameters:
[1348]497 row: row no of integration. Default -1 return all rows.
[794]498 Example:
499 none
500 """
[1070]501 return self._get_column(self._getazimuth, row)
[794]502
503 def get_parangle(self, row=-1):
504 """
505 Get a list of parallactic angles for the observations.
506 Return a float for each integration in the scantable.
507 Parameters:
[1348]508 row: row no of integration. Default -1 return all rows.
[794]509 Example:
510 none
511 """
[1070]512 return self._get_column(self._getparangle, row)
[794]513
[1070]514 def get_direction(self, row=-1):
515 """
516 Get a list of Positions on the sky (direction) for the observations.
517 Return a float for each integration in the scantable.
518 Parameters:
519 row: row no of integration. Default -1 return all rows
520 Example:
521 none
522 """
523 return self._get_column(self._getdirection, row)
524
[1391]525 def get_directionval(self, row=-1):
526 """
527 Get a list of Positions on the sky (direction) for the observations.
528 Return a float for each integration in the scantable.
529 Parameters:
530 row: row no of integration. Default -1 return all rows
531 Example:
532 none
533 """
534 return self._get_column(self._getdirectionvec, row)
535
536
[102]537 def set_unit(self, unit='channel'):
538 """
539 Set the unit for all following operations on this scantable
540 Parameters:
541 unit: optional unit, default is 'channel'
[1118]542 one of '*Hz', 'km/s', 'channel', ''
[102]543 """
[484]544 varlist = vars()
[1118]545 if unit in ['', 'pixel', 'channel']:
[113]546 unit = ''
547 inf = list(self._getcoordinfo())
548 inf[0] = unit
549 self._setcoordinfo(inf)
[1118]550 self._add_history("set_unit", varlist)
[113]551
[484]552 def set_instrument(self, instr):
[358]553 """
[1348]554 Set the instrument for subsequent processing.
[358]555 Parameters:
[710]556 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]557 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]558 """
559 self._setInstrument(instr)
[1118]560 self._add_history("set_instument", vars())
[718]561 print_log()
[358]562
[1190]563 def set_feedtype(self, feedtype):
564 """
565 Overwrite the feed type, which might not be set correctly.
566 Parameters:
567 feedtype: 'linear' or 'circular'
568 """
569 self._setfeedtype(feedtype)
570 self._add_history("set_feedtype", vars())
571 print_log()
572
[276]573 def set_doppler(self, doppler='RADIO'):
574 """
575 Set the doppler for all following operations on this scantable.
576 Parameters:
577 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
578 """
[484]579 varlist = vars()
[276]580 inf = list(self._getcoordinfo())
581 inf[2] = doppler
582 self._setcoordinfo(inf)
[1118]583 self._add_history("set_doppler", vars())
[718]584 print_log()
[710]585
[226]586 def set_freqframe(self, frame=None):
[113]587 """
588 Set the frame type of the Spectral Axis.
589 Parameters:
[591]590 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]591 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
592 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]593 Examples:
594 scan.set_freqframe('BARY')
595 """
[484]596 if frame is None: frame = rcParams['scantable.freqframe']
597 varlist = vars()
[1118]598 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
599 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]600
[989]601 if frame in valid:
[113]602 inf = list(self._getcoordinfo())
603 inf[1] = frame
604 self._setcoordinfo(inf)
[1118]605 self._add_history("set_freqframe", varlist)
[102]606 else:
[1118]607 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]608 if rcParams['verbose']:
609 print msg
610 else:
611 raise TypeError(msg)
612 print_log()
[710]613
[989]614 def set_dirframe(self, frame=""):
615 """
616 Set the frame type of the Direction on the sky.
617 Parameters:
618 frame: an optional frame type, default ''. Valid frames are:
619 'J2000', 'B1950', 'GALACTIC'
620 Examples:
621 scan.set_dirframe('GALACTIC')
622 """
623 varlist = vars()
624 try:
625 Scantable.set_dirframe(self, frame)
[1118]626 except RuntimeError, msg:
[989]627 if rcParams['verbose']:
628 print msg
629 else:
630 raise
[1118]631 self._add_history("set_dirframe", varlist)
[989]632
[113]633 def get_unit(self):
634 """
635 Get the default unit set in this scantable
636 Returns:
637 A unit string
638 """
639 inf = self._getcoordinfo()
640 unit = inf[0]
641 if unit == '': unit = 'channel'
642 return unit
[102]643
[158]644 def get_abcissa(self, rowno=0):
[102]645 """
[158]646 Get the abcissa in the current coordinate setup for the currently
[113]647 selected Beam/IF/Pol
648 Parameters:
[226]649 rowno: an optional row number in the scantable. Default is the
650 first row, i.e. rowno=0
[113]651 Returns:
[1348]652 The abcissa values and the format string (as a dictionary)
[113]653 """
[256]654 abc = self._getabcissa(rowno)
[710]655 lbl = self._getabcissalabel(rowno)
[718]656 print_log()
[158]657 return abc, lbl
[113]658
[1118]659 def flag(self, mask=None):
[1001]660 """
661 Flag the selected data using an optional channel mask.
662 Parameters:
663 mask: an optional channel mask, created with create_mask. Default
664 (no mask) is all channels.
665 """
666 varlist = vars()
[1118]667 if mask is None:
668 mask = []
[1001]669 try:
670 self._flag(mask)
[1118]671 except RuntimeError, msg:
[1001]672 if rcParams['verbose']:
673 print msg
674 return
675 else: raise
676 self._add_history("flag", varlist)
677
[1203]678 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]679 """
680 Flag the data in 'lag' space by providing a frequency to remove.
681 Flagged data in the scantable gets set to 0.0 before the fft.
682 No taper is applied.
683 Parameters:
[1348]684 frequency: the frequency (really a period within the bandwidth)
685 to remove
686 width: the width of the frequency to remove, to remove a
687 range of frequencies aroung the centre.
[1203]688 unit: the frequency unit (default "GHz")
689 Notes:
[1348]690 It is recommended to flag edges of the band or strong
691 signals beforehand.
[1192]692 """
693 if insitu is None: insitu = rcParams['insitu']
694 self._math._setinsitu(insitu)
695 varlist = vars()
[1370]696 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]697 if not base.has_key(unit):
698 raise ValueError("%s is not a valid unit." % unit)
699 try:
[1200]700 s = scantable(self._math._lag_flag(self, frequency*base[unit],
701 width*base[unit]))
[1192]702 except RuntimeError, msg:
703 if rcParams['verbose']:
704 print msg
705 return
706 else: raise
707 s._add_history("lag_flag", varlist)
708 print_log()
709 if insitu:
710 self._assign(s)
711 else:
712 return s
[1001]713
[1192]714
[113]715 def create_mask(self, *args, **kwargs):
716 """
[1118]717 Compute and return a mask based on [min, max] windows.
[189]718 The specified windows are to be INCLUDED, when the mask is
[113]719 applied.
[102]720 Parameters:
[1118]721 [min, max], [min2, max2], ...
[1024]722 Pairs of start/end points (inclusive)specifying the regions
[102]723 to be masked
[189]724 invert: optional argument. If specified as True,
725 return an inverted mask, i.e. the regions
726 specified are EXCLUDED
[513]727 row: create the mask using the specified row for
728 unit conversions, default is row=0
729 only necessary if frequency varies over rows.
[102]730 Example:
[113]731 scan.set_unit('channel')
732 a)
[1118]733 msk = scan.create_mask([400, 500], [800, 900])
[189]734 # masks everything outside 400 and 500
[113]735 # and 800 and 900 in the unit 'channel'
736
737 b)
[1118]738 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]739 # masks the regions between 400 and 500
[113]740 # and 800 and 900 in the unit 'channel'
[1024]741 c)
742 mask only channel 400
[1118]743 msk = scan.create_mask([400, 400])
[102]744 """
[513]745 row = 0
746 if kwargs.has_key("row"):
747 row = kwargs.get("row")
748 data = self._getabcissa(row)
[113]749 u = self._getcoordinfo()[0]
[718]750 if rcParams['verbose']:
[113]751 if u == "": u = "channel"
[718]752 msg = "The current mask window unit is %s" % u
[1118]753 i = self._check_ifs()
754 if not i:
[876]755 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]756 asaplog.push(msg)
[102]757 n = self.nchan()
[1295]758 msk = _n_bools(n, False)
[710]759 # test if args is a 'list' or a 'normal *args - UGLY!!!
760
[1118]761 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
762 and args or args[0]
[710]763 for window in ws:
[102]764 if (len(window) != 2 or window[0] > window[1] ):
[1118]765 raise TypeError("A window needs to be defined as [min, max]")
[102]766 for i in range(n):
[1024]767 if data[i] >= window[0] and data[i] <= window[1]:
[1295]768 msk[i] = True
[113]769 if kwargs.has_key('invert'):
770 if kwargs.get('invert'):
[1295]771 msk = mask_not(msk)
[718]772 print_log()
[102]773 return msk
[710]774
[256]775 def get_restfreqs(self):
776 """
777 Get the restfrequency(s) stored in this scantable.
778 The return value(s) are always of unit 'Hz'
779 Parameters:
780 none
781 Returns:
782 a list of doubles
783 """
784 return list(self._getrestfreqs())
[102]785
[402]786
[931]787 def set_restfreqs(self, freqs=None, unit='Hz'):
788 """
789 Set or replace the restfrequency specified and
790 If the 'freqs' argument holds a scalar,
791 then that rest frequency will be applied to all the selected
792 data. If the 'freqs' argument holds
793 a vector, then it MUST be of equal or smaller length than
794 the number of IFs (and the available restfrequencies will be
795 replaced by this vector). In this case, *all* data have
796 the restfrequency set per IF according
797 to the corresponding value you give in the 'freqs' vector.
[1118]798 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]799 IF 1 gets restfreq 2e9.
[1395]800 You can also specify the frequencies via a linecatalog.
[1153]801
[931]802 Parameters:
803 freqs: list of rest frequency values or string idenitfiers
804 unit: unit for rest frequency (default 'Hz')
[402]805
[931]806 Example:
807 # set the given restfrequency for the whole table
808 scan.set_restfreqs(freqs=1.4e9)
[1348]809 # If thee number of IFs in the data is >= 2 IF0 gets the first
[931]810 # value IF1 the second...
[1118]811 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]812 #set the given restfrequency for the whole table (by name)
813 scan.set_restfreqs(freqs="OH1667")
[391]814
[931]815 Note:
816 To do more sophisticate Restfrequency setting, e.g. on a
817 source and IF basis, use scantable.set_selection() before using
818 this function.
819 # provide your scantable is call scan
820 selection = selector()
821 selection.set_name("ORION*")
822 selection.set_ifs([1])
823 scan.set_selection(selection)
824 scan.set_restfreqs(freqs=86.6e9)
825
826 """
827 varlist = vars()
[1157]828 from asap import linecatalog
829 # simple value
[1118]830 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]831 self._setrestfreqs(freqs, "",unit)
[1157]832 # list of values
[1118]833 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]834 # list values are scalars
[1118]835 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]836 sel = selector()
837 savesel = self._getselection()
[1322]838 iflist = self.getifnos()
[931]839 for i in xrange(len(freqs)):
[1322]840 sel.set_ifs(iflist[i])
[931]841 self._setselection(sel)
[1153]842 self._setrestfreqs(freqs[i], "",unit)
[931]843 self._setselection(savesel)
[1157]844 # list values are tuples, (value, name)
845 elif isinstance(freqs[-1], dict):
846 sel = selector()
847 savesel = self._getselection()
[1322]848 iflist = self.getifnos()
[1157]849 for i in xrange(len(freqs)):
[1322]850 sel.set_ifs(iflist[i])
[1259]851 self._setselection(sel)
[1157]852 self._setrestfreqs(freqs[i]["value"],
853 freqs[i]["name"], "MHz")
854 self._setselection(savesel)
855 # freqs are to be taken from a linecatalog
[1153]856 elif isinstance(freqs, linecatalog):
857 sel = selector()
858 savesel = self._getselection()
[1322]859 iflist = self.getifnos()
[1153]860 for i in xrange(freqs.nrow()):
[1322]861 sel.set_ifs(iflist[i])
[1153]862 self._setselection(sel)
863 self._setrestfreqs(freqs.get_frequency(i),
864 freqs.get_name(i), "MHz")
865 # ensure that we are not iterating past nIF
866 if i == self.nif()-1: break
867 self._setselection(savesel)
[931]868 else:
869 return
870 self._add_history("set_restfreqs", varlist)
871
[1360]872 def shift_refpix(self, delta):
873 """
874 Shift the reference pixel of the Spectra Coordinate by an
875 integer amount.
876 Parameters:
877 delta: the amount to shift by
878 Note:
879 Be careful using this with broadband data.
880 """
881 Scantable.shift(self, delta)
[931]882
[1259]883 def history(self, filename=None):
884 """
885 Print the history. Optionally to a file.
[1348]886 Parameters:
887 filename: The name of the file to save the history to.
[1259]888 """
[484]889 hist = list(self._gethistory())
[794]890 out = "-"*80
[484]891 for h in hist:
[489]892 if h.startswith("---"):
[794]893 out += "\n"+h
[489]894 else:
895 items = h.split("##")
896 date = items[0]
897 func = items[1]
898 items = items[2:]
[794]899 out += "\n"+date+"\n"
900 out += "Function: %s\n Parameters:" % (func)
[489]901 for i in items:
902 s = i.split("=")
[1118]903 out += "\n %s = %s" % (s[0], s[1])
[794]904 out += "\n"+"-"*80
[1259]905 if filename is not None:
906 if filename is "":
907 filename = 'scantable_history.txt'
908 import os
909 filename = os.path.expandvars(os.path.expanduser(filename))
910 if not os.path.isdir(filename):
911 data = open(filename, 'w')
912 data.write(out)
913 data.close()
914 else:
915 msg = "Illegal file name '%s'." % (filename)
916 if rcParams['verbose']:
917 print msg
918 else:
919 raise IOError(msg)
920 if rcParams['verbose']:
921 try:
922 from IPython.genutils import page as pager
923 except ImportError:
924 from pydoc import pager
925 pager(out)
926 else:
927 return out
[484]928 return
[513]929 #
930 # Maths business
931 #
932
[931]933 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]934 """
[1070]935 Return the (time) weighted average of a scan.
[513]936 Note:
[1070]937 in channels only - align if necessary
[513]938 Parameters:
939 mask: an optional mask (only used for 'var' and 'tsys'
940 weighting)
[558]941 scanav: True averages each scan separately
942 False (default) averages all scans together,
[1099]943 weight: Weighting scheme.
944 'none' (mean no weight)
945 'var' (1/var(spec) weighted)
946 'tsys' (1/Tsys**2 weighted)
947 'tint' (integration time weighted)
948 'tintsys' (Tint/Tsys**2)
949 'median' ( median averaging)
[535]950 The default is 'tint'
[931]951 align: align the spectra in velocity before averaging. It takes
952 the time of the first spectrum as reference time.
[513]953 Example:
954 # time average the scantable without using a mask
[710]955 newscan = scan.average_time()
[513]956 """
957 varlist = vars()
[976]958 if weight is None: weight = 'TINT'
[513]959 if mask is None: mask = ()
[1099]960 if scanav: scanav = "SCAN"
961 else: scanav = "NONE"
[1118]962 scan = (self, )
[989]963 try:
[1118]964 if align:
965 scan = (self.freq_align(insitu=False), )
966 s = None
967 if weight.upper() == 'MEDIAN':
968 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
969 scanav))
970 else:
971 s = scantable(self._math._average(scan, mask, weight.upper(),
972 scanav))
973 except RuntimeError, msg:
[989]974 if rcParams['verbose']:
975 print msg
976 return
977 else: raise
[1099]978 s._add_history("average_time", varlist)
[718]979 print_log()
[513]980 return s
[710]981
[876]982 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]983 """
984 Return a scan where all spectra are converted to either
985 Jansky or Kelvin depending upon the flux units of the scan table.
986 By default the function tries to look the values up internally.
987 If it can't find them (or if you want to over-ride), you must
988 specify EITHER jyperk OR eta (and D which it will try to look up
989 also if you don't set it). jyperk takes precedence if you set both.
990 Parameters:
991 jyperk: the Jy / K conversion factor
992 eta: the aperture efficiency
993 d: the geomtric diameter (metres)
994 insitu: if False a new scantable is returned.
995 Otherwise, the scaling is done in-situ
996 The default is taken from .asaprc (False)
997 """
998 if insitu is None: insitu = rcParams['insitu']
[876]999 self._math._setinsitu(insitu)
[513]1000 varlist = vars()
1001 if jyperk is None: jyperk = -1.0
1002 if d is None: d = -1.0
1003 if eta is None: eta = -1.0
[876]1004 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1005 s._add_history("convert_flux", varlist)
1006 print_log()
1007 if insitu: self._assign(s)
1008 else: return s
[513]1009
[876]1010 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1011 """
1012 Return a scan after applying a gain-elevation correction.
1013 The correction can be made via either a polynomial or a
1014 table-based interpolation (and extrapolation if necessary).
1015 You specify polynomial coefficients, an ascii table or neither.
1016 If you specify neither, then a polynomial correction will be made
1017 with built in coefficients known for certain telescopes (an error
1018 will occur if the instrument is not known).
1019 The data and Tsys are *divided* by the scaling factors.
1020 Parameters:
1021 poly: Polynomial coefficients (default None) to compute a
1022 gain-elevation correction as a function of
1023 elevation (in degrees).
1024 filename: The name of an ascii file holding correction factors.
1025 The first row of the ascii file must give the column
1026 names and these MUST include columns
1027 "ELEVATION" (degrees) and "FACTOR" (multiply data
1028 by this) somewhere.
1029 The second row must give the data type of the
1030 column. Use 'R' for Real and 'I' for Integer.
1031 An example file would be
1032 (actual factors are arbitrary) :
1033
1034 TIME ELEVATION FACTOR
1035 R R R
1036 0.1 0 0.8
1037 0.2 20 0.85
1038 0.3 40 0.9
1039 0.4 60 0.85
1040 0.5 80 0.8
1041 0.6 90 0.75
1042 method: Interpolation method when correcting from a table.
1043 Values are "nearest", "linear" (default), "cubic"
1044 and "spline"
1045 insitu: if False a new scantable is returned.
1046 Otherwise, the scaling is done in-situ
1047 The default is taken from .asaprc (False)
1048 """
1049
1050 if insitu is None: insitu = rcParams['insitu']
[876]1051 self._math._setinsitu(insitu)
[513]1052 varlist = vars()
1053 if poly is None:
[1118]1054 poly = ()
[513]1055 from os.path import expandvars
1056 filename = expandvars(filename)
[876]1057 s = scantable(self._math._gainel(self, poly, filename, method))
1058 s._add_history("gain_el", varlist)
1059 print_log()
1060 if insitu: self._assign(s)
1061 else: return s
[710]1062
[931]1063 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1064 """
1065 Return a scan where all rows have been aligned in frequency/velocity.
1066 The alignment frequency frame (e.g. LSRK) is that set by function
1067 set_freqframe.
1068 Parameters:
1069 reftime: reference time to align at. By default, the time of
1070 the first row of data is used.
1071 method: Interpolation method for regridding the spectra.
1072 Choose from "nearest", "linear", "cubic" (default)
1073 and "spline"
1074 insitu: if False a new scantable is returned.
1075 Otherwise, the scaling is done in-situ
1076 The default is taken from .asaprc (False)
1077 """
[931]1078 if insitu is None: insitu = rcParams["insitu"]
[876]1079 self._math._setinsitu(insitu)
[513]1080 varlist = vars()
[931]1081 if reftime is None: reftime = ""
1082 s = scantable(self._math._freq_align(self, reftime, method))
[876]1083 s._add_history("freq_align", varlist)
1084 print_log()
1085 if insitu: self._assign(s)
1086 else: return s
[513]1087
[876]1088 def opacity(self, tau, insitu=None):
[513]1089 """
1090 Apply an opacity correction. The data
1091 and Tsys are multiplied by the correction factor.
1092 Parameters:
1093 tau: Opacity from which the correction factor is
1094 exp(tau*ZD)
1095 where ZD is the zenith-distance
1096 insitu: if False a new scantable is returned.
1097 Otherwise, the scaling is done in-situ
1098 The default is taken from .asaprc (False)
1099 """
1100 if insitu is None: insitu = rcParams['insitu']
[876]1101 self._math._setinsitu(insitu)
[513]1102 varlist = vars()
[876]1103 s = scantable(self._math._opacity(self, tau))
1104 s._add_history("opacity", varlist)
1105 print_log()
1106 if insitu: self._assign(s)
1107 else: return s
[513]1108
1109 def bin(self, width=5, insitu=None):
1110 """
1111 Return a scan where all spectra have been binned up.
[1348]1112 Parameters:
[513]1113 width: The bin width (default=5) in pixels
1114 insitu: if False a new scantable is returned.
1115 Otherwise, the scaling is done in-situ
1116 The default is taken from .asaprc (False)
1117 """
1118 if insitu is None: insitu = rcParams['insitu']
[876]1119 self._math._setinsitu(insitu)
[513]1120 varlist = vars()
[876]1121 s = scantable(self._math._bin(self, width))
[1118]1122 s._add_history("bin", varlist)
[876]1123 print_log()
1124 if insitu: self._assign(s)
1125 else: return s
[513]1126
[710]1127
[513]1128 def resample(self, width=5, method='cubic', insitu=None):
1129 """
[1348]1130 Return a scan where all spectra have been binned up.
1131
1132 Parameters:
[513]1133 width: The bin width (default=5) in pixels
1134 method: Interpolation method when correcting from a table.
1135 Values are "nearest", "linear", "cubic" (default)
1136 and "spline"
1137 insitu: if False a new scantable is returned.
1138 Otherwise, the scaling is done in-situ
1139 The default is taken from .asaprc (False)
1140 """
1141 if insitu is None: insitu = rcParams['insitu']
[876]1142 self._math._setinsitu(insitu)
[513]1143 varlist = vars()
[876]1144 s = scantable(self._math._resample(self, method, width))
[1118]1145 s._add_history("resample", varlist)
[876]1146 print_log()
1147 if insitu: self._assign(s)
1148 else: return s
[513]1149
1150
[946]1151 def average_pol(self, mask=None, weight='none'):
1152 """
1153 Average the Polarisations together.
1154 Parameters:
1155 mask: An optional mask defining the region, where the
1156 averaging will be applied. The output will have all
1157 specified points masked.
1158 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1159 weighted), or 'tsys' (1/Tsys**2 weighted)
1160 """
1161 varlist = vars()
1162 if mask is None:
1163 mask = ()
[1010]1164 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1165 s._add_history("average_pol", varlist)
[946]1166 print_log()
[992]1167 return s
[513]1168
[1145]1169 def average_beam(self, mask=None, weight='none'):
1170 """
1171 Average the Beams together.
1172 Parameters:
1173 mask: An optional mask defining the region, where the
1174 averaging will be applied. The output will have all
1175 specified points masked.
1176 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1177 weighted), or 'tsys' (1/Tsys**2 weighted)
1178 """
1179 varlist = vars()
1180 if mask is None:
1181 mask = ()
1182 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1183 s._add_history("average_beam", varlist)
1184 print_log()
1185 return s
1186
[992]1187 def convert_pol(self, poltype=None):
1188 """
1189 Convert the data to a different polarisation type.
1190 Parameters:
1191 poltype: The new polarisation type. Valid types are:
1192 "linear", "stokes" and "circular"
1193 """
1194 varlist = vars()
1195 try:
1196 s = scantable(self._math._convertpol(self, poltype))
[1118]1197 except RuntimeError, msg:
[992]1198 if rcParams['verbose']:
[1118]1199 print msg
1200 return
[992]1201 else:
1202 raise
[1118]1203 s._add_history("convert_pol", varlist)
[992]1204 print_log()
1205 return s
1206
[876]1207 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1208 """
1209 Smooth the spectrum by the specified kernel (conserving flux).
1210 Parameters:
1211 kernel: The type of smoothing kernel. Select from
[1373]1212 'hanning' (default), 'gaussian', 'boxcar' and
1213 'rmedian'
[513]1214 width: The width of the kernel in pixels. For hanning this is
1215 ignored otherwise it defauls to 5 pixels.
1216 For 'gaussian' it is the Full Width Half
1217 Maximum. For 'boxcar' it is the full width.
[1373]1218 For 'rmedian' it is the half width.
[513]1219 insitu: if False a new scantable is returned.
1220 Otherwise, the scaling is done in-situ
1221 The default is taken from .asaprc (False)
1222 Example:
1223 none
1224 """
1225 if insitu is None: insitu = rcParams['insitu']
[876]1226 self._math._setinsitu(insitu)
[513]1227 varlist = vars()
[1118]1228 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1229 s._add_history("smooth", varlist)
1230 print_log()
1231 if insitu: self._assign(s)
1232 else: return s
[513]1233
[876]1234
[1391]1235 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1236 """
1237 Return a scan which has been baselined (all rows) by a polynomial.
1238 Parameters:
[794]1239 mask: an optional mask
1240 order: the order of the polynomial (default is 0)
[1061]1241 plot: plot the fit and the residual. In this each
1242 indivual fit has to be approved, by typing 'y'
1243 or 'n'
[1391]1244 uselin: use linear polynomial fit
[794]1245 insitu: if False a new scantable is returned.
1246 Otherwise, the scaling is done in-situ
1247 The default is taken from .asaprc (False)
[513]1248 Example:
1249 # return a scan baselined by a third order polynomial,
1250 # not using a mask
1251 bscan = scan.poly_baseline(order=3)
[579]1252 """
[513]1253 if insitu is None: insitu = rcParams['insitu']
1254 varlist = vars()
1255 if mask is None:
[1295]1256 mask = [True for i in xrange(self.nchan(-1))]
[513]1257 from asap.asapfitter import fitter
[1217]1258 try:
1259 f = fitter()
1260 f.set_scan(self, mask)
[1391]1261 #f.set_function(poly=order)
1262 if uselin:
1263 f.set_function(lpoly=order)
1264 else:
1265 f.set_function(poly=order)
[1217]1266 s = f.auto_fit(insitu, plot=plot)
1267 s._add_history("poly_baseline", varlist)
1268 print_log()
1269 if insitu: self._assign(s)
1270 else: return s
1271 except RuntimeError:
1272 msg = "The fit failed, possibly because it didn't converge."
1273 if rcParams['verbose']:
1274 print msg
1275 return
1276 else:
1277 raise RuntimeError(msg)
[513]1278
[1217]1279
[1118]1280 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1281 threshold=3, chan_avg_limit=1, plot=False,
1282 insitu=None):
[880]1283 """
1284 Return a scan which has been baselined (all rows) by a polynomial.
1285 Spectral lines are detected first using linefinder and masked out
1286 to avoid them affecting the baseline solution.
1287
1288 Parameters:
1289 mask: an optional mask retreived from scantable
1290 edge: an optional number of channel to drop at
1291 the edge of spectrum. If only one value is
1292 specified, the same number will be dropped from
1293 both sides of the spectrum. Default is to keep
[907]1294 all channels. Nested tuples represent individual
[976]1295 edge selection for different IFs (a number of spectral
1296 channels can be different)
[880]1297 order: the order of the polynomial (default is 0)
1298 threshold: the threshold used by line finder. It is better to
1299 keep it large as only strong lines affect the
1300 baseline solution.
[1280]1301 chan_avg_limit:
1302 a maximum number of consequtive spectral channels to
1303 average during the search of weak and broad lines.
1304 The default is no averaging (and no search for weak
1305 lines). If such lines can affect the fitted baseline
1306 (e.g. a high order polynomial is fitted), increase this
1307 parameter (usually values up to 8 are reasonable). Most
1308 users of this method should find the default value
1309 sufficient.
[1061]1310 plot: plot the fit and the residual. In this each
1311 indivual fit has to be approved, by typing 'y'
1312 or 'n'
[880]1313 insitu: if False a new scantable is returned.
1314 Otherwise, the scaling is done in-situ
1315 The default is taken from .asaprc (False)
1316
1317 Example:
1318 scan2=scan.auto_poly_baseline(order=7)
1319 """
1320 if insitu is None: insitu = rcParams['insitu']
1321 varlist = vars()
1322 from asap.asapfitter import fitter
1323 from asap.asaplinefind import linefinder
1324 from asap import _is_sequence_or_number as _is_valid
1325
[976]1326 # check whether edge is set up for each IF individually
[1118]1327 individualedge = False;
1328 if len(edge) > 1:
1329 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1330 individualedge = True;
[907]1331
[1118]1332 if not _is_valid(edge, int) and not individualedge:
[909]1333 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1334 pair of integers specified as a tuple. Nested tuples are allowed \
1335 to make individual selection for different IFs."
[919]1336
[1118]1337 curedge = (0, 0)
1338 if individualedge:
1339 for edgepar in edge:
1340 if not _is_valid(edgepar, int):
1341 raise ValueError, "Each element of the 'edge' tuple has \
1342 to be a pair of integers or an integer."
[907]1343 else:
[1118]1344 curedge = edge;
[880]1345
1346 # setup fitter
1347 f = fitter()
1348 f.set_function(poly=order)
1349
1350 # setup line finder
[1118]1351 fl = linefinder()
[1268]1352 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1353
1354 if not insitu:
[1118]1355 workscan = self.copy()
[880]1356 else:
[1118]1357 workscan = self
[880]1358
[907]1359 fl.set_scan(workscan)
1360
[1118]1361 rows = range(workscan.nrow())
[880]1362 asaplog.push("Processing:")
1363 for r in rows:
[1118]1364 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1365 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1366 workscan.getpol(r), workscan.getcycle(r))
[880]1367 asaplog.push(msg, False)
[907]1368
[976]1369 # figure out edge parameter
[1118]1370 if individualedge:
1371 if len(edge) >= workscan.getif(r):
1372 raise RuntimeError, "Number of edge elements appear to " \
1373 "be less than the number of IFs"
1374 curedge = edge[workscan.getif(r)]
[919]1375
[976]1376 # setup line finder
[1118]1377 fl.find_lines(r, mask, curedge)
[880]1378 f.set_scan(workscan, fl.get_mask())
1379 f.x = workscan._getabcissa(r)
1380 f.y = workscan._getspectrum(r)
1381 f.data = None
1382 f.fit()
1383 x = f.get_parameters()
[1061]1384 if plot:
1385 f.plot(residual=True)
1386 x = raw_input("Accept fit ( [y]/n ): ")
1387 if x.upper() == 'N':
1388 continue
[880]1389 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1390 if plot:
1391 f._p.unmap()
1392 f._p = None
1393 workscan._add_history("auto_poly_baseline", varlist)
[880]1394 if insitu:
1395 self._assign(workscan)
1396 else:
1397 return workscan
1398
[914]1399 def rotate_linpolphase(self, angle):
1400 """
1401 Rotate the phase of the complex polarization O=Q+iU correlation.
1402 This is always done in situ in the raw data. So if you call this
1403 function more than once then each call rotates the phase further.
1404 Parameters:
1405 angle: The angle (degrees) to rotate (add) by.
1406 Examples:
1407 scan.rotate_linpolphase(2.3)
1408 """
1409 varlist = vars()
[936]1410 self._math._rotate_linpolphase(self, angle)
[914]1411 self._add_history("rotate_linpolphase", varlist)
1412 print_log()
1413 return
[710]1414
[513]1415
[914]1416 def rotate_xyphase(self, angle):
1417 """
1418 Rotate the phase of the XY correlation. This is always done in situ
1419 in the data. So if you call this function more than once
1420 then each call rotates the phase further.
1421 Parameters:
1422 angle: The angle (degrees) to rotate (add) by.
1423 Examples:
1424 scan.rotate_xyphase(2.3)
1425 """
1426 varlist = vars()
[936]1427 self._math._rotate_xyphase(self, angle)
[914]1428 self._add_history("rotate_xyphase", varlist)
1429 print_log()
1430 return
1431
1432 def swap_linears(self):
1433 """
[1348]1434 Swap the linear polarisations XX and YY, or better the first two
1435 polarisations as this also works for ciculars.
[914]1436 """
1437 varlist = vars()
[936]1438 self._math._swap_linears(self)
[914]1439 self._add_history("swap_linears", varlist)
1440 print_log()
1441 return
1442
1443 def invert_phase(self):
1444 """
1445 Invert the phase of the complex polarisation
1446 """
1447 varlist = vars()
[936]1448 self._math._invert_phase(self)
[914]1449 self._add_history("invert_phase", varlist)
1450 print_log()
1451 return
1452
[876]1453 def add(self, offset, insitu=None):
[513]1454 """
1455 Return a scan where all spectra have the offset added
1456 Parameters:
1457 offset: the offset
1458 insitu: if False a new scantable is returned.
1459 Otherwise, the scaling is done in-situ
1460 The default is taken from .asaprc (False)
1461 """
1462 if insitu is None: insitu = rcParams['insitu']
[876]1463 self._math._setinsitu(insitu)
[513]1464 varlist = vars()
[876]1465 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1466 s._add_history("add", varlist)
[876]1467 print_log()
1468 if insitu:
1469 self._assign(s)
1470 else:
[513]1471 return s
1472
[1308]1473 def scale(self, factor, tsys=True, insitu=None):
[513]1474 """
1475 Return a scan where all spectra are scaled by the give 'factor'
1476 Parameters:
1477 factor: the scaling factor
1478 insitu: if False a new scantable is returned.
1479 Otherwise, the scaling is done in-situ
1480 The default is taken from .asaprc (False)
1481 tsys: if True (default) then apply the operation to Tsys
1482 as well as the data
1483 """
1484 if insitu is None: insitu = rcParams['insitu']
[876]1485 self._math._setinsitu(insitu)
[513]1486 varlist = vars()
[876]1487 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1488 s._add_history("scale", varlist)
[876]1489 print_log()
1490 if insitu:
1491 self._assign(s)
1492 else:
[513]1493 return s
1494
[1348]1495 def auto_quotient(self, preserve=True, mode='paired'):
[670]1496 """
1497 This function allows to build quotients automatically.
1498 It assumes the observation to have the same numer of
1499 "ons" and "offs"
1500 Parameters:
[710]1501 preserve: you can preserve (default) the continuum or
1502 remove it. The equations used are
[670]1503 preserve: Output = Toff * (on/off) - Toff
[1070]1504 remove: Output = Toff * (on/off) - Ton
[1348]1505 mode: the on/off detection mode
1506 'paired' (default)
1507 identifies 'off' scans by the
1508 trailing '_R' (Mopra/Parkes) or
1509 '_e'/'_w' (Tid) and matches
1510 on/off pairs from the observing pattern
1511 'time'
1512 finds the closest off in time
1513
[670]1514 """
[1348]1515 modes = ["time", "paired"]
[670]1516 if not mode in modes:
[876]1517 msg = "please provide valid mode. Valid modes are %s" % (modes)
1518 raise ValueError(msg)
1519 varlist = vars()
[1348]1520 s = None
1521 if mode.lower() == "paired":
1522 basesel = self.get_selection()
[1356]1523 sel = selector()+basesel
1524 sel.set_query("SRCTYPE==1")
1525 self.set_selection(sel)
[1348]1526 offs = self.copy()
1527 sel.set_query("SRCTYPE==0")
[1356]1528 self.set_selection(sel)
[1348]1529 ons = self.copy()
1530 s = scantable(self._math._quotient(ons, offs, preserve))
1531 self.set_selection(basesel)
1532 elif mode.lower() == "time":
1533 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1534 s._add_history("auto_quotient", varlist)
[876]1535 print_log()
1536 return s
[710]1537
[1145]1538 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1539 """
[1143]1540 Form a quotient using "off" beams when observing in "MX" mode.
1541 Parameters:
[1145]1542 mask: an optional mask to be used when weight == 'stddev'
[1143]1543 weight: How to average the off beams. Default is 'median'.
[1145]1544 preserve: you can preserve (default) the continuum or
1545 remove it. The equations used are
1546 preserve: Output = Toff * (on/off) - Toff
1547 remove: Output = Toff * (on/off) - Ton
[1217]1548 """
[1143]1549 if mask is None: mask = ()
[1141]1550 varlist = vars()
1551 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1552 preoff = scantable(self._math._mx_extract(self, 'off'))
1553 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1554 from asapmath import quotient
[1145]1555 q = quotient(on, off, preserve)
[1143]1556 q._add_history("mx_quotient", varlist)
[1145]1557 print_log()
[1217]1558 return q
[513]1559
[718]1560 def freq_switch(self, insitu=None):
1561 """
1562 Apply frequency switching to the data.
1563 Parameters:
1564 insitu: if False a new scantable is returned.
1565 Otherwise, the swictching is done in-situ
1566 The default is taken from .asaprc (False)
1567 Example:
1568 none
1569 """
1570 if insitu is None: insitu = rcParams['insitu']
[876]1571 self._math._setinsitu(insitu)
[718]1572 varlist = vars()
[876]1573 s = scantable(self._math._freqswitch(self))
[1118]1574 s._add_history("freq_switch", varlist)
[876]1575 print_log()
1576 if insitu: self._assign(s)
1577 else: return s
[718]1578
[780]1579 def recalc_azel(self):
1580 """
1581 Recalculate the azimuth and elevation for each position.
1582 Parameters:
1583 none
1584 Example:
1585 """
1586 varlist = vars()
[876]1587 self._recalcazel()
[780]1588 self._add_history("recalc_azel", varlist)
1589 print_log()
1590 return
1591
[513]1592 def __add__(self, other):
1593 varlist = vars()
1594 s = None
1595 if isinstance(other, scantable):
[1308]1596 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1597 elif isinstance(other, float):
[876]1598 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1599 else:
[718]1600 raise TypeError("Other input is not a scantable or float value")
[513]1601 s._add_history("operator +", varlist)
[718]1602 print_log()
[513]1603 return s
1604
1605 def __sub__(self, other):
1606 """
1607 implicit on all axes and on Tsys
1608 """
1609 varlist = vars()
1610 s = None
1611 if isinstance(other, scantable):
[1308]1612 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1613 elif isinstance(other, float):
[876]1614 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1615 else:
[718]1616 raise TypeError("Other input is not a scantable or float value")
[513]1617 s._add_history("operator -", varlist)
[718]1618 print_log()
[513]1619 return s
[710]1620
[513]1621 def __mul__(self, other):
1622 """
1623 implicit on all axes and on Tsys
1624 """
1625 varlist = vars()
1626 s = None
1627 if isinstance(other, scantable):
[1308]1628 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1629 elif isinstance(other, float):
[876]1630 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1631 else:
[718]1632 raise TypeError("Other input is not a scantable or float value")
[513]1633 s._add_history("operator *", varlist)
[718]1634 print_log()
[513]1635 return s
1636
[710]1637
[513]1638 def __div__(self, other):
1639 """
1640 implicit on all axes and on Tsys
1641 """
1642 varlist = vars()
1643 s = None
1644 if isinstance(other, scantable):
[1308]1645 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1646 elif isinstance(other, float):
1647 if other == 0.0:
[718]1648 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1649 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1650 else:
[718]1651 raise TypeError("Other input is not a scantable or float value")
[513]1652 s._add_history("operator /", varlist)
[718]1653 print_log()
[513]1654 return s
1655
[530]1656 def get_fit(self, row=0):
1657 """
1658 Print or return the stored fits for a row in the scantable
1659 Parameters:
1660 row: the row which the fit has been applied to.
1661 """
1662 if row > self.nrow():
1663 return
[976]1664 from asap.asapfit import asapfit
[530]1665 fit = asapfit(self._getfit(row))
[718]1666 if rcParams['verbose']:
[530]1667 print fit
1668 return
1669 else:
1670 return fit.as_dict()
1671
[484]1672 def _add_history(self, funcname, parameters):
[1435]1673 if not rcParams['scantable.history']:
1674 return
[484]1675 # create date
1676 sep = "##"
1677 from datetime import datetime
1678 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1679 hist = dstr+sep
1680 hist += funcname+sep#cdate+sep
1681 if parameters.has_key('self'): del parameters['self']
[1118]1682 for k, v in parameters.iteritems():
[484]1683 if type(v) is dict:
[1118]1684 for k2, v2 in v.iteritems():
[484]1685 hist += k2
1686 hist += "="
[1118]1687 if isinstance(v2, scantable):
[484]1688 hist += 'scantable'
1689 elif k2 == 'mask':
[1118]1690 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1691 hist += str(self._zip_mask(v2))
1692 else:
1693 hist += str(v2)
[484]1694 else:
[513]1695 hist += str(v2)
[484]1696 else:
1697 hist += k
1698 hist += "="
[1118]1699 if isinstance(v, scantable):
[484]1700 hist += 'scantable'
1701 elif k == 'mask':
[1118]1702 if isinstance(v, list) or isinstance(v, tuple):
[513]1703 hist += str(self._zip_mask(v))
1704 else:
1705 hist += str(v)
[484]1706 else:
1707 hist += str(v)
1708 hist += sep
1709 hist = hist[:-2] # remove trailing '##'
1710 self._addhistory(hist)
1711
[710]1712
[484]1713 def _zip_mask(self, mask):
1714 mask = list(mask)
1715 i = 0
1716 segments = []
1717 while mask[i:].count(1):
1718 i += mask[i:].index(1)
1719 if mask[i:].count(0):
1720 j = i + mask[i:].index(0)
1721 else:
[710]1722 j = len(mask)
[1118]1723 segments.append([i, j])
[710]1724 i = j
[484]1725 return segments
[714]1726
[626]1727 def _get_ordinate_label(self):
1728 fu = "("+self.get_fluxunit()+")"
1729 import re
1730 lbl = "Intensity"
[1118]1731 if re.match(".K.", fu):
[626]1732 lbl = "Brightness Temperature "+ fu
[1118]1733 elif re.match(".Jy.", fu):
[626]1734 lbl = "Flux density "+ fu
1735 return lbl
[710]1736
[876]1737 def _check_ifs(self):
1738 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1739 nchans = filter(lambda t: t > 0, nchans)
[876]1740 return (sum(nchans)/len(nchans) == nchans[0])
[976]1741
1742 def _fill(self, names, unit, average):
1743 import os
1744 from asap._asap import stfiller
1745 first = True
1746 fullnames = []
1747 for name in names:
1748 name = os.path.expandvars(name)
1749 name = os.path.expanduser(name)
1750 if not os.path.exists(name):
1751 msg = "File '%s' does not exists" % (name)
1752 if rcParams['verbose']:
1753 asaplog.push(msg)
1754 print asaplog.pop().strip()
1755 return
1756 raise IOError(msg)
1757 fullnames.append(name)
1758 if average:
1759 asaplog.push('Auto averaging integrations')
[1079]1760 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1761 for name in fullnames:
[1073]1762 tbl = Scantable(stype)
1763 r = stfiller(tbl)
[976]1764 msg = "Importing %s..." % (name)
[1118]1765 asaplog.push(msg, False)
[976]1766 print_log()
[1118]1767 r._open(name, -1, -1)
[976]1768 r._read()
[1073]1769 #tbl = r._getdata()
[976]1770 if average:
[1118]1771 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1772 #tbl = tbl2
1773 if not first:
1774 tbl = self._math._merge([self, tbl])
1775 #tbl = tbl2
1776 Scantable.__init__(self, tbl)
1777 r._close()
[1118]1778 del r, tbl
[976]1779 first = False
1780 if unit is not None:
1781 self.set_fluxunit(unit)
1782 self.set_freqframe(rcParams['scantable.freqframe'])
1783
[1402]1784 def __getitem__(self, key):
1785 if key < 0:
1786 key += self.nrow()
1787 if key >= self.nrow():
1788 raise IndexError("Row index out of range.")
1789 return self._getspectrum(key)
1790
1791 def __setitem__(self, key, value):
1792 if key < 0:
1793 key += self.nrow()
1794 if key >= self.nrow():
1795 raise IndexError("Row index out of range.")
1796 if not hasattr(value, "__len__") or \
1797 len(value) > self.nchan(self.getif(key)):
1798 raise ValueError("Spectrum length doesn't match.")
1799 return self._setspectrum(value, key)
1800
1801 def __len__(self):
1802 return self.nrow()
1803
1804 def __iter__(self):
1805 for i in range(len(self)):
1806 yield self[i]
Note: See TracBrowser for help on using the repository browser.