source: branches/alma/python/scantable.py@ 2693

Last change on this file since 2693 was 1782, checked in by Kana Sugimoto, 14 years ago

New Development: Yes

JIRA Issue: No (merge)

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): single dish package

Description:

Merged some unimplemented developments from the trunk.
[Details]

  1. Restored PKSmsg.cc, PKSmsg.h to alma branch from trunk
  2. Updated pks_matchs.cc and src/SConscript to ones in trunk
  3. uncommented "@print_log_dec"
  4. implemented bug fixes to add(), sub(), mul(), and div() in scantable.py
  5. implemented get_region() and get_point() in asaplotbase.py
  6. activated interactive keyword for text(), arrow(), annotate(), axvline(), axhline(), axvspan(), and axhspan() in asapplotter.py


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 90.5 KB
RevLine 
[1757]1import os
2try:
3 from functools import wraps as wraps_dec
4except ImportError:
5 from asap.compatibility import wraps as wraps_dec
6
[876]7from asap._asap import Scantable
[226]8from asap import rcParams
[1757]9from asap import print_log, print_log_dec
[1118]10from asap import asaplog
[946]11from asap import selector
[1153]12from asap import linecatalog
[1757]13from asap.coordinate import coordinate
[1295]14from asap import _n_bools, mask_not, mask_and, mask_or
[102]15
[1757]16
17def preserve_selection(func):
18 @wraps_dec(func)
19 def wrap(obj, *args, **kw):
20 basesel = obj.get_selection()
21 val = func(obj, *args, **kw)
22 obj.set_selection(basesel)
23 return val
24 return wrap
25
26
27def is_scantable(filename):
28 return (os.path.isdir(filename)
29 and not os.path.exists(filename+'/table.f1')
30 and os.path.exists(filename+'/table.info'))
31
32
[876]33class scantable(Scantable):
[102]34 """
35 The ASAP container for scans
36 """
[710]37
[1782]38 @print_log_dec
[1757]39 def __init__(self, filename, average=None, unit=None, getpt=None, antenna=None, parallactify=None):
[102]40 """
41 Create a scantable from a saved one or make a reference
42 Parameters:
[181]43 filename: the name of an asap table on disk
44 or
45 the name of a rpfits/sdfits/ms file
46 (integrations within scans are auto averaged
47 and the whole file is read)
48 or
49 [advanced] a reference to an existing
[102]50 scantable
[484]51 average: average all integrations withinb a scan on read.
52 The default (True) is taken from .asaprc.
53 unit: brightness unit; must be consistent with K or Jy.
[340]54 Over-rides the default selected by the reader
55 (input rpfits/sdfits/ms) or replaces the value
56 in existing scantables
[1522]57 getpt: for MeasurementSet input data only:
58 If True, all pointing data are filled.
59 The deafult is False, which makes time to load
[1685]60 the MS data faster in some cases.
61 antenna: Antenna selection. integer (id) or string (name
62 or id).
[1757]63 parallactify: Indcicate that the data had been parallatified.
64 Default is taken form rc file.
[710]65 """
[976]66 if average is None:
[710]67 average = rcParams['scantable.autoaverage']
[1496]68 if getpt is None:
[1673]69 getpt = True
[1684]70 if antenna is None:
71 antenna = ''
72 elif type(antenna) == int:
73 antenna = '%s'%antenna
74 elif type(antenna) == list:
75 tmpstr = ''
76 for i in range( len(antenna) ):
[1781]77 if type(antenna[i]) == int:
78 tmpstr = tmpstr + ('%s,'%(antenna[i]))
[1684]79 elif type(antenna[i]) == str:
80 tmpstr=tmpstr+antenna[i]+','
81 else:
82 asaplog.push('Bad antenna selection.')
83 print_log('ERROR')
[1781]84 return
[1684]85 antenna = tmpstr.rstrip(',')
[1757]86 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]87 varlist = vars()
[876]88 from asap._asap import stmath
[1680]89 self._math = stmath( rcParams['insitu'] )
[876]90 if isinstance(filename, Scantable):
91 Scantable.__init__(self, filename)
[181]92 else:
[1757]93 if isinstance(filename, str):
[976]94 filename = os.path.expandvars(filename)
95 filename = os.path.expanduser(filename)
96 if not os.path.exists(filename):
97 s = "File '%s' not found." % (filename)
[718]98 if rcParams['verbose']:
[976]99 asaplog.push(s)
[1614]100 print_log('ERROR')
[718]101 return
[976]102 raise IOError(s)
[1757]103 if is_scantable(filename):
104 ondisk = rcParams['scantable.storage'] == 'disk'
105 Scantable.__init__(self, filename, ondisk)
106 if unit is not None:
107 self.set_fluxunit(unit)
108 # do not reset to the default freqframe
109 #self.set_freqframe(rcParams['scantable.freqframe'])
110 elif os.path.isdir(filename) \
111 and not os.path.exists(filename+'/table.f1'):
112 msg = "The given file '%s'is not a valid " \
113 "asap table." % (filename)
114 if rcParams['verbose']:
115 #print msg
116 asaplog.push( msg )
117 print_log( 'ERROR' )
118 return
[718]119 else:
[1757]120 raise IOError(msg)
[226]121 else:
[1684]122 self._fill([filename], unit, average, getpt, antenna)
[1118]123 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]124 and isinstance(filename[-1], str):
[1684]125 self._fill(filename, unit, average, getpt, antenna)
[1757]126 self.parallactify(parallactify)
[1259]127 self._add_history("scantable", varlist)
[714]128 print_log()
[102]129
[1782]130 @print_log_dec
[876]131 def save(self, name=None, format=None, overwrite=False):
[116]132 """
[1280]133 Store the scantable on disk. This can be an asap (aips++) Table,
134 SDFITS or MS2 format.
[116]135 Parameters:
[1093]136 name: the name of the outputfile. For format "ASCII"
137 this is the root file name (data in 'name'.txt
[497]138 and header in 'name'_header.txt)
[116]139 format: an optional file format. Default is ASAP.
[280]140 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]141 'SDFITS' (save as SDFITS file)
[200]142 'ASCII' (saves as ascii text file)
[226]143 'MS2' (saves as an aips++
144 MeasurementSet V2)
[1757]145 'FITS' (save as image FITS - not
[1603]146 readable by class)
147 'CLASS' (save as FITS readable by CLASS)
[411]148 overwrite: If the file should be overwritten if it exists.
[256]149 The default False is to return with warning
[411]150 without writing the output. USE WITH CARE.
[116]151 Example:
152 scan.save('myscan.asap')
[1118]153 scan.save('myscan.sdfits', 'SDFITS')
[116]154 """
[411]155 from os import path
[1757]156 format = format or rcParams['scantable.save']
[256]157 suffix = '.'+format.lower()
[1118]158 if name is None or name == "":
[256]159 name = 'scantable'+suffix
[718]160 msg = "No filename given. Using default name %s..." % name
161 asaplog.push(msg)
[411]162 name = path.expandvars(name)
[256]163 if path.isfile(name) or path.isdir(name):
164 if not overwrite:
[718]165 msg = "File %s exists." % name
166 if rcParams['verbose']:
[1612]167 #print msg
[1614]168 asaplog.push( msg )
169 print_log( 'ERROR' )
[718]170 return
171 else:
172 raise IOError(msg)
[451]173 format2 = format.upper()
174 if format2 == 'ASAP':
[116]175 self._save(name)
176 else:
[989]177 from asap._asap import stwriter as stw
[1118]178 writer = stw(format2)
179 writer.write(self, name)
[718]180 print_log()
[116]181 return
182
[102]183 def copy(self):
184 """
185 Return a copy of this scantable.
[1348]186 Note:
187 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]188 Parameters:
[113]189 none
[102]190 Example:
191 copiedscan = scan.copy()
192 """
[876]193 sd = scantable(Scantable._copy(self))
[113]194 return sd
195
[1093]196 def drop_scan(self, scanid=None):
197 """
198 Return a new scantable where the specified scan number(s) has(have)
199 been dropped.
200 Parameters:
201 scanid: a (list of) scan number(s)
202 """
203 from asap import _is_sequence_or_number as _is_valid
204 from asap import _to_list
205 from asap import unique
206 if not _is_valid(scanid):
207 if rcParams['verbose']:
[1612]208 #print "Please specify a scanno to drop from the scantable"
[1614]209 asaplog.push( 'Please specify a scanno to drop from the scantable' )
210 print_log( 'ERROR' )
[1093]211 return
212 else:
213 raise RuntimeError("No scan given")
214 try:
215 scanid = _to_list(scanid)
216 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
217 for sid in scanid: allscans.remove(sid)
[1118]218 if len(allscans) == 0:
219 raise ValueError("Can't remove all scans")
[1093]220 except ValueError:
221 if rcParams['verbose']:
[1612]222 #print "Couldn't find any match."
[1614]223 print_log()
224 asaplog.push( "Couldn't find any match." )
225 print_log( 'ERROR' )
[1093]226 return
227 else: raise
228 try:
[1757]229 sel = selector(scans=allscans)
230 return self._select_copy(sel)
[1093]231 except RuntimeError:
[1118]232 if rcParams['verbose']:
[1612]233 #print "Couldn't find any match."
[1614]234 print_log()
235 asaplog.push( "Couldn't find any match." )
236 print_log( 'ERROR' )
[1118]237 else:
238 raise
[1093]239
[1757]240 def _select_copy(self, selection):
241 orig = self.get_selection()
242 self.set_selection(orig+selection)
243 cp = self.copy()
244 self.set_selection(orig)
245 return cp
[1093]246
[102]247 def get_scan(self, scanid=None):
248 """
249 Return a specific scan (by scanno) or collection of scans (by
250 source name) in a new scantable.
[1348]251 Note:
252 See scantable.drop_scan() for the inverse operation.
[102]253 Parameters:
[513]254 scanid: a (list of) scanno or a source name, unix-style
255 patterns are accepted for source name matching, e.g.
256 '*_R' gets all 'ref scans
[102]257 Example:
[513]258 # get all scans containing the source '323p459'
259 newscan = scan.get_scan('323p459')
260 # get all 'off' scans
261 refscans = scan.get_scan('*_R')
262 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]263 newscan = scan.get_scan([0, 2, 7, 10])
[102]264 """
265 if scanid is None:
[718]266 if rcParams['verbose']:
[1612]267 #print "Please specify a scan no or name to " \
268 # "retrieve from the scantable"
[1614]269 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
270 print_log( 'ERROR' )
[718]271 return
272 else:
273 raise RuntimeError("No scan given")
274
[102]275 try:
[946]276 bsel = self.get_selection()
277 sel = selector()
[102]278 if type(scanid) is str:
[946]279 sel.set_name(scanid)
[1757]280 return self._select_copy(sel)
[102]281 elif type(scanid) is int:
[946]282 sel.set_scans([scanid])
[1757]283 return self._select_copy(sel)
[381]284 elif type(scanid) is list:
[946]285 sel.set_scans(scanid)
[1757]286 return self._select_copy(sel)
[381]287 else:
[718]288 msg = "Illegal scanid type, use 'int' or 'list' if ints."
289 if rcParams['verbose']:
[1612]290 #print msg
[1614]291 asaplog.push( msg )
292 print_log( 'ERROR' )
[718]293 else:
294 raise TypeError(msg)
[102]295 except RuntimeError:
[1612]296 if rcParams['verbose']:
297 #print "Couldn't find any match."
[1614]298 print_log()
299 asaplog.push( "Couldn't find any match." )
300 print_log( 'ERROR' )
[718]301 else: raise
[102]302
303 def __str__(self):
[1118]304 return Scantable._summary(self, True)
[102]305
[976]306 def summary(self, filename=None):
[102]307 """
308 Print a summary of the contents of this scantable.
309 Parameters:
310 filename: the name of a file to write the putput to
311 Default - no file output
312 """
[976]313 info = Scantable._summary(self, True)
[102]314 if filename is not None:
[256]315 if filename is "":
316 filename = 'scantable_summary.txt'
[415]317 from os.path import expandvars, isdir
[411]318 filename = expandvars(filename)
[415]319 if not isdir(filename):
[413]320 data = open(filename, 'w')
321 data.write(info)
322 data.close()
323 else:
[718]324 msg = "Illegal file name '%s'." % (filename)
325 if rcParams['verbose']:
[1612]326 #print msg
[1614]327 asaplog.push( msg )
328 print_log( 'ERROR' )
[718]329 else:
330 raise IOError(msg)
331 if rcParams['verbose']:
[794]332 try:
333 from IPython.genutils import page as pager
334 except ImportError:
335 from pydoc import pager
336 pager(info)
[718]337 else:
338 return info
[710]339
[1603]340 def get_spectrum(self, rowno):
341 """Return the spectrum for the current row in the scantable as a list.
342 Parameters:
[1757]343 rowno: the row number to retrieve the spectrum from
[1603]344 """
345 return self._getspectrum(rowno)
[946]346
[1603]347 def get_mask(self, rowno):
348 """Return the mask for the current row in the scantable as a list.
349 Parameters:
[1757]350 rowno: the row number to retrieve the mask from
[1603]351 """
352 return self._getmask(rowno)
353
354 def set_spectrum(self, spec, rowno):
355 """Return the spectrum for the current row in the scantable as a list.
356 Parameters:
357 spec: the spectrum
[1757]358 rowno: the row number to set the spectrum for
[1603]359 """
360 assert(len(spec) == self.nchan())
361 return self._setspectrum(spec, rowno)
362
[1757]363 def get_coordinate(self, rowno):
364 """Return the (spectral) coordinate for a a given 'rowno'.
365 NOTE:
366 * This coordinate is only valid until a scantable method modifies
367 the frequency axis.
368 * This coordinate does contain the original frequency set-up
369 NOT the new frame. The conversions however are done using the user
370 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
371 use scantable.freq_align first. Without it there is no closure,
372 i.e.
373 c = myscan.get_coordinate(0)
374 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
375
376 Parameters:
377 rowno: the row number for the spectral coordinate
378
379 """
380 return coordinate(Scantable.get_coordinate(self, rowno))
381
[946]382 def get_selection(self):
383 """
[1005]384 Get the selection object currently set on this scantable.
385 Parameters:
386 none
387 Example:
388 sel = scan.get_selection()
389 sel.set_ifs(0) # select IF 0
390 scan.set_selection(sel) # apply modified selection
[946]391 """
392 return selector(self._getselection())
393
[1757]394 def set_selection(self, selection=None, **kw):
[946]395 """
[1005]396 Select a subset of the data. All following operations on this scantable
397 are only applied to thi selection.
398 Parameters:
[1757]399 selection: a selector object (default unset the selection),
400
401 or
402
403 any combination of
404 "pols", "ifs", "beams", "scans", "cycles", "name", "query"
405
[1005]406 Examples:
407 sel = selector() # create a selection object
[1118]408 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]409 scan.set_selection(sel) # set the selection
410 scan.summary() # will only print summary of scanno 0 an 3
411 scan.set_selection() # unset the selection
[1757]412 # or the equivalent
413 scan.set_selection(scans=[0,3])
414 scan.summary() # will only print summary of scanno 0 an 3
415 scan.set_selection() # unset the selection
[946]416 """
[1757]417 if selection is None:
418 # reset
419 if len(kw) == 0:
420 selection = selector()
421 else:
422 # try keywords
423 for k in kw:
424 if k not in selector.fields:
425 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
426 selection = selector(**kw)
[946]427 self._setselection(selection)
428
[1446]429 def get_row(self, row=0, insitu=None):
430 """
431 Select a row in the scantable.
432 Return a scantable with single row.
433 Parameters:
434 row: row no of integration, default is 0.
435 insitu: if False a new scantable is returned.
436 Otherwise, the scaling is done in-situ
437 The default is taken from .asaprc (False)
438 """
439 if insitu is None: insitu = rcParams['insitu']
440 if not insitu:
441 workscan = self.copy()
442 else:
443 workscan = self
444 # Select a row
445 sel=selector()
446 sel.set_scans([workscan.getscan(row)])
447 sel.set_cycles([workscan.getcycle(row)])
448 sel.set_beams([workscan.getbeam(row)])
449 sel.set_ifs([workscan.getif(row)])
450 sel.set_polarisations([workscan.getpol(row)])
451 sel.set_name(workscan._getsourcename(row))
452 workscan.set_selection(sel)
453 if not workscan.nrow() == 1:
454 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
455 raise RuntimeError(msg)
456 del sel
457 if insitu:
458 self._assign(workscan)
459 else:
460 return workscan
461
[1629]462 #def stats(self, stat='stddev', mask=None):
463 def stats(self, stat='stddev', mask=None, form='3.3f'):
[102]464 """
[135]465 Determine the specified statistic of the current beam/if/pol
[102]466 Takes a 'mask' as an optional parameter to specify which
467 channels should be excluded.
468 Parameters:
[1517]469 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
[1515]470 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[135]471 mask: an optional mask specifying where the statistic
[102]472 should be determined.
[1629]473 form: format string to print statistic values
[102]474 Example:
[113]475 scan.set_unit('channel')
[1118]476 msk = scan.create_mask([100, 200], [500, 600])
[135]477 scan.stats(stat='mean', mask=m)
[102]478 """
[1757]479 mask = mask or []
[876]480 if not self._check_ifs():
[1118]481 raise ValueError("Cannot apply mask as the IFs have different "
482 "number of channels. Please use setselection() "
483 "to select individual IFs")
[1530]484 rtnabc = False
485 if stat.lower().endswith('_abc'): rtnabc = True
486 getchan = False
[1781]487 if stat.lower().startswith('min') or stat.lower().startswith('max'):
[1517]488 chan = self._math._minmaxchan(self, mask, stat)
489 getchan = True
[1515]490 statvals = []
[1530]491 if not rtnabc: statvals = self._math._stats(self, mask, stat)
492
[1757]493 #def cb(i):
494 # return statvals[i]
495
496 #return self._row_callback(cb, stat)
497
498 label=stat
499 #callback=cb
500 out = ""
501 #outvec = []
502 sep = '-'*50
[876]503 for i in range(self.nrow()):
[1530]504 refstr = ''
505 statunit= ''
[1517]506 if getchan:
507 qx, qy = self.chan2data(rowno=i, chan=chan[i])
[1530]508 if rtnabc:
509 statvals.append(qx['value'])
[1629]510 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
[1530]511 statunit= '['+qx['unit']+']'
512 else:
[1629]513 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
[1757]514
515 tm = self._gettime(i)
516 src = self._getsourcename(i)
517 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]518 out += 'Time[%s]:\n' % (tm)
[1757]519 if self.nbeam(-1) > 1:
520 out += ' Beam[%d] ' % (self.getbeam(i))
521 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
522 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
523 #outvec.append(callback(i))
[1781]524 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
525 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
[1757]526 out += sep+"\n"
[256]527
[876]528 if rcParams['verbose']:
[1615]529 import os
[1625]530 if os.environ.has_key( 'USER' ):
531 usr=os.environ['USER']
532 else:
533 import commands
534 usr=commands.getoutput( 'whoami' )
[1613]535 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
[1612]536 f=open(tmpfile,'w')
[1757]537 print >> f, sep
538 print >> f, ' %s %s' % (label, statunit)
539 print >> f, sep
[1612]540 print >> f, out
541 f.close()
542 f=open(tmpfile,'r')
543 x=f.readlines()
544 f.close()
[1757]545 blanc=''
546 asaplog.push(blanc.join(x), False)
547 #for xx in x:
548 # asaplog.push( xx, False )
[1614]549 print_log()
[1295]550 return statvals
[102]551
[1517]552 def chan2data(self, rowno=0, chan=0):
[1515]553 """
[1517]554 Returns channel/frequency/velocity and spectral value
[1515]555 at an arbitrary row and channel in the scantable.
556 Parameters:
557 rowno: a row number in the scantable. Default is the
558 first row, i.e. rowno=0
[1517]559 chan: a channel in the scantable. Default is the first
[1515]560 channel, i.e. pos=0
561 """
[1517]562 if isinstance(rowno, int) and isinstance(chan, int):
[1530]563 qx = {'unit': self.get_unit(),
564 'value': self._getabcissa(rowno)[chan]}
[1517]565 qy = {'unit': self.get_fluxunit(),
566 'value': self._getspectrum(rowno)[chan]}
567 return qx, qy
[1515]568
[1118]569 def stddev(self, mask=None):
[135]570 """
571 Determine the standard deviation of the current beam/if/pol
572 Takes a 'mask' as an optional parameter to specify which
573 channels should be excluded.
574 Parameters:
575 mask: an optional mask specifying where the standard
576 deviation should be determined.
577
578 Example:
579 scan.set_unit('channel')
[1118]580 msk = scan.create_mask([100, 200], [500, 600])
[135]581 scan.stddev(mask=m)
582 """
[1118]583 return self.stats(stat='stddev', mask=mask);
[135]584
[1003]585
[1259]586 def get_column_names(self):
[1003]587 """
588 Return a list of column names, which can be used for selection.
589 """
[1259]590 return list(Scantable.get_column_names(self))
[1003]591
[1757]592 def get_tsys(self, row=-1):
[113]593 """
594 Return the System temperatures.
595 Returns:
[876]596 a list of Tsys values for the current selection
[113]597 """
[1757]598 if row > -1:
599 return self._get_column(self._gettsys, row)
[876]600 return self._row_callback(self._gettsys, "Tsys")
[256]601
[1757]602
603 def get_weather(self, row=-1):
604 values = self._get_column(self._get_weather, row)
605 if row > -1:
606 return {'temperature': values[0],
607 'pressure': values[1], 'humidity' : values[2],
608 'windspeed' : values[3], 'windaz' : values[4]
609 }
610 else:
611 out = []
612 for r in values:
613
614 out.append({'temperature': r[0],
615 'pressure': r[1], 'humidity' : r[2],
616 'windspeed' : r[3], 'windaz' : r[4]
617 })
618 return out
619
[876]620 def _row_callback(self, callback, label):
621 out = ""
[1118]622 outvec = []
[1757]623 sep = '-'*50
[876]624 for i in range(self.nrow()):
625 tm = self._gettime(i)
626 src = self._getsourcename(i)
[1757]627 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]628 out += 'Time[%s]:\n' % (tm)
[1757]629 if self.nbeam(-1) > 1:
630 out += ' Beam[%d] ' % (self.getbeam(i))
631 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
632 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]633 outvec.append(callback(i))
634 out += '= %3.3f\n' % (outvec[i])
[1757]635 out += sep+'\n'
[876]636 if rcParams['verbose']:
[1757]637 asaplog.push(sep)
[1615]638 asaplog.push(" %s" % (label))
[1757]639 asaplog.push(sep)
[1615]640 asaplog.push(out)
[1614]641 print_log()
[1175]642 return outvec
[256]643
[1070]644 def _get_column(self, callback, row=-1):
645 """
646 """
647 if row == -1:
648 return [callback(i) for i in range(self.nrow())]
649 else:
650 if 0 <= row < self.nrow():
651 return callback(row)
[256]652
[1070]653
[1348]654 def get_time(self, row=-1, asdatetime=False):
[113]655 """
656 Get a list of time stamps for the observations.
[1348]657 Return a datetime object for each integration time stamp in the scantable.
[113]658 Parameters:
[1348]659 row: row no of integration. Default -1 return all rows
660 asdatetime: return values as datetime objects rather than strings
[113]661 Example:
662 none
663 """
[1175]664 from time import strptime
665 from datetime import datetime
[1457]666 times = self._get_column(self._gettime, row)
[1348]667 if not asdatetime:
[1603]668 return times
[1175]669 format = "%Y/%m/%d/%H:%M:%S"
670 if isinstance(times, list):
671 return [datetime(*strptime(i, format)[:6]) for i in times]
672 else:
673 return datetime(*strptime(times, format)[:6])
[102]674
[1348]675
676 def get_inttime(self, row=-1):
677 """
678 Get a list of integration times for the observations.
679 Return a time in seconds for each integration in the scantable.
680 Parameters:
681 row: row no of integration. Default -1 return all rows.
682 Example:
683 none
684 """
[1757]685 return self._get_column(self._getinttime, row)
[1348]686
[1757]687
[714]688 def get_sourcename(self, row=-1):
689 """
[794]690 Get a list source names for the observations.
[714]691 Return a string for each integration in the scantable.
692 Parameters:
[1348]693 row: row no of integration. Default -1 return all rows.
[714]694 Example:
695 none
696 """
[1070]697 return self._get_column(self._getsourcename, row)
[714]698
[794]699 def get_elevation(self, row=-1):
700 """
701 Get a list of elevations for the observations.
702 Return a float for each integration in the scantable.
703 Parameters:
[1348]704 row: row no of integration. Default -1 return all rows.
[794]705 Example:
706 none
707 """
[1070]708 return self._get_column(self._getelevation, row)
[794]709
710 def get_azimuth(self, row=-1):
711 """
712 Get a list of azimuths for the observations.
713 Return a float for each integration in the scantable.
714 Parameters:
[1348]715 row: row no of integration. Default -1 return all rows.
[794]716 Example:
717 none
718 """
[1070]719 return self._get_column(self._getazimuth, row)
[794]720
721 def get_parangle(self, row=-1):
722 """
723 Get a list of parallactic angles for the observations.
724 Return a float for each integration in the scantable.
725 Parameters:
[1348]726 row: row no of integration. Default -1 return all rows.
[794]727 Example:
728 none
729 """
[1070]730 return self._get_column(self._getparangle, row)
[794]731
[1070]732 def get_direction(self, row=-1):
733 """
734 Get a list of Positions on the sky (direction) for the observations.
[1757]735 Return a string for each integration in the scantable.
[1070]736 Parameters:
737 row: row no of integration. Default -1 return all rows
738 Example:
739 none
740 """
741 return self._get_column(self._getdirection, row)
742
[1389]743 def get_directionval(self, row=-1):
744 """
745 Get a list of Positions on the sky (direction) for the observations.
746 Return a float for each integration in the scantable.
747 Parameters:
748 row: row no of integration. Default -1 return all rows
749 Example:
750 none
751 """
752 return self._get_column(self._getdirectionvec, row)
753
[1782]754 @print_log_dec
[102]755 def set_unit(self, unit='channel'):
756 """
757 Set the unit for all following operations on this scantable
758 Parameters:
759 unit: optional unit, default is 'channel'
[1118]760 one of '*Hz', 'km/s', 'channel', ''
[102]761 """
[484]762 varlist = vars()
[1118]763 if unit in ['', 'pixel', 'channel']:
[113]764 unit = ''
765 inf = list(self._getcoordinfo())
766 inf[0] = unit
767 self._setcoordinfo(inf)
[1118]768 self._add_history("set_unit", varlist)
[113]769
[1782]770 @print_log_dec
[484]771 def set_instrument(self, instr):
[358]772 """
[1348]773 Set the instrument for subsequent processing.
[358]774 Parameters:
[710]775 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]776 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]777 """
778 self._setInstrument(instr)
[1118]779 self._add_history("set_instument", vars())
[718]780 print_log()
[358]781
[1782]782 @print_log_dec
[1190]783 def set_feedtype(self, feedtype):
784 """
785 Overwrite the feed type, which might not be set correctly.
786 Parameters:
787 feedtype: 'linear' or 'circular'
788 """
789 self._setfeedtype(feedtype)
790 self._add_history("set_feedtype", vars())
791 print_log()
792
[1782]793 @print_log_dec
[276]794 def set_doppler(self, doppler='RADIO'):
795 """
796 Set the doppler for all following operations on this scantable.
797 Parameters:
798 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
799 """
[484]800 varlist = vars()
[276]801 inf = list(self._getcoordinfo())
802 inf[2] = doppler
803 self._setcoordinfo(inf)
[1118]804 self._add_history("set_doppler", vars())
[718]805 print_log()
[710]806
[1782]807 @print_log_dec
[226]808 def set_freqframe(self, frame=None):
[113]809 """
810 Set the frame type of the Spectral Axis.
811 Parameters:
[591]812 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1757]813 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]814 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]815 Examples:
816 scan.set_freqframe('BARY')
817 """
[1757]818 frame = frame or rcParams['scantable.freqframe']
[484]819 varlist = vars()
[1757]820 # "REST" is not implemented in casacore
821 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
822 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
823 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]824 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]825
[989]826 if frame in valid:
[113]827 inf = list(self._getcoordinfo())
828 inf[1] = frame
829 self._setcoordinfo(inf)
[1118]830 self._add_history("set_freqframe", varlist)
[102]831 else:
[1118]832 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]833 if rcParams['verbose']:
[1612]834 #print msg
[1614]835 asaplog.push( msg )
836 print_log( 'ERROR' )
[718]837 else:
838 raise TypeError(msg)
839 print_log()
[710]840
[989]841 def set_dirframe(self, frame=""):
842 """
843 Set the frame type of the Direction on the sky.
844 Parameters:
845 frame: an optional frame type, default ''. Valid frames are:
846 'J2000', 'B1950', 'GALACTIC'
847 Examples:
848 scan.set_dirframe('GALACTIC')
849 """
850 varlist = vars()
851 try:
852 Scantable.set_dirframe(self, frame)
[1118]853 except RuntimeError, msg:
[989]854 if rcParams['verbose']:
[1612]855 #print msg
[1614]856 print_log()
[1676]857 asaplog.push( str(msg) )
[1614]858 print_log( 'ERROR' )
[989]859 else:
860 raise
[1118]861 self._add_history("set_dirframe", varlist)
[989]862
[113]863 def get_unit(self):
864 """
865 Get the default unit set in this scantable
866 Returns:
867 A unit string
868 """
869 inf = self._getcoordinfo()
870 unit = inf[0]
871 if unit == '': unit = 'channel'
872 return unit
[102]873
[158]874 def get_abcissa(self, rowno=0):
[102]875 """
[158]876 Get the abcissa in the current coordinate setup for the currently
[113]877 selected Beam/IF/Pol
878 Parameters:
[226]879 rowno: an optional row number in the scantable. Default is the
880 first row, i.e. rowno=0
[113]881 Returns:
[1348]882 The abcissa values and the format string (as a dictionary)
[113]883 """
[256]884 abc = self._getabcissa(rowno)
[710]885 lbl = self._getabcissalabel(rowno)
[718]886 print_log()
[158]887 return abc, lbl
[113]888
[1401]889 def flag(self, mask=None, unflag=False):
[1001]890 """
891 Flag the selected data using an optional channel mask.
892 Parameters:
893 mask: an optional channel mask, created with create_mask. Default
894 (no mask) is all channels.
[1401]895 unflag: if True, unflag the data
[1001]896 """
897 varlist = vars()
[1757]898 mask = mask or []
[1001]899 try:
[1401]900 self._flag(mask, unflag)
[1118]901 except RuntimeError, msg:
[1001]902 if rcParams['verbose']:
[1612]903 #print msg
[1614]904 print_log()
[1676]905 asaplog.push( str(msg) )
[1614]906 print_log( 'ERROR' )
[1001]907 return
908 else: raise
909 self._add_history("flag", varlist)
910
[1655]911 def flag_row(self, rows=[], unflag=False):
912 """
913 Flag the selected data in row-based manner.
914 Parameters:
915 rows: list of row numbers to be flagged. Default is no row (must be explicitly specified to execute row-based flagging).
916 unflag: if True, unflag the data.
917 """
918 varlist = vars()
919 try:
920 self._flag_row(rows, unflag)
921 except RuntimeError, msg:
922 if rcParams['verbose']:
923 print_log()
[1676]924 asaplog.push( str(msg) )
[1655]925 print_log('ERROR')
926 return
927 else: raise
928 self._add_history("flag_row", varlist)
[1781]929
[1701]930 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
931 """
932 Flag the selected data outside a specified range (in channel-base)
933 Parameters:
934 uthres: upper threshold.
935 dthres: lower threshold
936 clipoutside: True for flagging data outside the range [dthres:uthres].
937 False for glagging data inside the range.
938 unflag : if True, unflag the data.
939 """
940 varlist = vars()
941 try:
942 self._clip(uthres, dthres, clipoutside, unflag)
943 except RuntimeError, msg:
944 if rcParams['verbose']:
945 print_log()
946 asaplog.push(str(msg))
947 print_log('ERROR')
948 return
949 else: raise
950 self._add_history("clip", varlist)
[1757]951
[1782]952 @print_log_dec
[1757]953 def lag_flag(self, start, end, unit="MHz", insitu=None):
954 #def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]955 """
956 Flag the data in 'lag' space by providing a frequency to remove.
[1757]957 Flagged data in the scantable gets interpolated over the region.
[1192]958 No taper is applied.
959 Parameters:
[1757]960 start: the start frequency (really a period within the
961 bandwidth) or period to remove
962 end: the end frequency or period to remove
963 unit: the frequency unit (default "MHz") or "" for
964 explicit lag channels
[1203]965 Notes:
[1757]966 It is recommended to flag edges of the band or strong
[1348]967 signals beforehand.
[1192]968 """
969 if insitu is None: insitu = rcParams['insitu']
970 self._math._setinsitu(insitu)
971 varlist = vars()
[1757]972 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
973 if not (unit == "" or base.has_key(unit)):
[1192]974 raise ValueError("%s is not a valid unit." % unit)
975 try:
[1757]976 if unit == "":
977 s = scantable(self._math._lag_flag(self, start, end, "lags"))
978 else:
979 s = scantable(self._math._lag_flag(self, start*base[unit],
980 end*base[unit], "frequency"))
[1192]981 except RuntimeError, msg:
982 if rcParams['verbose']:
[1612]983 #print msg
[1614]984 print_log()
[1676]985 asaplog.push( str(msg) )
[1614]986 print_log( 'ERROR' )
[1192]987 return
988 else: raise
989 s._add_history("lag_flag", varlist)
990 print_log()
991 if insitu:
992 self._assign(s)
993 else:
994 return s
[1001]995
[1782]996 @print_log_dec
[113]997 def create_mask(self, *args, **kwargs):
998 """
[1118]999 Compute and return a mask based on [min, max] windows.
[189]1000 The specified windows are to be INCLUDED, when the mask is
[113]1001 applied.
[102]1002 Parameters:
[1118]1003 [min, max], [min2, max2], ...
[1024]1004 Pairs of start/end points (inclusive)specifying the regions
[102]1005 to be masked
[189]1006 invert: optional argument. If specified as True,
1007 return an inverted mask, i.e. the regions
1008 specified are EXCLUDED
[513]1009 row: create the mask using the specified row for
1010 unit conversions, default is row=0
1011 only necessary if frequency varies over rows.
[102]1012 Example:
[113]1013 scan.set_unit('channel')
1014 a)
[1118]1015 msk = scan.create_mask([400, 500], [800, 900])
[189]1016 # masks everything outside 400 and 500
[113]1017 # and 800 and 900 in the unit 'channel'
1018
1019 b)
[1118]1020 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1021 # masks the regions between 400 and 500
[113]1022 # and 800 and 900 in the unit 'channel'
[1024]1023 c)
1024 mask only channel 400
[1757]1025 msk = scan.create_mask([400])
[102]1026 """
[1757]1027 row = kwargs.get("row", 0)
[513]1028 data = self._getabcissa(row)
[113]1029 u = self._getcoordinfo()[0]
[718]1030 if rcParams['verbose']:
[113]1031 if u == "": u = "channel"
[718]1032 msg = "The current mask window unit is %s" % u
[1118]1033 i = self._check_ifs()
1034 if not i:
[876]1035 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]1036 asaplog.push(msg)
[102]1037 n = self.nchan()
[1295]1038 msk = _n_bools(n, False)
[710]1039 # test if args is a 'list' or a 'normal *args - UGLY!!!
1040
[1118]1041 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1042 and args or args[0]
[710]1043 for window in ws:
[1757]1044 if len(window) == 1:
1045 window = [window[0], window[0]]
1046 if len(window) == 0 or len(window) > 2:
1047 raise ValueError("A window needs to be defined as [start(, end)]")
1048 if window[0] > window[1]:
1049 tmp = window[0]
1050 window[0] = window[1]
1051 window[1] = tmp
[102]1052 for i in range(n):
[1024]1053 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1054 msk[i] = True
[113]1055 if kwargs.has_key('invert'):
1056 if kwargs.get('invert'):
[1295]1057 msk = mask_not(msk)
[718]1058 print_log()
[102]1059 return msk
[710]1060
[1446]1061 def get_masklist(self, mask=None, row=0):
[256]1062 """
[1446]1063 Compute and return a list of mask windows, [min, max].
1064 Parameters:
1065 mask: channel mask, created with create_mask.
1066 row: calcutate the masklist using the specified row
1067 for unit conversions, default is row=0
1068 only necessary if frequency varies over rows.
1069 Returns:
1070 [min, max], [min2, max2], ...
1071 Pairs of start/end points (inclusive)specifying
1072 the masked regions
1073 """
1074 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1075 raise TypeError("The mask should be list or tuple.")
1076 if len(mask) < 2:
1077 raise TypeError("The mask elements should be > 1")
1078 if self.nchan() != len(mask):
1079 msg = "Number of channels in scantable != number of mask elements"
1080 raise TypeError(msg)
1081 data = self._getabcissa(row)
1082 u = self._getcoordinfo()[0]
1083 if rcParams['verbose']:
1084 if u == "": u = "channel"
1085 msg = "The current mask window unit is %s" % u
1086 i = self._check_ifs()
1087 if not i:
1088 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1089 asaplog.push(msg)
1090 masklist=[]
1091 ist, ien = None, None
1092 ist, ien=self.get_mask_indices(mask)
1093 if ist is not None and ien is not None:
1094 for i in xrange(len(ist)):
1095 range=[data[ist[i]],data[ien[i]]]
1096 range.sort()
1097 masklist.append([range[0],range[1]])
1098 return masklist
1099
1100 def get_mask_indices(self, mask=None):
1101 """
1102 Compute and Return lists of mask start indices and mask end indices.
1103 Parameters:
1104 mask: channel mask, created with create_mask.
1105 Returns:
1106 List of mask start indices and that of mask end indices,
1107 i.e., [istart1,istart2,....], [iend1,iend2,....].
1108 """
1109 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1110 raise TypeError("The mask should be list or tuple.")
1111 if len(mask) < 2:
1112 raise TypeError("The mask elements should be > 1")
1113 istart=[]
1114 iend=[]
1115 if mask[0]: istart.append(0)
1116 for i in range(len(mask)-1):
1117 if not mask[i] and mask[i+1]:
1118 istart.append(i+1)
1119 elif mask[i] and not mask[i+1]:
1120 iend.append(i)
1121 if mask[len(mask)-1]: iend.append(len(mask)-1)
1122 if len(istart) != len(iend):
1123 raise RuntimeError("Numbers of mask start != mask end.")
1124 for i in range(len(istart)):
1125 if istart[i] > iend[i]:
1126 raise RuntimeError("Mask start index > mask end index")
1127 break
1128 return istart,iend
1129
1130# def get_restfreqs(self):
1131# """
1132# Get the restfrequency(s) stored in this scantable.
1133# The return value(s) are always of unit 'Hz'
1134# Parameters:
1135# none
1136# Returns:
1137# a list of doubles
1138# """
1139# return list(self._getrestfreqs())
1140
1141 def get_restfreqs(self, ids=None):
1142 """
[256]1143 Get the restfrequency(s) stored in this scantable.
1144 The return value(s) are always of unit 'Hz'
1145 Parameters:
[1446]1146 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1147 be retrieved
[256]1148 Returns:
[1446]1149 dictionary containing ids and a list of doubles for each id
[256]1150 """
[1446]1151 if ids is None:
1152 rfreqs={}
1153 idlist = self.getmolnos()
1154 for i in idlist:
1155 rfreqs[i]=list(self._getrestfreqs(i))
1156 return rfreqs
1157 else:
1158 if type(ids)==list or type(ids)==tuple:
1159 rfreqs={}
1160 for i in ids:
1161 rfreqs[i]=list(self._getrestfreqs(i))
1162 return rfreqs
1163 else:
1164 return list(self._getrestfreqs(ids))
1165 #return list(self._getrestfreqs(ids))
[102]1166
[931]1167 def set_restfreqs(self, freqs=None, unit='Hz'):
1168 """
[1446]1169 ********NEED TO BE UPDATED begin************
[931]1170 Set or replace the restfrequency specified and
1171 If the 'freqs' argument holds a scalar,
1172 then that rest frequency will be applied to all the selected
1173 data. If the 'freqs' argument holds
1174 a vector, then it MUST be of equal or smaller length than
1175 the number of IFs (and the available restfrequencies will be
1176 replaced by this vector). In this case, *all* data have
1177 the restfrequency set per IF according
1178 to the corresponding value you give in the 'freqs' vector.
[1118]1179 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1180 IF 1 gets restfreq 2e9.
[1446]1181 ********NEED TO BE UPDATED end************
[1603]1182 You can also specify the frequencies via a linecatalog.
[1153]1183
[931]1184 Parameters:
1185 freqs: list of rest frequency values or string idenitfiers
1186 unit: unit for rest frequency (default 'Hz')
[402]1187
[931]1188 Example:
[1446]1189 # set the given restfrequency for the all currently selected IFs
[931]1190 scan.set_restfreqs(freqs=1.4e9)
[1446]1191 # set multiple restfrequencies to all the selected data
1192 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1193 # If the number of IFs in the data is >= 2 the IF0 gets the first
1194 # value IF1 the second... NOTE that freqs needs to be
1195 # specified in list of list (e.g. [[],[],...] ).
1196 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]1197 #set the given restfrequency for the whole table (by name)
1198 scan.set_restfreqs(freqs="OH1667")
[391]1199
[931]1200 Note:
1201 To do more sophisticate Restfrequency setting, e.g. on a
1202 source and IF basis, use scantable.set_selection() before using
1203 this function.
[1757]1204 # provide your scantable is called scan
[931]1205 selection = selector()
1206 selection.set_name("ORION*")
1207 selection.set_ifs([1])
1208 scan.set_selection(selection)
1209 scan.set_restfreqs(freqs=86.6e9)
1210
1211 """
1212 varlist = vars()
[1157]1213 from asap import linecatalog
1214 # simple value
[1118]1215 if isinstance(freqs, int) or isinstance(freqs, float):
[1446]1216 # TT mod
1217 #self._setrestfreqs(freqs, "",unit)
1218 self._setrestfreqs([freqs], [""],unit)
[1157]1219 # list of values
[1118]1220 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1221 # list values are scalars
[1118]1222 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1781]1223 self._setrestfreqs(freqs, [""], unit)
[1157]1224 # list values are tuples, (value, name)
1225 elif isinstance(freqs[-1], dict):
[1446]1226 #sel = selector()
1227 #savesel = self._getselection()
1228 #iflist = self.getifnos()
1229 #for i in xrange(len(freqs)):
1230 # sel.set_ifs(iflist[i])
1231 # self._setselection(sel)
1232 # self._setrestfreqs(freqs[i], "",unit)
1233 #self._setselection(savesel)
1234 self._setrestfreqs(freqs["value"],
[1781]1235 freqs["name"], unit)
[1446]1236 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1237 sel = selector()
1238 savesel = self._getselection()
[1322]1239 iflist = self.getifnos()
[1446]1240 if len(freqs)>len(iflist):
1241 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]1242 for i in xrange(len(freqs)):
[1322]1243 sel.set_ifs(iflist[i])
[1259]1244 self._setselection(sel)
[1781]1245 self._setrestfreqs(freqs[i], [""], unit)
[1157]1246 self._setselection(savesel)
1247 # freqs are to be taken from a linecatalog
[1153]1248 elif isinstance(freqs, linecatalog):
1249 sel = selector()
1250 savesel = self._getselection()
1251 for i in xrange(freqs.nrow()):
[1322]1252 sel.set_ifs(iflist[i])
[1153]1253 self._setselection(sel)
1254 self._setrestfreqs(freqs.get_frequency(i),
1255 freqs.get_name(i), "MHz")
1256 # ensure that we are not iterating past nIF
1257 if i == self.nif()-1: break
1258 self._setselection(savesel)
[931]1259 else:
1260 return
1261 self._add_history("set_restfreqs", varlist)
1262
[1360]1263 def shift_refpix(self, delta):
[1757]1264 """
1265 Shift the reference pixel of the Spectra Coordinate by an
1266 integer amount.
1267 Parameters:
1268 delta: the amount to shift by
[1360]1269 Note:
[1757]1270 Be careful using this with broadband data.
[1360]1271 """
[1757]1272 Scantable.shift_refpix(self, delta)
[931]1273
[1259]1274 def history(self, filename=None):
1275 """
1276 Print the history. Optionally to a file.
[1348]1277 Parameters:
1278 filename: The name of the file to save the history to.
[1259]1279 """
[484]1280 hist = list(self._gethistory())
[794]1281 out = "-"*80
[484]1282 for h in hist:
[489]1283 if h.startswith("---"):
[794]1284 out += "\n"+h
[489]1285 else:
1286 items = h.split("##")
1287 date = items[0]
1288 func = items[1]
1289 items = items[2:]
[794]1290 out += "\n"+date+"\n"
1291 out += "Function: %s\n Parameters:" % (func)
[489]1292 for i in items:
1293 s = i.split("=")
[1118]1294 out += "\n %s = %s" % (s[0], s[1])
[794]1295 out += "\n"+"-"*80
[1259]1296 if filename is not None:
1297 if filename is "":
1298 filename = 'scantable_history.txt'
1299 import os
1300 filename = os.path.expandvars(os.path.expanduser(filename))
1301 if not os.path.isdir(filename):
1302 data = open(filename, 'w')
1303 data.write(out)
1304 data.close()
1305 else:
1306 msg = "Illegal file name '%s'." % (filename)
1307 if rcParams['verbose']:
[1612]1308 #print msg
[1614]1309 asaplog.push( msg )
1310 print_log( 'ERROR' )
[1259]1311 else:
1312 raise IOError(msg)
1313 if rcParams['verbose']:
1314 try:
1315 from IPython.genutils import page as pager
1316 except ImportError:
1317 from pydoc import pager
1318 pager(out)
1319 else:
1320 return out
[484]1321 return
[513]1322 #
1323 # Maths business
1324 #
[1782]1325 @print_log_dec
[931]1326 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1327 """
[1070]1328 Return the (time) weighted average of a scan.
[513]1329 Note:
[1070]1330 in channels only - align if necessary
[513]1331 Parameters:
1332 mask: an optional mask (only used for 'var' and 'tsys'
1333 weighting)
[558]1334 scanav: True averages each scan separately
1335 False (default) averages all scans together,
[1099]1336 weight: Weighting scheme.
1337 'none' (mean no weight)
1338 'var' (1/var(spec) weighted)
1339 'tsys' (1/Tsys**2 weighted)
1340 'tint' (integration time weighted)
1341 'tintsys' (Tint/Tsys**2)
1342 'median' ( median averaging)
[535]1343 The default is 'tint'
[931]1344 align: align the spectra in velocity before averaging. It takes
1345 the time of the first spectrum as reference time.
[513]1346 Example:
1347 # time average the scantable without using a mask
[710]1348 newscan = scan.average_time()
[513]1349 """
1350 varlist = vars()
[1757]1351 weight = weight or 'TINT'
1352 mask = mask or ()
1353 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1354 scan = (self, )
[989]1355 try:
[1118]1356 if align:
1357 scan = (self.freq_align(insitu=False), )
1358 s = None
1359 if weight.upper() == 'MEDIAN':
1360 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1361 scanav))
1362 else:
1363 s = scantable(self._math._average(scan, mask, weight.upper(),
1364 scanav))
1365 except RuntimeError, msg:
[989]1366 if rcParams['verbose']:
[1612]1367 #print msg
[1614]1368 print_log()
[1676]1369 asaplog.push( str(msg) )
[1614]1370 print_log( 'ERROR' )
[989]1371 return
1372 else: raise
[1099]1373 s._add_history("average_time", varlist)
[718]1374 print_log()
[513]1375 return s
[710]1376
[1782]1377 @print_log_dec
[876]1378 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1379 """
1380 Return a scan where all spectra are converted to either
1381 Jansky or Kelvin depending upon the flux units of the scan table.
1382 By default the function tries to look the values up internally.
1383 If it can't find them (or if you want to over-ride), you must
1384 specify EITHER jyperk OR eta (and D which it will try to look up
1385 also if you don't set it). jyperk takes precedence if you set both.
1386 Parameters:
1387 jyperk: the Jy / K conversion factor
1388 eta: the aperture efficiency
1389 d: the geomtric diameter (metres)
1390 insitu: if False a new scantable is returned.
1391 Otherwise, the scaling is done in-situ
1392 The default is taken from .asaprc (False)
1393 """
1394 if insitu is None: insitu = rcParams['insitu']
[876]1395 self._math._setinsitu(insitu)
[513]1396 varlist = vars()
[1757]1397 jyperk = jyperk or -1.0
1398 d = d or -1.0
1399 eta = eta or -1.0
[876]1400 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1401 s._add_history("convert_flux", varlist)
1402 print_log()
1403 if insitu: self._assign(s)
1404 else: return s
[513]1405
[1782]1406 @print_log_dec
[876]1407 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1408 """
1409 Return a scan after applying a gain-elevation correction.
1410 The correction can be made via either a polynomial or a
1411 table-based interpolation (and extrapolation if necessary).
1412 You specify polynomial coefficients, an ascii table or neither.
1413 If you specify neither, then a polynomial correction will be made
1414 with built in coefficients known for certain telescopes (an error
1415 will occur if the instrument is not known).
1416 The data and Tsys are *divided* by the scaling factors.
1417 Parameters:
1418 poly: Polynomial coefficients (default None) to compute a
1419 gain-elevation correction as a function of
1420 elevation (in degrees).
1421 filename: The name of an ascii file holding correction factors.
1422 The first row of the ascii file must give the column
1423 names and these MUST include columns
1424 "ELEVATION" (degrees) and "FACTOR" (multiply data
1425 by this) somewhere.
1426 The second row must give the data type of the
1427 column. Use 'R' for Real and 'I' for Integer.
1428 An example file would be
1429 (actual factors are arbitrary) :
1430
1431 TIME ELEVATION FACTOR
1432 R R R
1433 0.1 0 0.8
1434 0.2 20 0.85
1435 0.3 40 0.9
1436 0.4 60 0.85
1437 0.5 80 0.8
1438 0.6 90 0.75
1439 method: Interpolation method when correcting from a table.
1440 Values are "nearest", "linear" (default), "cubic"
1441 and "spline"
1442 insitu: if False a new scantable is returned.
1443 Otherwise, the scaling is done in-situ
1444 The default is taken from .asaprc (False)
1445 """
1446
1447 if insitu is None: insitu = rcParams['insitu']
[876]1448 self._math._setinsitu(insitu)
[513]1449 varlist = vars()
[1757]1450 poly = poly or ()
[513]1451 from os.path import expandvars
1452 filename = expandvars(filename)
[876]1453 s = scantable(self._math._gainel(self, poly, filename, method))
1454 s._add_history("gain_el", varlist)
1455 print_log()
[1757]1456 if insitu:
1457 self._assign(s)
1458 else:
1459 return s
[710]1460
[1782]1461 @print_log_dec
[931]1462 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1463 """
1464 Return a scan where all rows have been aligned in frequency/velocity.
1465 The alignment frequency frame (e.g. LSRK) is that set by function
1466 set_freqframe.
1467 Parameters:
1468 reftime: reference time to align at. By default, the time of
1469 the first row of data is used.
1470 method: Interpolation method for regridding the spectra.
1471 Choose from "nearest", "linear", "cubic" (default)
1472 and "spline"
1473 insitu: if False a new scantable is returned.
1474 Otherwise, the scaling is done in-situ
1475 The default is taken from .asaprc (False)
1476 """
[931]1477 if insitu is None: insitu = rcParams["insitu"]
[876]1478 self._math._setinsitu(insitu)
[513]1479 varlist = vars()
[1757]1480 reftime = reftime or ""
[931]1481 s = scantable(self._math._freq_align(self, reftime, method))
[876]1482 s._add_history("freq_align", varlist)
1483 print_log()
1484 if insitu: self._assign(s)
1485 else: return s
[513]1486
[1782]1487 @print_log_dec
[1757]1488 def opacity(self, tau=None, insitu=None):
[513]1489 """
1490 Apply an opacity correction. The data
1491 and Tsys are multiplied by the correction factor.
1492 Parameters:
[1757]1493 tau: (list of) opacity from which the correction factor is
[513]1494 exp(tau*ZD)
[1757]1495 where ZD is the zenith-distance.
1496 If a list is provided, it has to be of length nIF,
1497 nIF*nPol or 1 and in order of IF/POL, e.g.
1498 [opif0pol0, opif0pol1, opif1pol0 ...]
1499 if tau is `None` the opacities are determined from a
1500 model.
[513]1501 insitu: if False a new scantable is returned.
1502 Otherwise, the scaling is done in-situ
1503 The default is taken from .asaprc (False)
1504 """
1505 if insitu is None: insitu = rcParams['insitu']
[876]1506 self._math._setinsitu(insitu)
[513]1507 varlist = vars()
[1757]1508 if not hasattr(tau, "__len__"):
1509 tau = [tau]
[876]1510 s = scantable(self._math._opacity(self, tau))
1511 s._add_history("opacity", varlist)
1512 print_log()
1513 if insitu: self._assign(s)
1514 else: return s
[513]1515
[1782]1516 @print_log_dec
[513]1517 def bin(self, width=5, insitu=None):
1518 """
1519 Return a scan where all spectra have been binned up.
[1348]1520 Parameters:
[513]1521 width: The bin width (default=5) in pixels
1522 insitu: if False a new scantable is returned.
1523 Otherwise, the scaling is done in-situ
1524 The default is taken from .asaprc (False)
1525 """
1526 if insitu is None: insitu = rcParams['insitu']
[876]1527 self._math._setinsitu(insitu)
[513]1528 varlist = vars()
[876]1529 s = scantable(self._math._bin(self, width))
[1118]1530 s._add_history("bin", varlist)
[876]1531 print_log()
[1757]1532 if insitu:
1533 self._assign(s)
1534 else:
1535 return s
[513]1536
[1782]1537 @print_log_dec
[513]1538 def resample(self, width=5, method='cubic', insitu=None):
1539 """
[1348]1540 Return a scan where all spectra have been binned up.
[1781]1541
[1348]1542 Parameters:
[513]1543 width: The bin width (default=5) in pixels
1544 method: Interpolation method when correcting from a table.
1545 Values are "nearest", "linear", "cubic" (default)
1546 and "spline"
1547 insitu: if False a new scantable is returned.
1548 Otherwise, the scaling is done in-situ
1549 The default is taken from .asaprc (False)
1550 """
1551 if insitu is None: insitu = rcParams['insitu']
[876]1552 self._math._setinsitu(insitu)
[513]1553 varlist = vars()
[876]1554 s = scantable(self._math._resample(self, method, width))
[1118]1555 s._add_history("resample", varlist)
[876]1556 print_log()
1557 if insitu: self._assign(s)
1558 else: return s
[513]1559
[1782]1560 @print_log_dec
[946]1561 def average_pol(self, mask=None, weight='none'):
1562 """
1563 Average the Polarisations together.
1564 Parameters:
1565 mask: An optional mask defining the region, where the
1566 averaging will be applied. The output will have all
1567 specified points masked.
1568 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1569 weighted), or 'tsys' (1/Tsys**2 weighted)
1570 """
1571 varlist = vars()
[1757]1572 mask = mask or ()
[1010]1573 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1574 s._add_history("average_pol", varlist)
[946]1575 print_log()
[992]1576 return s
[513]1577
[1782]1578 @print_log_dec
[1145]1579 def average_beam(self, mask=None, weight='none'):
1580 """
1581 Average the Beams together.
1582 Parameters:
1583 mask: An optional mask defining the region, where the
1584 averaging will be applied. The output will have all
1585 specified points masked.
1586 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1587 weighted), or 'tsys' (1/Tsys**2 weighted)
1588 """
1589 varlist = vars()
[1757]1590 mask = mask or ()
[1145]1591 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1592 s._add_history("average_beam", varlist)
1593 print_log()
1594 return s
1595
[1757]1596 def parallactify(self, pflag):
1597 """
1598 Set a flag to inidcate whether this data should be treated as having
1599 been 'parallactified' (total phase == 0.0)
1600 Parameters:
1601 pflag: Bool inidcating whether to turn this on (True) or
1602 off (False)
1603 """
1604 varlist = vars()
1605 self._parallactify(pflag)
1606 self._add_history("parallactify", varlist)
1607
[1782]1608 @print_log_dec
[992]1609 def convert_pol(self, poltype=None):
1610 """
1611 Convert the data to a different polarisation type.
[1757]1612 Note that you will need cross-polarisation terms for most conversions.
[992]1613 Parameters:
1614 poltype: The new polarisation type. Valid types are:
[1757]1615 "linear", "circular", "stokes" and "linpol"
[992]1616 """
1617 varlist = vars()
1618 try:
1619 s = scantable(self._math._convertpol(self, poltype))
[1118]1620 except RuntimeError, msg:
[992]1621 if rcParams['verbose']:
[1612]1622 #print msg
[1614]1623 print_log()
[1676]1624 asaplog.push( str(msg) )
[1614]1625 print_log( 'ERROR' )
[1118]1626 return
[992]1627 else:
1628 raise
[1118]1629 s._add_history("convert_pol", varlist)
[992]1630 print_log()
1631 return s
1632
[1782]1633 @print_log_dec
[1757]1634 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[513]1635 """
1636 Smooth the spectrum by the specified kernel (conserving flux).
1637 Parameters:
1638 kernel: The type of smoothing kernel. Select from
[1757]1639 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1640 or 'poly'
[513]1641 width: The width of the kernel in pixels. For hanning this is
1642 ignored otherwise it defauls to 5 pixels.
1643 For 'gaussian' it is the Full Width Half
1644 Maximum. For 'boxcar' it is the full width.
[1757]1645 For 'rmedian' and 'poly' it is the half width.
1646 order: Optional parameter for 'poly' kernel (default is 2), to
1647 specify the order of the polnomial. Ignored by all other
1648 kernels.
[1653]1649 plot: plot the original and the smoothed spectra.
1650 In this each indivual fit has to be approved, by
1651 typing 'y' or 'n'
[513]1652 insitu: if False a new scantable is returned.
1653 Otherwise, the scaling is done in-situ
1654 The default is taken from .asaprc (False)
1655 Example:
1656 none
1657 """
1658 if insitu is None: insitu = rcParams['insitu']
[876]1659 self._math._setinsitu(insitu)
[513]1660 varlist = vars()
[1653]1661
1662 if plot: orgscan = self.copy()
1663
[1757]1664 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1665 s._add_history("smooth", varlist)
[1653]1666
1667 if plot:
1668 if rcParams['plotter.gui']:
1669 from asap.asaplotgui import asaplotgui as asaplot
1670 else:
1671 from asap.asaplot import asaplot
1672 self._p=asaplot()
1673 self._p.set_panels()
1674 ylab=s._get_ordinate_label()
1675 #self._p.palette(0,["#777777","red"])
1676 for r in xrange(s.nrow()):
1677 xsm=s._getabcissa(r)
1678 ysm=s._getspectrum(r)
1679 xorg=orgscan._getabcissa(r)
1680 yorg=orgscan._getspectrum(r)
1681 self._p.clear()
1682 self._p.hold()
1683 self._p.set_axes('ylabel',ylab)
1684 self._p.set_axes('xlabel',s._getabcissalabel(r))
1685 self._p.set_axes('title',s._getsourcename(r))
1686 self._p.set_line(label='Original',color="#777777")
1687 self._p.plot(xorg,yorg)
1688 self._p.set_line(label='Smoothed',color="red")
1689 self._p.plot(xsm,ysm)
1690 ### Ugly part for legend
1691 for i in [0,1]:
1692 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1693 self._p.release()
1694 ### Ugly part for legend
1695 self._p.subplots[0]['lines']=[]
1696 res = raw_input("Accept smoothing ([y]/n): ")
1697 if res.upper() == 'N':
1698 s._setspectrum(yorg, r)
1699 self._p.unmap()
1700 self._p = None
1701 del orgscan
1702
[876]1703 print_log()
1704 if insitu: self._assign(s)
1705 else: return s
[513]1706
[1782]1707 @print_log_dec
[1757]1708 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1709 insitu=None):
[513]1710 """
1711 Return a scan which has been baselined (all rows) by a polynomial.
1712 Parameters:
[794]1713 mask: an optional mask
1714 order: the order of the polynomial (default is 0)
[1061]1715 plot: plot the fit and the residual. In this each
1716 indivual fit has to be approved, by typing 'y'
1717 or 'n'
[1389]1718 uselin: use linear polynomial fit
[794]1719 insitu: if False a new scantable is returned.
1720 Otherwise, the scaling is done in-situ
1721 The default is taken from .asaprc (False)
[513]1722 Example:
1723 # return a scan baselined by a third order polynomial,
1724 # not using a mask
1725 bscan = scan.poly_baseline(order=3)
[579]1726 """
[513]1727 if insitu is None: insitu = rcParams['insitu']
[1649]1728 if not insitu:
1729 workscan = self.copy()
1730 else:
1731 workscan = self
[513]1732 varlist = vars()
1733 if mask is None:
[1295]1734 mask = [True for i in xrange(self.nchan(-1))]
[1781]1735
[513]1736 from asap.asapfitter import fitter
[1217]1737 try:
1738 f = fitter()
[1389]1739 if uselin:
1740 f.set_function(lpoly=order)
1741 else:
1742 f.set_function(poly=order)
[1637]1743
[1655]1744 rows = range(workscan.nrow())
[1637]1745 if len(rows) > 0:
1746 self.blpars = []
[1781]1747
[1637]1748 for r in rows:
1749 # take into account flagtra info (CAS-1434)
[1655]1750 flagtra = workscan._getmask(r)
[1637]1751 actualmask = mask[:]
1752 if len(actualmask) == 0:
1753 actualmask = list(flagtra[:])
1754 else:
1755 if len(actualmask) != len(flagtra):
1756 raise RuntimeError, "Mask and flagtra have different length"
1757 else:
1758 for i in range(0, len(actualmask)):
1759 actualmask[i] = actualmask[i] and flagtra[i]
[1655]1760 f.set_scan(workscan, actualmask)
1761 f.x = workscan._getabcissa(r)
1762 f.y = workscan._getspectrum(r)
[1637]1763 f.data = None
1764 f.fit()
[1653]1765 if plot:
1766 f.plot(residual=True)
1767 x = raw_input("Accept fit ( [y]/n ): ")
1768 if x.upper() == 'N':
1769 self.blpars.append(None)
1770 continue
[1649]1771 workscan._setspectrum(f.fitter.getresidual(), r)
[1655]1772 self.blpars.append(f.get_parameters())
1773
[1653]1774 if plot:
1775 f._p.unmap()
1776 f._p = None
[1649]1777 workscan._add_history("poly_baseline", varlist)
[1217]1778 print_log()
[1649]1779 if insitu: self._assign(workscan)
[1781]1780 else: return workscan
[1217]1781 except RuntimeError:
1782 msg = "The fit failed, possibly because it didn't converge."
1783 if rcParams['verbose']:
[1612]1784 #print msg
[1614]1785 print_log()
[1676]1786 asaplog.push( str(msg) )
[1614]1787 print_log( 'ERROR' )
[1217]1788 return
1789 else:
1790 raise RuntimeError(msg)
[513]1791
[1217]1792
[1118]1793 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1794 threshold=3, chan_avg_limit=1, plot=False,
1795 insitu=None):
[880]1796 """
1797 Return a scan which has been baselined (all rows) by a polynomial.
1798 Spectral lines are detected first using linefinder and masked out
1799 to avoid them affecting the baseline solution.
1800
1801 Parameters:
1802 mask: an optional mask retreived from scantable
1803 edge: an optional number of channel to drop at
1804 the edge of spectrum. If only one value is
1805 specified, the same number will be dropped from
1806 both sides of the spectrum. Default is to keep
[907]1807 all channels. Nested tuples represent individual
[976]1808 edge selection for different IFs (a number of spectral
1809 channels can be different)
[880]1810 order: the order of the polynomial (default is 0)
1811 threshold: the threshold used by line finder. It is better to
1812 keep it large as only strong lines affect the
1813 baseline solution.
[1280]1814 chan_avg_limit:
1815 a maximum number of consequtive spectral channels to
1816 average during the search of weak and broad lines.
1817 The default is no averaging (and no search for weak
1818 lines). If such lines can affect the fitted baseline
1819 (e.g. a high order polynomial is fitted), increase this
1820 parameter (usually values up to 8 are reasonable). Most
1821 users of this method should find the default value
1822 sufficient.
[1061]1823 plot: plot the fit and the residual. In this each
1824 indivual fit has to be approved, by typing 'y'
1825 or 'n'
[880]1826 insitu: if False a new scantable is returned.
1827 Otherwise, the scaling is done in-situ
1828 The default is taken from .asaprc (False)
1829
1830 Example:
1831 scan2=scan.auto_poly_baseline(order=7)
1832 """
1833 if insitu is None: insitu = rcParams['insitu']
1834 varlist = vars()
1835 from asap.asapfitter import fitter
1836 from asap.asaplinefind import linefinder
1837 from asap import _is_sequence_or_number as _is_valid
1838
[976]1839 # check whether edge is set up for each IF individually
[1118]1840 individualedge = False;
1841 if len(edge) > 1:
1842 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1843 individualedge = True;
[907]1844
[1118]1845 if not _is_valid(edge, int) and not individualedge:
[909]1846 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1847 pair of integers specified as a tuple. Nested tuples are allowed \
1848 to make individual selection for different IFs."
[919]1849
[1118]1850 curedge = (0, 0)
1851 if individualedge:
1852 for edgepar in edge:
1853 if not _is_valid(edgepar, int):
1854 raise ValueError, "Each element of the 'edge' tuple has \
1855 to be a pair of integers or an integer."
[907]1856 else:
[1118]1857 curedge = edge;
[880]1858
1859 # setup fitter
1860 f = fitter()
1861 f.set_function(poly=order)
1862
1863 # setup line finder
[1118]1864 fl = linefinder()
[1268]1865 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1866
1867 if not insitu:
[1118]1868 workscan = self.copy()
[880]1869 else:
[1118]1870 workscan = self
[880]1871
[907]1872 fl.set_scan(workscan)
1873
[1118]1874 rows = range(workscan.nrow())
[1446]1875 # Save parameters of baseline fits & masklists as a class attribute.
1876 # NOTICE: It does not reflect changes in scantable!
1877 if len(rows) > 0:
1878 self.blpars=[]
1879 self.masklists=[]
[880]1880 asaplog.push("Processing:")
1881 for r in rows:
[1118]1882 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1883 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1884 workscan.getpol(r), workscan.getcycle(r))
[880]1885 asaplog.push(msg, False)
[907]1886
[976]1887 # figure out edge parameter
[1118]1888 if individualedge:
1889 if len(edge) >= workscan.getif(r):
1890 raise RuntimeError, "Number of edge elements appear to " \
1891 "be less than the number of IFs"
1892 curedge = edge[workscan.getif(r)]
[919]1893
[1637]1894 # take into account flagtra info (CAS-1434)
1895 flagtra = workscan._getmask(r)
1896 actualmask = mask[:]
1897 if len(actualmask) == 0:
1898 actualmask = list(flagtra[:])
1899 else:
1900 if len(actualmask) != len(flagtra):
1901 raise RuntimeError, "Mask and flagtra have different length"
1902 else:
1903 for i in range(0, len(actualmask)):
1904 actualmask[i] = actualmask[i] and flagtra[i]
1905
[976]1906 # setup line finder
[1637]1907 fl.find_lines(r, actualmask, curedge)
[1446]1908 outmask=fl.get_mask()
[880]1909 f.set_scan(workscan, fl.get_mask())
1910 f.x = workscan._getabcissa(r)
1911 f.y = workscan._getspectrum(r)
1912 f.data = None
1913 f.fit()
[1781]1914
[1446]1915 # Show mask list
1916 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1917 msg = "mask range: "+str(masklist)
1918 asaplog.push(msg, False)
1919
[1061]1920 if plot:
1921 f.plot(residual=True)
1922 x = raw_input("Accept fit ( [y]/n ): ")
1923 if x.upper() == 'N':
[1446]1924 self.blpars.append(None)
1925 self.masklists.append(None)
[1061]1926 continue
[1655]1927
[880]1928 workscan._setspectrum(f.fitter.getresidual(), r)
[1655]1929 self.blpars.append(f.get_parameters())
[1446]1930 self.masklists.append(masklist)
[1061]1931 if plot:
1932 f._p.unmap()
1933 f._p = None
1934 workscan._add_history("auto_poly_baseline", varlist)
[880]1935 if insitu:
1936 self._assign(workscan)
1937 else:
1938 return workscan
1939
[1782]1940 @print_log_dec
[914]1941 def rotate_linpolphase(self, angle):
1942 """
1943 Rotate the phase of the complex polarization O=Q+iU correlation.
1944 This is always done in situ in the raw data. So if you call this
1945 function more than once then each call rotates the phase further.
1946 Parameters:
1947 angle: The angle (degrees) to rotate (add) by.
1948 Examples:
1949 scan.rotate_linpolphase(2.3)
1950 """
1951 varlist = vars()
[936]1952 self._math._rotate_linpolphase(self, angle)
[914]1953 self._add_history("rotate_linpolphase", varlist)
1954 print_log()
1955 return
[710]1956
[1782]1957 @print_log_dec
[914]1958 def rotate_xyphase(self, angle):
1959 """
1960 Rotate the phase of the XY correlation. This is always done in situ
1961 in the data. So if you call this function more than once
1962 then each call rotates the phase further.
1963 Parameters:
1964 angle: The angle (degrees) to rotate (add) by.
1965 Examples:
1966 scan.rotate_xyphase(2.3)
1967 """
1968 varlist = vars()
[936]1969 self._math._rotate_xyphase(self, angle)
[914]1970 self._add_history("rotate_xyphase", varlist)
1971 print_log()
1972 return
1973
[1782]1974 @print_log_dec
[914]1975 def swap_linears(self):
1976 """
[1757]1977 Swap the linear polarisations XX and YY, or better the first two
[1348]1978 polarisations as this also works for ciculars.
[914]1979 """
1980 varlist = vars()
[936]1981 self._math._swap_linears(self)
[914]1982 self._add_history("swap_linears", varlist)
1983 print_log()
1984 return
1985
[1782]1986 @print_log_dec
[914]1987 def invert_phase(self):
1988 """
1989 Invert the phase of the complex polarisation
1990 """
1991 varlist = vars()
[936]1992 self._math._invert_phase(self)
[914]1993 self._add_history("invert_phase", varlist)
1994 print_log()
1995 return
1996
[1782]1997 @print_log_dec
[876]1998 def add(self, offset, insitu=None):
[513]1999 """
2000 Return a scan where all spectra have the offset added
2001 Parameters:
2002 offset: the offset
2003 insitu: if False a new scantable is returned.
2004 Otherwise, the scaling is done in-situ
2005 The default is taken from .asaprc (False)
2006 """
2007 if insitu is None: insitu = rcParams['insitu']
[876]2008 self._math._setinsitu(insitu)
[513]2009 varlist = vars()
[876]2010 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2011 s._add_history("add", varlist)
[876]2012 print_log()
2013 if insitu:
2014 self._assign(s)
2015 else:
[513]2016 return s
2017
[1782]2018 @print_log_dec
[1680]2019 def scale(self, factor, tsys=True, insitu=None):
[513]2020 """
2021 Return a scan where all spectra are scaled by the give 'factor'
2022 Parameters:
[1677]2023 factor: the scaling factor (float or 1D float list)
[513]2024 insitu: if False a new scantable is returned.
2025 Otherwise, the scaling is done in-situ
2026 The default is taken from .asaprc (False)
2027 tsys: if True (default) then apply the operation to Tsys
2028 as well as the data
2029 """
2030 if insitu is None: insitu = rcParams['insitu']
[876]2031 self._math._setinsitu(insitu)
[513]2032 varlist = vars()
[1680]2033 s = None
[1681]2034 import numpy
2035 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2036 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
[1680]2037 from asapmath import _array2dOp
2038 s = _array2dOp( self.copy(), factor, "MUL", tsys )
[1677]2039 else:
[1681]2040 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
[1677]2041 else:
2042 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2043 s._add_history("scale", varlist)
[876]2044 print_log()
2045 if insitu:
2046 self._assign(s)
2047 else:
[513]2048 return s
2049
[1603]2050 def set_sourcetype(self, match, matchtype="pattern",
2051 sourcetype="reference"):
2052 """
2053 Set the type of the source to be an source or reference scan
2054 using the provided pattern:
2055 Parameters:
2056 match: a Unix style pattern, regular expression or selector
2057 matchtype: 'pattern' (default) UNIX style pattern or
2058 'regex' regular expression
2059 sourcetype: the type of the source to use (source/reference)
2060 """
2061 varlist = vars()
2062 basesel = self.get_selection()
2063 stype = -1
2064 if sourcetype.lower().startswith("r"):
2065 stype = 1
2066 elif sourcetype.lower().startswith("s"):
2067 stype = 0
2068 else:
2069 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2070 if matchtype.lower().startswith("p"):
2071 matchtype = "pattern"
2072 elif matchtype.lower().startswith("r"):
2073 matchtype = "regex"
2074 else:
2075 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2076 sel = selector()
2077 if isinstance(match, selector):
2078 sel = match
2079 else:
2080 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2081 self.set_selection(basesel+sel)
2082 self._setsourcetype(stype)
2083 self.set_selection(basesel)
[1757]2084 self._add_history("set_sourcetype", varlist)
[1603]2085
[1782]2086 @print_log_dec
[1631]2087 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[670]2088 """
2089 This function allows to build quotients automatically.
[1631]2090 It assumes the observation to have the same number of
[670]2091 "ons" and "offs"
2092 Parameters:
[710]2093 preserve: you can preserve (default) the continuum or
2094 remove it. The equations used are
[670]2095 preserve: Output = Toff * (on/off) - Toff
[1070]2096 remove: Output = Toff * (on/off) - Ton
[1757]2097 mode: the on/off detection mode
[1348]2098 'paired' (default)
2099 identifies 'off' scans by the
2100 trailing '_R' (Mopra/Parkes) or
2101 '_e'/'_w' (Tid) and matches
2102 on/off pairs from the observing pattern
[1603]2103 'time'
2104 finds the closest off in time
[1348]2105
[670]2106 """
[1348]2107 modes = ["time", "paired"]
[670]2108 if not mode in modes:
[876]2109 msg = "please provide valid mode. Valid modes are %s" % (modes)
2110 raise ValueError(msg)
2111 varlist = vars()
[1348]2112 s = None
2113 if mode.lower() == "paired":
2114 basesel = self.get_selection()
[1356]2115 sel = selector()+basesel
2116 sel.set_query("SRCTYPE==1")
2117 self.set_selection(sel)
[1348]2118 offs = self.copy()
2119 sel.set_query("SRCTYPE==0")
[1356]2120 self.set_selection(sel)
[1348]2121 ons = self.copy()
2122 s = scantable(self._math._quotient(ons, offs, preserve))
2123 self.set_selection(basesel)
2124 elif mode.lower() == "time":
2125 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2126 s._add_history("auto_quotient", varlist)
[876]2127 print_log()
2128 return s
[710]2129
[1782]2130 @print_log_dec
[1145]2131 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]2132 """
[1143]2133 Form a quotient using "off" beams when observing in "MX" mode.
2134 Parameters:
[1145]2135 mask: an optional mask to be used when weight == 'stddev'
[1143]2136 weight: How to average the off beams. Default is 'median'.
[1145]2137 preserve: you can preserve (default) the continuum or
2138 remove it. The equations used are
2139 preserve: Output = Toff * (on/off) - Toff
2140 remove: Output = Toff * (on/off) - Ton
[1217]2141 """
[1757]2142 mask = mask or ()
[1141]2143 varlist = vars()
2144 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2145 preoff = scantable(self._math._mx_extract(self, 'off'))
2146 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2147 from asapmath import quotient
[1145]2148 q = quotient(on, off, preserve)
[1143]2149 q._add_history("mx_quotient", varlist)
[1145]2150 print_log()
[1217]2151 return q
[513]2152
[1782]2153 @print_log_dec
[718]2154 def freq_switch(self, insitu=None):
2155 """
2156 Apply frequency switching to the data.
2157 Parameters:
2158 insitu: if False a new scantable is returned.
2159 Otherwise, the swictching is done in-situ
2160 The default is taken from .asaprc (False)
2161 Example:
2162 none
2163 """
2164 if insitu is None: insitu = rcParams['insitu']
[876]2165 self._math._setinsitu(insitu)
[718]2166 varlist = vars()
[876]2167 s = scantable(self._math._freqswitch(self))
[1118]2168 s._add_history("freq_switch", varlist)
[876]2169 print_log()
2170 if insitu: self._assign(s)
2171 else: return s
[718]2172
[1782]2173 @print_log_dec
[780]2174 def recalc_azel(self):
2175 """
2176 Recalculate the azimuth and elevation for each position.
2177 Parameters:
2178 none
2179 Example:
2180 """
2181 varlist = vars()
[876]2182 self._recalcazel()
[780]2183 self._add_history("recalc_azel", varlist)
2184 print_log()
2185 return
2186
[1782]2187 @print_log_dec
[513]2188 def __add__(self, other):
[1782]2189 varlist = vars()
2190 s = None
2191 if isinstance(other, scantable):
2192 s = scantable(self._math._binaryop(self, other, "ADD"))
2193 elif isinstance(other, float):
2194 s = scantable(self._math._unaryop(self, other, "ADD", False))
2195 else:
2196 raise TypeError("Other input is not a scantable or float value")
2197 s._add_history("operator +", varlist)
2198 return s
[513]2199
[1782]2200 @print_log_dec
[513]2201 def __sub__(self, other):
2202 """
2203 implicit on all axes and on Tsys
2204 """
[1782]2205 varlist = vars()
2206 s = None
2207 if isinstance(other, scantable):
2208 s = scantable(self._math._binaryop(self, other, "SUB"))
2209 elif isinstance(other, float):
2210 s = scantable(self._math._unaryop(self, other, "SUB", False))
2211 else:
2212 raise TypeError("Other input is not a scantable or float value")
2213 s._add_history("operator -", varlist)
2214 return s
[710]2215
[1782]2216 @print_log_dec
[513]2217 def __mul__(self, other):
2218 """
2219 implicit on all axes and on Tsys
2220 """
[1782]2221 varlist = vars()
2222 s = None
2223 if isinstance(other, scantable):
2224 s = scantable(self._math._binaryop(self, other, "MUL"))
2225 elif isinstance(other, float):
2226 s = scantable(self._math._unaryop(self, other, "MUL", False))
2227 else:
2228 raise TypeError("Other input is not a scantable or float value")
2229 s._add_history("operator *", varlist)
2230 return s
[513]2231
[1782]2232
2233 @print_log_dec
[513]2234 def __div__(self, other):
2235 """
2236 implicit on all axes and on Tsys
2237 """
[1782]2238 varlist = vars()
2239 s = None
2240 if isinstance(other, scantable):
2241 s = scantable(self._math._binaryop(self, other, "DIV"))
2242 elif isinstance(other, float):
2243 if other == 0.0:
2244 raise ZeroDivisionError("Dividing by zero is not recommended")
2245 s = scantable(self._math._unaryop(self, other, "DIV", False))
2246 else:
2247 raise TypeError("Other input is not a scantable or float value")
2248 s._add_history("operator /", varlist)
2249 return s
[513]2250
[530]2251 def get_fit(self, row=0):
2252 """
2253 Print or return the stored fits for a row in the scantable
2254 Parameters:
2255 row: the row which the fit has been applied to.
2256 """
2257 if row > self.nrow():
2258 return
[976]2259 from asap.asapfit import asapfit
[530]2260 fit = asapfit(self._getfit(row))
[718]2261 if rcParams['verbose']:
[1612]2262 #print fit
[1614]2263 asaplog.push( '%s' %(fit) )
2264 print_log()
[530]2265 return
2266 else:
2267 return fit.as_dict()
2268
[1603]2269 def flag_nans(self):
2270 """
2271 Utility function to flag NaN values in the scantable.
2272 """
2273 import numpy
2274 basesel = self.get_selection()
2275 for i in range(self.nrow()):
[1757]2276 sel = self.get_row_selector(i)
2277 self.set_selection(basesel+sel)
[1603]2278 nans = numpy.isnan(self._getspectrum(0))
2279 if numpy.any(nans):
2280 bnans = [ bool(v) for v in nans]
2281 self.flag(bnans)
2282 self.set_selection(basesel)
2283
[1757]2284 def get_row_selector(self, rowno):
2285 return selector(beams=self.getbeam(rowno),
2286 ifs=self.getif(rowno),
2287 pols=self.getpol(rowno),
2288 scans=self.getscan(rowno),
2289 cycles=self.getcycle(rowno))
2290
[484]2291 def _add_history(self, funcname, parameters):
[1603]2292 if not rcParams['scantable.history']:
2293 return
[484]2294 # create date
2295 sep = "##"
2296 from datetime import datetime
2297 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2298 hist = dstr+sep
2299 hist += funcname+sep#cdate+sep
2300 if parameters.has_key('self'): del parameters['self']
[1118]2301 for k, v in parameters.iteritems():
[484]2302 if type(v) is dict:
[1118]2303 for k2, v2 in v.iteritems():
[484]2304 hist += k2
2305 hist += "="
[1118]2306 if isinstance(v2, scantable):
[484]2307 hist += 'scantable'
2308 elif k2 == 'mask':
[1118]2309 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2310 hist += str(self._zip_mask(v2))
2311 else:
2312 hist += str(v2)
[484]2313 else:
[513]2314 hist += str(v2)
[484]2315 else:
2316 hist += k
2317 hist += "="
[1118]2318 if isinstance(v, scantable):
[484]2319 hist += 'scantable'
2320 elif k == 'mask':
[1118]2321 if isinstance(v, list) or isinstance(v, tuple):
[513]2322 hist += str(self._zip_mask(v))
2323 else:
2324 hist += str(v)
[484]2325 else:
2326 hist += str(v)
2327 hist += sep
2328 hist = hist[:-2] # remove trailing '##'
2329 self._addhistory(hist)
2330
[710]2331
[484]2332 def _zip_mask(self, mask):
2333 mask = list(mask)
2334 i = 0
2335 segments = []
2336 while mask[i:].count(1):
2337 i += mask[i:].index(1)
2338 if mask[i:].count(0):
2339 j = i + mask[i:].index(0)
2340 else:
[710]2341 j = len(mask)
[1118]2342 segments.append([i, j])
[710]2343 i = j
[484]2344 return segments
[714]2345
[626]2346 def _get_ordinate_label(self):
2347 fu = "("+self.get_fluxunit()+")"
2348 import re
2349 lbl = "Intensity"
[1118]2350 if re.match(".K.", fu):
[626]2351 lbl = "Brightness Temperature "+ fu
[1118]2352 elif re.match(".Jy.", fu):
[626]2353 lbl = "Flux density "+ fu
2354 return lbl
[710]2355
[876]2356 def _check_ifs(self):
2357 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2358 nchans = filter(lambda t: t > 0, nchans)
[876]2359 return (sum(nchans)/len(nchans) == nchans[0])
[976]2360
[1684]2361 def _fill(self, names, unit, average, getpt, antenna):
[976]2362 import os
2363 from asap._asap import stfiller
2364 first = True
2365 fullnames = []
2366 for name in names:
2367 name = os.path.expandvars(name)
2368 name = os.path.expanduser(name)
2369 if not os.path.exists(name):
2370 msg = "File '%s' does not exists" % (name)
2371 if rcParams['verbose']:
2372 asaplog.push(msg)
[1612]2373 #print asaplog.pop().strip()
[1614]2374 print_log( 'ERROR' )
[976]2375 return
2376 raise IOError(msg)
2377 fullnames.append(name)
2378 if average:
2379 asaplog.push('Auto averaging integrations')
[1079]2380 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2381 for name in fullnames:
[1073]2382 tbl = Scantable(stype)
2383 r = stfiller(tbl)
[1603]2384 rx = rcParams['scantable.reference']
2385 r._setreferenceexpr(rx)
[976]2386 msg = "Importing %s..." % (name)
[1118]2387 asaplog.push(msg, False)
[976]2388 print_log()
[1684]2389 r._open(name, antenna, -1, -1, getpt)
[976]2390 r._read()
2391 if average:
[1118]2392 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2393 if not first:
2394 tbl = self._math._merge([self, tbl])
2395 Scantable.__init__(self, tbl)
2396 r._close()
[1118]2397 del r, tbl
[976]2398 first = False
2399 if unit is not None:
2400 self.set_fluxunit(unit)
[1446]2401 #self.set_freqframe(rcParams['scantable.freqframe'])
[976]2402
[1603]2403 def __getitem__(self, key):
2404 if key < 0:
2405 key += self.nrow()
2406 if key >= self.nrow():
2407 raise IndexError("Row index out of range.")
2408 return self._getspectrum(key)
2409
2410 def __setitem__(self, key, value):
2411 if key < 0:
2412 key += self.nrow()
2413 if key >= self.nrow():
2414 raise IndexError("Row index out of range.")
2415 if not hasattr(value, "__len__") or \
2416 len(value) > self.nchan(self.getif(key)):
2417 raise ValueError("Spectrum length doesn't match.")
2418 return self._setspectrum(value, key)
2419
2420 def __len__(self):
2421 return self.nrow()
2422
2423 def __iter__(self):
2424 for i in range(len(self)):
2425 yield self[i]
Note: See TracBrowser for help on using the repository browser.