source: trunk/python/scantable.py@ 1849

Last change on this file since 1849 was 1846, checked in by Malte Marquarding, 14 years ago

Documentation update to play nicer with sphinx

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 91.6 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
13from asap.logging import asaplog, print_log, print_log_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1824]17from asap.utils import _n_bools, mask_not, mask_and, mask_or
[102]18
[1689]19
20def preserve_selection(func):
[1691]21 @wraps_dec(func)
[1689]22 def wrap(obj, *args, **kw):
23 basesel = obj.get_selection()
24 val = func(obj, *args, **kw)
25 obj.set_selection(basesel)
26 return val
27 return wrap
28
[1846]29def is_scantable(filename):
30 """Is the given file a scantable?
[1689]31
[1846]32 Parameters:
33
34 filename: the name of the file/directory to test
35
36 """
[1697]37 return (os.path.isdir(filename)
38 and not os.path.exists(filename+'/table.f1')
39 and os.path.exists(filename+'/table.info'))
40
41
[876]42class scantable(Scantable):
[1846]43 """\
44 The ASAP container for scans (single-dish data).
[102]45 """
[1819]46
[1589]47 @print_log_dec
[1824]48 def __init__(self, filename, average=None, unit=None, getpt=None,
49 antenna=None, parallactify=None):
[1846]50 """\
[102]51 Create a scantable from a saved one or make a reference
[1846]52
[102]53 Parameters:
[1846]54
55 filename: the name of an asap table on disk
56 or
57 the name of a rpfits/sdfits/ms file
58 (integrations within scans are auto averaged
59 and the whole file is read) or
60 [advanced] a reference to an existing scantable
61
62 average: average all integrations withinb a scan on read.
63 The default (True) is taken from .asaprc.
64
[484]65 unit: brightness unit; must be consistent with K or Jy.
[1846]66 Over-rides the default selected by the filler
67 (input rpfits/sdfits/ms) or replaces the value
68 in existing scantables
69
70 getpt: for MeasurementSet input data only:
71 If True, all pointing data are filled.
72 The deafult is False, which makes time to load
73 the MS data faster in some cases.
74
75 antenna: Antenna selection. integer (id) or string (name or id).
76
77 parallactify: Indicate that the data had been parallatified. Default
78 is taken from rc file.
79
[710]80 """
[976]81 if average is None:
[710]82 average = rcParams['scantable.autoaverage']
[1819]83 if getpt is None:
84 getpt = True
[1843]85 if antenna is not None:
86 asaplog.push("Antenna selection currently unsupported."
87 "Using '0'")
88 print_log('WARN')
[1819]89 if antenna is None:
90 antenna = ''
91 elif type(antenna) == int:
[1843]92 antenna = '%s' % antenna
[1819]93 elif type(antenna) == list:
94 tmpstr = ''
95 for i in range( len(antenna) ):
96 if type(antenna[i]) == int:
97 tmpstr = tmpstr + ('%s,'%(antenna[i]))
98 elif type(antenna[i]) == str:
99 tmpstr=tmpstr+antenna[i]+','
100 else:
101 asaplog.push('Bad antenna selection.')
102 print_log('ERROR')
103 return
104 antenna = tmpstr.rstrip(',')
[1593]105 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]106 varlist = vars()
[876]107 from asap._asap import stmath
[1819]108 self._math = stmath( rcParams['insitu'] )
[876]109 if isinstance(filename, Scantable):
110 Scantable.__init__(self, filename)
[181]111 else:
[1697]112 if isinstance(filename, str):
[976]113 filename = os.path.expandvars(filename)
114 filename = os.path.expanduser(filename)
115 if not os.path.exists(filename):
116 s = "File '%s' not found." % (filename)
[718]117 if rcParams['verbose']:
[976]118 asaplog.push(s)
[1819]119 print_log('ERROR')
[718]120 return
[976]121 raise IOError(s)
[1697]122 if is_scantable(filename):
123 ondisk = rcParams['scantable.storage'] == 'disk'
124 Scantable.__init__(self, filename, ondisk)
125 if unit is not None:
126 self.set_fluxunit(unit)
[1819]127 # do not reset to the default freqframe
128 #self.set_freqframe(rcParams['scantable.freqframe'])
129 elif os.path.isdir(filename) \
130 and not os.path.exists(filename+'/table.f1'):
131 msg = "The given file '%s'is not a valid " \
132 "asap table." % (filename)
133 if rcParams['verbose']:
134 #print msg
135 asaplog.push( msg )
136 print_log( 'ERROR' )
137 return
138 else:
139 raise IOError(msg)
[226]140 else:
[1819]141 self._fill([filename], unit, average, getpt, antenna)
[1118]142 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]143 and isinstance(filename[-1], str):
[1819]144 self._fill(filename, unit, average, getpt, antenna)
[1586]145 self.parallactify(parallactify)
[1259]146 self._add_history("scantable", varlist)
[1819]147 print_log()
[102]148
[1589]149 @print_log_dec
[876]150 def save(self, name=None, format=None, overwrite=False):
[1846]151 """\
[1280]152 Store the scantable on disk. This can be an asap (aips++) Table,
153 SDFITS or MS2 format.
[1846]154
[116]155 Parameters:
[1846]156
[1093]157 name: the name of the outputfile. For format "ASCII"
158 this is the root file name (data in 'name'.txt
[497]159 and header in 'name'_header.txt)
[116]160 format: an optional file format. Default is ASAP.
[280]161 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]162 'SDFITS' (save as SDFITS file)
[200]163 'ASCII' (saves as ascii text file)
[226]164 'MS2' (saves as an aips++
165 MeasurementSet V2)
[1573]166 'FITS' (save as image FITS - not
[1443]167 readable by class)
168 'CLASS' (save as FITS readable by CLASS)
[411]169 overwrite: If the file should be overwritten if it exists.
[256]170 The default False is to return with warning
[411]171 without writing the output. USE WITH CARE.
[1846]172 Example::
173
[116]174 scan.save('myscan.asap')
[1118]175 scan.save('myscan.sdfits', 'SDFITS')
[1846]176
[116]177 """
[411]178 from os import path
[1593]179 format = format or rcParams['scantable.save']
[256]180 suffix = '.'+format.lower()
[1118]181 if name is None or name == "":
[256]182 name = 'scantable'+suffix
[718]183 msg = "No filename given. Using default name %s..." % name
184 asaplog.push(msg)
[411]185 name = path.expandvars(name)
[256]186 if path.isfile(name) or path.isdir(name):
187 if not overwrite:
[718]188 msg = "File %s exists." % name
189 if rcParams['verbose']:
[1819]190 #print msg
191 asaplog.push( msg )
192 print_log( 'ERROR' )
[718]193 return
194 else:
195 raise IOError(msg)
[451]196 format2 = format.upper()
197 if format2 == 'ASAP':
[116]198 self._save(name)
199 else:
[989]200 from asap._asap import stwriter as stw
[1118]201 writer = stw(format2)
202 writer.write(self, name)
[1819]203 print_log()
[116]204 return
205
[102]206 def copy(self):
[1846]207 """Return a copy of this scantable.
208
209 *Note*:
210
[1348]211 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]212
213 Example::
214
[102]215 copiedscan = scan.copy()
[1846]216
[102]217 """
[876]218 sd = scantable(Scantable._copy(self))
[113]219 return sd
220
[1093]221 def drop_scan(self, scanid=None):
[1846]222 """\
[1093]223 Return a new scantable where the specified scan number(s) has(have)
224 been dropped.
[1846]225
[1093]226 Parameters:
[1846]227
[1093]228 scanid: a (list of) scan number(s)
[1846]229
[1093]230 """
231 from asap import _is_sequence_or_number as _is_valid
232 from asap import _to_list
233 from asap import unique
234 if not _is_valid(scanid):
235 if rcParams['verbose']:
[1819]236 #print "Please specify a scanno to drop from the scantable"
237 asaplog.push( 'Please specify a scanno to drop from the scantable' )
238 print_log( 'ERROR' )
[1093]239 return
240 else:
241 raise RuntimeError("No scan given")
242 try:
243 scanid = _to_list(scanid)
244 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
245 for sid in scanid: allscans.remove(sid)
[1118]246 if len(allscans) == 0:
247 raise ValueError("Can't remove all scans")
[1093]248 except ValueError:
249 if rcParams['verbose']:
[1819]250 #print "Couldn't find any match."
251 print_log()
252 asaplog.push( "Couldn't find any match." )
253 print_log( 'ERROR' )
[1093]254 return
255 else: raise
256 try:
[1593]257 sel = selector(scans=allscans)
[1594]258 return self._select_copy(sel)
[1093]259 except RuntimeError:
[1118]260 if rcParams['verbose']:
[1819]261 #print "Couldn't find any match."
262 print_log()
263 asaplog.push( "Couldn't find any match." )
264 print_log( 'ERROR' )
[1118]265 else:
266 raise
[1093]267
[1594]268 def _select_copy(self, selection):
269 orig = self.get_selection()
270 self.set_selection(orig+selection)
271 cp = self.copy()
272 self.set_selection(orig)
273 return cp
274
[102]275 def get_scan(self, scanid=None):
276 """
277 Return a specific scan (by scanno) or collection of scans (by
278 source name) in a new scantable.
[1846]279
280 *Note*:
281
[1348]282 See scantable.drop_scan() for the inverse operation.
[1846]283
[102]284 Parameters:
[1846]285
[513]286 scanid: a (list of) scanno or a source name, unix-style
287 patterns are accepted for source name matching, e.g.
288 '*_R' gets all 'ref scans
[1846]289
290 Example::
291
[513]292 # get all scans containing the source '323p459'
293 newscan = scan.get_scan('323p459')
294 # get all 'off' scans
295 refscans = scan.get_scan('*_R')
296 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]297 newscan = scan.get_scan([0, 2, 7, 10])
[1846]298
[102]299 """
300 if scanid is None:
[718]301 if rcParams['verbose']:
[1819]302 #print "Please specify a scan no or name to " \
303 # "retrieve from the scantable"
[1846]304 asaplog.push( 'Please specify a scan no or name to retrieve'
305 ' from the scantable' )
[1819]306 print_log( 'ERROR' )
[718]307 return
308 else:
309 raise RuntimeError("No scan given")
310
[102]311 try:
[946]312 bsel = self.get_selection()
313 sel = selector()
[102]314 if type(scanid) is str:
[946]315 sel.set_name(scanid)
[1594]316 return self._select_copy(sel)
[102]317 elif type(scanid) is int:
[946]318 sel.set_scans([scanid])
[1594]319 return self._select_copy(sel)
[381]320 elif type(scanid) is list:
[946]321 sel.set_scans(scanid)
[1594]322 return self._select_copy(sel)
[381]323 else:
[718]324 msg = "Illegal scanid type, use 'int' or 'list' if ints."
325 if rcParams['verbose']:
[1819]326 #print msg
327 asaplog.push( msg )
328 print_log( 'ERROR' )
[718]329 else:
330 raise TypeError(msg)
[102]331 except RuntimeError:
[1819]332 if rcParams['verbose']:
333 #print "Couldn't find any match."
334 print_log()
335 asaplog.push( "Couldn't find any match." )
336 print_log( 'ERROR' )
[718]337 else: raise
[102]338
339 def __str__(self):
[1118]340 return Scantable._summary(self, True)
[102]341
[976]342 def summary(self, filename=None):
[1846]343 """\
[102]344 Print a summary of the contents of this scantable.
[1846]345
[102]346 Parameters:
[1846]347
[102]348 filename: the name of a file to write the putput to
349 Default - no file output
[1846]350
[102]351 """
[976]352 info = Scantable._summary(self, True)
[102]353 if filename is not None:
[256]354 if filename is "":
355 filename = 'scantable_summary.txt'
[415]356 from os.path import expandvars, isdir
[411]357 filename = expandvars(filename)
[415]358 if not isdir(filename):
[413]359 data = open(filename, 'w')
360 data.write(info)
361 data.close()
362 else:
[718]363 msg = "Illegal file name '%s'." % (filename)
364 if rcParams['verbose']:
[1819]365 #print msg
366 asaplog.push( msg )
367 print_log( 'ERROR' )
[718]368 else:
369 raise IOError(msg)
370 if rcParams['verbose']:
[794]371 try:
372 from IPython.genutils import page as pager
373 except ImportError:
374 from pydoc import pager
375 pager(info)
[718]376 else:
377 return info
[710]378
[1512]379 def get_spectrum(self, rowno):
[1471]380 """Return the spectrum for the current row in the scantable as a list.
[1846]381
[1471]382 Parameters:
[1846]383
[1573]384 rowno: the row number to retrieve the spectrum from
[1846]385
[1471]386 """
387 return self._getspectrum(rowno)
[946]388
[1471]389 def get_mask(self, rowno):
390 """Return the mask for the current row in the scantable as a list.
[1846]391
[1471]392 Parameters:
[1846]393
[1573]394 rowno: the row number to retrieve the mask from
[1846]395
[1471]396 """
397 return self._getmask(rowno)
398
399 def set_spectrum(self, spec, rowno):
400 """Return the spectrum for the current row in the scantable as a list.
[1846]401
[1471]402 Parameters:
[1846]403
[1471]404 spec: the spectrum
[1573]405 rowno: the row number to set the spectrum for
[1846]406
[1471]407 """
408 assert(len(spec) == self.nchan())
409 return self._setspectrum(spec, rowno)
410
[1600]411 def get_coordinate(self, rowno):
412 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]413
414 *Note*:
415
[1600]416 * This coordinate is only valid until a scantable method modifies
417 the frequency axis.
418 * This coordinate does contain the original frequency set-up
419 NOT the new frame. The conversions however are done using the user
420 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
421 use scantable.freq_align first. Without it there is no closure,
[1846]422 i.e.::
[1600]423
[1846]424 c = myscan.get_coordinate(0)
425 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
426
[1600]427 Parameters:
[1846]428
[1600]429 rowno: the row number for the spectral coordinate
430
431 """
432 return coordinate(Scantable.get_coordinate(self, rowno))
433
[946]434 def get_selection(self):
[1846]435 """\
[1005]436 Get the selection object currently set on this scantable.
[1846]437
438 Example::
439
[1005]440 sel = scan.get_selection()
441 sel.set_ifs(0) # select IF 0
442 scan.set_selection(sel) # apply modified selection
[1846]443
[946]444 """
445 return selector(self._getselection())
446
[1576]447 def set_selection(self, selection=None, **kw):
[1846]448 """\
[1005]449 Select a subset of the data. All following operations on this scantable
450 are only applied to thi selection.
[1846]451
[1005]452 Parameters:
[1697]453
[1846]454 selection: a selector object (default unset the selection), or
455 any combination of "pols", "ifs", "beams", "scans",
456 "cycles", "name", "query"
[1697]457
[1846]458 Examples::
[1697]459
[1005]460 sel = selector() # create a selection object
[1118]461 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]462 scan.set_selection(sel) # set the selection
463 scan.summary() # will only print summary of scanno 0 an 3
464 scan.set_selection() # unset the selection
[1697]465 # or the equivalent
466 scan.set_selection(scans=[0,3])
467 scan.summary() # will only print summary of scanno 0 an 3
468 scan.set_selection() # unset the selection
[1846]469
[946]470 """
[1576]471 if selection is None:
472 # reset
473 if len(kw) == 0:
474 selection = selector()
475 else:
476 # try keywords
477 for k in kw:
478 if k not in selector.fields:
479 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
480 selection = selector(**kw)
[946]481 self._setselection(selection)
482
[1819]483 def get_row(self, row=0, insitu=None):
[1846]484 """\
[1819]485 Select a row in the scantable.
486 Return a scantable with single row.
[1846]487
[1819]488 Parameters:
[1846]489
490 row: row no of integration, default is 0.
491 insitu: if False a new scantable is returned. Otherwise, the
492 scaling is done in-situ. The default is taken from .asaprc
493 (False)
494
[1819]495 """
496 if insitu is None: insitu = rcParams['insitu']
497 if not insitu:
498 workscan = self.copy()
499 else:
500 workscan = self
501 # Select a row
502 sel=selector()
503 sel.set_scans([workscan.getscan(row)])
504 sel.set_cycles([workscan.getcycle(row)])
505 sel.set_beams([workscan.getbeam(row)])
506 sel.set_ifs([workscan.getif(row)])
507 sel.set_polarisations([workscan.getpol(row)])
508 sel.set_name(workscan._getsourcename(row))
509 workscan.set_selection(sel)
510 if not workscan.nrow() == 1:
511 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
512 raise RuntimeError(msg)
513 del sel
514 if insitu:
515 self._assign(workscan)
516 else:
517 return workscan
518
519 #def stats(self, stat='stddev', mask=None):
520 def stats(self, stat='stddev', mask=None, form='3.3f'):
[1846]521 """\
[135]522 Determine the specified statistic of the current beam/if/pol
[102]523 Takes a 'mask' as an optional parameter to specify which
524 channels should be excluded.
[1846]525
[102]526 Parameters:
[1846]527
[1819]528 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
529 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[135]530 mask: an optional mask specifying where the statistic
[102]531 should be determined.
[1819]532 form: format string to print statistic values
[1846]533
534 Example::
535
[113]536 scan.set_unit('channel')
[1118]537 msk = scan.create_mask([100, 200], [500, 600])
[135]538 scan.stats(stat='mean', mask=m)
[1846]539
[102]540 """
[1593]541 mask = mask or []
[876]542 if not self._check_ifs():
[1118]543 raise ValueError("Cannot apply mask as the IFs have different "
544 "number of channels. Please use setselection() "
545 "to select individual IFs")
[1819]546 rtnabc = False
547 if stat.lower().endswith('_abc'): rtnabc = True
548 getchan = False
549 if stat.lower().startswith('min') or stat.lower().startswith('max'):
550 chan = self._math._minmaxchan(self, mask, stat)
551 getchan = True
552 statvals = []
553 if not rtnabc: statvals = self._math._stats(self, mask, stat)
[256]554
[1819]555 #def cb(i):
556 # return statvals[i]
[256]557
[1819]558 #return self._row_callback(cb, stat)
[102]559
[1819]560 label=stat
561 #callback=cb
562 out = ""
563 #outvec = []
564 sep = '-'*50
565 for i in range(self.nrow()):
566 refstr = ''
567 statunit= ''
568 if getchan:
569 qx, qy = self.chan2data(rowno=i, chan=chan[i])
570 if rtnabc:
571 statvals.append(qx['value'])
572 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
573 statunit= '['+qx['unit']+']'
574 else:
575 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
576
577 tm = self._gettime(i)
578 src = self._getsourcename(i)
579 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
580 out += 'Time[%s]:\n' % (tm)
581 if self.nbeam(-1) > 1:
582 out += ' Beam[%d] ' % (self.getbeam(i))
583 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
584 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
585 #outvec.append(callback(i))
586 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
587 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
588 out += sep+"\n"
589
590 if rcParams['verbose']:
591 import os
592 if os.environ.has_key( 'USER' ):
593 usr=os.environ['USER']
594 else:
595 import commands
596 usr=commands.getoutput( 'whoami' )
597 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
598 f=open(tmpfile,'w')
599 print >> f, sep
600 print >> f, ' %s %s' % (label, statunit)
601 print >> f, sep
602 print >> f, out
603 f.close()
604 f=open(tmpfile,'r')
605 x=f.readlines()
606 f.close()
607 blanc=''
608 asaplog.push(blanc.join(x), False)
609 #for xx in x:
610 # asaplog.push( xx, False )
611 print_log()
612 return statvals
613
614 def chan2data(self, rowno=0, chan=0):
[1846]615 """\
[1819]616 Returns channel/frequency/velocity and spectral value
617 at an arbitrary row and channel in the scantable.
[1846]618
[1819]619 Parameters:
[1846]620
[1819]621 rowno: a row number in the scantable. Default is the
622 first row, i.e. rowno=0
623 chan: a channel in the scantable. Default is the first
624 channel, i.e. pos=0
[1846]625
[1819]626 """
627 if isinstance(rowno, int) and isinstance(chan, int):
628 qx = {'unit': self.get_unit(),
629 'value': self._getabcissa(rowno)[chan]}
630 qy = {'unit': self.get_fluxunit(),
631 'value': self._getspectrum(rowno)[chan]}
632 return qx, qy
633
[1118]634 def stddev(self, mask=None):
[1846]635 """\
[135]636 Determine the standard deviation of the current beam/if/pol
637 Takes a 'mask' as an optional parameter to specify which
638 channels should be excluded.
[1846]639
[135]640 Parameters:
[1846]641
[135]642 mask: an optional mask specifying where the standard
643 deviation should be determined.
644
[1846]645 Example::
646
[135]647 scan.set_unit('channel')
[1118]648 msk = scan.create_mask([100, 200], [500, 600])
[135]649 scan.stddev(mask=m)
[1846]650
[135]651 """
[1118]652 return self.stats(stat='stddev', mask=mask);
[135]653
[1003]654
[1259]655 def get_column_names(self):
[1846]656 """\
[1003]657 Return a list of column names, which can be used for selection.
658 """
[1259]659 return list(Scantable.get_column_names(self))
[1003]660
[1730]661 def get_tsys(self, row=-1):
[1846]662 """\
[113]663 Return the System temperatures.
[1846]664
665 Parameters:
666
667 row: the rowno to get the information for. (default all rows)
668
[113]669 Returns:
[1846]670
[876]671 a list of Tsys values for the current selection
[1846]672
[113]673 """
[1730]674 if row > -1:
675 return self._get_column(self._gettsys, row)
[876]676 return self._row_callback(self._gettsys, "Tsys")
[256]677
[1730]678
679 def get_weather(self, row=-1):
[1846]680 """\
681 Return the weather informations.
682
683 Parameters:
684
685 row: the rowno to get the information for. (default all rows)
686
687 Returns:
688
689 a dict or list of of dicts of values for the current selection
690
691 """
692
[1730]693 values = self._get_column(self._get_weather, row)
694 if row > -1:
695 return {'temperature': values[0],
696 'pressure': values[1], 'humidity' : values[2],
697 'windspeed' : values[3], 'windaz' : values[4]
698 }
699 else:
700 out = []
701 for r in values:
702
703 out.append({'temperature': r[0],
704 'pressure': r[1], 'humidity' : r[2],
705 'windspeed' : r[3], 'windaz' : r[4]
706 })
707 return out
708
[876]709 def _row_callback(self, callback, label):
710 out = ""
[1118]711 outvec = []
[1590]712 sep = '-'*50
[876]713 for i in range(self.nrow()):
714 tm = self._gettime(i)
715 src = self._getsourcename(i)
[1590]716 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]717 out += 'Time[%s]:\n' % (tm)
[1590]718 if self.nbeam(-1) > 1:
719 out += ' Beam[%d] ' % (self.getbeam(i))
720 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
721 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]722 outvec.append(callback(i))
723 out += '= %3.3f\n' % (outvec[i])
[1590]724 out += sep+'\n'
[876]725 if rcParams['verbose']:
[1819]726 asaplog.push(sep)
727 asaplog.push(" %s" % (label))
728 asaplog.push(sep)
729 asaplog.push(out)
730 print_log()
[1175]731 return outvec
[256]732
[1070]733 def _get_column(self, callback, row=-1):
734 """
735 """
736 if row == -1:
737 return [callback(i) for i in range(self.nrow())]
738 else:
[1819]739 if 0 <= row < self.nrow():
[1070]740 return callback(row)
[256]741
[1070]742
[1348]743 def get_time(self, row=-1, asdatetime=False):
[1846]744 """\
[113]745 Get a list of time stamps for the observations.
[1348]746 Return a datetime object for each integration time stamp in the scantable.
[1846]747
[113]748 Parameters:
[1846]749
[1348]750 row: row no of integration. Default -1 return all rows
751 asdatetime: return values as datetime objects rather than strings
[1846]752
[113]753 """
[1175]754 from time import strptime
755 from datetime import datetime
[1392]756 times = self._get_column(self._gettime, row)
[1348]757 if not asdatetime:
[1392]758 return times
[1175]759 format = "%Y/%m/%d/%H:%M:%S"
760 if isinstance(times, list):
761 return [datetime(*strptime(i, format)[:6]) for i in times]
762 else:
763 return datetime(*strptime(times, format)[:6])
[102]764
[1348]765
766 def get_inttime(self, row=-1):
[1846]767 """\
[1348]768 Get a list of integration times for the observations.
769 Return a time in seconds for each integration in the scantable.
[1846]770
[1348]771 Parameters:
[1846]772
[1348]773 row: row no of integration. Default -1 return all rows.
[1846]774
[1348]775 """
[1573]776 return self._get_column(self._getinttime, row)
[1348]777
[1573]778
[714]779 def get_sourcename(self, row=-1):
[1846]780 """\
[794]781 Get a list source names for the observations.
[714]782 Return a string for each integration in the scantable.
783 Parameters:
[1846]784
[1348]785 row: row no of integration. Default -1 return all rows.
[1846]786
[714]787 """
[1070]788 return self._get_column(self._getsourcename, row)
[714]789
[794]790 def get_elevation(self, row=-1):
[1846]791 """\
[794]792 Get a list of elevations for the observations.
793 Return a float for each integration in the scantable.
[1846]794
[794]795 Parameters:
[1846]796
[1348]797 row: row no of integration. Default -1 return all rows.
[1846]798
[794]799 """
[1070]800 return self._get_column(self._getelevation, row)
[794]801
802 def get_azimuth(self, row=-1):
[1846]803 """\
[794]804 Get a list of azimuths for the observations.
805 Return a float for each integration in the scantable.
[1846]806
[794]807 Parameters:
[1348]808 row: row no of integration. Default -1 return all rows.
[1846]809
[794]810 """
[1070]811 return self._get_column(self._getazimuth, row)
[794]812
813 def get_parangle(self, row=-1):
[1846]814 """\
[794]815 Get a list of parallactic angles for the observations.
816 Return a float for each integration in the scantable.
[1846]817
[794]818 Parameters:
[1846]819
[1348]820 row: row no of integration. Default -1 return all rows.
[1846]821
[794]822 """
[1070]823 return self._get_column(self._getparangle, row)
[794]824
[1070]825 def get_direction(self, row=-1):
826 """
827 Get a list of Positions on the sky (direction) for the observations.
[1594]828 Return a string for each integration in the scantable.
[1070]829 Parameters:
830 row: row no of integration. Default -1 return all rows
831 Example:
832 none
833 """
834 return self._get_column(self._getdirection, row)
835
[1391]836 def get_directionval(self, row=-1):
[1846]837 """\
[1391]838 Get a list of Positions on the sky (direction) for the observations.
839 Return a float for each integration in the scantable.
[1846]840
[1391]841 Parameters:
[1846]842
[1391]843 row: row no of integration. Default -1 return all rows
[1846]844
[1391]845 """
846 return self._get_column(self._getdirectionvec, row)
847
[1730]848 @print_log_dec
[102]849 def set_unit(self, unit='channel'):
[1846]850 """\
[102]851 Set the unit for all following operations on this scantable
[1846]852
[102]853 Parameters:
[1846]854
855 unit: optional unit, default is 'channel'. Use one of '*Hz',
856 'km/s', 'channel' or equivalent ''
857
[102]858 """
[484]859 varlist = vars()
[1118]860 if unit in ['', 'pixel', 'channel']:
[113]861 unit = ''
862 inf = list(self._getcoordinfo())
863 inf[0] = unit
864 self._setcoordinfo(inf)
[1118]865 self._add_history("set_unit", varlist)
[113]866
[1589]867 @print_log_dec
[484]868 def set_instrument(self, instr):
[1846]869 """\
[1348]870 Set the instrument for subsequent processing.
[1846]871
[358]872 Parameters:
[1846]873
[710]874 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]875 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]876
[358]877 """
878 self._setInstrument(instr)
[1118]879 self._add_history("set_instument", vars())
[1819]880 print_log()
[358]881
[1589]882 @print_log_dec
[1190]883 def set_feedtype(self, feedtype):
[1846]884 """\
[1190]885 Overwrite the feed type, which might not be set correctly.
[1846]886
[1190]887 Parameters:
[1846]888
[1190]889 feedtype: 'linear' or 'circular'
[1846]890
[1190]891 """
892 self._setfeedtype(feedtype)
893 self._add_history("set_feedtype", vars())
[1819]894 print_log()
[1190]895
[1589]896 @print_log_dec
[276]897 def set_doppler(self, doppler='RADIO'):
[1846]898 """\
[276]899 Set the doppler for all following operations on this scantable.
[1846]900
[276]901 Parameters:
[1846]902
[276]903 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]904
[276]905 """
[484]906 varlist = vars()
[276]907 inf = list(self._getcoordinfo())
908 inf[2] = doppler
909 self._setcoordinfo(inf)
[1118]910 self._add_history("set_doppler", vars())
[1819]911 print_log()
[710]912
[1589]913 @print_log_dec
[226]914 def set_freqframe(self, frame=None):
[1846]915 """\
[113]916 Set the frame type of the Spectral Axis.
[1846]917
[113]918 Parameters:
[1846]919
[591]920 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]921 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]922 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]923
924 Example::
925
[113]926 scan.set_freqframe('BARY')
[1846]927
[113]928 """
[1593]929 frame = frame or rcParams['scantable.freqframe']
[484]930 varlist = vars()
[1819]931 # "REST" is not implemented in casacore
932 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
933 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
934 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]935 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]936
[989]937 if frame in valid:
[113]938 inf = list(self._getcoordinfo())
939 inf[1] = frame
940 self._setcoordinfo(inf)
[1118]941 self._add_history("set_freqframe", varlist)
[102]942 else:
[1118]943 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]944 if rcParams['verbose']:
[1819]945 #print msg
946 asaplog.push( msg )
947 print_log( 'ERROR' )
[718]948 else:
949 raise TypeError(msg)
[1819]950 print_log()
[710]951
[989]952 def set_dirframe(self, frame=""):
[1846]953 """\
[989]954 Set the frame type of the Direction on the sky.
[1846]955
[989]956 Parameters:
[1846]957
[989]958 frame: an optional frame type, default ''. Valid frames are:
959 'J2000', 'B1950', 'GALACTIC'
[1846]960
961 Example:
962
[989]963 scan.set_dirframe('GALACTIC')
[1846]964
[989]965 """
966 varlist = vars()
967 try:
968 Scantable.set_dirframe(self, frame)
[1118]969 except RuntimeError, msg:
[989]970 if rcParams['verbose']:
[1819]971 #print msg
972 print_log()
973 asaplog.push( str(msg) )
974 print_log( 'ERROR' )
[989]975 else:
976 raise
[1118]977 self._add_history("set_dirframe", varlist)
[989]978
[113]979 def get_unit(self):
[1846]980 """\
[113]981 Get the default unit set in this scantable
[1846]982
[113]983 Returns:
[1846]984
[113]985 A unit string
[1846]986
[113]987 """
988 inf = self._getcoordinfo()
989 unit = inf[0]
990 if unit == '': unit = 'channel'
991 return unit
[102]992
[158]993 def get_abcissa(self, rowno=0):
[1846]994 """\
[158]995 Get the abcissa in the current coordinate setup for the currently
[113]996 selected Beam/IF/Pol
[1846]997
[113]998 Parameters:
[1846]999
[226]1000 rowno: an optional row number in the scantable. Default is the
1001 first row, i.e. rowno=0
[1846]1002
[113]1003 Returns:
[1846]1004
[1348]1005 The abcissa values and the format string (as a dictionary)
[1846]1006
[113]1007 """
[256]1008 abc = self._getabcissa(rowno)
[710]1009 lbl = self._getabcissalabel(rowno)
[1819]1010 print_log()
[158]1011 return abc, lbl
[113]1012
[1819]1013 def flag(self, mask=None, unflag=False):
[1846]1014 """\
[1001]1015 Flag the selected data using an optional channel mask.
[1846]1016
[1001]1017 Parameters:
[1846]1018
[1001]1019 mask: an optional channel mask, created with create_mask. Default
1020 (no mask) is all channels.
[1819]1021 unflag: if True, unflag the data
[1846]1022
[1001]1023 """
1024 varlist = vars()
[1593]1025 mask = mask or []
[1001]1026 try:
[1819]1027 self._flag(mask, unflag)
[1118]1028 except RuntimeError, msg:
[1001]1029 if rcParams['verbose']:
[1819]1030 #print msg
1031 print_log()
1032 asaplog.push( str(msg) )
1033 print_log( 'ERROR' )
[1001]1034 return
1035 else: raise
1036 self._add_history("flag", varlist)
1037
[1819]1038 def flag_row(self, rows=[], unflag=False):
[1846]1039 """\
[1819]1040 Flag the selected data in row-based manner.
[1846]1041
[1819]1042 Parameters:
[1846]1043
[1843]1044 rows: list of row numbers to be flagged. Default is no row
1045 (must be explicitly specified to execute row-based flagging).
[1819]1046 unflag: if True, unflag the data.
[1846]1047
[1819]1048 """
1049 varlist = vars()
1050 try:
1051 self._flag_row(rows, unflag)
1052 except RuntimeError, msg:
1053 if rcParams['verbose']:
1054 print_log()
1055 asaplog.push( str(msg) )
1056 print_log('ERROR')
1057 return
1058 else: raise
1059 self._add_history("flag_row", varlist)
1060
1061 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1062 """\
[1819]1063 Flag the selected data outside a specified range (in channel-base)
[1846]1064
[1819]1065 Parameters:
[1846]1066
[1819]1067 uthres: upper threshold.
1068 dthres: lower threshold
[1846]1069
[1819]1070 clipoutside: True for flagging data outside the range [dthres:uthres].
1071 False for glagging data inside the range.
[1846]1072 unflag: if True, unflag the data.
1073
[1819]1074 """
1075 varlist = vars()
1076 try:
1077 self._clip(uthres, dthres, clipoutside, unflag)
1078 except RuntimeError, msg:
1079 if rcParams['verbose']:
1080 print_log()
1081 asaplog.push(str(msg))
1082 print_log('ERROR')
1083 return
1084 else: raise
1085 self._add_history("clip", varlist)
1086
[1589]1087 @print_log_dec
[1584]1088 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1089 """\
[1192]1090 Flag the data in 'lag' space by providing a frequency to remove.
[1584]1091 Flagged data in the scantable gets interpolated over the region.
[1192]1092 No taper is applied.
[1846]1093
[1192]1094 Parameters:
[1846]1095
[1579]1096 start: the start frequency (really a period within the
1097 bandwidth) or period to remove
1098 end: the end frequency or period to remove
[1584]1099 unit: the frequency unit (default "MHz") or "" for
[1579]1100 explicit lag channels
[1846]1101
1102 *Notes*:
1103
[1579]1104 It is recommended to flag edges of the band or strong
[1348]1105 signals beforehand.
[1846]1106
[1192]1107 """
1108 if insitu is None: insitu = rcParams['insitu']
1109 self._math._setinsitu(insitu)
1110 varlist = vars()
[1579]1111 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1112 if not (unit == "" or base.has_key(unit)):
[1192]1113 raise ValueError("%s is not a valid unit." % unit)
1114 try:
[1579]1115 if unit == "":
1116 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1117 else:
1118 s = scantable(self._math._lag_flag(self, start*base[unit],
1119 end*base[unit], "frequency"))
[1192]1120 except RuntimeError, msg:
1121 if rcParams['verbose']:
[1819]1122 #print msg
1123 print_log()
1124 asaplog.push( str(msg) )
1125 print_log( 'ERROR' )
[1192]1126 return
1127 else: raise
1128 s._add_history("lag_flag", varlist)
[1819]1129 print_log()
[1192]1130 if insitu:
1131 self._assign(s)
1132 else:
1133 return s
[1001]1134
[1589]1135 @print_log_dec
[113]1136 def create_mask(self, *args, **kwargs):
[1846]1137 """\
[1118]1138 Compute and return a mask based on [min, max] windows.
[189]1139 The specified windows are to be INCLUDED, when the mask is
[113]1140 applied.
[1846]1141
[102]1142 Parameters:
[1846]1143
[1118]1144 [min, max], [min2, max2], ...
[1024]1145 Pairs of start/end points (inclusive)specifying the regions
[102]1146 to be masked
[189]1147 invert: optional argument. If specified as True,
1148 return an inverted mask, i.e. the regions
1149 specified are EXCLUDED
[513]1150 row: create the mask using the specified row for
1151 unit conversions, default is row=0
1152 only necessary if frequency varies over rows.
[1846]1153
1154 Examples::
1155
[113]1156 scan.set_unit('channel')
[1846]1157 # a)
[1118]1158 msk = scan.create_mask([400, 500], [800, 900])
[189]1159 # masks everything outside 400 and 500
[113]1160 # and 800 and 900 in the unit 'channel'
1161
[1846]1162 # b)
[1118]1163 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1164 # masks the regions between 400 and 500
[113]1165 # and 800 and 900 in the unit 'channel'
[1846]1166
1167 # c)
1168 #mask only channel 400
[1554]1169 msk = scan.create_mask([400])
[1846]1170
[102]1171 """
[1554]1172 row = kwargs.get("row", 0)
[513]1173 data = self._getabcissa(row)
[113]1174 u = self._getcoordinfo()[0]
[718]1175 if rcParams['verbose']:
[113]1176 if u == "": u = "channel"
[718]1177 msg = "The current mask window unit is %s" % u
[1118]1178 i = self._check_ifs()
1179 if not i:
[876]1180 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]1181 asaplog.push(msg)
[102]1182 n = self.nchan()
[1295]1183 msk = _n_bools(n, False)
[710]1184 # test if args is a 'list' or a 'normal *args - UGLY!!!
1185
[1118]1186 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1187 and args or args[0]
[710]1188 for window in ws:
[1554]1189 if len(window) == 1:
1190 window = [window[0], window[0]]
1191 if len(window) == 0 or len(window) > 2:
1192 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1193 if window[0] > window[1]:
1194 tmp = window[0]
1195 window[0] = window[1]
1196 window[1] = tmp
[102]1197 for i in range(n):
[1024]1198 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1199 msk[i] = True
[113]1200 if kwargs.has_key('invert'):
1201 if kwargs.get('invert'):
[1295]1202 msk = mask_not(msk)
[1819]1203 print_log()
[102]1204 return msk
[710]1205
[1819]1206 def get_masklist(self, mask=None, row=0):
[1846]1207 """\
[1819]1208 Compute and return a list of mask windows, [min, max].
[1846]1209
[1819]1210 Parameters:
[1846]1211
[1819]1212 mask: channel mask, created with create_mask.
1213 row: calcutate the masklist using the specified row
1214 for unit conversions, default is row=0
1215 only necessary if frequency varies over rows.
[1846]1216
[1819]1217 Returns:
[1846]1218
[1819]1219 [min, max], [min2, max2], ...
1220 Pairs of start/end points (inclusive)specifying
1221 the masked regions
[1846]1222
[1819]1223 """
1224 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1225 raise TypeError("The mask should be list or tuple.")
1226 if len(mask) < 2:
1227 raise TypeError("The mask elements should be > 1")
1228 if self.nchan() != len(mask):
1229 msg = "Number of channels in scantable != number of mask elements"
1230 raise TypeError(msg)
1231 data = self._getabcissa(row)
1232 u = self._getcoordinfo()[0]
1233 if rcParams['verbose']:
1234 if u == "": u = "channel"
1235 msg = "The current mask window unit is %s" % u
1236 i = self._check_ifs()
1237 if not i:
1238 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1239 asaplog.push(msg)
1240 masklist=[]
1241 ist, ien = None, None
1242 ist, ien=self.get_mask_indices(mask)
1243 if ist is not None and ien is not None:
1244 for i in xrange(len(ist)):
1245 range=[data[ist[i]],data[ien[i]]]
1246 range.sort()
1247 masklist.append([range[0],range[1]])
1248 return masklist
1249
1250 def get_mask_indices(self, mask=None):
[1846]1251 """\
[1819]1252 Compute and Return lists of mask start indices and mask end indices.
1253 Parameters:
1254 mask: channel mask, created with create_mask.
[1846]1255
[1819]1256 Returns:
[1846]1257
[1819]1258 List of mask start indices and that of mask end indices,
1259 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1260
[1819]1261 """
1262 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1263 raise TypeError("The mask should be list or tuple.")
1264 if len(mask) < 2:
1265 raise TypeError("The mask elements should be > 1")
1266 istart=[]
1267 iend=[]
1268 if mask[0]: istart.append(0)
1269 for i in range(len(mask)-1):
1270 if not mask[i] and mask[i+1]:
1271 istart.append(i+1)
1272 elif mask[i] and not mask[i+1]:
1273 iend.append(i)
1274 if mask[len(mask)-1]: iend.append(len(mask)-1)
1275 if len(istart) != len(iend):
1276 raise RuntimeError("Numbers of mask start != mask end.")
1277 for i in range(len(istart)):
1278 if istart[i] > iend[i]:
1279 raise RuntimeError("Mask start index > mask end index")
1280 break
1281 return istart,iend
1282
1283# def get_restfreqs(self):
1284# """
1285# Get the restfrequency(s) stored in this scantable.
1286# The return value(s) are always of unit 'Hz'
1287# Parameters:
1288# none
1289# Returns:
1290# a list of doubles
1291# """
1292# return list(self._getrestfreqs())
1293
1294 def get_restfreqs(self, ids=None):
[1846]1295 """\
[256]1296 Get the restfrequency(s) stored in this scantable.
1297 The return value(s) are always of unit 'Hz'
[1846]1298
[256]1299 Parameters:
[1846]1300
[1819]1301 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1302 be retrieved
[1846]1303
[256]1304 Returns:
[1846]1305
[1819]1306 dictionary containing ids and a list of doubles for each id
[1846]1307
[256]1308 """
[1819]1309 if ids is None:
1310 rfreqs={}
1311 idlist = self.getmolnos()
1312 for i in idlist:
1313 rfreqs[i]=list(self._getrestfreqs(i))
1314 return rfreqs
1315 else:
1316 if type(ids)==list or type(ids)==tuple:
1317 rfreqs={}
1318 for i in ids:
1319 rfreqs[i]=list(self._getrestfreqs(i))
1320 return rfreqs
1321 else:
1322 return list(self._getrestfreqs(ids))
1323 #return list(self._getrestfreqs(ids))
[102]1324
[931]1325 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1326 """\
[931]1327 Set or replace the restfrequency specified and
1328 If the 'freqs' argument holds a scalar,
1329 then that rest frequency will be applied to all the selected
1330 data. If the 'freqs' argument holds
1331 a vector, then it MUST be of equal or smaller length than
1332 the number of IFs (and the available restfrequencies will be
1333 replaced by this vector). In this case, *all* data have
1334 the restfrequency set per IF according
1335 to the corresponding value you give in the 'freqs' vector.
[1118]1336 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1337 IF 1 gets restfreq 2e9.
[1846]1338
[1395]1339 You can also specify the frequencies via a linecatalog.
[1153]1340
[931]1341 Parameters:
[1846]1342
[931]1343 freqs: list of rest frequency values or string idenitfiers
1344 unit: unit for rest frequency (default 'Hz')
[402]1345
[1846]1346
1347 Example::
1348
[1819]1349 # set the given restfrequency for the all currently selected IFs
[931]1350 scan.set_restfreqs(freqs=1.4e9)
[1845]1351 # set restfrequencies for the n IFs (n > 1) in the order of the
1352 # list, i.e
1353 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1354 # len(list_of_restfreqs) == nIF
1355 # for nIF == 1 the following will set multiple restfrequency for
1356 # that IF
[1819]1357 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1358 # set multiple restfrequencies per IF. as a list of lists where
1359 # the outer list has nIF elements, the inner s arbitrary
1360 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1361
[1846]1362 *Note*:
[1845]1363
[931]1364 To do more sophisticate Restfrequency setting, e.g. on a
1365 source and IF basis, use scantable.set_selection() before using
[1846]1366 this function::
[931]1367
[1846]1368 # provided your scantable is called scan
1369 selection = selector()
1370 selection.set_name("ORION*")
1371 selection.set_ifs([1])
1372 scan.set_selection(selection)
1373 scan.set_restfreqs(freqs=86.6e9)
1374
[931]1375 """
1376 varlist = vars()
[1157]1377 from asap import linecatalog
1378 # simple value
[1118]1379 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1380 self._setrestfreqs([freqs], [""], unit)
[1157]1381 # list of values
[1118]1382 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1383 # list values are scalars
[1118]1384 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1385 if len(freqs) == 1:
1386 self._setrestfreqs(freqs, [""], unit)
1387 else:
1388 # allow the 'old' mode of setting mulitple IFs
1389 sel = selector()
1390 savesel = self._getselection()
1391 iflist = self.getifnos()
1392 if len(freqs)>len(iflist):
1393 raise ValueError("number of elements in list of list "
1394 "exeeds the current IF selections")
1395 iflist = self.getifnos()
1396 for i, fval in enumerate(freqs):
1397 sel.set_ifs(iflist[i])
1398 self._setselection(sel)
1399 self._setrestfreqs([fval], [""], unit)
1400 self._setselection(savesel)
1401
1402 # list values are dict, {'value'=, 'name'=)
[1157]1403 elif isinstance(freqs[-1], dict):
[1845]1404 values = []
1405 names = []
1406 for d in freqs:
1407 values.append(d["value"])
1408 names.append(d["name"])
1409 self._setrestfreqs(values, names, unit)
[1819]1410 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1411 sel = selector()
1412 savesel = self._getselection()
[1322]1413 iflist = self.getifnos()
[1819]1414 if len(freqs)>len(iflist):
[1845]1415 raise ValueError("number of elements in list of list exeeds"
1416 " the current IF selections")
1417 for i, fval in enumerate(freqs):
[1322]1418 sel.set_ifs(iflist[i])
[1259]1419 self._setselection(sel)
[1845]1420 self._setrestfreqs(fval, [""], unit)
[1157]1421 self._setselection(savesel)
1422 # freqs are to be taken from a linecatalog
[1153]1423 elif isinstance(freqs, linecatalog):
1424 sel = selector()
1425 savesel = self._getselection()
1426 for i in xrange(freqs.nrow()):
[1322]1427 sel.set_ifs(iflist[i])
[1153]1428 self._setselection(sel)
[1845]1429 self._setrestfreqs([freqs.get_frequency(i)],
1430 [freqs.get_name(i)], "MHz")
[1153]1431 # ensure that we are not iterating past nIF
1432 if i == self.nif()-1: break
1433 self._setselection(savesel)
[931]1434 else:
1435 return
1436 self._add_history("set_restfreqs", varlist)
1437
[1360]1438 def shift_refpix(self, delta):
[1846]1439 """\
[1589]1440 Shift the reference pixel of the Spectra Coordinate by an
1441 integer amount.
[1846]1442
[1589]1443 Parameters:
[1846]1444
[1589]1445 delta: the amount to shift by
[1846]1446
1447 *Note*:
1448
[1589]1449 Be careful using this with broadband data.
[1846]1450
[1360]1451 """
[1731]1452 Scantable.shift_refpix(self, delta)
[931]1453
[1259]1454 def history(self, filename=None):
[1846]1455 """\
[1259]1456 Print the history. Optionally to a file.
[1846]1457
[1348]1458 Parameters:
[1846]1459
[1348]1460 filename: The name of the file to save the history to.
[1846]1461
[1259]1462 """
[484]1463 hist = list(self._gethistory())
[794]1464 out = "-"*80
[484]1465 for h in hist:
[489]1466 if h.startswith("---"):
[794]1467 out += "\n"+h
[489]1468 else:
1469 items = h.split("##")
1470 date = items[0]
1471 func = items[1]
1472 items = items[2:]
[794]1473 out += "\n"+date+"\n"
1474 out += "Function: %s\n Parameters:" % (func)
[489]1475 for i in items:
1476 s = i.split("=")
[1118]1477 out += "\n %s = %s" % (s[0], s[1])
[794]1478 out += "\n"+"-"*80
[1259]1479 if filename is not None:
1480 if filename is "":
1481 filename = 'scantable_history.txt'
1482 import os
1483 filename = os.path.expandvars(os.path.expanduser(filename))
1484 if not os.path.isdir(filename):
1485 data = open(filename, 'w')
1486 data.write(out)
1487 data.close()
1488 else:
1489 msg = "Illegal file name '%s'." % (filename)
1490 if rcParams['verbose']:
[1819]1491 #print msg
1492 asaplog.push( msg )
1493 print_log( 'ERROR' )
[1259]1494 else:
1495 raise IOError(msg)
1496 if rcParams['verbose']:
1497 try:
1498 from IPython.genutils import page as pager
1499 except ImportError:
1500 from pydoc import pager
1501 pager(out)
1502 else:
1503 return out
[484]1504 return
[513]1505 #
1506 # Maths business
1507 #
[1589]1508 @print_log_dec
[931]1509 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1510 """\
[1070]1511 Return the (time) weighted average of a scan.
[1846]1512
1513 *Note*:
1514
[1070]1515 in channels only - align if necessary
[1846]1516
[513]1517 Parameters:
[1846]1518
[513]1519 mask: an optional mask (only used for 'var' and 'tsys'
1520 weighting)
[558]1521 scanav: True averages each scan separately
1522 False (default) averages all scans together,
[1099]1523 weight: Weighting scheme.
1524 'none' (mean no weight)
1525 'var' (1/var(spec) weighted)
1526 'tsys' (1/Tsys**2 weighted)
1527 'tint' (integration time weighted)
1528 'tintsys' (Tint/Tsys**2)
1529 'median' ( median averaging)
[535]1530 The default is 'tint'
[931]1531 align: align the spectra in velocity before averaging. It takes
1532 the time of the first spectrum as reference time.
[1846]1533
1534 Example::
1535
[513]1536 # time average the scantable without using a mask
[710]1537 newscan = scan.average_time()
[1846]1538
[513]1539 """
1540 varlist = vars()
[1593]1541 weight = weight or 'TINT'
1542 mask = mask or ()
1543 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1544 scan = (self, )
[989]1545 try:
[1118]1546 if align:
1547 scan = (self.freq_align(insitu=False), )
1548 s = None
1549 if weight.upper() == 'MEDIAN':
1550 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1551 scanav))
1552 else:
1553 s = scantable(self._math._average(scan, mask, weight.upper(),
1554 scanav))
1555 except RuntimeError, msg:
[989]1556 if rcParams['verbose']:
[1819]1557 #print msg
1558 print_log()
1559 asaplog.push( str(msg) )
1560 print_log( 'ERROR' )
[989]1561 return
1562 else: raise
[1099]1563 s._add_history("average_time", varlist)
[1819]1564 print_log()
[513]1565 return s
[710]1566
[1589]1567 @print_log_dec
[876]1568 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1569 """\
[513]1570 Return a scan where all spectra are converted to either
1571 Jansky or Kelvin depending upon the flux units of the scan table.
1572 By default the function tries to look the values up internally.
1573 If it can't find them (or if you want to over-ride), you must
1574 specify EITHER jyperk OR eta (and D which it will try to look up
1575 also if you don't set it). jyperk takes precedence if you set both.
[1846]1576
[513]1577 Parameters:
[1846]1578
[513]1579 jyperk: the Jy / K conversion factor
1580 eta: the aperture efficiency
1581 d: the geomtric diameter (metres)
1582 insitu: if False a new scantable is returned.
1583 Otherwise, the scaling is done in-situ
1584 The default is taken from .asaprc (False)
[1846]1585
[513]1586 """
1587 if insitu is None: insitu = rcParams['insitu']
[876]1588 self._math._setinsitu(insitu)
[513]1589 varlist = vars()
[1593]1590 jyperk = jyperk or -1.0
1591 d = d or -1.0
1592 eta = eta or -1.0
[876]1593 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1594 s._add_history("convert_flux", varlist)
[1819]1595 print_log()
[876]1596 if insitu: self._assign(s)
1597 else: return s
[513]1598
[1589]1599 @print_log_dec
[876]1600 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1601 """\
[513]1602 Return a scan after applying a gain-elevation correction.
1603 The correction can be made via either a polynomial or a
1604 table-based interpolation (and extrapolation if necessary).
1605 You specify polynomial coefficients, an ascii table or neither.
1606 If you specify neither, then a polynomial correction will be made
1607 with built in coefficients known for certain telescopes (an error
1608 will occur if the instrument is not known).
1609 The data and Tsys are *divided* by the scaling factors.
[1846]1610
[513]1611 Parameters:
[1846]1612
[513]1613 poly: Polynomial coefficients (default None) to compute a
1614 gain-elevation correction as a function of
1615 elevation (in degrees).
1616 filename: The name of an ascii file holding correction factors.
1617 The first row of the ascii file must give the column
1618 names and these MUST include columns
1619 "ELEVATION" (degrees) and "FACTOR" (multiply data
1620 by this) somewhere.
1621 The second row must give the data type of the
1622 column. Use 'R' for Real and 'I' for Integer.
1623 An example file would be
1624 (actual factors are arbitrary) :
1625
1626 TIME ELEVATION FACTOR
1627 R R R
1628 0.1 0 0.8
1629 0.2 20 0.85
1630 0.3 40 0.9
1631 0.4 60 0.85
1632 0.5 80 0.8
1633 0.6 90 0.75
1634 method: Interpolation method when correcting from a table.
1635 Values are "nearest", "linear" (default), "cubic"
1636 and "spline"
1637 insitu: if False a new scantable is returned.
1638 Otherwise, the scaling is done in-situ
1639 The default is taken from .asaprc (False)
[1846]1640
[513]1641 """
1642
1643 if insitu is None: insitu = rcParams['insitu']
[876]1644 self._math._setinsitu(insitu)
[513]1645 varlist = vars()
[1593]1646 poly = poly or ()
[513]1647 from os.path import expandvars
1648 filename = expandvars(filename)
[876]1649 s = scantable(self._math._gainel(self, poly, filename, method))
1650 s._add_history("gain_el", varlist)
[1819]1651 print_log()
[1593]1652 if insitu:
1653 self._assign(s)
1654 else:
1655 return s
[710]1656
[1589]1657 @print_log_dec
[931]1658 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1659 """\
[513]1660 Return a scan where all rows have been aligned in frequency/velocity.
1661 The alignment frequency frame (e.g. LSRK) is that set by function
1662 set_freqframe.
[1846]1663
[513]1664 Parameters:
1665 reftime: reference time to align at. By default, the time of
1666 the first row of data is used.
1667 method: Interpolation method for regridding the spectra.
1668 Choose from "nearest", "linear", "cubic" (default)
1669 and "spline"
1670 insitu: if False a new scantable is returned.
1671 Otherwise, the scaling is done in-situ
1672 The default is taken from .asaprc (False)
[1846]1673
[513]1674 """
[931]1675 if insitu is None: insitu = rcParams["insitu"]
[876]1676 self._math._setinsitu(insitu)
[513]1677 varlist = vars()
[1593]1678 reftime = reftime or ""
[931]1679 s = scantable(self._math._freq_align(self, reftime, method))
[876]1680 s._add_history("freq_align", varlist)
[1819]1681 print_log()
[876]1682 if insitu: self._assign(s)
1683 else: return s
[513]1684
[1589]1685 @print_log_dec
[1725]1686 def opacity(self, tau=None, insitu=None):
[1846]1687 """\
[513]1688 Apply an opacity correction. The data
1689 and Tsys are multiplied by the correction factor.
[1846]1690
[513]1691 Parameters:
[1689]1692 tau: (list of) opacity from which the correction factor is
[513]1693 exp(tau*ZD)
[1689]1694 where ZD is the zenith-distance.
1695 If a list is provided, it has to be of length nIF,
1696 nIF*nPol or 1 and in order of IF/POL, e.g.
1697 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1698 if tau is `None` the opacities are determined from a
1699 model.
[513]1700 insitu: if False a new scantable is returned.
1701 Otherwise, the scaling is done in-situ
1702 The default is taken from .asaprc (False)
[1846]1703
[513]1704 """
1705 if insitu is None: insitu = rcParams['insitu']
[876]1706 self._math._setinsitu(insitu)
[513]1707 varlist = vars()
[1689]1708 if not hasattr(tau, "__len__"):
1709 tau = [tau]
[876]1710 s = scantable(self._math._opacity(self, tau))
1711 s._add_history("opacity", varlist)
[1819]1712 print_log()
[876]1713 if insitu: self._assign(s)
1714 else: return s
[513]1715
[1589]1716 @print_log_dec
[513]1717 def bin(self, width=5, insitu=None):
[1846]1718 """\
[513]1719 Return a scan where all spectra have been binned up.
[1846]1720
[1348]1721 Parameters:
[1846]1722
[513]1723 width: The bin width (default=5) in pixels
1724 insitu: if False a new scantable is returned.
1725 Otherwise, the scaling is done in-situ
1726 The default is taken from .asaprc (False)
[1846]1727
[513]1728 """
1729 if insitu is None: insitu = rcParams['insitu']
[876]1730 self._math._setinsitu(insitu)
[513]1731 varlist = vars()
[876]1732 s = scantable(self._math._bin(self, width))
[1118]1733 s._add_history("bin", varlist)
[1819]1734 print_log()
[1589]1735 if insitu:
1736 self._assign(s)
1737 else:
1738 return s
[513]1739
[1589]1740 @print_log_dec
[513]1741 def resample(self, width=5, method='cubic', insitu=None):
[1846]1742 """\
[1348]1743 Return a scan where all spectra have been binned up.
[1573]1744
[1348]1745 Parameters:
[1846]1746
[513]1747 width: The bin width (default=5) in pixels
1748 method: Interpolation method when correcting from a table.
1749 Values are "nearest", "linear", "cubic" (default)
1750 and "spline"
1751 insitu: if False a new scantable is returned.
1752 Otherwise, the scaling is done in-situ
1753 The default is taken from .asaprc (False)
[1846]1754
[513]1755 """
1756 if insitu is None: insitu = rcParams['insitu']
[876]1757 self._math._setinsitu(insitu)
[513]1758 varlist = vars()
[876]1759 s = scantable(self._math._resample(self, method, width))
[1118]1760 s._add_history("resample", varlist)
[1819]1761 print_log()
[876]1762 if insitu: self._assign(s)
1763 else: return s
[513]1764
[1589]1765 @print_log_dec
[946]1766 def average_pol(self, mask=None, weight='none'):
[1846]1767 """\
[946]1768 Average the Polarisations together.
[1846]1769
[946]1770 Parameters:
[1846]1771
[946]1772 mask: An optional mask defining the region, where the
1773 averaging will be applied. The output will have all
1774 specified points masked.
1775 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1776 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1777
[946]1778 """
1779 varlist = vars()
[1593]1780 mask = mask or ()
[1010]1781 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1782 s._add_history("average_pol", varlist)
[1819]1783 print_log()
[992]1784 return s
[513]1785
[1589]1786 @print_log_dec
[1145]1787 def average_beam(self, mask=None, weight='none'):
[1846]1788 """\
[1145]1789 Average the Beams together.
[1846]1790
[1145]1791 Parameters:
1792 mask: An optional mask defining the region, where the
1793 averaging will be applied. The output will have all
1794 specified points masked.
1795 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1796 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1797
[1145]1798 """
1799 varlist = vars()
[1593]1800 mask = mask or ()
[1145]1801 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1802 s._add_history("average_beam", varlist)
[1819]1803 print_log()
[1145]1804 return s
1805
[1586]1806 def parallactify(self, pflag):
[1846]1807 """\
[1843]1808 Set a flag to indicate whether this data should be treated as having
[1617]1809 been 'parallactified' (total phase == 0.0)
[1846]1810
[1617]1811 Parameters:
[1843]1812 pflag: Bool indicating whether to turn this on (True) or
[1617]1813 off (False)
[1846]1814
[1617]1815 """
[1586]1816 varlist = vars()
1817 self._parallactify(pflag)
1818 self._add_history("parallactify", varlist)
1819
[1589]1820 @print_log_dec
[992]1821 def convert_pol(self, poltype=None):
[1846]1822 """\
[992]1823 Convert the data to a different polarisation type.
[1565]1824 Note that you will need cross-polarisation terms for most conversions.
[1846]1825
[992]1826 Parameters:
1827 poltype: The new polarisation type. Valid types are:
[1565]1828 "linear", "circular", "stokes" and "linpol"
[1846]1829
[992]1830 """
1831 varlist = vars()
1832 try:
1833 s = scantable(self._math._convertpol(self, poltype))
[1118]1834 except RuntimeError, msg:
[992]1835 if rcParams['verbose']:
[1819]1836 #print msg
1837 print_log()
1838 asaplog.push( str(msg) )
1839 print_log( 'ERROR' )
[1118]1840 return
[992]1841 else:
1842 raise
[1118]1843 s._add_history("convert_pol", varlist)
[1819]1844 print_log()
[992]1845 return s
1846
[1589]1847 @print_log_dec
[1819]1848 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1849 """\
[513]1850 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1851
[513]1852 Parameters:
[1846]1853
[513]1854 kernel: The type of smoothing kernel. Select from
[1574]1855 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1856 or 'poly'
[513]1857 width: The width of the kernel in pixels. For hanning this is
1858 ignored otherwise it defauls to 5 pixels.
1859 For 'gaussian' it is the Full Width Half
1860 Maximum. For 'boxcar' it is the full width.
[1574]1861 For 'rmedian' and 'poly' it is the half width.
1862 order: Optional parameter for 'poly' kernel (default is 2), to
1863 specify the order of the polnomial. Ignored by all other
1864 kernels.
[1819]1865 plot: plot the original and the smoothed spectra.
1866 In this each indivual fit has to be approved, by
1867 typing 'y' or 'n'
[513]1868 insitu: if False a new scantable is returned.
1869 Otherwise, the scaling is done in-situ
1870 The default is taken from .asaprc (False)
[1846]1871
[513]1872 """
1873 if insitu is None: insitu = rcParams['insitu']
[876]1874 self._math._setinsitu(insitu)
[513]1875 varlist = vars()
[1819]1876
1877 if plot: orgscan = self.copy()
1878
[1574]1879 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1880 s._add_history("smooth", varlist)
[1819]1881
1882 if plot:
1883 if rcParams['plotter.gui']:
1884 from asap.asaplotgui import asaplotgui as asaplot
1885 else:
1886 from asap.asaplot import asaplot
1887 self._p=asaplot()
1888 self._p.set_panels()
1889 ylab=s._get_ordinate_label()
1890 #self._p.palette(0,["#777777","red"])
1891 for r in xrange(s.nrow()):
1892 xsm=s._getabcissa(r)
1893 ysm=s._getspectrum(r)
1894 xorg=orgscan._getabcissa(r)
1895 yorg=orgscan._getspectrum(r)
1896 self._p.clear()
1897 self._p.hold()
1898 self._p.set_axes('ylabel',ylab)
1899 self._p.set_axes('xlabel',s._getabcissalabel(r))
1900 self._p.set_axes('title',s._getsourcename(r))
1901 self._p.set_line(label='Original',color="#777777")
1902 self._p.plot(xorg,yorg)
1903 self._p.set_line(label='Smoothed',color="red")
1904 self._p.plot(xsm,ysm)
1905 ### Ugly part for legend
1906 for i in [0,1]:
1907 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1908 self._p.release()
1909 ### Ugly part for legend
1910 self._p.subplots[0]['lines']=[]
1911 res = raw_input("Accept smoothing ([y]/n): ")
1912 if res.upper() == 'N':
1913 s._setspectrum(yorg, r)
1914 self._p.unmap()
1915 self._p = None
1916 del orgscan
1917
1918 print_log()
[876]1919 if insitu: self._assign(s)
1920 else: return s
[513]1921
[1589]1922 @print_log_dec
[1617]1923 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1924 insitu=None):
[1846]1925 """\
[513]1926 Return a scan which has been baselined (all rows) by a polynomial.
[1846]1927
[513]1928 Parameters:
[1846]1929
[794]1930 mask: an optional mask
1931 order: the order of the polynomial (default is 0)
[1061]1932 plot: plot the fit and the residual. In this each
1933 indivual fit has to be approved, by typing 'y'
1934 or 'n'
[1391]1935 uselin: use linear polynomial fit
[794]1936 insitu: if False a new scantable is returned.
1937 Otherwise, the scaling is done in-situ
1938 The default is taken from .asaprc (False)
[1846]1939
1940 Example::
1941
[513]1942 # return a scan baselined by a third order polynomial,
1943 # not using a mask
1944 bscan = scan.poly_baseline(order=3)
[1846]1945
[579]1946 """
[513]1947 if insitu is None: insitu = rcParams['insitu']
[1819]1948 if not insitu:
1949 workscan = self.copy()
1950 else:
1951 workscan = self
[513]1952 varlist = vars()
1953 if mask is None:
[1295]1954 mask = [True for i in xrange(self.nchan(-1))]
[1819]1955
[513]1956 from asap.asapfitter import fitter
[1217]1957 try:
1958 f = fitter()
[1391]1959 if uselin:
1960 f.set_function(lpoly=order)
1961 else:
1962 f.set_function(poly=order)
[1819]1963
1964 rows = range(workscan.nrow())
1965 if len(rows) > 0:
1966 self.blpars = []
1967
1968 for r in rows:
1969 # take into account flagtra info (CAS-1434)
1970 flagtra = workscan._getmask(r)
1971 actualmask = mask[:]
1972 if len(actualmask) == 0:
1973 actualmask = list(flagtra[:])
1974 else:
1975 if len(actualmask) != len(flagtra):
1976 raise RuntimeError, "Mask and flagtra have different length"
1977 else:
1978 for i in range(0, len(actualmask)):
1979 actualmask[i] = actualmask[i] and flagtra[i]
1980 f.set_scan(workscan, actualmask)
1981 f.x = workscan._getabcissa(r)
1982 f.y = workscan._getspectrum(r)
1983 f.data = None
1984 f.fit()
1985 if plot:
1986 f.plot(residual=True)
1987 x = raw_input("Accept fit ( [y]/n ): ")
1988 if x.upper() == 'N':
1989 self.blpars.append(None)
1990 continue
1991 workscan._setspectrum(f.fitter.getresidual(), r)
1992 self.blpars.append(f.get_parameters())
1993
1994 if plot:
1995 f._p.unmap()
1996 f._p = None
1997 workscan._add_history("poly_baseline", varlist)
1998 print_log()
1999 if insitu: self._assign(workscan)
2000 else: return workscan
[1217]2001 except RuntimeError:
2002 msg = "The fit failed, possibly because it didn't converge."
2003 if rcParams['verbose']:
[1819]2004 #print msg
2005 print_log()
2006 asaplog.push( str(msg) )
2007 print_log( 'ERROR' )
[1217]2008 return
2009 else:
2010 raise RuntimeError(msg)
[513]2011
[1819]2012
[1118]2013 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]2014 threshold=3, chan_avg_limit=1, plot=False,
2015 insitu=None):
[1846]2016 """\
[880]2017 Return a scan which has been baselined (all rows) by a polynomial.
2018 Spectral lines are detected first using linefinder and masked out
2019 to avoid them affecting the baseline solution.
2020
2021 Parameters:
[1846]2022
[880]2023 mask: an optional mask retreived from scantable
[1846]2024
2025 edge: an optional number of channel to drop at the edge of
2026 spectrum. If only one value is
[880]2027 specified, the same number will be dropped from
2028 both sides of the spectrum. Default is to keep
[907]2029 all channels. Nested tuples represent individual
[976]2030 edge selection for different IFs (a number of spectral
2031 channels can be different)
[1846]2032
[880]2033 order: the order of the polynomial (default is 0)
[1846]2034
[880]2035 threshold: the threshold used by line finder. It is better to
2036 keep it large as only strong lines affect the
2037 baseline solution.
[1846]2038
[1280]2039 chan_avg_limit:
2040 a maximum number of consequtive spectral channels to
2041 average during the search of weak and broad lines.
2042 The default is no averaging (and no search for weak
2043 lines). If such lines can affect the fitted baseline
2044 (e.g. a high order polynomial is fitted), increase this
2045 parameter (usually values up to 8 are reasonable). Most
2046 users of this method should find the default value
2047 sufficient.
[1846]2048
[1061]2049 plot: plot the fit and the residual. In this each
2050 indivual fit has to be approved, by typing 'y'
2051 or 'n'
[1846]2052
[880]2053 insitu: if False a new scantable is returned.
2054 Otherwise, the scaling is done in-situ
2055 The default is taken from .asaprc (False)
2056
[1846]2057
2058 Example::
2059
2060 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2061
[880]2062 """
2063 if insitu is None: insitu = rcParams['insitu']
2064 varlist = vars()
2065 from asap.asapfitter import fitter
2066 from asap.asaplinefind import linefinder
2067 from asap import _is_sequence_or_number as _is_valid
2068
[976]2069 # check whether edge is set up for each IF individually
[1118]2070 individualedge = False;
2071 if len(edge) > 1:
2072 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2073 individualedge = True;
[907]2074
[1118]2075 if not _is_valid(edge, int) and not individualedge:
[909]2076 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2077 pair of integers specified as a tuple. Nested tuples are allowed \
2078 to make individual selection for different IFs."
[919]2079
[1118]2080 curedge = (0, 0)
2081 if individualedge:
2082 for edgepar in edge:
2083 if not _is_valid(edgepar, int):
2084 raise ValueError, "Each element of the 'edge' tuple has \
2085 to be a pair of integers or an integer."
[907]2086 else:
[1118]2087 curedge = edge;
[880]2088
2089 # setup fitter
2090 f = fitter()
2091 f.set_function(poly=order)
2092
2093 # setup line finder
[1118]2094 fl = linefinder()
[1268]2095 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2096
2097 if not insitu:
[1118]2098 workscan = self.copy()
[880]2099 else:
[1118]2100 workscan = self
[880]2101
[907]2102 fl.set_scan(workscan)
2103
[1118]2104 rows = range(workscan.nrow())
[1819]2105 # Save parameters of baseline fits & masklists as a class attribute.
2106 # NOTICE: It does not reflect changes in scantable!
2107 if len(rows) > 0:
2108 self.blpars=[]
2109 self.masklists=[]
[880]2110 asaplog.push("Processing:")
2111 for r in rows:
[1118]2112 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2113 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2114 workscan.getpol(r), workscan.getcycle(r))
[880]2115 asaplog.push(msg, False)
[907]2116
[976]2117 # figure out edge parameter
[1118]2118 if individualedge:
2119 if len(edge) >= workscan.getif(r):
2120 raise RuntimeError, "Number of edge elements appear to " \
2121 "be less than the number of IFs"
2122 curedge = edge[workscan.getif(r)]
[919]2123
[1819]2124 # take into account flagtra info (CAS-1434)
2125 flagtra = workscan._getmask(r)
2126 actualmask = mask[:]
2127 if len(actualmask) == 0:
2128 actualmask = list(flagtra[:])
2129 else:
2130 if len(actualmask) != len(flagtra):
2131 raise RuntimeError, "Mask and flagtra have different length"
2132 else:
2133 for i in range(0, len(actualmask)):
2134 actualmask[i] = actualmask[i] and flagtra[i]
2135
[976]2136 # setup line finder
[1819]2137 fl.find_lines(r, actualmask, curedge)
2138 outmask=fl.get_mask()
2139 f.set_scan(workscan, fl.get_mask())
2140 f.x = workscan._getabcissa(r)
2141 f.y = workscan._getspectrum(r)
2142 f.data = None
[880]2143 f.fit()
[1819]2144
2145 # Show mask list
2146 masklist=workscan.get_masklist(fl.get_mask(),row=r)
2147 msg = "mask range: "+str(masklist)
2148 asaplog.push(msg, False)
2149
[1061]2150 if plot:
2151 f.plot(residual=True)
2152 x = raw_input("Accept fit ( [y]/n ): ")
2153 if x.upper() == 'N':
[1819]2154 self.blpars.append(None)
2155 self.masklists.append(None)
[1061]2156 continue
[1819]2157
[880]2158 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2159 self.blpars.append(f.get_parameters())
2160 self.masklists.append(masklist)
[1061]2161 if plot:
2162 f._p.unmap()
2163 f._p = None
2164 workscan._add_history("auto_poly_baseline", varlist)
[880]2165 if insitu:
2166 self._assign(workscan)
2167 else:
2168 return workscan
2169
[1589]2170 @print_log_dec
[914]2171 def rotate_linpolphase(self, angle):
[1846]2172 """\
[914]2173 Rotate the phase of the complex polarization O=Q+iU correlation.
2174 This is always done in situ in the raw data. So if you call this
2175 function more than once then each call rotates the phase further.
[1846]2176
[914]2177 Parameters:
[1846]2178
[914]2179 angle: The angle (degrees) to rotate (add) by.
[1846]2180
2181 Example::
2182
[914]2183 scan.rotate_linpolphase(2.3)
[1846]2184
[914]2185 """
2186 varlist = vars()
[936]2187 self._math._rotate_linpolphase(self, angle)
[914]2188 self._add_history("rotate_linpolphase", varlist)
[1819]2189 print_log()
[914]2190 return
[710]2191
[1589]2192 @print_log_dec
[914]2193 def rotate_xyphase(self, angle):
[1846]2194 """\
[914]2195 Rotate the phase of the XY correlation. This is always done in situ
2196 in the data. So if you call this function more than once
2197 then each call rotates the phase further.
[1846]2198
[914]2199 Parameters:
[1846]2200
[914]2201 angle: The angle (degrees) to rotate (add) by.
[1846]2202
2203 Example::
2204
[914]2205 scan.rotate_xyphase(2.3)
[1846]2206
[914]2207 """
2208 varlist = vars()
[936]2209 self._math._rotate_xyphase(self, angle)
[914]2210 self._add_history("rotate_xyphase", varlist)
[1819]2211 print_log()
[914]2212 return
2213
[1589]2214 @print_log_dec
[914]2215 def swap_linears(self):
[1846]2216 """\
[1573]2217 Swap the linear polarisations XX and YY, or better the first two
[1348]2218 polarisations as this also works for ciculars.
[914]2219 """
2220 varlist = vars()
[936]2221 self._math._swap_linears(self)
[914]2222 self._add_history("swap_linears", varlist)
[1819]2223 print_log()
[914]2224 return
2225
[1589]2226 @print_log_dec
[914]2227 def invert_phase(self):
[1846]2228 """\
[914]2229 Invert the phase of the complex polarisation
2230 """
2231 varlist = vars()
[936]2232 self._math._invert_phase(self)
[914]2233 self._add_history("invert_phase", varlist)
2234 return
2235
[1589]2236 @print_log_dec
[876]2237 def add(self, offset, insitu=None):
[1846]2238 """\
[513]2239 Return a scan where all spectra have the offset added
[1846]2240
[513]2241 Parameters:
[1846]2242
[513]2243 offset: the offset
2244 insitu: if False a new scantable is returned.
2245 Otherwise, the scaling is done in-situ
2246 The default is taken from .asaprc (False)
[1846]2247
[513]2248 """
2249 if insitu is None: insitu = rcParams['insitu']
[876]2250 self._math._setinsitu(insitu)
[513]2251 varlist = vars()
[876]2252 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2253 s._add_history("add", varlist)
[876]2254 if insitu:
2255 self._assign(s)
2256 else:
[513]2257 return s
2258
[1589]2259 @print_log_dec
[1308]2260 def scale(self, factor, tsys=True, insitu=None):
[1846]2261 """\
2262
[513]2263 Return a scan where all spectra are scaled by the give 'factor'
[1846]2264
[513]2265 Parameters:
[1846]2266
[1819]2267 factor: the scaling factor (float or 1D float list)
[513]2268 insitu: if False a new scantable is returned.
2269 Otherwise, the scaling is done in-situ
2270 The default is taken from .asaprc (False)
2271 tsys: if True (default) then apply the operation to Tsys
2272 as well as the data
[1846]2273
[513]2274 """
2275 if insitu is None: insitu = rcParams['insitu']
[876]2276 self._math._setinsitu(insitu)
[513]2277 varlist = vars()
[1819]2278 s = None
2279 import numpy
2280 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2281 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2282 from asapmath import _array2dOp
2283 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2284 else:
2285 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2286 else:
2287 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2288 s._add_history("scale", varlist)
[1819]2289 print_log()
[876]2290 if insitu:
2291 self._assign(s)
2292 else:
[513]2293 return s
2294
[1504]2295 def set_sourcetype(self, match, matchtype="pattern",
2296 sourcetype="reference"):
[1846]2297 """\
[1502]2298 Set the type of the source to be an source or reference scan
[1846]2299 using the provided pattern.
2300
[1502]2301 Parameters:
[1846]2302
[1504]2303 match: a Unix style pattern, regular expression or selector
2304 matchtype: 'pattern' (default) UNIX style pattern or
2305 'regex' regular expression
[1502]2306 sourcetype: the type of the source to use (source/reference)
[1846]2307
[1502]2308 """
2309 varlist = vars()
2310 basesel = self.get_selection()
2311 stype = -1
2312 if sourcetype.lower().startswith("r"):
2313 stype = 1
2314 elif sourcetype.lower().startswith("s"):
2315 stype = 0
[1504]2316 else:
[1502]2317 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2318 if matchtype.lower().startswith("p"):
2319 matchtype = "pattern"
2320 elif matchtype.lower().startswith("r"):
2321 matchtype = "regex"
2322 else:
2323 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2324 sel = selector()
2325 if isinstance(match, selector):
2326 sel = match
2327 else:
[1504]2328 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2329 self.set_selection(basesel+sel)
2330 self._setsourcetype(stype)
2331 self.set_selection(basesel)
[1573]2332 self._add_history("set_sourcetype", varlist)
[1502]2333
[1589]2334 @print_log_dec
[1819]2335 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2336 """\
[670]2337 This function allows to build quotients automatically.
[1819]2338 It assumes the observation to have the same number of
[670]2339 "ons" and "offs"
[1846]2340
[670]2341 Parameters:
[1846]2342
[710]2343 preserve: you can preserve (default) the continuum or
2344 remove it. The equations used are
[670]2345 preserve: Output = Toff * (on/off) - Toff
[1070]2346 remove: Output = Toff * (on/off) - Ton
[1573]2347 mode: the on/off detection mode
[1348]2348 'paired' (default)
2349 identifies 'off' scans by the
2350 trailing '_R' (Mopra/Parkes) or
2351 '_e'/'_w' (Tid) and matches
2352 on/off pairs from the observing pattern
[1502]2353 'time'
2354 finds the closest off in time
[1348]2355
[670]2356 """
[1348]2357 modes = ["time", "paired"]
[670]2358 if not mode in modes:
[876]2359 msg = "please provide valid mode. Valid modes are %s" % (modes)
2360 raise ValueError(msg)
2361 varlist = vars()
[1348]2362 s = None
2363 if mode.lower() == "paired":
2364 basesel = self.get_selection()
[1356]2365 sel = selector()+basesel
2366 sel.set_query("SRCTYPE==1")
2367 self.set_selection(sel)
[1348]2368 offs = self.copy()
2369 sel.set_query("SRCTYPE==0")
[1356]2370 self.set_selection(sel)
[1348]2371 ons = self.copy()
2372 s = scantable(self._math._quotient(ons, offs, preserve))
2373 self.set_selection(basesel)
2374 elif mode.lower() == "time":
2375 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2376 s._add_history("auto_quotient", varlist)
[1819]2377 print_log()
[876]2378 return s
[710]2379
[1589]2380 @print_log_dec
[1145]2381 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2382 """\
[1143]2383 Form a quotient using "off" beams when observing in "MX" mode.
[1846]2384
[1143]2385 Parameters:
[1846]2386
[1145]2387 mask: an optional mask to be used when weight == 'stddev'
[1143]2388 weight: How to average the off beams. Default is 'median'.
[1145]2389 preserve: you can preserve (default) the continuum or
2390 remove it. The equations used are
2391 preserve: Output = Toff * (on/off) - Toff
2392 remove: Output = Toff * (on/off) - Ton
[1846]2393
[1217]2394 """
[1593]2395 mask = mask or ()
[1141]2396 varlist = vars()
2397 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2398 preoff = scantable(self._math._mx_extract(self, 'off'))
2399 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2400 from asapmath import quotient
[1145]2401 q = quotient(on, off, preserve)
[1143]2402 q._add_history("mx_quotient", varlist)
[1819]2403 print_log()
[1217]2404 return q
[513]2405
[1589]2406 @print_log_dec
[718]2407 def freq_switch(self, insitu=None):
[1846]2408 """\
[718]2409 Apply frequency switching to the data.
[1846]2410
[718]2411 Parameters:
[1846]2412
[718]2413 insitu: if False a new scantable is returned.
2414 Otherwise, the swictching is done in-situ
2415 The default is taken from .asaprc (False)
[1846]2416
[718]2417 """
2418 if insitu is None: insitu = rcParams['insitu']
[876]2419 self._math._setinsitu(insitu)
[718]2420 varlist = vars()
[876]2421 s = scantable(self._math._freqswitch(self))
[1118]2422 s._add_history("freq_switch", varlist)
[1819]2423 print_log()
[876]2424 if insitu: self._assign(s)
2425 else: return s
[718]2426
[1589]2427 @print_log_dec
[780]2428 def recalc_azel(self):
[1846]2429 """Recalculate the azimuth and elevation for each position."""
[780]2430 varlist = vars()
[876]2431 self._recalcazel()
[780]2432 self._add_history("recalc_azel", varlist)
[1819]2433 print_log()
[780]2434 return
2435
[1589]2436 @print_log_dec
[513]2437 def __add__(self, other):
2438 varlist = vars()
2439 s = None
2440 if isinstance(other, scantable):
[1573]2441 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2442 elif isinstance(other, float):
[876]2443 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2444 else:
[718]2445 raise TypeError("Other input is not a scantable or float value")
[513]2446 s._add_history("operator +", varlist)
2447 return s
2448
[1589]2449 @print_log_dec
[513]2450 def __sub__(self, other):
2451 """
2452 implicit on all axes and on Tsys
2453 """
2454 varlist = vars()
2455 s = None
2456 if isinstance(other, scantable):
[1588]2457 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2458 elif isinstance(other, float):
[876]2459 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2460 else:
[718]2461 raise TypeError("Other input is not a scantable or float value")
[513]2462 s._add_history("operator -", varlist)
2463 return s
[710]2464
[1589]2465 @print_log_dec
[513]2466 def __mul__(self, other):
2467 """
2468 implicit on all axes and on Tsys
2469 """
2470 varlist = vars()
2471 s = None
2472 if isinstance(other, scantable):
[1588]2473 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2474 elif isinstance(other, float):
[876]2475 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2476 else:
[718]2477 raise TypeError("Other input is not a scantable or float value")
[513]2478 s._add_history("operator *", varlist)
2479 return s
2480
[710]2481
[1589]2482 @print_log_dec
[513]2483 def __div__(self, other):
2484 """
2485 implicit on all axes and on Tsys
2486 """
2487 varlist = vars()
2488 s = None
2489 if isinstance(other, scantable):
[1589]2490 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2491 elif isinstance(other, float):
2492 if other == 0.0:
[718]2493 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2494 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2495 else:
[718]2496 raise TypeError("Other input is not a scantable or float value")
[513]2497 s._add_history("operator /", varlist)
2498 return s
2499
[530]2500 def get_fit(self, row=0):
[1846]2501 """\
[530]2502 Print or return the stored fits for a row in the scantable
[1846]2503
[530]2504 Parameters:
[1846]2505
[530]2506 row: the row which the fit has been applied to.
[1846]2507
[530]2508 """
2509 if row > self.nrow():
2510 return
[976]2511 from asap.asapfit import asapfit
[530]2512 fit = asapfit(self._getfit(row))
[718]2513 if rcParams['verbose']:
[1819]2514 #print fit
2515 asaplog.push( '%s' %(fit) )
2516 print_log()
[530]2517 return
2518 else:
2519 return fit.as_dict()
2520
[1483]2521 def flag_nans(self):
[1846]2522 """\
[1483]2523 Utility function to flag NaN values in the scantable.
2524 """
2525 import numpy
2526 basesel = self.get_selection()
2527 for i in range(self.nrow()):
[1589]2528 sel = self.get_row_selector(i)
2529 self.set_selection(basesel+sel)
[1483]2530 nans = numpy.isnan(self._getspectrum(0))
2531 if numpy.any(nans):
2532 bnans = [ bool(v) for v in nans]
2533 self.flag(bnans)
2534 self.set_selection(basesel)
2535
[1588]2536 def get_row_selector(self, rowno):
2537 return selector(beams=self.getbeam(rowno),
2538 ifs=self.getif(rowno),
2539 pols=self.getpol(rowno),
2540 scans=self.getscan(rowno),
2541 cycles=self.getcycle(rowno))
[1573]2542
[484]2543 def _add_history(self, funcname, parameters):
[1435]2544 if not rcParams['scantable.history']:
2545 return
[484]2546 # create date
2547 sep = "##"
2548 from datetime import datetime
2549 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2550 hist = dstr+sep
2551 hist += funcname+sep#cdate+sep
2552 if parameters.has_key('self'): del parameters['self']
[1118]2553 for k, v in parameters.iteritems():
[484]2554 if type(v) is dict:
[1118]2555 for k2, v2 in v.iteritems():
[484]2556 hist += k2
2557 hist += "="
[1118]2558 if isinstance(v2, scantable):
[484]2559 hist += 'scantable'
2560 elif k2 == 'mask':
[1118]2561 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2562 hist += str(self._zip_mask(v2))
2563 else:
2564 hist += str(v2)
[484]2565 else:
[513]2566 hist += str(v2)
[484]2567 else:
2568 hist += k
2569 hist += "="
[1118]2570 if isinstance(v, scantable):
[484]2571 hist += 'scantable'
2572 elif k == 'mask':
[1118]2573 if isinstance(v, list) or isinstance(v, tuple):
[513]2574 hist += str(self._zip_mask(v))
2575 else:
2576 hist += str(v)
[484]2577 else:
2578 hist += str(v)
2579 hist += sep
2580 hist = hist[:-2] # remove trailing '##'
2581 self._addhistory(hist)
2582
[710]2583
[484]2584 def _zip_mask(self, mask):
2585 mask = list(mask)
2586 i = 0
2587 segments = []
2588 while mask[i:].count(1):
2589 i += mask[i:].index(1)
2590 if mask[i:].count(0):
2591 j = i + mask[i:].index(0)
2592 else:
[710]2593 j = len(mask)
[1118]2594 segments.append([i, j])
[710]2595 i = j
[484]2596 return segments
[714]2597
[626]2598 def _get_ordinate_label(self):
2599 fu = "("+self.get_fluxunit()+")"
2600 import re
2601 lbl = "Intensity"
[1118]2602 if re.match(".K.", fu):
[626]2603 lbl = "Brightness Temperature "+ fu
[1118]2604 elif re.match(".Jy.", fu):
[626]2605 lbl = "Flux density "+ fu
2606 return lbl
[710]2607
[876]2608 def _check_ifs(self):
2609 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2610 nchans = filter(lambda t: t > 0, nchans)
[876]2611 return (sum(nchans)/len(nchans) == nchans[0])
[976]2612
[1819]2613 def _fill(self, names, unit, average, getpt, antenna):
[976]2614 first = True
2615 fullnames = []
2616 for name in names:
2617 name = os.path.expandvars(name)
2618 name = os.path.expanduser(name)
2619 if not os.path.exists(name):
2620 msg = "File '%s' does not exists" % (name)
2621 if rcParams['verbose']:
2622 asaplog.push(msg)
[1819]2623 print_log( 'ERROR' )
[976]2624 return
2625 raise IOError(msg)
2626 fullnames.append(name)
2627 if average:
2628 asaplog.push('Auto averaging integrations')
[1079]2629 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2630 for name in fullnames:
[1073]2631 tbl = Scantable(stype)
[1843]2632 r = filler(tbl)
[1504]2633 rx = rcParams['scantable.reference']
[1843]2634 r.setreferenceexpr(rx)
[976]2635 msg = "Importing %s..." % (name)
[1118]2636 asaplog.push(msg, False)
[976]2637 print_log()
[1843]2638 r.open(name)# antenna, -1, -1, getpt)
2639 r.fill()
[976]2640 if average:
[1118]2641 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2642 if not first:
2643 tbl = self._math._merge([self, tbl])
2644 Scantable.__init__(self, tbl)
[1843]2645 r.close()
[1118]2646 del r, tbl
[976]2647 first = False
2648 if unit is not None:
2649 self.set_fluxunit(unit)
[1824]2650 if not is_casapy():
2651 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2652
[1402]2653 def __getitem__(self, key):
2654 if key < 0:
2655 key += self.nrow()
2656 if key >= self.nrow():
2657 raise IndexError("Row index out of range.")
2658 return self._getspectrum(key)
2659
2660 def __setitem__(self, key, value):
2661 if key < 0:
2662 key += self.nrow()
2663 if key >= self.nrow():
2664 raise IndexError("Row index out of range.")
2665 if not hasattr(value, "__len__") or \
2666 len(value) > self.nchan(self.getif(key)):
2667 raise ValueError("Spectrum length doesn't match.")
2668 return self._setspectrum(value, key)
2669
2670 def __len__(self):
2671 return self.nrow()
2672
2673 def __iter__(self):
2674 for i in range(len(self)):
2675 yield self[i]
Note: See TracBrowser for help on using the repository browser.