source: trunk/python/scantable.py@ 1840

Last change on this file since 1840 was 1824, checked in by Malte Marquarding, 14 years ago

Refactoring of init.py. Moved functionality into separate modules. Some minor fixes to make unit test work under 'standard asap'.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 90.6 KB
RevLine 
[1697]1import os
[1691]2try:
3 from functools import wraps as wraps_dec
4except ImportError:
5 from asap.compatibility import wraps as wraps_dec
6
[1824]7from asap.env import is_casapy
[876]8from asap._asap import Scantable
[1824]9from asap.parameters import rcParams
10from asap.logging import asaplog, print_log, print_log_dec
11from asap.selector import selector
12from asap.linecatalog import linecatalog
[1600]13from asap.coordinate import coordinate
[1824]14from asap.utils import _n_bools, mask_not, mask_and, mask_or
[102]15
[1689]16
17def preserve_selection(func):
[1691]18 @wraps_dec(func)
[1689]19 def wrap(obj, *args, **kw):
20 basesel = obj.get_selection()
21 val = func(obj, *args, **kw)
22 obj.set_selection(basesel)
23 return val
24 return wrap
25
26
[1697]27def is_scantable(filename):
28 return (os.path.isdir(filename)
29 and not os.path.exists(filename+'/table.f1')
30 and os.path.exists(filename+'/table.info'))
31
32
[876]33class scantable(Scantable):
[102]34 """
35 The ASAP container for scans
36 """
[1819]37
[1589]38 @print_log_dec
[1824]39 def __init__(self, filename, average=None, unit=None, getpt=None,
40 antenna=None, parallactify=None):
[102]41 """
42 Create a scantable from a saved one or make a reference
43 Parameters:
[181]44 filename: the name of an asap table on disk
45 or
46 the name of a rpfits/sdfits/ms file
47 (integrations within scans are auto averaged
48 and the whole file is read)
49 or
50 [advanced] a reference to an existing
[102]51 scantable
[484]52 average: average all integrations withinb a scan on read.
53 The default (True) is taken from .asaprc.
54 unit: brightness unit; must be consistent with K or Jy.
[340]55 Over-rides the default selected by the reader
56 (input rpfits/sdfits/ms) or replaces the value
57 in existing scantables
[1819]58 getpt: for MeasurementSet input data only:
59 If True, all pointing data are filled.
60 The deafult is False, which makes time to load
61 the MS data faster in some cases.
62 antenna: Antenna selection. integer (id) or string (name
63 or id).
[1586]64 parallactify: Indcicate that the data had been parallatified.
65 Default is taken form rc file.
[710]66 """
[976]67 if average is None:
[710]68 average = rcParams['scantable.autoaverage']
[1819]69 if getpt is None:
70 getpt = True
71 if antenna is None:
72 antenna = ''
73 elif type(antenna) == int:
74 antenna = '%s'%antenna
75 elif type(antenna) == list:
76 tmpstr = ''
77 for i in range( len(antenna) ):
78 if type(antenna[i]) == int:
79 tmpstr = tmpstr + ('%s,'%(antenna[i]))
80 elif type(antenna[i]) == str:
81 tmpstr=tmpstr+antenna[i]+','
82 else:
83 asaplog.push('Bad antenna selection.')
84 print_log('ERROR')
85 return
86 antenna = tmpstr.rstrip(',')
[1593]87 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]88 varlist = vars()
[876]89 from asap._asap import stmath
[1819]90 self._math = stmath( rcParams['insitu'] )
[876]91 if isinstance(filename, Scantable):
92 Scantable.__init__(self, filename)
[181]93 else:
[1697]94 if isinstance(filename, str):
[976]95 filename = os.path.expandvars(filename)
96 filename = os.path.expanduser(filename)
97 if not os.path.exists(filename):
98 s = "File '%s' not found." % (filename)
[718]99 if rcParams['verbose']:
[976]100 asaplog.push(s)
[1819]101 print_log('ERROR')
[718]102 return
[976]103 raise IOError(s)
[1697]104 if is_scantable(filename):
105 ondisk = rcParams['scantable.storage'] == 'disk'
106 Scantable.__init__(self, filename, ondisk)
107 if unit is not None:
108 self.set_fluxunit(unit)
[1819]109 # do not reset to the default freqframe
110 #self.set_freqframe(rcParams['scantable.freqframe'])
111 elif os.path.isdir(filename) \
112 and not os.path.exists(filename+'/table.f1'):
113 msg = "The given file '%s'is not a valid " \
114 "asap table." % (filename)
115 if rcParams['verbose']:
116 #print msg
117 asaplog.push( msg )
118 print_log( 'ERROR' )
119 return
120 else:
121 raise IOError(msg)
[226]122 else:
[1819]123 self._fill([filename], unit, average, getpt, antenna)
[1118]124 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]125 and isinstance(filename[-1], str):
[1819]126 self._fill(filename, unit, average, getpt, antenna)
[1586]127 self.parallactify(parallactify)
[1259]128 self._add_history("scantable", varlist)
[1819]129 print_log()
[102]130
[1589]131 @print_log_dec
[876]132 def save(self, name=None, format=None, overwrite=False):
[116]133 """
[1280]134 Store the scantable on disk. This can be an asap (aips++) Table,
135 SDFITS or MS2 format.
[116]136 Parameters:
[1093]137 name: the name of the outputfile. For format "ASCII"
138 this is the root file name (data in 'name'.txt
[497]139 and header in 'name'_header.txt)
[116]140 format: an optional file format. Default is ASAP.
[280]141 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]142 'SDFITS' (save as SDFITS file)
[200]143 'ASCII' (saves as ascii text file)
[226]144 'MS2' (saves as an aips++
145 MeasurementSet V2)
[1573]146 'FITS' (save as image FITS - not
[1443]147 readable by class)
148 'CLASS' (save as FITS readable by CLASS)
[411]149 overwrite: If the file should be overwritten if it exists.
[256]150 The default False is to return with warning
[411]151 without writing the output. USE WITH CARE.
[116]152 Example:
153 scan.save('myscan.asap')
[1118]154 scan.save('myscan.sdfits', 'SDFITS')
[116]155 """
[411]156 from os import path
[1593]157 format = format or rcParams['scantable.save']
[256]158 suffix = '.'+format.lower()
[1118]159 if name is None or name == "":
[256]160 name = 'scantable'+suffix
[718]161 msg = "No filename given. Using default name %s..." % name
162 asaplog.push(msg)
[411]163 name = path.expandvars(name)
[256]164 if path.isfile(name) or path.isdir(name):
165 if not overwrite:
[718]166 msg = "File %s exists." % name
167 if rcParams['verbose']:
[1819]168 #print msg
169 asaplog.push( msg )
170 print_log( 'ERROR' )
[718]171 return
172 else:
173 raise IOError(msg)
[451]174 format2 = format.upper()
175 if format2 == 'ASAP':
[116]176 self._save(name)
177 else:
[989]178 from asap._asap import stwriter as stw
[1118]179 writer = stw(format2)
180 writer.write(self, name)
[1819]181 print_log()
[116]182 return
183
[102]184 def copy(self):
185 """
186 Return a copy of this scantable.
[1348]187 Note:
188 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]189 Parameters:
[113]190 none
[102]191 Example:
192 copiedscan = scan.copy()
193 """
[876]194 sd = scantable(Scantable._copy(self))
[113]195 return sd
196
[1093]197 def drop_scan(self, scanid=None):
198 """
199 Return a new scantable where the specified scan number(s) has(have)
200 been dropped.
201 Parameters:
202 scanid: a (list of) scan number(s)
203 """
204 from asap import _is_sequence_or_number as _is_valid
205 from asap import _to_list
206 from asap import unique
207 if not _is_valid(scanid):
208 if rcParams['verbose']:
[1819]209 #print "Please specify a scanno to drop from the scantable"
210 asaplog.push( 'Please specify a scanno to drop from the scantable' )
211 print_log( 'ERROR' )
[1093]212 return
213 else:
214 raise RuntimeError("No scan given")
215 try:
216 scanid = _to_list(scanid)
217 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
218 for sid in scanid: allscans.remove(sid)
[1118]219 if len(allscans) == 0:
220 raise ValueError("Can't remove all scans")
[1093]221 except ValueError:
222 if rcParams['verbose']:
[1819]223 #print "Couldn't find any match."
224 print_log()
225 asaplog.push( "Couldn't find any match." )
226 print_log( 'ERROR' )
[1093]227 return
228 else: raise
229 try:
[1593]230 sel = selector(scans=allscans)
[1594]231 return self._select_copy(sel)
[1093]232 except RuntimeError:
[1118]233 if rcParams['verbose']:
[1819]234 #print "Couldn't find any match."
235 print_log()
236 asaplog.push( "Couldn't find any match." )
237 print_log( 'ERROR' )
[1118]238 else:
239 raise
[1093]240
[1594]241 def _select_copy(self, selection):
242 orig = self.get_selection()
243 self.set_selection(orig+selection)
244 cp = self.copy()
245 self.set_selection(orig)
246 return cp
247
[102]248 def get_scan(self, scanid=None):
249 """
250 Return a specific scan (by scanno) or collection of scans (by
251 source name) in a new scantable.
[1348]252 Note:
253 See scantable.drop_scan() for the inverse operation.
[102]254 Parameters:
[513]255 scanid: a (list of) scanno or a source name, unix-style
256 patterns are accepted for source name matching, e.g.
257 '*_R' gets all 'ref scans
[102]258 Example:
[513]259 # get all scans containing the source '323p459'
260 newscan = scan.get_scan('323p459')
261 # get all 'off' scans
262 refscans = scan.get_scan('*_R')
263 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]264 newscan = scan.get_scan([0, 2, 7, 10])
[102]265 """
266 if scanid is None:
[718]267 if rcParams['verbose']:
[1819]268 #print "Please specify a scan no or name to " \
269 # "retrieve from the scantable"
270 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
271 print_log( 'ERROR' )
[718]272 return
273 else:
274 raise RuntimeError("No scan given")
275
[102]276 try:
[946]277 bsel = self.get_selection()
278 sel = selector()
[102]279 if type(scanid) is str:
[946]280 sel.set_name(scanid)
[1594]281 return self._select_copy(sel)
[102]282 elif type(scanid) is int:
[946]283 sel.set_scans([scanid])
[1594]284 return self._select_copy(sel)
[381]285 elif type(scanid) is list:
[946]286 sel.set_scans(scanid)
[1594]287 return self._select_copy(sel)
[381]288 else:
[718]289 msg = "Illegal scanid type, use 'int' or 'list' if ints."
290 if rcParams['verbose']:
[1819]291 #print msg
292 asaplog.push( msg )
293 print_log( 'ERROR' )
[718]294 else:
295 raise TypeError(msg)
[102]296 except RuntimeError:
[1819]297 if rcParams['verbose']:
298 #print "Couldn't find any match."
299 print_log()
300 asaplog.push( "Couldn't find any match." )
301 print_log( 'ERROR' )
[718]302 else: raise
[102]303
304 def __str__(self):
[1118]305 return Scantable._summary(self, True)
[102]306
[976]307 def summary(self, filename=None):
[102]308 """
309 Print a summary of the contents of this scantable.
310 Parameters:
311 filename: the name of a file to write the putput to
312 Default - no file output
313 """
[976]314 info = Scantable._summary(self, True)
[102]315 if filename is not None:
[256]316 if filename is "":
317 filename = 'scantable_summary.txt'
[415]318 from os.path import expandvars, isdir
[411]319 filename = expandvars(filename)
[415]320 if not isdir(filename):
[413]321 data = open(filename, 'w')
322 data.write(info)
323 data.close()
324 else:
[718]325 msg = "Illegal file name '%s'." % (filename)
326 if rcParams['verbose']:
[1819]327 #print msg
328 asaplog.push( msg )
329 print_log( 'ERROR' )
[718]330 else:
331 raise IOError(msg)
332 if rcParams['verbose']:
[794]333 try:
334 from IPython.genutils import page as pager
335 except ImportError:
336 from pydoc import pager
337 pager(info)
[718]338 else:
339 return info
[710]340
[1512]341 def get_spectrum(self, rowno):
[1471]342 """Return the spectrum for the current row in the scantable as a list.
343 Parameters:
[1573]344 rowno: the row number to retrieve the spectrum from
[1471]345 """
346 return self._getspectrum(rowno)
[946]347
[1471]348 def get_mask(self, rowno):
349 """Return the mask for the current row in the scantable as a list.
350 Parameters:
[1573]351 rowno: the row number to retrieve the mask from
[1471]352 """
353 return self._getmask(rowno)
354
355 def set_spectrum(self, spec, rowno):
356 """Return the spectrum for the current row in the scantable as a list.
357 Parameters:
358 spec: the spectrum
[1573]359 rowno: the row number to set the spectrum for
[1471]360 """
361 assert(len(spec) == self.nchan())
362 return self._setspectrum(spec, rowno)
363
[1600]364 def get_coordinate(self, rowno):
365 """Return the (spectral) coordinate for a a given 'rowno'.
366 NOTE:
367 * This coordinate is only valid until a scantable method modifies
368 the frequency axis.
369 * This coordinate does contain the original frequency set-up
370 NOT the new frame. The conversions however are done using the user
371 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
372 use scantable.freq_align first. Without it there is no closure,
373 i.e.
374 c = myscan.get_coordinate(0)
375 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
376
377 Parameters:
378 rowno: the row number for the spectral coordinate
379
380 """
381 return coordinate(Scantable.get_coordinate(self, rowno))
382
[946]383 def get_selection(self):
384 """
[1005]385 Get the selection object currently set on this scantable.
386 Parameters:
387 none
388 Example:
389 sel = scan.get_selection()
390 sel.set_ifs(0) # select IF 0
391 scan.set_selection(sel) # apply modified selection
[946]392 """
393 return selector(self._getselection())
394
[1576]395 def set_selection(self, selection=None, **kw):
[946]396 """
[1005]397 Select a subset of the data. All following operations on this scantable
398 are only applied to thi selection.
399 Parameters:
[1697]400 selection: a selector object (default unset the selection),
401
402 or
403
404 any combination of
405 "pols", "ifs", "beams", "scans", "cycles", "name", "query"
406
[1005]407 Examples:
408 sel = selector() # create a selection object
[1118]409 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]410 scan.set_selection(sel) # set the selection
411 scan.summary() # will only print summary of scanno 0 an 3
412 scan.set_selection() # unset the selection
[1697]413 # or the equivalent
414 scan.set_selection(scans=[0,3])
415 scan.summary() # will only print summary of scanno 0 an 3
416 scan.set_selection() # unset the selection
[946]417 """
[1576]418 if selection is None:
419 # reset
420 if len(kw) == 0:
421 selection = selector()
422 else:
423 # try keywords
424 for k in kw:
425 if k not in selector.fields:
426 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
427 selection = selector(**kw)
[946]428 self._setselection(selection)
429
[1819]430 def get_row(self, row=0, insitu=None):
[102]431 """
[1819]432 Select a row in the scantable.
433 Return a scantable with single row.
434 Parameters:
435 row: row no of integration, default is 0.
436 insitu: if False a new scantable is returned.
437 Otherwise, the scaling is done in-situ
438 The default is taken from .asaprc (False)
439 """
440 if insitu is None: insitu = rcParams['insitu']
441 if not insitu:
442 workscan = self.copy()
443 else:
444 workscan = self
445 # Select a row
446 sel=selector()
447 sel.set_scans([workscan.getscan(row)])
448 sel.set_cycles([workscan.getcycle(row)])
449 sel.set_beams([workscan.getbeam(row)])
450 sel.set_ifs([workscan.getif(row)])
451 sel.set_polarisations([workscan.getpol(row)])
452 sel.set_name(workscan._getsourcename(row))
453 workscan.set_selection(sel)
454 if not workscan.nrow() == 1:
455 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
456 raise RuntimeError(msg)
457 del sel
458 if insitu:
459 self._assign(workscan)
460 else:
461 return workscan
462
463 #def stats(self, stat='stddev', mask=None):
464 def stats(self, stat='stddev', mask=None, form='3.3f'):
465 """
[135]466 Determine the specified statistic of the current beam/if/pol
[102]467 Takes a 'mask' as an optional parameter to specify which
468 channels should be excluded.
469 Parameters:
[1819]470 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
471 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[135]472 mask: an optional mask specifying where the statistic
[102]473 should be determined.
[1819]474 form: format string to print statistic values
[102]475 Example:
[113]476 scan.set_unit('channel')
[1118]477 msk = scan.create_mask([100, 200], [500, 600])
[135]478 scan.stats(stat='mean', mask=m)
[102]479 """
[1593]480 mask = mask or []
[876]481 if not self._check_ifs():
[1118]482 raise ValueError("Cannot apply mask as the IFs have different "
483 "number of channels. Please use setselection() "
484 "to select individual IFs")
[1819]485 rtnabc = False
486 if stat.lower().endswith('_abc'): rtnabc = True
487 getchan = False
488 if stat.lower().startswith('min') or stat.lower().startswith('max'):
489 chan = self._math._minmaxchan(self, mask, stat)
490 getchan = True
491 statvals = []
492 if not rtnabc: statvals = self._math._stats(self, mask, stat)
[256]493
[1819]494 #def cb(i):
495 # return statvals[i]
[256]496
[1819]497 #return self._row_callback(cb, stat)
[102]498
[1819]499 label=stat
500 #callback=cb
501 out = ""
502 #outvec = []
503 sep = '-'*50
504 for i in range(self.nrow()):
505 refstr = ''
506 statunit= ''
507 if getchan:
508 qx, qy = self.chan2data(rowno=i, chan=chan[i])
509 if rtnabc:
510 statvals.append(qx['value'])
511 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
512 statunit= '['+qx['unit']+']'
513 else:
514 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
515
516 tm = self._gettime(i)
517 src = self._getsourcename(i)
518 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
519 out += 'Time[%s]:\n' % (tm)
520 if self.nbeam(-1) > 1:
521 out += ' Beam[%d] ' % (self.getbeam(i))
522 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
523 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
524 #outvec.append(callback(i))
525 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
526 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
527 out += sep+"\n"
528
529 if rcParams['verbose']:
530 import os
531 if os.environ.has_key( 'USER' ):
532 usr=os.environ['USER']
533 else:
534 import commands
535 usr=commands.getoutput( 'whoami' )
536 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
537 f=open(tmpfile,'w')
538 print >> f, sep
539 print >> f, ' %s %s' % (label, statunit)
540 print >> f, sep
541 print >> f, out
542 f.close()
543 f=open(tmpfile,'r')
544 x=f.readlines()
545 f.close()
546 blanc=''
547 asaplog.push(blanc.join(x), False)
548 #for xx in x:
549 # asaplog.push( xx, False )
550 print_log()
551 return statvals
552
553 def chan2data(self, rowno=0, chan=0):
554 """
555 Returns channel/frequency/velocity and spectral value
556 at an arbitrary row and channel in the scantable.
557 Parameters:
558 rowno: a row number in the scantable. Default is the
559 first row, i.e. rowno=0
560 chan: a channel in the scantable. Default is the first
561 channel, i.e. pos=0
562 """
563 if isinstance(rowno, int) and isinstance(chan, int):
564 qx = {'unit': self.get_unit(),
565 'value': self._getabcissa(rowno)[chan]}
566 qy = {'unit': self.get_fluxunit(),
567 'value': self._getspectrum(rowno)[chan]}
568 return qx, qy
569
[1118]570 def stddev(self, mask=None):
[135]571 """
572 Determine the standard deviation of the current beam/if/pol
573 Takes a 'mask' as an optional parameter to specify which
574 channels should be excluded.
575 Parameters:
576 mask: an optional mask specifying where the standard
577 deviation should be determined.
578
579 Example:
580 scan.set_unit('channel')
[1118]581 msk = scan.create_mask([100, 200], [500, 600])
[135]582 scan.stddev(mask=m)
583 """
[1118]584 return self.stats(stat='stddev', mask=mask);
[135]585
[1003]586
[1259]587 def get_column_names(self):
[1003]588 """
589 Return a list of column names, which can be used for selection.
590 """
[1259]591 return list(Scantable.get_column_names(self))
[1003]592
[1730]593 def get_tsys(self, row=-1):
[113]594 """
595 Return the System temperatures.
596 Returns:
[876]597 a list of Tsys values for the current selection
[113]598 """
[1730]599 if row > -1:
600 return self._get_column(self._gettsys, row)
[876]601 return self._row_callback(self._gettsys, "Tsys")
[256]602
[1730]603
604 def get_weather(self, row=-1):
605 values = self._get_column(self._get_weather, row)
606 if row > -1:
607 return {'temperature': values[0],
608 'pressure': values[1], 'humidity' : values[2],
609 'windspeed' : values[3], 'windaz' : values[4]
610 }
611 else:
612 out = []
613 for r in values:
614
615 out.append({'temperature': r[0],
616 'pressure': r[1], 'humidity' : r[2],
617 'windspeed' : r[3], 'windaz' : r[4]
618 })
619 return out
620
[876]621 def _row_callback(self, callback, label):
622 out = ""
[1118]623 outvec = []
[1590]624 sep = '-'*50
[876]625 for i in range(self.nrow()):
626 tm = self._gettime(i)
627 src = self._getsourcename(i)
[1590]628 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]629 out += 'Time[%s]:\n' % (tm)
[1590]630 if self.nbeam(-1) > 1:
631 out += ' Beam[%d] ' % (self.getbeam(i))
632 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
633 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]634 outvec.append(callback(i))
635 out += '= %3.3f\n' % (outvec[i])
[1590]636 out += sep+'\n'
[876]637 if rcParams['verbose']:
[1819]638 asaplog.push(sep)
639 asaplog.push(" %s" % (label))
640 asaplog.push(sep)
641 asaplog.push(out)
642 print_log()
[1175]643 return outvec
[256]644
[1070]645 def _get_column(self, callback, row=-1):
646 """
647 """
648 if row == -1:
649 return [callback(i) for i in range(self.nrow())]
650 else:
[1819]651 if 0 <= row < self.nrow():
[1070]652 return callback(row)
[256]653
[1070]654
[1348]655 def get_time(self, row=-1, asdatetime=False):
[113]656 """
657 Get a list of time stamps for the observations.
[1348]658 Return a datetime object for each integration time stamp in the scantable.
[113]659 Parameters:
[1348]660 row: row no of integration. Default -1 return all rows
661 asdatetime: return values as datetime objects rather than strings
[113]662 Example:
663 none
664 """
[1175]665 from time import strptime
666 from datetime import datetime
[1392]667 times = self._get_column(self._gettime, row)
[1348]668 if not asdatetime:
[1392]669 return times
[1175]670 format = "%Y/%m/%d/%H:%M:%S"
671 if isinstance(times, list):
672 return [datetime(*strptime(i, format)[:6]) for i in times]
673 else:
674 return datetime(*strptime(times, format)[:6])
[102]675
[1348]676
677 def get_inttime(self, row=-1):
678 """
679 Get a list of integration times for the observations.
680 Return a time in seconds for each integration in the scantable.
681 Parameters:
682 row: row no of integration. Default -1 return all rows.
683 Example:
684 none
685 """
[1573]686 return self._get_column(self._getinttime, row)
[1348]687
[1573]688
[714]689 def get_sourcename(self, row=-1):
690 """
[794]691 Get a list source names for the observations.
[714]692 Return a string for each integration in the scantable.
693 Parameters:
[1348]694 row: row no of integration. Default -1 return all rows.
[714]695 Example:
696 none
697 """
[1070]698 return self._get_column(self._getsourcename, row)
[714]699
[794]700 def get_elevation(self, row=-1):
701 """
702 Get a list of elevations for the observations.
703 Return a float for each integration in the scantable.
704 Parameters:
[1348]705 row: row no of integration. Default -1 return all rows.
[794]706 Example:
707 none
708 """
[1070]709 return self._get_column(self._getelevation, row)
[794]710
711 def get_azimuth(self, row=-1):
712 """
713 Get a list of azimuths for the observations.
714 Return a float for each integration in the scantable.
715 Parameters:
[1348]716 row: row no of integration. Default -1 return all rows.
[794]717 Example:
718 none
719 """
[1070]720 return self._get_column(self._getazimuth, row)
[794]721
722 def get_parangle(self, row=-1):
723 """
724 Get a list of parallactic angles for the observations.
725 Return a float for each integration in the scantable.
726 Parameters:
[1348]727 row: row no of integration. Default -1 return all rows.
[794]728 Example:
729 none
730 """
[1070]731 return self._get_column(self._getparangle, row)
[794]732
[1070]733 def get_direction(self, row=-1):
734 """
735 Get a list of Positions on the sky (direction) for the observations.
[1594]736 Return a string for each integration in the scantable.
[1070]737 Parameters:
738 row: row no of integration. Default -1 return all rows
739 Example:
740 none
741 """
742 return self._get_column(self._getdirection, row)
743
[1391]744 def get_directionval(self, row=-1):
745 """
746 Get a list of Positions on the sky (direction) for the observations.
747 Return a float for each integration in the scantable.
748 Parameters:
749 row: row no of integration. Default -1 return all rows
750 Example:
751 none
752 """
753 return self._get_column(self._getdirectionvec, row)
754
[1730]755 @print_log_dec
[102]756 def set_unit(self, unit='channel'):
757 """
758 Set the unit for all following operations on this scantable
759 Parameters:
760 unit: optional unit, default is 'channel'
[1118]761 one of '*Hz', 'km/s', 'channel', ''
[102]762 """
[484]763 varlist = vars()
[1118]764 if unit in ['', 'pixel', 'channel']:
[113]765 unit = ''
766 inf = list(self._getcoordinfo())
767 inf[0] = unit
768 self._setcoordinfo(inf)
[1118]769 self._add_history("set_unit", varlist)
[113]770
[1589]771 @print_log_dec
[484]772 def set_instrument(self, instr):
[358]773 """
[1348]774 Set the instrument for subsequent processing.
[358]775 Parameters:
[710]776 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]777 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]778 """
779 self._setInstrument(instr)
[1118]780 self._add_history("set_instument", vars())
[1819]781 print_log()
[358]782
[1589]783 @print_log_dec
[1190]784 def set_feedtype(self, feedtype):
785 """
786 Overwrite the feed type, which might not be set correctly.
787 Parameters:
788 feedtype: 'linear' or 'circular'
789 """
790 self._setfeedtype(feedtype)
791 self._add_history("set_feedtype", vars())
[1819]792 print_log()
[1190]793
[1589]794 @print_log_dec
[276]795 def set_doppler(self, doppler='RADIO'):
796 """
797 Set the doppler for all following operations on this scantable.
798 Parameters:
799 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
800 """
[484]801 varlist = vars()
[276]802 inf = list(self._getcoordinfo())
803 inf[2] = doppler
804 self._setcoordinfo(inf)
[1118]805 self._add_history("set_doppler", vars())
[1819]806 print_log()
[710]807
[1589]808 @print_log_dec
[226]809 def set_freqframe(self, frame=None):
[113]810 """
811 Set the frame type of the Spectral Axis.
812 Parameters:
[591]813 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]814 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]815 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]816 Examples:
817 scan.set_freqframe('BARY')
818 """
[1593]819 frame = frame or rcParams['scantable.freqframe']
[484]820 varlist = vars()
[1819]821 # "REST" is not implemented in casacore
822 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
823 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
824 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]825 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]826
[989]827 if frame in valid:
[113]828 inf = list(self._getcoordinfo())
829 inf[1] = frame
830 self._setcoordinfo(inf)
[1118]831 self._add_history("set_freqframe", varlist)
[102]832 else:
[1118]833 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]834 if rcParams['verbose']:
[1819]835 #print msg
836 asaplog.push( msg )
837 print_log( 'ERROR' )
[718]838 else:
839 raise TypeError(msg)
[1819]840 print_log()
[710]841
[989]842 def set_dirframe(self, frame=""):
843 """
844 Set the frame type of the Direction on the sky.
845 Parameters:
846 frame: an optional frame type, default ''. Valid frames are:
847 'J2000', 'B1950', 'GALACTIC'
848 Examples:
849 scan.set_dirframe('GALACTIC')
850 """
851 varlist = vars()
852 try:
853 Scantable.set_dirframe(self, frame)
[1118]854 except RuntimeError, msg:
[989]855 if rcParams['verbose']:
[1819]856 #print msg
857 print_log()
858 asaplog.push( str(msg) )
859 print_log( 'ERROR' )
[989]860 else:
861 raise
[1118]862 self._add_history("set_dirframe", varlist)
[989]863
[113]864 def get_unit(self):
865 """
866 Get the default unit set in this scantable
867 Returns:
868 A unit string
869 """
870 inf = self._getcoordinfo()
871 unit = inf[0]
872 if unit == '': unit = 'channel'
873 return unit
[102]874
[158]875 def get_abcissa(self, rowno=0):
[102]876 """
[158]877 Get the abcissa in the current coordinate setup for the currently
[113]878 selected Beam/IF/Pol
879 Parameters:
[226]880 rowno: an optional row number in the scantable. Default is the
881 first row, i.e. rowno=0
[113]882 Returns:
[1348]883 The abcissa values and the format string (as a dictionary)
[113]884 """
[256]885 abc = self._getabcissa(rowno)
[710]886 lbl = self._getabcissalabel(rowno)
[1819]887 print_log()
[158]888 return abc, lbl
[113]889
[1819]890 def flag(self, mask=None, unflag=False):
[1001]891 """
892 Flag the selected data using an optional channel mask.
893 Parameters:
894 mask: an optional channel mask, created with create_mask. Default
895 (no mask) is all channels.
[1819]896 unflag: if True, unflag the data
[1001]897 """
898 varlist = vars()
[1593]899 mask = mask or []
[1001]900 try:
[1819]901 self._flag(mask, unflag)
[1118]902 except RuntimeError, msg:
[1001]903 if rcParams['verbose']:
[1819]904 #print msg
905 print_log()
906 asaplog.push( str(msg) )
907 print_log( 'ERROR' )
[1001]908 return
909 else: raise
910 self._add_history("flag", varlist)
911
[1819]912 def flag_row(self, rows=[], unflag=False):
913 """
914 Flag the selected data in row-based manner.
915 Parameters:
916 rows: list of row numbers to be flagged. Default is no row (must be explicitly specified to execute row-based flagging).
917 unflag: if True, unflag the data.
918 """
919 varlist = vars()
920 try:
921 self._flag_row(rows, unflag)
922 except RuntimeError, msg:
923 if rcParams['verbose']:
924 print_log()
925 asaplog.push( str(msg) )
926 print_log('ERROR')
927 return
928 else: raise
929 self._add_history("flag_row", varlist)
930
931 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
932 """
933 Flag the selected data outside a specified range (in channel-base)
934 Parameters:
935 uthres: upper threshold.
936 dthres: lower threshold
937 clipoutside: True for flagging data outside the range [dthres:uthres].
938 False for glagging data inside the range.
939 unflag : if True, unflag the data.
940 """
941 varlist = vars()
942 try:
943 self._clip(uthres, dthres, clipoutside, unflag)
944 except RuntimeError, msg:
945 if rcParams['verbose']:
946 print_log()
947 asaplog.push(str(msg))
948 print_log('ERROR')
949 return
950 else: raise
951 self._add_history("clip", varlist)
952
[1589]953 @print_log_dec
[1584]954 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1819]955 #def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]956 """
957 Flag the data in 'lag' space by providing a frequency to remove.
[1584]958 Flagged data in the scantable gets interpolated over the region.
[1192]959 No taper is applied.
960 Parameters:
[1579]961 start: the start frequency (really a period within the
962 bandwidth) or period to remove
963 end: the end frequency or period to remove
[1584]964 unit: the frequency unit (default "MHz") or "" for
[1579]965 explicit lag channels
[1203]966 Notes:
[1579]967 It is recommended to flag edges of the band or strong
[1348]968 signals beforehand.
[1192]969 """
970 if insitu is None: insitu = rcParams['insitu']
971 self._math._setinsitu(insitu)
972 varlist = vars()
[1579]973 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
974 if not (unit == "" or base.has_key(unit)):
[1192]975 raise ValueError("%s is not a valid unit." % unit)
976 try:
[1579]977 if unit == "":
978 s = scantable(self._math._lag_flag(self, start, end, "lags"))
979 else:
980 s = scantable(self._math._lag_flag(self, start*base[unit],
981 end*base[unit], "frequency"))
[1192]982 except RuntimeError, msg:
983 if rcParams['verbose']:
[1819]984 #print msg
985 print_log()
986 asaplog.push( str(msg) )
987 print_log( 'ERROR' )
[1192]988 return
989 else: raise
990 s._add_history("lag_flag", varlist)
[1819]991 print_log()
[1192]992 if insitu:
993 self._assign(s)
994 else:
995 return s
[1001]996
[1589]997 @print_log_dec
[113]998 def create_mask(self, *args, **kwargs):
999 """
[1118]1000 Compute and return a mask based on [min, max] windows.
[189]1001 The specified windows are to be INCLUDED, when the mask is
[113]1002 applied.
[102]1003 Parameters:
[1118]1004 [min, max], [min2, max2], ...
[1024]1005 Pairs of start/end points (inclusive)specifying the regions
[102]1006 to be masked
[189]1007 invert: optional argument. If specified as True,
1008 return an inverted mask, i.e. the regions
1009 specified are EXCLUDED
[513]1010 row: create the mask using the specified row for
1011 unit conversions, default is row=0
1012 only necessary if frequency varies over rows.
[102]1013 Example:
[113]1014 scan.set_unit('channel')
1015 a)
[1118]1016 msk = scan.create_mask([400, 500], [800, 900])
[189]1017 # masks everything outside 400 and 500
[113]1018 # and 800 and 900 in the unit 'channel'
1019
1020 b)
[1118]1021 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1022 # masks the regions between 400 and 500
[113]1023 # and 800 and 900 in the unit 'channel'
[1024]1024 c)
1025 mask only channel 400
[1554]1026 msk = scan.create_mask([400])
[102]1027 """
[1554]1028 row = kwargs.get("row", 0)
[513]1029 data = self._getabcissa(row)
[113]1030 u = self._getcoordinfo()[0]
[718]1031 if rcParams['verbose']:
[113]1032 if u == "": u = "channel"
[718]1033 msg = "The current mask window unit is %s" % u
[1118]1034 i = self._check_ifs()
1035 if not i:
[876]1036 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]1037 asaplog.push(msg)
[102]1038 n = self.nchan()
[1295]1039 msk = _n_bools(n, False)
[710]1040 # test if args is a 'list' or a 'normal *args - UGLY!!!
1041
[1118]1042 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1043 and args or args[0]
[710]1044 for window in ws:
[1554]1045 if len(window) == 1:
1046 window = [window[0], window[0]]
1047 if len(window) == 0 or len(window) > 2:
1048 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1049 if window[0] > window[1]:
1050 tmp = window[0]
1051 window[0] = window[1]
1052 window[1] = tmp
[102]1053 for i in range(n):
[1024]1054 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1055 msk[i] = True
[113]1056 if kwargs.has_key('invert'):
1057 if kwargs.get('invert'):
[1295]1058 msk = mask_not(msk)
[1819]1059 print_log()
[102]1060 return msk
[710]1061
[1819]1062 def get_masklist(self, mask=None, row=0):
[256]1063 """
[1819]1064 Compute and return a list of mask windows, [min, max].
1065 Parameters:
1066 mask: channel mask, created with create_mask.
1067 row: calcutate the masklist using the specified row
1068 for unit conversions, default is row=0
1069 only necessary if frequency varies over rows.
1070 Returns:
1071 [min, max], [min2, max2], ...
1072 Pairs of start/end points (inclusive)specifying
1073 the masked regions
1074 """
1075 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1076 raise TypeError("The mask should be list or tuple.")
1077 if len(mask) < 2:
1078 raise TypeError("The mask elements should be > 1")
1079 if self.nchan() != len(mask):
1080 msg = "Number of channels in scantable != number of mask elements"
1081 raise TypeError(msg)
1082 data = self._getabcissa(row)
1083 u = self._getcoordinfo()[0]
1084 if rcParams['verbose']:
1085 if u == "": u = "channel"
1086 msg = "The current mask window unit is %s" % u
1087 i = self._check_ifs()
1088 if not i:
1089 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1090 asaplog.push(msg)
1091 masklist=[]
1092 ist, ien = None, None
1093 ist, ien=self.get_mask_indices(mask)
1094 if ist is not None and ien is not None:
1095 for i in xrange(len(ist)):
1096 range=[data[ist[i]],data[ien[i]]]
1097 range.sort()
1098 masklist.append([range[0],range[1]])
1099 return masklist
1100
1101 def get_mask_indices(self, mask=None):
1102 """
1103 Compute and Return lists of mask start indices and mask end indices.
1104 Parameters:
1105 mask: channel mask, created with create_mask.
1106 Returns:
1107 List of mask start indices and that of mask end indices,
1108 i.e., [istart1,istart2,....], [iend1,iend2,....].
1109 """
1110 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1111 raise TypeError("The mask should be list or tuple.")
1112 if len(mask) < 2:
1113 raise TypeError("The mask elements should be > 1")
1114 istart=[]
1115 iend=[]
1116 if mask[0]: istart.append(0)
1117 for i in range(len(mask)-1):
1118 if not mask[i] and mask[i+1]:
1119 istart.append(i+1)
1120 elif mask[i] and not mask[i+1]:
1121 iend.append(i)
1122 if mask[len(mask)-1]: iend.append(len(mask)-1)
1123 if len(istart) != len(iend):
1124 raise RuntimeError("Numbers of mask start != mask end.")
1125 for i in range(len(istart)):
1126 if istart[i] > iend[i]:
1127 raise RuntimeError("Mask start index > mask end index")
1128 break
1129 return istart,iend
1130
1131# def get_restfreqs(self):
1132# """
1133# Get the restfrequency(s) stored in this scantable.
1134# The return value(s) are always of unit 'Hz'
1135# Parameters:
1136# none
1137# Returns:
1138# a list of doubles
1139# """
1140# return list(self._getrestfreqs())
1141
1142 def get_restfreqs(self, ids=None):
1143 """
[256]1144 Get the restfrequency(s) stored in this scantable.
1145 The return value(s) are always of unit 'Hz'
1146 Parameters:
[1819]1147 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1148 be retrieved
[256]1149 Returns:
[1819]1150 dictionary containing ids and a list of doubles for each id
[256]1151 """
[1819]1152 if ids is None:
1153 rfreqs={}
1154 idlist = self.getmolnos()
1155 for i in idlist:
1156 rfreqs[i]=list(self._getrestfreqs(i))
1157 return rfreqs
1158 else:
1159 if type(ids)==list or type(ids)==tuple:
1160 rfreqs={}
1161 for i in ids:
1162 rfreqs[i]=list(self._getrestfreqs(i))
1163 return rfreqs
1164 else:
1165 return list(self._getrestfreqs(ids))
1166 #return list(self._getrestfreqs(ids))
[102]1167
[931]1168 def set_restfreqs(self, freqs=None, unit='Hz'):
1169 """
[1819]1170 ********NEED TO BE UPDATED begin************
[931]1171 Set or replace the restfrequency specified and
1172 If the 'freqs' argument holds a scalar,
1173 then that rest frequency will be applied to all the selected
1174 data. If the 'freqs' argument holds
1175 a vector, then it MUST be of equal or smaller length than
1176 the number of IFs (and the available restfrequencies will be
1177 replaced by this vector). In this case, *all* data have
1178 the restfrequency set per IF according
1179 to the corresponding value you give in the 'freqs' vector.
[1118]1180 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1181 IF 1 gets restfreq 2e9.
[1819]1182 ********NEED TO BE UPDATED end************
[1395]1183 You can also specify the frequencies via a linecatalog.
[1153]1184
[931]1185 Parameters:
1186 freqs: list of rest frequency values or string idenitfiers
1187 unit: unit for rest frequency (default 'Hz')
[402]1188
[931]1189 Example:
[1819]1190 # set the given restfrequency for the all currently selected IFs
[931]1191 scan.set_restfreqs(freqs=1.4e9)
[1819]1192 # set multiple restfrequencies to all the selected data
1193 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1194 # If the number of IFs in the data is >= 2 the IF0 gets the first
1195 # value IF1 the second... NOTE that freqs needs to be
1196 # specified in list of list (e.g. [[],[],...] ).
1197 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]1198 #set the given restfrequency for the whole table (by name)
1199 scan.set_restfreqs(freqs="OH1667")
[391]1200
[931]1201 Note:
1202 To do more sophisticate Restfrequency setting, e.g. on a
1203 source and IF basis, use scantable.set_selection() before using
1204 this function.
[1593]1205 # provide your scantable is called scan
[931]1206 selection = selector()
1207 selection.set_name("ORION*")
1208 selection.set_ifs([1])
1209 scan.set_selection(selection)
1210 scan.set_restfreqs(freqs=86.6e9)
1211
1212 """
1213 varlist = vars()
[1157]1214 from asap import linecatalog
1215 # simple value
[1118]1216 if isinstance(freqs, int) or isinstance(freqs, float):
[1819]1217 # TT mod
1218 #self._setrestfreqs(freqs, "",unit)
1219 self._setrestfreqs([freqs], [""],unit)
[1157]1220 # list of values
[1118]1221 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1222 # list values are scalars
[1118]1223 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1819]1224 self._setrestfreqs(freqs, [""], unit)
[1157]1225 # list values are tuples, (value, name)
1226 elif isinstance(freqs[-1], dict):
[1819]1227 #sel = selector()
1228 #savesel = self._getselection()
1229 #iflist = self.getifnos()
1230 #for i in xrange(len(freqs)):
1231 # sel.set_ifs(iflist[i])
1232 # self._setselection(sel)
1233 # self._setrestfreqs(freqs[i], "",unit)
1234 #self._setselection(savesel)
1235 self._setrestfreqs(freqs["value"],
1236 freqs["name"], unit)
1237 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1238 sel = selector()
1239 savesel = self._getselection()
[1322]1240 iflist = self.getifnos()
[1819]1241 if len(freqs)>len(iflist):
1242 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]1243 for i in xrange(len(freqs)):
[1322]1244 sel.set_ifs(iflist[i])
[1259]1245 self._setselection(sel)
[1819]1246 self._setrestfreqs(freqs[i], [""], unit)
[1157]1247 self._setselection(savesel)
1248 # freqs are to be taken from a linecatalog
[1153]1249 elif isinstance(freqs, linecatalog):
1250 sel = selector()
1251 savesel = self._getselection()
1252 for i in xrange(freqs.nrow()):
[1322]1253 sel.set_ifs(iflist[i])
[1153]1254 self._setselection(sel)
1255 self._setrestfreqs(freqs.get_frequency(i),
1256 freqs.get_name(i), "MHz")
1257 # ensure that we are not iterating past nIF
1258 if i == self.nif()-1: break
1259 self._setselection(savesel)
[931]1260 else:
1261 return
1262 self._add_history("set_restfreqs", varlist)
1263
[1360]1264 def shift_refpix(self, delta):
[1589]1265 """
1266 Shift the reference pixel of the Spectra Coordinate by an
1267 integer amount.
1268 Parameters:
1269 delta: the amount to shift by
[1360]1270 Note:
[1589]1271 Be careful using this with broadband data.
[1360]1272 """
[1731]1273 Scantable.shift_refpix(self, delta)
[931]1274
[1259]1275 def history(self, filename=None):
1276 """
1277 Print the history. Optionally to a file.
[1348]1278 Parameters:
1279 filename: The name of the file to save the history to.
[1259]1280 """
[484]1281 hist = list(self._gethistory())
[794]1282 out = "-"*80
[484]1283 for h in hist:
[489]1284 if h.startswith("---"):
[794]1285 out += "\n"+h
[489]1286 else:
1287 items = h.split("##")
1288 date = items[0]
1289 func = items[1]
1290 items = items[2:]
[794]1291 out += "\n"+date+"\n"
1292 out += "Function: %s\n Parameters:" % (func)
[489]1293 for i in items:
1294 s = i.split("=")
[1118]1295 out += "\n %s = %s" % (s[0], s[1])
[794]1296 out += "\n"+"-"*80
[1259]1297 if filename is not None:
1298 if filename is "":
1299 filename = 'scantable_history.txt'
1300 import os
1301 filename = os.path.expandvars(os.path.expanduser(filename))
1302 if not os.path.isdir(filename):
1303 data = open(filename, 'w')
1304 data.write(out)
1305 data.close()
1306 else:
1307 msg = "Illegal file name '%s'." % (filename)
1308 if rcParams['verbose']:
[1819]1309 #print msg
1310 asaplog.push( msg )
1311 print_log( 'ERROR' )
[1259]1312 else:
1313 raise IOError(msg)
1314 if rcParams['verbose']:
1315 try:
1316 from IPython.genutils import page as pager
1317 except ImportError:
1318 from pydoc import pager
1319 pager(out)
1320 else:
1321 return out
[484]1322 return
[513]1323 #
1324 # Maths business
1325 #
[1589]1326 @print_log_dec
[931]1327 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1328 """
[1070]1329 Return the (time) weighted average of a scan.
[513]1330 Note:
[1070]1331 in channels only - align if necessary
[513]1332 Parameters:
1333 mask: an optional mask (only used for 'var' and 'tsys'
1334 weighting)
[558]1335 scanav: True averages each scan separately
1336 False (default) averages all scans together,
[1099]1337 weight: Weighting scheme.
1338 'none' (mean no weight)
1339 'var' (1/var(spec) weighted)
1340 'tsys' (1/Tsys**2 weighted)
1341 'tint' (integration time weighted)
1342 'tintsys' (Tint/Tsys**2)
1343 'median' ( median averaging)
[535]1344 The default is 'tint'
[931]1345 align: align the spectra in velocity before averaging. It takes
1346 the time of the first spectrum as reference time.
[513]1347 Example:
1348 # time average the scantable without using a mask
[710]1349 newscan = scan.average_time()
[513]1350 """
1351 varlist = vars()
[1593]1352 weight = weight or 'TINT'
1353 mask = mask or ()
1354 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1355 scan = (self, )
[989]1356 try:
[1118]1357 if align:
1358 scan = (self.freq_align(insitu=False), )
1359 s = None
1360 if weight.upper() == 'MEDIAN':
1361 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1362 scanav))
1363 else:
1364 s = scantable(self._math._average(scan, mask, weight.upper(),
1365 scanav))
1366 except RuntimeError, msg:
[989]1367 if rcParams['verbose']:
[1819]1368 #print msg
1369 print_log()
1370 asaplog.push( str(msg) )
1371 print_log( 'ERROR' )
[989]1372 return
1373 else: raise
[1099]1374 s._add_history("average_time", varlist)
[1819]1375 print_log()
[513]1376 return s
[710]1377
[1589]1378 @print_log_dec
[876]1379 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1380 """
1381 Return a scan where all spectra are converted to either
1382 Jansky or Kelvin depending upon the flux units of the scan table.
1383 By default the function tries to look the values up internally.
1384 If it can't find them (or if you want to over-ride), you must
1385 specify EITHER jyperk OR eta (and D which it will try to look up
1386 also if you don't set it). jyperk takes precedence if you set both.
1387 Parameters:
1388 jyperk: the Jy / K conversion factor
1389 eta: the aperture efficiency
1390 d: the geomtric diameter (metres)
1391 insitu: if False a new scantable is returned.
1392 Otherwise, the scaling is done in-situ
1393 The default is taken from .asaprc (False)
1394 """
1395 if insitu is None: insitu = rcParams['insitu']
[876]1396 self._math._setinsitu(insitu)
[513]1397 varlist = vars()
[1593]1398 jyperk = jyperk or -1.0
1399 d = d or -1.0
1400 eta = eta or -1.0
[876]1401 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1402 s._add_history("convert_flux", varlist)
[1819]1403 print_log()
[876]1404 if insitu: self._assign(s)
1405 else: return s
[513]1406
[1589]1407 @print_log_dec
[876]1408 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1409 """
1410 Return a scan after applying a gain-elevation correction.
1411 The correction can be made via either a polynomial or a
1412 table-based interpolation (and extrapolation if necessary).
1413 You specify polynomial coefficients, an ascii table or neither.
1414 If you specify neither, then a polynomial correction will be made
1415 with built in coefficients known for certain telescopes (an error
1416 will occur if the instrument is not known).
1417 The data and Tsys are *divided* by the scaling factors.
1418 Parameters:
1419 poly: Polynomial coefficients (default None) to compute a
1420 gain-elevation correction as a function of
1421 elevation (in degrees).
1422 filename: The name of an ascii file holding correction factors.
1423 The first row of the ascii file must give the column
1424 names and these MUST include columns
1425 "ELEVATION" (degrees) and "FACTOR" (multiply data
1426 by this) somewhere.
1427 The second row must give the data type of the
1428 column. Use 'R' for Real and 'I' for Integer.
1429 An example file would be
1430 (actual factors are arbitrary) :
1431
1432 TIME ELEVATION FACTOR
1433 R R R
1434 0.1 0 0.8
1435 0.2 20 0.85
1436 0.3 40 0.9
1437 0.4 60 0.85
1438 0.5 80 0.8
1439 0.6 90 0.75
1440 method: Interpolation method when correcting from a table.
1441 Values are "nearest", "linear" (default), "cubic"
1442 and "spline"
1443 insitu: if False a new scantable is returned.
1444 Otherwise, the scaling is done in-situ
1445 The default is taken from .asaprc (False)
1446 """
1447
1448 if insitu is None: insitu = rcParams['insitu']
[876]1449 self._math._setinsitu(insitu)
[513]1450 varlist = vars()
[1593]1451 poly = poly or ()
[513]1452 from os.path import expandvars
1453 filename = expandvars(filename)
[876]1454 s = scantable(self._math._gainel(self, poly, filename, method))
1455 s._add_history("gain_el", varlist)
[1819]1456 print_log()
[1593]1457 if insitu:
1458 self._assign(s)
1459 else:
1460 return s
[710]1461
[1589]1462 @print_log_dec
[931]1463 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1464 """
1465 Return a scan where all rows have been aligned in frequency/velocity.
1466 The alignment frequency frame (e.g. LSRK) is that set by function
1467 set_freqframe.
1468 Parameters:
1469 reftime: reference time to align at. By default, the time of
1470 the first row of data is used.
1471 method: Interpolation method for regridding the spectra.
1472 Choose from "nearest", "linear", "cubic" (default)
1473 and "spline"
1474 insitu: if False a new scantable is returned.
1475 Otherwise, the scaling is done in-situ
1476 The default is taken from .asaprc (False)
1477 """
[931]1478 if insitu is None: insitu = rcParams["insitu"]
[876]1479 self._math._setinsitu(insitu)
[513]1480 varlist = vars()
[1593]1481 reftime = reftime or ""
[931]1482 s = scantable(self._math._freq_align(self, reftime, method))
[876]1483 s._add_history("freq_align", varlist)
[1819]1484 print_log()
[876]1485 if insitu: self._assign(s)
1486 else: return s
[513]1487
[1589]1488 @print_log_dec
[1725]1489 def opacity(self, tau=None, insitu=None):
[513]1490 """
1491 Apply an opacity correction. The data
1492 and Tsys are multiplied by the correction factor.
1493 Parameters:
[1689]1494 tau: (list of) opacity from which the correction factor is
[513]1495 exp(tau*ZD)
[1689]1496 where ZD is the zenith-distance.
1497 If a list is provided, it has to be of length nIF,
1498 nIF*nPol or 1 and in order of IF/POL, e.g.
1499 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1500 if tau is `None` the opacities are determined from a
1501 model.
[513]1502 insitu: if False a new scantable is returned.
1503 Otherwise, the scaling is done in-situ
1504 The default is taken from .asaprc (False)
1505 """
1506 if insitu is None: insitu = rcParams['insitu']
[876]1507 self._math._setinsitu(insitu)
[513]1508 varlist = vars()
[1689]1509 if not hasattr(tau, "__len__"):
1510 tau = [tau]
[876]1511 s = scantable(self._math._opacity(self, tau))
1512 s._add_history("opacity", varlist)
[1819]1513 print_log()
[876]1514 if insitu: self._assign(s)
1515 else: return s
[513]1516
[1589]1517 @print_log_dec
[513]1518 def bin(self, width=5, insitu=None):
1519 """
1520 Return a scan where all spectra have been binned up.
[1348]1521 Parameters:
[513]1522 width: The bin width (default=5) in pixels
1523 insitu: if False a new scantable is returned.
1524 Otherwise, the scaling is done in-situ
1525 The default is taken from .asaprc (False)
1526 """
1527 if insitu is None: insitu = rcParams['insitu']
[876]1528 self._math._setinsitu(insitu)
[513]1529 varlist = vars()
[876]1530 s = scantable(self._math._bin(self, width))
[1118]1531 s._add_history("bin", varlist)
[1819]1532 print_log()
[1589]1533 if insitu:
1534 self._assign(s)
1535 else:
1536 return s
[513]1537
[1589]1538 @print_log_dec
[513]1539 def resample(self, width=5, method='cubic', insitu=None):
1540 """
[1348]1541 Return a scan where all spectra have been binned up.
[1573]1542
[1348]1543 Parameters:
[513]1544 width: The bin width (default=5) in pixels
1545 method: Interpolation method when correcting from a table.
1546 Values are "nearest", "linear", "cubic" (default)
1547 and "spline"
1548 insitu: if False a new scantable is returned.
1549 Otherwise, the scaling is done in-situ
1550 The default is taken from .asaprc (False)
1551 """
1552 if insitu is None: insitu = rcParams['insitu']
[876]1553 self._math._setinsitu(insitu)
[513]1554 varlist = vars()
[876]1555 s = scantable(self._math._resample(self, method, width))
[1118]1556 s._add_history("resample", varlist)
[1819]1557 print_log()
[876]1558 if insitu: self._assign(s)
1559 else: return s
[513]1560
[1589]1561 @print_log_dec
[946]1562 def average_pol(self, mask=None, weight='none'):
1563 """
1564 Average the Polarisations together.
1565 Parameters:
1566 mask: An optional mask defining the region, where the
1567 averaging will be applied. The output will have all
1568 specified points masked.
1569 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1570 weighted), or 'tsys' (1/Tsys**2 weighted)
1571 """
1572 varlist = vars()
[1593]1573 mask = mask or ()
[1010]1574 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1575 s._add_history("average_pol", varlist)
[1819]1576 print_log()
[992]1577 return s
[513]1578
[1589]1579 @print_log_dec
[1145]1580 def average_beam(self, mask=None, weight='none'):
1581 """
1582 Average the Beams together.
1583 Parameters:
1584 mask: An optional mask defining the region, where the
1585 averaging will be applied. The output will have all
1586 specified points masked.
1587 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1588 weighted), or 'tsys' (1/Tsys**2 weighted)
1589 """
1590 varlist = vars()
[1593]1591 mask = mask or ()
[1145]1592 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1593 s._add_history("average_beam", varlist)
[1819]1594 print_log()
[1145]1595 return s
1596
[1586]1597 def parallactify(self, pflag):
[1617]1598 """
1599 Set a flag to inidcate whether this data should be treated as having
1600 been 'parallactified' (total phase == 0.0)
1601 Parameters:
1602 pflag: Bool inidcating whether to turn this on (True) or
1603 off (False)
1604 """
[1586]1605 varlist = vars()
1606 self._parallactify(pflag)
1607 self._add_history("parallactify", varlist)
1608
[1589]1609 @print_log_dec
[992]1610 def convert_pol(self, poltype=None):
1611 """
1612 Convert the data to a different polarisation type.
[1565]1613 Note that you will need cross-polarisation terms for most conversions.
[992]1614 Parameters:
1615 poltype: The new polarisation type. Valid types are:
[1565]1616 "linear", "circular", "stokes" and "linpol"
[992]1617 """
1618 varlist = vars()
1619 try:
1620 s = scantable(self._math._convertpol(self, poltype))
[1118]1621 except RuntimeError, msg:
[992]1622 if rcParams['verbose']:
[1819]1623 #print msg
1624 print_log()
1625 asaplog.push( str(msg) )
1626 print_log( 'ERROR' )
[1118]1627 return
[992]1628 else:
1629 raise
[1118]1630 s._add_history("convert_pol", varlist)
[1819]1631 print_log()
[992]1632 return s
1633
[1589]1634 @print_log_dec
[1819]1635 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[513]1636 """
1637 Smooth the spectrum by the specified kernel (conserving flux).
1638 Parameters:
1639 kernel: The type of smoothing kernel. Select from
[1574]1640 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1641 or 'poly'
[513]1642 width: The width of the kernel in pixels. For hanning this is
1643 ignored otherwise it defauls to 5 pixels.
1644 For 'gaussian' it is the Full Width Half
1645 Maximum. For 'boxcar' it is the full width.
[1574]1646 For 'rmedian' and 'poly' it is the half width.
1647 order: Optional parameter for 'poly' kernel (default is 2), to
1648 specify the order of the polnomial. Ignored by all other
1649 kernels.
[1819]1650 plot: plot the original and the smoothed spectra.
1651 In this each indivual fit has to be approved, by
1652 typing 'y' or 'n'
[513]1653 insitu: if False a new scantable is returned.
1654 Otherwise, the scaling is done in-situ
1655 The default is taken from .asaprc (False)
1656 Example:
1657 none
1658 """
1659 if insitu is None: insitu = rcParams['insitu']
[876]1660 self._math._setinsitu(insitu)
[513]1661 varlist = vars()
[1819]1662
1663 if plot: orgscan = self.copy()
1664
[1574]1665 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1666 s._add_history("smooth", varlist)
[1819]1667
1668 if plot:
1669 if rcParams['plotter.gui']:
1670 from asap.asaplotgui import asaplotgui as asaplot
1671 else:
1672 from asap.asaplot import asaplot
1673 self._p=asaplot()
1674 self._p.set_panels()
1675 ylab=s._get_ordinate_label()
1676 #self._p.palette(0,["#777777","red"])
1677 for r in xrange(s.nrow()):
1678 xsm=s._getabcissa(r)
1679 ysm=s._getspectrum(r)
1680 xorg=orgscan._getabcissa(r)
1681 yorg=orgscan._getspectrum(r)
1682 self._p.clear()
1683 self._p.hold()
1684 self._p.set_axes('ylabel',ylab)
1685 self._p.set_axes('xlabel',s._getabcissalabel(r))
1686 self._p.set_axes('title',s._getsourcename(r))
1687 self._p.set_line(label='Original',color="#777777")
1688 self._p.plot(xorg,yorg)
1689 self._p.set_line(label='Smoothed',color="red")
1690 self._p.plot(xsm,ysm)
1691 ### Ugly part for legend
1692 for i in [0,1]:
1693 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1694 self._p.release()
1695 ### Ugly part for legend
1696 self._p.subplots[0]['lines']=[]
1697 res = raw_input("Accept smoothing ([y]/n): ")
1698 if res.upper() == 'N':
1699 s._setspectrum(yorg, r)
1700 self._p.unmap()
1701 self._p = None
1702 del orgscan
1703
1704 print_log()
[876]1705 if insitu: self._assign(s)
1706 else: return s
[513]1707
[1589]1708 @print_log_dec
[1617]1709 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1710 insitu=None):
[513]1711 """
1712 Return a scan which has been baselined (all rows) by a polynomial.
1713 Parameters:
[794]1714 mask: an optional mask
1715 order: the order of the polynomial (default is 0)
[1061]1716 plot: plot the fit and the residual. In this each
1717 indivual fit has to be approved, by typing 'y'
1718 or 'n'
[1391]1719 uselin: use linear polynomial fit
[794]1720 insitu: if False a new scantable is returned.
1721 Otherwise, the scaling is done in-situ
1722 The default is taken from .asaprc (False)
[513]1723 Example:
1724 # return a scan baselined by a third order polynomial,
1725 # not using a mask
1726 bscan = scan.poly_baseline(order=3)
[579]1727 """
[513]1728 if insitu is None: insitu = rcParams['insitu']
[1819]1729 if not insitu:
1730 workscan = self.copy()
1731 else:
1732 workscan = self
[513]1733 varlist = vars()
1734 if mask is None:
[1295]1735 mask = [True for i in xrange(self.nchan(-1))]
[1819]1736
[513]1737 from asap.asapfitter import fitter
[1217]1738 try:
1739 f = fitter()
[1391]1740 if uselin:
1741 f.set_function(lpoly=order)
1742 else:
1743 f.set_function(poly=order)
[1819]1744
1745 rows = range(workscan.nrow())
1746 if len(rows) > 0:
1747 self.blpars = []
1748
1749 for r in rows:
1750 # take into account flagtra info (CAS-1434)
1751 flagtra = workscan._getmask(r)
1752 actualmask = mask[:]
1753 if len(actualmask) == 0:
1754 actualmask = list(flagtra[:])
1755 else:
1756 if len(actualmask) != len(flagtra):
1757 raise RuntimeError, "Mask and flagtra have different length"
1758 else:
1759 for i in range(0, len(actualmask)):
1760 actualmask[i] = actualmask[i] and flagtra[i]
1761 f.set_scan(workscan, actualmask)
1762 f.x = workscan._getabcissa(r)
1763 f.y = workscan._getspectrum(r)
1764 f.data = None
1765 f.fit()
1766 if plot:
1767 f.plot(residual=True)
1768 x = raw_input("Accept fit ( [y]/n ): ")
1769 if x.upper() == 'N':
1770 self.blpars.append(None)
1771 continue
1772 workscan._setspectrum(f.fitter.getresidual(), r)
1773 self.blpars.append(f.get_parameters())
1774
1775 if plot:
1776 f._p.unmap()
1777 f._p = None
1778 workscan._add_history("poly_baseline", varlist)
1779 print_log()
1780 if insitu: self._assign(workscan)
1781 else: return workscan
[1217]1782 except RuntimeError:
1783 msg = "The fit failed, possibly because it didn't converge."
1784 if rcParams['verbose']:
[1819]1785 #print msg
1786 print_log()
1787 asaplog.push( str(msg) )
1788 print_log( 'ERROR' )
[1217]1789 return
1790 else:
1791 raise RuntimeError(msg)
[513]1792
[1819]1793
[1118]1794 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1795 threshold=3, chan_avg_limit=1, plot=False,
1796 insitu=None):
[880]1797 """
1798 Return a scan which has been baselined (all rows) by a polynomial.
1799 Spectral lines are detected first using linefinder and masked out
1800 to avoid them affecting the baseline solution.
1801
1802 Parameters:
1803 mask: an optional mask retreived from scantable
1804 edge: an optional number of channel to drop at
1805 the edge of spectrum. If only one value is
1806 specified, the same number will be dropped from
1807 both sides of the spectrum. Default is to keep
[907]1808 all channels. Nested tuples represent individual
[976]1809 edge selection for different IFs (a number of spectral
1810 channels can be different)
[880]1811 order: the order of the polynomial (default is 0)
1812 threshold: the threshold used by line finder. It is better to
1813 keep it large as only strong lines affect the
1814 baseline solution.
[1280]1815 chan_avg_limit:
1816 a maximum number of consequtive spectral channels to
1817 average during the search of weak and broad lines.
1818 The default is no averaging (and no search for weak
1819 lines). If such lines can affect the fitted baseline
1820 (e.g. a high order polynomial is fitted), increase this
1821 parameter (usually values up to 8 are reasonable). Most
1822 users of this method should find the default value
1823 sufficient.
[1061]1824 plot: plot the fit and the residual. In this each
1825 indivual fit has to be approved, by typing 'y'
1826 or 'n'
[880]1827 insitu: if False a new scantable is returned.
1828 Otherwise, the scaling is done in-situ
1829 The default is taken from .asaprc (False)
1830
1831 Example:
1832 scan2=scan.auto_poly_baseline(order=7)
1833 """
1834 if insitu is None: insitu = rcParams['insitu']
1835 varlist = vars()
1836 from asap.asapfitter import fitter
1837 from asap.asaplinefind import linefinder
1838 from asap import _is_sequence_or_number as _is_valid
1839
[976]1840 # check whether edge is set up for each IF individually
[1118]1841 individualedge = False;
1842 if len(edge) > 1:
1843 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1844 individualedge = True;
[907]1845
[1118]1846 if not _is_valid(edge, int) and not individualedge:
[909]1847 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1848 pair of integers specified as a tuple. Nested tuples are allowed \
1849 to make individual selection for different IFs."
[919]1850
[1118]1851 curedge = (0, 0)
1852 if individualedge:
1853 for edgepar in edge:
1854 if not _is_valid(edgepar, int):
1855 raise ValueError, "Each element of the 'edge' tuple has \
1856 to be a pair of integers or an integer."
[907]1857 else:
[1118]1858 curedge = edge;
[880]1859
1860 # setup fitter
1861 f = fitter()
1862 f.set_function(poly=order)
1863
1864 # setup line finder
[1118]1865 fl = linefinder()
[1268]1866 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1867
1868 if not insitu:
[1118]1869 workscan = self.copy()
[880]1870 else:
[1118]1871 workscan = self
[880]1872
[907]1873 fl.set_scan(workscan)
1874
[1118]1875 rows = range(workscan.nrow())
[1819]1876 # Save parameters of baseline fits & masklists as a class attribute.
1877 # NOTICE: It does not reflect changes in scantable!
1878 if len(rows) > 0:
1879 self.blpars=[]
1880 self.masklists=[]
[880]1881 asaplog.push("Processing:")
1882 for r in rows:
[1118]1883 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1884 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1885 workscan.getpol(r), workscan.getcycle(r))
[880]1886 asaplog.push(msg, False)
[907]1887
[976]1888 # figure out edge parameter
[1118]1889 if individualedge:
1890 if len(edge) >= workscan.getif(r):
1891 raise RuntimeError, "Number of edge elements appear to " \
1892 "be less than the number of IFs"
1893 curedge = edge[workscan.getif(r)]
[919]1894
[1819]1895 # take into account flagtra info (CAS-1434)
1896 flagtra = workscan._getmask(r)
1897 actualmask = mask[:]
1898 if len(actualmask) == 0:
1899 actualmask = list(flagtra[:])
1900 else:
1901 if len(actualmask) != len(flagtra):
1902 raise RuntimeError, "Mask and flagtra have different length"
1903 else:
1904 for i in range(0, len(actualmask)):
1905 actualmask[i] = actualmask[i] and flagtra[i]
1906
[976]1907 # setup line finder
[1819]1908 fl.find_lines(r, actualmask, curedge)
1909 outmask=fl.get_mask()
1910 f.set_scan(workscan, fl.get_mask())
1911 f.x = workscan._getabcissa(r)
1912 f.y = workscan._getspectrum(r)
1913 f.data = None
[880]1914 f.fit()
[1819]1915
1916 # Show mask list
1917 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1918 msg = "mask range: "+str(masklist)
1919 asaplog.push(msg, False)
1920
[1061]1921 if plot:
1922 f.plot(residual=True)
1923 x = raw_input("Accept fit ( [y]/n ): ")
1924 if x.upper() == 'N':
[1819]1925 self.blpars.append(None)
1926 self.masklists.append(None)
[1061]1927 continue
[1819]1928
[880]1929 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]1930 self.blpars.append(f.get_parameters())
1931 self.masklists.append(masklist)
[1061]1932 if plot:
1933 f._p.unmap()
1934 f._p = None
1935 workscan._add_history("auto_poly_baseline", varlist)
[880]1936 if insitu:
1937 self._assign(workscan)
1938 else:
1939 return workscan
1940
[1589]1941 @print_log_dec
[914]1942 def rotate_linpolphase(self, angle):
1943 """
1944 Rotate the phase of the complex polarization O=Q+iU correlation.
1945 This is always done in situ in the raw data. So if you call this
1946 function more than once then each call rotates the phase further.
1947 Parameters:
1948 angle: The angle (degrees) to rotate (add) by.
1949 Examples:
1950 scan.rotate_linpolphase(2.3)
1951 """
1952 varlist = vars()
[936]1953 self._math._rotate_linpolphase(self, angle)
[914]1954 self._add_history("rotate_linpolphase", varlist)
[1819]1955 print_log()
[914]1956 return
[710]1957
[1589]1958 @print_log_dec
[914]1959 def rotate_xyphase(self, angle):
1960 """
1961 Rotate the phase of the XY correlation. This is always done in situ
1962 in the data. So if you call this function more than once
1963 then each call rotates the phase further.
1964 Parameters:
1965 angle: The angle (degrees) to rotate (add) by.
1966 Examples:
1967 scan.rotate_xyphase(2.3)
1968 """
1969 varlist = vars()
[936]1970 self._math._rotate_xyphase(self, angle)
[914]1971 self._add_history("rotate_xyphase", varlist)
[1819]1972 print_log()
[914]1973 return
1974
[1589]1975 @print_log_dec
[914]1976 def swap_linears(self):
1977 """
[1573]1978 Swap the linear polarisations XX and YY, or better the first two
[1348]1979 polarisations as this also works for ciculars.
[914]1980 """
1981 varlist = vars()
[936]1982 self._math._swap_linears(self)
[914]1983 self._add_history("swap_linears", varlist)
[1819]1984 print_log()
[914]1985 return
1986
[1589]1987 @print_log_dec
[914]1988 def invert_phase(self):
1989 """
1990 Invert the phase of the complex polarisation
1991 """
1992 varlist = vars()
[936]1993 self._math._invert_phase(self)
[914]1994 self._add_history("invert_phase", varlist)
[1819]1995 print_log()
[914]1996 return
1997
[1589]1998 @print_log_dec
[876]1999 def add(self, offset, insitu=None):
[513]2000 """
2001 Return a scan where all spectra have the offset added
2002 Parameters:
2003 offset: the offset
2004 insitu: if False a new scantable is returned.
2005 Otherwise, the scaling is done in-situ
2006 The default is taken from .asaprc (False)
2007 """
2008 if insitu is None: insitu = rcParams['insitu']
[876]2009 self._math._setinsitu(insitu)
[513]2010 varlist = vars()
[876]2011 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2012 s._add_history("add", varlist)
[1819]2013 print_log()
[876]2014 if insitu:
2015 self._assign(s)
2016 else:
[513]2017 return s
2018
[1589]2019 @print_log_dec
[1308]2020 def scale(self, factor, tsys=True, insitu=None):
[513]2021 """
2022 Return a scan where all spectra are scaled by the give 'factor'
2023 Parameters:
[1819]2024 factor: the scaling factor (float or 1D float list)
[513]2025 insitu: if False a new scantable is returned.
2026 Otherwise, the scaling is done in-situ
2027 The default is taken from .asaprc (False)
2028 tsys: if True (default) then apply the operation to Tsys
2029 as well as the data
2030 """
2031 if insitu is None: insitu = rcParams['insitu']
[876]2032 self._math._setinsitu(insitu)
[513]2033 varlist = vars()
[1819]2034 s = None
2035 import numpy
2036 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2037 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2038 from asapmath import _array2dOp
2039 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2040 else:
2041 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2042 else:
2043 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2044 s._add_history("scale", varlist)
[1819]2045 print_log()
[876]2046 if insitu:
2047 self._assign(s)
2048 else:
[513]2049 return s
2050
[1504]2051 def set_sourcetype(self, match, matchtype="pattern",
2052 sourcetype="reference"):
[1502]2053 """
2054 Set the type of the source to be an source or reference scan
2055 using the provided pattern:
2056 Parameters:
[1504]2057 match: a Unix style pattern, regular expression or selector
2058 matchtype: 'pattern' (default) UNIX style pattern or
2059 'regex' regular expression
[1502]2060 sourcetype: the type of the source to use (source/reference)
2061 """
2062 varlist = vars()
2063 basesel = self.get_selection()
2064 stype = -1
2065 if sourcetype.lower().startswith("r"):
2066 stype = 1
2067 elif sourcetype.lower().startswith("s"):
2068 stype = 0
[1504]2069 else:
[1502]2070 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2071 if matchtype.lower().startswith("p"):
2072 matchtype = "pattern"
2073 elif matchtype.lower().startswith("r"):
2074 matchtype = "regex"
2075 else:
2076 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2077 sel = selector()
2078 if isinstance(match, selector):
2079 sel = match
2080 else:
[1504]2081 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2082 self.set_selection(basesel+sel)
2083 self._setsourcetype(stype)
2084 self.set_selection(basesel)
[1573]2085 self._add_history("set_sourcetype", varlist)
[1502]2086
[1589]2087 @print_log_dec
[1819]2088 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[670]2089 """
2090 This function allows to build quotients automatically.
[1819]2091 It assumes the observation to have the same number of
[670]2092 "ons" and "offs"
2093 Parameters:
[710]2094 preserve: you can preserve (default) the continuum or
2095 remove it. The equations used are
[670]2096 preserve: Output = Toff * (on/off) - Toff
[1070]2097 remove: Output = Toff * (on/off) - Ton
[1573]2098 mode: the on/off detection mode
[1348]2099 'paired' (default)
2100 identifies 'off' scans by the
2101 trailing '_R' (Mopra/Parkes) or
2102 '_e'/'_w' (Tid) and matches
2103 on/off pairs from the observing pattern
[1502]2104 'time'
2105 finds the closest off in time
[1348]2106
[670]2107 """
[1348]2108 modes = ["time", "paired"]
[670]2109 if not mode in modes:
[876]2110 msg = "please provide valid mode. Valid modes are %s" % (modes)
2111 raise ValueError(msg)
2112 varlist = vars()
[1348]2113 s = None
2114 if mode.lower() == "paired":
2115 basesel = self.get_selection()
[1356]2116 sel = selector()+basesel
2117 sel.set_query("SRCTYPE==1")
2118 self.set_selection(sel)
[1348]2119 offs = self.copy()
2120 sel.set_query("SRCTYPE==0")
[1356]2121 self.set_selection(sel)
[1348]2122 ons = self.copy()
2123 s = scantable(self._math._quotient(ons, offs, preserve))
2124 self.set_selection(basesel)
2125 elif mode.lower() == "time":
2126 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2127 s._add_history("auto_quotient", varlist)
[1819]2128 print_log()
[876]2129 return s
[710]2130
[1589]2131 @print_log_dec
[1145]2132 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]2133 """
[1143]2134 Form a quotient using "off" beams when observing in "MX" mode.
2135 Parameters:
[1145]2136 mask: an optional mask to be used when weight == 'stddev'
[1143]2137 weight: How to average the off beams. Default is 'median'.
[1145]2138 preserve: you can preserve (default) the continuum or
2139 remove it. The equations used are
2140 preserve: Output = Toff * (on/off) - Toff
2141 remove: Output = Toff * (on/off) - Ton
[1217]2142 """
[1593]2143 mask = mask or ()
[1141]2144 varlist = vars()
2145 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2146 preoff = scantable(self._math._mx_extract(self, 'off'))
2147 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2148 from asapmath import quotient
[1145]2149 q = quotient(on, off, preserve)
[1143]2150 q._add_history("mx_quotient", varlist)
[1819]2151 print_log()
[1217]2152 return q
[513]2153
[1589]2154 @print_log_dec
[718]2155 def freq_switch(self, insitu=None):
2156 """
2157 Apply frequency switching to the data.
2158 Parameters:
2159 insitu: if False a new scantable is returned.
2160 Otherwise, the swictching is done in-situ
2161 The default is taken from .asaprc (False)
2162 Example:
2163 none
2164 """
2165 if insitu is None: insitu = rcParams['insitu']
[876]2166 self._math._setinsitu(insitu)
[718]2167 varlist = vars()
[876]2168 s = scantable(self._math._freqswitch(self))
[1118]2169 s._add_history("freq_switch", varlist)
[1819]2170 print_log()
[876]2171 if insitu: self._assign(s)
2172 else: return s
[718]2173
[1589]2174 @print_log_dec
[780]2175 def recalc_azel(self):
2176 """
2177 Recalculate the azimuth and elevation for each position.
2178 Parameters:
2179 none
2180 Example:
2181 """
2182 varlist = vars()
[876]2183 self._recalcazel()
[780]2184 self._add_history("recalc_azel", varlist)
[1819]2185 print_log()
[780]2186 return
2187
[1589]2188 @print_log_dec
[513]2189 def __add__(self, other):
2190 varlist = vars()
2191 s = None
2192 if isinstance(other, scantable):
[1573]2193 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2194 elif isinstance(other, float):
[876]2195 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2196 else:
[718]2197 raise TypeError("Other input is not a scantable or float value")
[513]2198 s._add_history("operator +", varlist)
2199 return s
2200
[1589]2201 @print_log_dec
[513]2202 def __sub__(self, other):
2203 """
2204 implicit on all axes and on Tsys
2205 """
2206 varlist = vars()
2207 s = None
2208 if isinstance(other, scantable):
[1588]2209 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2210 elif isinstance(other, float):
[876]2211 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2212 else:
[718]2213 raise TypeError("Other input is not a scantable or float value")
[513]2214 s._add_history("operator -", varlist)
2215 return s
[710]2216
[1589]2217 @print_log_dec
[513]2218 def __mul__(self, other):
2219 """
2220 implicit on all axes and on Tsys
2221 """
2222 varlist = vars()
2223 s = None
2224 if isinstance(other, scantable):
[1588]2225 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2226 elif isinstance(other, float):
[876]2227 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2228 else:
[718]2229 raise TypeError("Other input is not a scantable or float value")
[513]2230 s._add_history("operator *", varlist)
2231 return s
2232
[710]2233
[1589]2234 @print_log_dec
[513]2235 def __div__(self, other):
2236 """
2237 implicit on all axes and on Tsys
2238 """
2239 varlist = vars()
2240 s = None
2241 if isinstance(other, scantable):
[1589]2242 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2243 elif isinstance(other, float):
2244 if other == 0.0:
[718]2245 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2246 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2247 else:
[718]2248 raise TypeError("Other input is not a scantable or float value")
[513]2249 s._add_history("operator /", varlist)
2250 return s
2251
[530]2252 def get_fit(self, row=0):
2253 """
2254 Print or return the stored fits for a row in the scantable
2255 Parameters:
2256 row: the row which the fit has been applied to.
2257 """
2258 if row > self.nrow():
2259 return
[976]2260 from asap.asapfit import asapfit
[530]2261 fit = asapfit(self._getfit(row))
[718]2262 if rcParams['verbose']:
[1819]2263 #print fit
2264 asaplog.push( '%s' %(fit) )
2265 print_log()
[530]2266 return
2267 else:
2268 return fit.as_dict()
2269
[1483]2270 def flag_nans(self):
2271 """
2272 Utility function to flag NaN values in the scantable.
2273 """
2274 import numpy
2275 basesel = self.get_selection()
2276 for i in range(self.nrow()):
[1589]2277 sel = self.get_row_selector(i)
2278 self.set_selection(basesel+sel)
[1483]2279 nans = numpy.isnan(self._getspectrum(0))
2280 if numpy.any(nans):
2281 bnans = [ bool(v) for v in nans]
2282 self.flag(bnans)
2283 self.set_selection(basesel)
2284
[1588]2285 def get_row_selector(self, rowno):
2286 return selector(beams=self.getbeam(rowno),
2287 ifs=self.getif(rowno),
2288 pols=self.getpol(rowno),
2289 scans=self.getscan(rowno),
2290 cycles=self.getcycle(rowno))
[1573]2291
[484]2292 def _add_history(self, funcname, parameters):
[1435]2293 if not rcParams['scantable.history']:
2294 return
[484]2295 # create date
2296 sep = "##"
2297 from datetime import datetime
2298 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2299 hist = dstr+sep
2300 hist += funcname+sep#cdate+sep
2301 if parameters.has_key('self'): del parameters['self']
[1118]2302 for k, v in parameters.iteritems():
[484]2303 if type(v) is dict:
[1118]2304 for k2, v2 in v.iteritems():
[484]2305 hist += k2
2306 hist += "="
[1118]2307 if isinstance(v2, scantable):
[484]2308 hist += 'scantable'
2309 elif k2 == 'mask':
[1118]2310 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2311 hist += str(self._zip_mask(v2))
2312 else:
2313 hist += str(v2)
[484]2314 else:
[513]2315 hist += str(v2)
[484]2316 else:
2317 hist += k
2318 hist += "="
[1118]2319 if isinstance(v, scantable):
[484]2320 hist += 'scantable'
2321 elif k == 'mask':
[1118]2322 if isinstance(v, list) or isinstance(v, tuple):
[513]2323 hist += str(self._zip_mask(v))
2324 else:
2325 hist += str(v)
[484]2326 else:
2327 hist += str(v)
2328 hist += sep
2329 hist = hist[:-2] # remove trailing '##'
2330 self._addhistory(hist)
2331
[710]2332
[484]2333 def _zip_mask(self, mask):
2334 mask = list(mask)
2335 i = 0
2336 segments = []
2337 while mask[i:].count(1):
2338 i += mask[i:].index(1)
2339 if mask[i:].count(0):
2340 j = i + mask[i:].index(0)
2341 else:
[710]2342 j = len(mask)
[1118]2343 segments.append([i, j])
[710]2344 i = j
[484]2345 return segments
[714]2346
[626]2347 def _get_ordinate_label(self):
2348 fu = "("+self.get_fluxunit()+")"
2349 import re
2350 lbl = "Intensity"
[1118]2351 if re.match(".K.", fu):
[626]2352 lbl = "Brightness Temperature "+ fu
[1118]2353 elif re.match(".Jy.", fu):
[626]2354 lbl = "Flux density "+ fu
2355 return lbl
[710]2356
[876]2357 def _check_ifs(self):
2358 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2359 nchans = filter(lambda t: t > 0, nchans)
[876]2360 return (sum(nchans)/len(nchans) == nchans[0])
[976]2361
[1819]2362 def _fill(self, names, unit, average, getpt, antenna):
[976]2363 import os
2364 from asap._asap import stfiller
2365 first = True
2366 fullnames = []
2367 for name in names:
2368 name = os.path.expandvars(name)
2369 name = os.path.expanduser(name)
2370 if not os.path.exists(name):
2371 msg = "File '%s' does not exists" % (name)
2372 if rcParams['verbose']:
2373 asaplog.push(msg)
[1819]2374 #print asaplog.pop().strip()
2375 print_log( 'ERROR' )
[976]2376 return
2377 raise IOError(msg)
2378 fullnames.append(name)
2379 if average:
2380 asaplog.push('Auto averaging integrations')
[1079]2381 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2382 for name in fullnames:
[1073]2383 tbl = Scantable(stype)
2384 r = stfiller(tbl)
[1504]2385 rx = rcParams['scantable.reference']
2386 r._setreferenceexpr(rx)
[976]2387 msg = "Importing %s..." % (name)
[1118]2388 asaplog.push(msg, False)
[976]2389 print_log()
[1819]2390 r._open(name, antenna, -1, -1, getpt)
[976]2391 r._read()
2392 if average:
[1118]2393 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2394 if not first:
2395 tbl = self._math._merge([self, tbl])
2396 Scantable.__init__(self, tbl)
2397 r._close()
[1118]2398 del r, tbl
[976]2399 first = False
2400 if unit is not None:
2401 self.set_fluxunit(unit)
[1824]2402 if not is_casapy():
2403 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2404
[1402]2405 def __getitem__(self, key):
2406 if key < 0:
2407 key += self.nrow()
2408 if key >= self.nrow():
2409 raise IndexError("Row index out of range.")
2410 return self._getspectrum(key)
2411
2412 def __setitem__(self, key, value):
2413 if key < 0:
2414 key += self.nrow()
2415 if key >= self.nrow():
2416 raise IndexError("Row index out of range.")
2417 if not hasattr(value, "__len__") or \
2418 len(value) > self.nchan(self.getif(key)):
2419 raise ValueError("Spectrum length doesn't match.")
2420 return self._setspectrum(value, key)
2421
2422 def __len__(self):
2423 return self.nrow()
2424
2425 def __iter__(self):
2426 for i in range(len(self)):
2427 yield self[i]
Note: See TracBrowser for help on using the repository browser.