source: trunk/python/scantable.py@ 1865

Last change on this file since 1865 was 1862, checked in by Malte Marquarding, 14 years ago

renamed print_log_dec to more explicit asaplog_post_dec

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 85.6 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
[1862]13from asap.logging import asaplog, asaplog_post_dec
[1824]14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1859]17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[102]18
[1689]19
20def preserve_selection(func):
[1691]21 @wraps_dec(func)
[1689]22 def wrap(obj, *args, **kw):
23 basesel = obj.get_selection()
[1857]24 try:
25 val = func(obj, *args, **kw)
26 finally:
27 obj.set_selection(basesel)
[1689]28 return val
29 return wrap
30
[1846]31def is_scantable(filename):
32 """Is the given file a scantable?
[1689]33
[1846]34 Parameters:
35
36 filename: the name of the file/directory to test
37
38 """
[1697]39 return (os.path.isdir(filename)
40 and not os.path.exists(filename+'/table.f1')
41 and os.path.exists(filename+'/table.info'))
42
43
[876]44class scantable(Scantable):
[1846]45 """\
46 The ASAP container for scans (single-dish data).
[102]47 """
[1819]48
[1862]49 @asaplog_post_dec
[1824]50 def __init__(self, filename, average=None, unit=None, getpt=None,
51 antenna=None, parallactify=None):
[1846]52 """\
[102]53 Create a scantable from a saved one or make a reference
[1846]54
[102]55 Parameters:
[1846]56
57 filename: the name of an asap table on disk
58 or
59 the name of a rpfits/sdfits/ms file
60 (integrations within scans are auto averaged
61 and the whole file is read) or
62 [advanced] a reference to an existing scantable
63
64 average: average all integrations withinb a scan on read.
65 The default (True) is taken from .asaprc.
66
[484]67 unit: brightness unit; must be consistent with K or Jy.
[1846]68 Over-rides the default selected by the filler
69 (input rpfits/sdfits/ms) or replaces the value
70 in existing scantables
71
72 getpt: for MeasurementSet input data only:
73 If True, all pointing data are filled.
74 The deafult is False, which makes time to load
75 the MS data faster in some cases.
76
77 antenna: Antenna selection. integer (id) or string (name or id).
78
79 parallactify: Indicate that the data had been parallatified. Default
80 is taken from rc file.
81
[710]82 """
[976]83 if average is None:
[710]84 average = rcParams['scantable.autoaverage']
[1819]85 if getpt is None:
86 getpt = True
[1843]87 if antenna is not None:
88 asaplog.push("Antenna selection currently unsupported."
89 "Using '0'")
[1861]90 asaplog.post('WARN')
[1819]91 if antenna is None:
92 antenna = ''
93 elif type(antenna) == int:
[1843]94 antenna = '%s' % antenna
[1819]95 elif type(antenna) == list:
96 tmpstr = ''
97 for i in range( len(antenna) ):
98 if type(antenna[i]) == int:
99 tmpstr = tmpstr + ('%s,'%(antenna[i]))
100 elif type(antenna[i]) == str:
101 tmpstr=tmpstr+antenna[i]+','
102 else:
[1859]103 raise TypeError('Bad antenna selection.')
[1819]104 antenna = tmpstr.rstrip(',')
[1593]105 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]106 varlist = vars()
[876]107 from asap._asap import stmath
[1819]108 self._math = stmath( rcParams['insitu'] )
[876]109 if isinstance(filename, Scantable):
110 Scantable.__init__(self, filename)
[181]111 else:
[1697]112 if isinstance(filename, str):
[976]113 filename = os.path.expandvars(filename)
114 filename = os.path.expanduser(filename)
115 if not os.path.exists(filename):
116 s = "File '%s' not found." % (filename)
117 raise IOError(s)
[1697]118 if is_scantable(filename):
119 ondisk = rcParams['scantable.storage'] == 'disk'
120 Scantable.__init__(self, filename, ondisk)
121 if unit is not None:
122 self.set_fluxunit(unit)
[1819]123 # do not reset to the default freqframe
124 #self.set_freqframe(rcParams['scantable.freqframe'])
125 elif os.path.isdir(filename) \
126 and not os.path.exists(filename+'/table.f1'):
127 msg = "The given file '%s'is not a valid " \
128 "asap table." % (filename)
[1859]129 raise IOError(msg)
[226]130 else:
[1819]131 self._fill([filename], unit, average, getpt, antenna)
[1118]132 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]133 and isinstance(filename[-1], str):
[1819]134 self._fill(filename, unit, average, getpt, antenna)
[1586]135 self.parallactify(parallactify)
[1259]136 self._add_history("scantable", varlist)
[102]137
[1862]138 @asaplog_post_dec
[876]139 def save(self, name=None, format=None, overwrite=False):
[1846]140 """\
[1280]141 Store the scantable on disk. This can be an asap (aips++) Table,
142 SDFITS or MS2 format.
[1846]143
[116]144 Parameters:
[1846]145
[1093]146 name: the name of the outputfile. For format "ASCII"
147 this is the root file name (data in 'name'.txt
[497]148 and header in 'name'_header.txt)
[1855]149
[116]150 format: an optional file format. Default is ASAP.
[1855]151 Allowed are:
152
153 * 'ASAP' (save as ASAP [aips++] Table),
154 * 'SDFITS' (save as SDFITS file)
155 * 'ASCII' (saves as ascii text file)
156 * 'MS2' (saves as an casacore MeasurementSet V2)
157 * 'FITS' (save as image FITS - not readable by class)
158 * 'CLASS' (save as FITS readable by CLASS)
159
[411]160 overwrite: If the file should be overwritten if it exists.
[256]161 The default False is to return with warning
[411]162 without writing the output. USE WITH CARE.
[1855]163
[1846]164 Example::
165
[116]166 scan.save('myscan.asap')
[1118]167 scan.save('myscan.sdfits', 'SDFITS')
[1846]168
[116]169 """
[411]170 from os import path
[1593]171 format = format or rcParams['scantable.save']
[256]172 suffix = '.'+format.lower()
[1118]173 if name is None or name == "":
[256]174 name = 'scantable'+suffix
[718]175 msg = "No filename given. Using default name %s..." % name
176 asaplog.push(msg)
[411]177 name = path.expandvars(name)
[256]178 if path.isfile(name) or path.isdir(name):
179 if not overwrite:
[718]180 msg = "File %s exists." % name
[1859]181 raise IOError(msg)
[451]182 format2 = format.upper()
183 if format2 == 'ASAP':
[116]184 self._save(name)
185 else:
[989]186 from asap._asap import stwriter as stw
[1118]187 writer = stw(format2)
188 writer.write(self, name)
[116]189 return
190
[102]191 def copy(self):
[1846]192 """Return a copy of this scantable.
193
194 *Note*:
195
[1348]196 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]197
198 Example::
199
[102]200 copiedscan = scan.copy()
[1846]201
[102]202 """
[876]203 sd = scantable(Scantable._copy(self))
[113]204 return sd
205
[1093]206 def drop_scan(self, scanid=None):
[1846]207 """\
[1093]208 Return a new scantable where the specified scan number(s) has(have)
209 been dropped.
[1846]210
[1093]211 Parameters:
[1846]212
[1093]213 scanid: a (list of) scan number(s)
[1846]214
[1093]215 """
216 from asap import _is_sequence_or_number as _is_valid
217 from asap import _to_list
218 from asap import unique
219 if not _is_valid(scanid):
[1859]220 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
221 scanid = _to_list(scanid)
222 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
223 for sid in scanid: allscans.remove(sid)
224 if len(allscans) == 0:
225 raise ValueError("Can't remove all scans")
226 sel = selector(scans=allscans)
227 return self._select_copy(sel)
[1093]228
[1594]229 def _select_copy(self, selection):
230 orig = self.get_selection()
231 self.set_selection(orig+selection)
232 cp = self.copy()
233 self.set_selection(orig)
234 return cp
235
[102]236 def get_scan(self, scanid=None):
[1855]237 """\
[102]238 Return a specific scan (by scanno) or collection of scans (by
239 source name) in a new scantable.
[1846]240
241 *Note*:
242
[1348]243 See scantable.drop_scan() for the inverse operation.
[1846]244
[102]245 Parameters:
[1846]246
[513]247 scanid: a (list of) scanno or a source name, unix-style
248 patterns are accepted for source name matching, e.g.
249 '*_R' gets all 'ref scans
[1846]250
251 Example::
252
[513]253 # get all scans containing the source '323p459'
254 newscan = scan.get_scan('323p459')
255 # get all 'off' scans
256 refscans = scan.get_scan('*_R')
257 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]258 newscan = scan.get_scan([0, 2, 7, 10])
[1846]259
[102]260 """
261 if scanid is None:
[1859]262 raise RuntimeError( 'Please specify a scan no or name to '
263 'retrieve from the scantable' )
[102]264 try:
[946]265 bsel = self.get_selection()
266 sel = selector()
[102]267 if type(scanid) is str:
[946]268 sel.set_name(scanid)
[1594]269 return self._select_copy(sel)
[102]270 elif type(scanid) is int:
[946]271 sel.set_scans([scanid])
[1594]272 return self._select_copy(sel)
[381]273 elif type(scanid) is list:
[946]274 sel.set_scans(scanid)
[1594]275 return self._select_copy(sel)
[381]276 else:
[718]277 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]278 raise TypeError(msg)
[102]279 except RuntimeError:
[1859]280 raise
[102]281
282 def __str__(self):
[1118]283 return Scantable._summary(self, True)
[102]284
[976]285 def summary(self, filename=None):
[1846]286 """\
[102]287 Print a summary of the contents of this scantable.
[1846]288
[102]289 Parameters:
[1846]290
[102]291 filename: the name of a file to write the putput to
292 Default - no file output
[1846]293
[102]294 """
[976]295 info = Scantable._summary(self, True)
[102]296 if filename is not None:
[256]297 if filename is "":
298 filename = 'scantable_summary.txt'
[415]299 from os.path import expandvars, isdir
[411]300 filename = expandvars(filename)
[415]301 if not isdir(filename):
[413]302 data = open(filename, 'w')
303 data.write(info)
304 data.close()
305 else:
[718]306 msg = "Illegal file name '%s'." % (filename)
[1859]307 raise IOError(msg)
308 return page(info)
[710]309
[1512]310 def get_spectrum(self, rowno):
[1471]311 """Return the spectrum for the current row in the scantable as a list.
[1846]312
[1471]313 Parameters:
[1846]314
[1573]315 rowno: the row number to retrieve the spectrum from
[1846]316
[1471]317 """
318 return self._getspectrum(rowno)
[946]319
[1471]320 def get_mask(self, rowno):
321 """Return the mask for the current row in the scantable as a list.
[1846]322
[1471]323 Parameters:
[1846]324
[1573]325 rowno: the row number to retrieve the mask from
[1846]326
[1471]327 """
328 return self._getmask(rowno)
329
330 def set_spectrum(self, spec, rowno):
331 """Return the spectrum for the current row in the scantable as a list.
[1846]332
[1471]333 Parameters:
[1846]334
[1855]335 spec: the new spectrum
[1846]336
[1855]337 rowno: the row number to set the spectrum for
338
[1471]339 """
340 assert(len(spec) == self.nchan())
341 return self._setspectrum(spec, rowno)
342
[1600]343 def get_coordinate(self, rowno):
344 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]345
346 *Note*:
347
[1600]348 * This coordinate is only valid until a scantable method modifies
349 the frequency axis.
350 * This coordinate does contain the original frequency set-up
351 NOT the new frame. The conversions however are done using the user
352 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
353 use scantable.freq_align first. Without it there is no closure,
[1846]354 i.e.::
[1600]355
[1846]356 c = myscan.get_coordinate(0)
357 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
358
[1600]359 Parameters:
[1846]360
[1600]361 rowno: the row number for the spectral coordinate
362
363 """
364 return coordinate(Scantable.get_coordinate(self, rowno))
365
[946]366 def get_selection(self):
[1846]367 """\
[1005]368 Get the selection object currently set on this scantable.
[1846]369
370 Example::
371
[1005]372 sel = scan.get_selection()
373 sel.set_ifs(0) # select IF 0
374 scan.set_selection(sel) # apply modified selection
[1846]375
[946]376 """
377 return selector(self._getselection())
378
[1576]379 def set_selection(self, selection=None, **kw):
[1846]380 """\
[1005]381 Select a subset of the data. All following operations on this scantable
382 are only applied to thi selection.
[1846]383
[1005]384 Parameters:
[1697]385
[1846]386 selection: a selector object (default unset the selection), or
387 any combination of "pols", "ifs", "beams", "scans",
388 "cycles", "name", "query"
[1697]389
[1846]390 Examples::
[1697]391
[1005]392 sel = selector() # create a selection object
[1118]393 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]394 scan.set_selection(sel) # set the selection
395 scan.summary() # will only print summary of scanno 0 an 3
396 scan.set_selection() # unset the selection
[1697]397 # or the equivalent
398 scan.set_selection(scans=[0,3])
399 scan.summary() # will only print summary of scanno 0 an 3
400 scan.set_selection() # unset the selection
[1846]401
[946]402 """
[1576]403 if selection is None:
404 # reset
405 if len(kw) == 0:
406 selection = selector()
407 else:
408 # try keywords
409 for k in kw:
410 if k not in selector.fields:
411 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
412 selection = selector(**kw)
[946]413 self._setselection(selection)
414
[1819]415 def get_row(self, row=0, insitu=None):
[1846]416 """\
[1819]417 Select a row in the scantable.
418 Return a scantable with single row.
[1846]419
[1819]420 Parameters:
[1846]421
422 row: row no of integration, default is 0.
423 insitu: if False a new scantable is returned. Otherwise, the
424 scaling is done in-situ. The default is taken from .asaprc
425 (False)
426
[1819]427 """
428 if insitu is None: insitu = rcParams['insitu']
429 if not insitu:
430 workscan = self.copy()
431 else:
432 workscan = self
433 # Select a row
434 sel=selector()
435 sel.set_scans([workscan.getscan(row)])
436 sel.set_cycles([workscan.getcycle(row)])
437 sel.set_beams([workscan.getbeam(row)])
438 sel.set_ifs([workscan.getif(row)])
439 sel.set_polarisations([workscan.getpol(row)])
440 sel.set_name(workscan._getsourcename(row))
441 workscan.set_selection(sel)
442 if not workscan.nrow() == 1:
443 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
444 raise RuntimeError(msg)
445 del sel
446 if insitu:
447 self._assign(workscan)
448 else:
449 return workscan
450
[1862]451 @asaplog_post_dec
[1819]452 def stats(self, stat='stddev', mask=None, form='3.3f'):
[1846]453 """\
[135]454 Determine the specified statistic of the current beam/if/pol
[102]455 Takes a 'mask' as an optional parameter to specify which
456 channels should be excluded.
[1846]457
[102]458 Parameters:
[1846]459
[1819]460 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
461 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]462
[135]463 mask: an optional mask specifying where the statistic
[102]464 should be determined.
[1855]465
[1819]466 form: format string to print statistic values
[1846]467
468 Example::
469
[113]470 scan.set_unit('channel')
[1118]471 msk = scan.create_mask([100, 200], [500, 600])
[135]472 scan.stats(stat='mean', mask=m)
[1846]473
[102]474 """
[1593]475 mask = mask or []
[876]476 if not self._check_ifs():
[1118]477 raise ValueError("Cannot apply mask as the IFs have different "
478 "number of channels. Please use setselection() "
479 "to select individual IFs")
[1819]480 rtnabc = False
481 if stat.lower().endswith('_abc'): rtnabc = True
482 getchan = False
483 if stat.lower().startswith('min') or stat.lower().startswith('max'):
484 chan = self._math._minmaxchan(self, mask, stat)
485 getchan = True
486 statvals = []
487 if not rtnabc: statvals = self._math._stats(self, mask, stat)
[256]488
[1819]489 #def cb(i):
490 # return statvals[i]
[256]491
[1819]492 #return self._row_callback(cb, stat)
[102]493
[1819]494 label=stat
495 #callback=cb
496 out = ""
497 #outvec = []
498 sep = '-'*50
499 for i in range(self.nrow()):
500 refstr = ''
501 statunit= ''
502 if getchan:
503 qx, qy = self.chan2data(rowno=i, chan=chan[i])
504 if rtnabc:
505 statvals.append(qx['value'])
506 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
507 statunit= '['+qx['unit']+']'
508 else:
509 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
510
511 tm = self._gettime(i)
512 src = self._getsourcename(i)
513 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
514 out += 'Time[%s]:\n' % (tm)
515 if self.nbeam(-1) > 1:
516 out += ' Beam[%d] ' % (self.getbeam(i))
517 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
518 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
519 #outvec.append(callback(i))
520 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
521 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
522 out += sep+"\n"
523
[1859]524
525 import os
526 if os.environ.has_key( 'USER' ):
527 usr = os.environ['USER']
528 else:
529 import commands
530 usr = commands.getoutput( 'whoami' )
531 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
532 f = open(tmpfile,'w')
533 print >> f, sep
534 print >> f, ' %s %s' % (label, statunit)
535 print >> f, sep
536 print >> f, out
537 f.close()
538 f = open(tmpfile,'r')
539 x = f.readlines()
540 f.close()
541 asaplog.push(''.join(x), False)
542
[1819]543 return statvals
544
545 def chan2data(self, rowno=0, chan=0):
[1846]546 """\
[1819]547 Returns channel/frequency/velocity and spectral value
548 at an arbitrary row and channel in the scantable.
[1846]549
[1819]550 Parameters:
[1846]551
[1819]552 rowno: a row number in the scantable. Default is the
553 first row, i.e. rowno=0
[1855]554
[1819]555 chan: a channel in the scantable. Default is the first
556 channel, i.e. pos=0
[1846]557
[1819]558 """
559 if isinstance(rowno, int) and isinstance(chan, int):
560 qx = {'unit': self.get_unit(),
561 'value': self._getabcissa(rowno)[chan]}
562 qy = {'unit': self.get_fluxunit(),
563 'value': self._getspectrum(rowno)[chan]}
564 return qx, qy
565
[1118]566 def stddev(self, mask=None):
[1846]567 """\
[135]568 Determine the standard deviation of the current beam/if/pol
569 Takes a 'mask' as an optional parameter to specify which
570 channels should be excluded.
[1846]571
[135]572 Parameters:
[1846]573
[135]574 mask: an optional mask specifying where the standard
575 deviation should be determined.
576
[1846]577 Example::
578
[135]579 scan.set_unit('channel')
[1118]580 msk = scan.create_mask([100, 200], [500, 600])
[135]581 scan.stddev(mask=m)
[1846]582
[135]583 """
[1118]584 return self.stats(stat='stddev', mask=mask);
[135]585
[1003]586
[1259]587 def get_column_names(self):
[1846]588 """\
[1003]589 Return a list of column names, which can be used for selection.
590 """
[1259]591 return list(Scantable.get_column_names(self))
[1003]592
[1730]593 def get_tsys(self, row=-1):
[1846]594 """\
[113]595 Return the System temperatures.
[1846]596
597 Parameters:
598
599 row: the rowno to get the information for. (default all rows)
600
[113]601 Returns:
[1846]602
[876]603 a list of Tsys values for the current selection
[1846]604
[113]605 """
[1730]606 if row > -1:
607 return self._get_column(self._gettsys, row)
[876]608 return self._row_callback(self._gettsys, "Tsys")
[256]609
[1730]610
611 def get_weather(self, row=-1):
[1846]612 """\
613 Return the weather informations.
614
615 Parameters:
616
617 row: the rowno to get the information for. (default all rows)
618
619 Returns:
620
621 a dict or list of of dicts of values for the current selection
622
623 """
624
[1730]625 values = self._get_column(self._get_weather, row)
626 if row > -1:
627 return {'temperature': values[0],
628 'pressure': values[1], 'humidity' : values[2],
629 'windspeed' : values[3], 'windaz' : values[4]
630 }
631 else:
632 out = []
633 for r in values:
634
635 out.append({'temperature': r[0],
636 'pressure': r[1], 'humidity' : r[2],
637 'windspeed' : r[3], 'windaz' : r[4]
638 })
639 return out
640
[876]641 def _row_callback(self, callback, label):
642 out = ""
[1118]643 outvec = []
[1590]644 sep = '-'*50
[876]645 for i in range(self.nrow()):
646 tm = self._gettime(i)
647 src = self._getsourcename(i)
[1590]648 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]649 out += 'Time[%s]:\n' % (tm)
[1590]650 if self.nbeam(-1) > 1:
651 out += ' Beam[%d] ' % (self.getbeam(i))
652 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
653 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]654 outvec.append(callback(i))
655 out += '= %3.3f\n' % (outvec[i])
[1590]656 out += sep+'\n'
[1859]657
658 asaplog.push(sep)
659 asaplog.push(" %s" % (label))
660 asaplog.push(sep)
661 asaplog.push(out)
[1861]662 asaplog.post()
[1175]663 return outvec
[256]664
[1070]665 def _get_column(self, callback, row=-1):
666 """
667 """
668 if row == -1:
669 return [callback(i) for i in range(self.nrow())]
670 else:
[1819]671 if 0 <= row < self.nrow():
[1070]672 return callback(row)
[256]673
[1070]674
[1348]675 def get_time(self, row=-1, asdatetime=False):
[1846]676 """\
[113]677 Get a list of time stamps for the observations.
[1348]678 Return a datetime object for each integration time stamp in the scantable.
[1846]679
[113]680 Parameters:
[1846]681
[1348]682 row: row no of integration. Default -1 return all rows
[1855]683
[1348]684 asdatetime: return values as datetime objects rather than strings
[1846]685
[113]686 """
[1175]687 from time import strptime
688 from datetime import datetime
[1392]689 times = self._get_column(self._gettime, row)
[1348]690 if not asdatetime:
[1392]691 return times
[1175]692 format = "%Y/%m/%d/%H:%M:%S"
693 if isinstance(times, list):
694 return [datetime(*strptime(i, format)[:6]) for i in times]
695 else:
696 return datetime(*strptime(times, format)[:6])
[102]697
[1348]698
699 def get_inttime(self, row=-1):
[1846]700 """\
[1348]701 Get a list of integration times for the observations.
702 Return a time in seconds for each integration in the scantable.
[1846]703
[1348]704 Parameters:
[1846]705
[1348]706 row: row no of integration. Default -1 return all rows.
[1846]707
[1348]708 """
[1573]709 return self._get_column(self._getinttime, row)
[1348]710
[1573]711
[714]712 def get_sourcename(self, row=-1):
[1846]713 """\
[794]714 Get a list source names for the observations.
[714]715 Return a string for each integration in the scantable.
716 Parameters:
[1846]717
[1348]718 row: row no of integration. Default -1 return all rows.
[1846]719
[714]720 """
[1070]721 return self._get_column(self._getsourcename, row)
[714]722
[794]723 def get_elevation(self, row=-1):
[1846]724 """\
[794]725 Get a list of elevations for the observations.
726 Return a float for each integration in the scantable.
[1846]727
[794]728 Parameters:
[1846]729
[1348]730 row: row no of integration. Default -1 return all rows.
[1846]731
[794]732 """
[1070]733 return self._get_column(self._getelevation, row)
[794]734
735 def get_azimuth(self, row=-1):
[1846]736 """\
[794]737 Get a list of azimuths for the observations.
738 Return a float for each integration in the scantable.
[1846]739
[794]740 Parameters:
[1348]741 row: row no of integration. Default -1 return all rows.
[1846]742
[794]743 """
[1070]744 return self._get_column(self._getazimuth, row)
[794]745
746 def get_parangle(self, row=-1):
[1846]747 """\
[794]748 Get a list of parallactic angles for the observations.
749 Return a float for each integration in the scantable.
[1846]750
[794]751 Parameters:
[1846]752
[1348]753 row: row no of integration. Default -1 return all rows.
[1846]754
[794]755 """
[1070]756 return self._get_column(self._getparangle, row)
[794]757
[1070]758 def get_direction(self, row=-1):
759 """
760 Get a list of Positions on the sky (direction) for the observations.
[1594]761 Return a string for each integration in the scantable.
[1855]762
[1070]763 Parameters:
[1855]764
[1070]765 row: row no of integration. Default -1 return all rows
[1855]766
[1070]767 """
768 return self._get_column(self._getdirection, row)
769
[1391]770 def get_directionval(self, row=-1):
[1846]771 """\
[1391]772 Get a list of Positions on the sky (direction) for the observations.
773 Return a float for each integration in the scantable.
[1846]774
[1391]775 Parameters:
[1846]776
[1391]777 row: row no of integration. Default -1 return all rows
[1846]778
[1391]779 """
780 return self._get_column(self._getdirectionvec, row)
781
[1862]782 @asaplog_post_dec
[102]783 def set_unit(self, unit='channel'):
[1846]784 """\
[102]785 Set the unit for all following operations on this scantable
[1846]786
[102]787 Parameters:
[1846]788
789 unit: optional unit, default is 'channel'. Use one of '*Hz',
790 'km/s', 'channel' or equivalent ''
791
[102]792 """
[484]793 varlist = vars()
[1118]794 if unit in ['', 'pixel', 'channel']:
[113]795 unit = ''
796 inf = list(self._getcoordinfo())
797 inf[0] = unit
798 self._setcoordinfo(inf)
[1118]799 self._add_history("set_unit", varlist)
[113]800
[1862]801 @asaplog_post_dec
[484]802 def set_instrument(self, instr):
[1846]803 """\
[1348]804 Set the instrument for subsequent processing.
[1846]805
[358]806 Parameters:
[1846]807
[710]808 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]809 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]810
[358]811 """
812 self._setInstrument(instr)
[1118]813 self._add_history("set_instument", vars())
[358]814
[1862]815 @asaplog_post_dec
[1190]816 def set_feedtype(self, feedtype):
[1846]817 """\
[1190]818 Overwrite the feed type, which might not be set correctly.
[1846]819
[1190]820 Parameters:
[1846]821
[1190]822 feedtype: 'linear' or 'circular'
[1846]823
[1190]824 """
825 self._setfeedtype(feedtype)
826 self._add_history("set_feedtype", vars())
827
[1862]828 @asaplog_post_dec
[276]829 def set_doppler(self, doppler='RADIO'):
[1846]830 """\
[276]831 Set the doppler for all following operations on this scantable.
[1846]832
[276]833 Parameters:
[1846]834
[276]835 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]836
[276]837 """
[484]838 varlist = vars()
[276]839 inf = list(self._getcoordinfo())
840 inf[2] = doppler
841 self._setcoordinfo(inf)
[1118]842 self._add_history("set_doppler", vars())
[710]843
[1862]844 @asaplog_post_dec
[226]845 def set_freqframe(self, frame=None):
[1846]846 """\
[113]847 Set the frame type of the Spectral Axis.
[1846]848
[113]849 Parameters:
[1846]850
[591]851 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]852 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]853 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]854
855 Example::
856
[113]857 scan.set_freqframe('BARY')
[1846]858
[113]859 """
[1593]860 frame = frame or rcParams['scantable.freqframe']
[484]861 varlist = vars()
[1819]862 # "REST" is not implemented in casacore
863 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
864 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
865 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]866 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]867
[989]868 if frame in valid:
[113]869 inf = list(self._getcoordinfo())
870 inf[1] = frame
871 self._setcoordinfo(inf)
[1118]872 self._add_history("set_freqframe", varlist)
[102]873 else:
[1118]874 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]875 raise TypeError(msg)
[710]876
[1862]877 @asaplog_post_dec
[989]878 def set_dirframe(self, frame=""):
[1846]879 """\
[989]880 Set the frame type of the Direction on the sky.
[1846]881
[989]882 Parameters:
[1846]883
[989]884 frame: an optional frame type, default ''. Valid frames are:
885 'J2000', 'B1950', 'GALACTIC'
[1846]886
887 Example:
888
[989]889 scan.set_dirframe('GALACTIC')
[1846]890
[989]891 """
892 varlist = vars()
[1859]893 Scantable.set_dirframe(self, frame)
[1118]894 self._add_history("set_dirframe", varlist)
[989]895
[113]896 def get_unit(self):
[1846]897 """\
[113]898 Get the default unit set in this scantable
[1846]899
[113]900 Returns:
[1846]901
[113]902 A unit string
[1846]903
[113]904 """
905 inf = self._getcoordinfo()
906 unit = inf[0]
907 if unit == '': unit = 'channel'
908 return unit
[102]909
[1862]910 @asaplog_post_dec
[158]911 def get_abcissa(self, rowno=0):
[1846]912 """\
[158]913 Get the abcissa in the current coordinate setup for the currently
[113]914 selected Beam/IF/Pol
[1846]915
[113]916 Parameters:
[1846]917
[226]918 rowno: an optional row number in the scantable. Default is the
919 first row, i.e. rowno=0
[1846]920
[113]921 Returns:
[1846]922
[1348]923 The abcissa values and the format string (as a dictionary)
[1846]924
[113]925 """
[256]926 abc = self._getabcissa(rowno)
[710]927 lbl = self._getabcissalabel(rowno)
[158]928 return abc, lbl
[113]929
[1862]930 @asaplog_post_dec
[1819]931 def flag(self, mask=None, unflag=False):
[1846]932 """\
[1001]933 Flag the selected data using an optional channel mask.
[1846]934
[1001]935 Parameters:
[1846]936
[1001]937 mask: an optional channel mask, created with create_mask. Default
938 (no mask) is all channels.
[1855]939
[1819]940 unflag: if True, unflag the data
[1846]941
[1001]942 """
943 varlist = vars()
[1593]944 mask = mask or []
[1859]945 self._flag(mask, unflag)
[1001]946 self._add_history("flag", varlist)
947
[1862]948 @asaplog_post_dec
[1819]949 def flag_row(self, rows=[], unflag=False):
[1846]950 """\
[1819]951 Flag the selected data in row-based manner.
[1846]952
[1819]953 Parameters:
[1846]954
[1843]955 rows: list of row numbers to be flagged. Default is no row
956 (must be explicitly specified to execute row-based flagging).
[1855]957
[1819]958 unflag: if True, unflag the data.
[1846]959
[1819]960 """
961 varlist = vars()
[1859]962 self._flag_row(rows, unflag)
[1819]963 self._add_history("flag_row", varlist)
964
[1862]965 @asaplog_post_dec
[1819]966 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]967 """\
[1819]968 Flag the selected data outside a specified range (in channel-base)
[1846]969
[1819]970 Parameters:
[1846]971
[1819]972 uthres: upper threshold.
[1855]973
[1819]974 dthres: lower threshold
[1846]975
[1819]976 clipoutside: True for flagging data outside the range [dthres:uthres].
977 False for glagging data inside the range.
[1855]978
[1846]979 unflag: if True, unflag the data.
980
[1819]981 """
982 varlist = vars()
[1859]983 self._clip(uthres, dthres, clipoutside, unflag)
[1819]984 self._add_history("clip", varlist)
985
[1862]986 @asaplog_post_dec
[1584]987 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]988 """\
[1192]989 Flag the data in 'lag' space by providing a frequency to remove.
[1584]990 Flagged data in the scantable gets interpolated over the region.
[1192]991 No taper is applied.
[1846]992
[1192]993 Parameters:
[1846]994
[1579]995 start: the start frequency (really a period within the
996 bandwidth) or period to remove
[1855]997
[1579]998 end: the end frequency or period to remove
[1855]999
[1584]1000 unit: the frequency unit (default "MHz") or "" for
[1579]1001 explicit lag channels
[1846]1002
1003 *Notes*:
1004
[1579]1005 It is recommended to flag edges of the band or strong
[1348]1006 signals beforehand.
[1846]1007
[1192]1008 """
1009 if insitu is None: insitu = rcParams['insitu']
1010 self._math._setinsitu(insitu)
1011 varlist = vars()
[1579]1012 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1013 if not (unit == "" or base.has_key(unit)):
[1192]1014 raise ValueError("%s is not a valid unit." % unit)
[1859]1015 if unit == "":
1016 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1017 else:
1018 s = scantable(self._math._lag_flag(self, start*base[unit],
1019 end*base[unit], "frequency"))
[1192]1020 s._add_history("lag_flag", varlist)
1021 if insitu:
1022 self._assign(s)
1023 else:
1024 return s
[1001]1025
[1862]1026 @asaplog_post_dec
[113]1027 def create_mask(self, *args, **kwargs):
[1846]1028 """\
[1118]1029 Compute and return a mask based on [min, max] windows.
[189]1030 The specified windows are to be INCLUDED, when the mask is
[113]1031 applied.
[1846]1032
[102]1033 Parameters:
[1846]1034
[1118]1035 [min, max], [min2, max2], ...
[1024]1036 Pairs of start/end points (inclusive)specifying the regions
[102]1037 to be masked
[1855]1038
[189]1039 invert: optional argument. If specified as True,
1040 return an inverted mask, i.e. the regions
1041 specified are EXCLUDED
[1855]1042
[513]1043 row: create the mask using the specified row for
1044 unit conversions, default is row=0
1045 only necessary if frequency varies over rows.
[1846]1046
1047 Examples::
1048
[113]1049 scan.set_unit('channel')
[1846]1050 # a)
[1118]1051 msk = scan.create_mask([400, 500], [800, 900])
[189]1052 # masks everything outside 400 and 500
[113]1053 # and 800 and 900 in the unit 'channel'
1054
[1846]1055 # b)
[1118]1056 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1057 # masks the regions between 400 and 500
[113]1058 # and 800 and 900 in the unit 'channel'
[1846]1059
1060 # c)
1061 #mask only channel 400
[1554]1062 msk = scan.create_mask([400])
[1846]1063
[102]1064 """
[1554]1065 row = kwargs.get("row", 0)
[513]1066 data = self._getabcissa(row)
[113]1067 u = self._getcoordinfo()[0]
[1859]1068 if u == "":
1069 u = "channel"
1070 msg = "The current mask window unit is %s" % u
1071 i = self._check_ifs()
1072 if not i:
1073 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1074 asaplog.push(msg)
[102]1075 n = self.nchan()
[1295]1076 msk = _n_bools(n, False)
[710]1077 # test if args is a 'list' or a 'normal *args - UGLY!!!
1078
[1118]1079 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1080 and args or args[0]
[710]1081 for window in ws:
[1554]1082 if len(window) == 1:
1083 window = [window[0], window[0]]
1084 if len(window) == 0 or len(window) > 2:
1085 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1086 if window[0] > window[1]:
1087 tmp = window[0]
1088 window[0] = window[1]
1089 window[1] = tmp
[102]1090 for i in range(n):
[1024]1091 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1092 msk[i] = True
[113]1093 if kwargs.has_key('invert'):
1094 if kwargs.get('invert'):
[1295]1095 msk = mask_not(msk)
[102]1096 return msk
[710]1097
[1819]1098 def get_masklist(self, mask=None, row=0):
[1846]1099 """\
[1819]1100 Compute and return a list of mask windows, [min, max].
[1846]1101
[1819]1102 Parameters:
[1846]1103
[1819]1104 mask: channel mask, created with create_mask.
[1855]1105
[1819]1106 row: calcutate the masklist using the specified row
1107 for unit conversions, default is row=0
1108 only necessary if frequency varies over rows.
[1846]1109
[1819]1110 Returns:
[1846]1111
[1819]1112 [min, max], [min2, max2], ...
1113 Pairs of start/end points (inclusive)specifying
1114 the masked regions
[1846]1115
[1819]1116 """
1117 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1118 raise TypeError("The mask should be list or tuple.")
1119 if len(mask) < 2:
1120 raise TypeError("The mask elements should be > 1")
1121 if self.nchan() != len(mask):
1122 msg = "Number of channels in scantable != number of mask elements"
1123 raise TypeError(msg)
1124 data = self._getabcissa(row)
1125 u = self._getcoordinfo()[0]
[1859]1126 if u == "":
1127 u = "channel"
1128 msg = "The current mask window unit is %s" % u
1129 i = self._check_ifs()
1130 if not i:
1131 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1132 asaplog.push(msg)
[1819]1133 masklist=[]
1134 ist, ien = None, None
1135 ist, ien=self.get_mask_indices(mask)
1136 if ist is not None and ien is not None:
1137 for i in xrange(len(ist)):
1138 range=[data[ist[i]],data[ien[i]]]
1139 range.sort()
1140 masklist.append([range[0],range[1]])
1141 return masklist
1142
1143 def get_mask_indices(self, mask=None):
[1846]1144 """\
[1819]1145 Compute and Return lists of mask start indices and mask end indices.
[1855]1146
1147 Parameters:
1148
[1819]1149 mask: channel mask, created with create_mask.
[1846]1150
[1819]1151 Returns:
[1846]1152
[1819]1153 List of mask start indices and that of mask end indices,
1154 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1155
[1819]1156 """
1157 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1158 raise TypeError("The mask should be list or tuple.")
1159 if len(mask) < 2:
1160 raise TypeError("The mask elements should be > 1")
1161 istart=[]
1162 iend=[]
1163 if mask[0]: istart.append(0)
1164 for i in range(len(mask)-1):
1165 if not mask[i] and mask[i+1]:
1166 istart.append(i+1)
1167 elif mask[i] and not mask[i+1]:
1168 iend.append(i)
1169 if mask[len(mask)-1]: iend.append(len(mask)-1)
1170 if len(istart) != len(iend):
1171 raise RuntimeError("Numbers of mask start != mask end.")
1172 for i in range(len(istart)):
1173 if istart[i] > iend[i]:
1174 raise RuntimeError("Mask start index > mask end index")
1175 break
1176 return istart,iend
1177
1178# def get_restfreqs(self):
1179# """
1180# Get the restfrequency(s) stored in this scantable.
1181# The return value(s) are always of unit 'Hz'
1182# Parameters:
1183# none
1184# Returns:
1185# a list of doubles
1186# """
1187# return list(self._getrestfreqs())
1188
1189 def get_restfreqs(self, ids=None):
[1846]1190 """\
[256]1191 Get the restfrequency(s) stored in this scantable.
1192 The return value(s) are always of unit 'Hz'
[1846]1193
[256]1194 Parameters:
[1846]1195
[1819]1196 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1197 be retrieved
[1846]1198
[256]1199 Returns:
[1846]1200
[1819]1201 dictionary containing ids and a list of doubles for each id
[1846]1202
[256]1203 """
[1819]1204 if ids is None:
1205 rfreqs={}
1206 idlist = self.getmolnos()
1207 for i in idlist:
1208 rfreqs[i]=list(self._getrestfreqs(i))
1209 return rfreqs
1210 else:
1211 if type(ids)==list or type(ids)==tuple:
1212 rfreqs={}
1213 for i in ids:
1214 rfreqs[i]=list(self._getrestfreqs(i))
1215 return rfreqs
1216 else:
1217 return list(self._getrestfreqs(ids))
1218 #return list(self._getrestfreqs(ids))
[102]1219
[931]1220 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1221 """\
[931]1222 Set or replace the restfrequency specified and
1223 If the 'freqs' argument holds a scalar,
1224 then that rest frequency will be applied to all the selected
1225 data. If the 'freqs' argument holds
1226 a vector, then it MUST be of equal or smaller length than
1227 the number of IFs (and the available restfrequencies will be
1228 replaced by this vector). In this case, *all* data have
1229 the restfrequency set per IF according
1230 to the corresponding value you give in the 'freqs' vector.
[1118]1231 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1232 IF 1 gets restfreq 2e9.
[1846]1233
[1395]1234 You can also specify the frequencies via a linecatalog.
[1153]1235
[931]1236 Parameters:
[1846]1237
[931]1238 freqs: list of rest frequency values or string idenitfiers
[1855]1239
[931]1240 unit: unit for rest frequency (default 'Hz')
[402]1241
[1846]1242
1243 Example::
1244
[1819]1245 # set the given restfrequency for the all currently selected IFs
[931]1246 scan.set_restfreqs(freqs=1.4e9)
[1845]1247 # set restfrequencies for the n IFs (n > 1) in the order of the
1248 # list, i.e
1249 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1250 # len(list_of_restfreqs) == nIF
1251 # for nIF == 1 the following will set multiple restfrequency for
1252 # that IF
[1819]1253 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1254 # set multiple restfrequencies per IF. as a list of lists where
1255 # the outer list has nIF elements, the inner s arbitrary
1256 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1257
[1846]1258 *Note*:
[1845]1259
[931]1260 To do more sophisticate Restfrequency setting, e.g. on a
1261 source and IF basis, use scantable.set_selection() before using
[1846]1262 this function::
[931]1263
[1846]1264 # provided your scantable is called scan
1265 selection = selector()
1266 selection.set_name("ORION*")
1267 selection.set_ifs([1])
1268 scan.set_selection(selection)
1269 scan.set_restfreqs(freqs=86.6e9)
1270
[931]1271 """
1272 varlist = vars()
[1157]1273 from asap import linecatalog
1274 # simple value
[1118]1275 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1276 self._setrestfreqs([freqs], [""], unit)
[1157]1277 # list of values
[1118]1278 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1279 # list values are scalars
[1118]1280 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1281 if len(freqs) == 1:
1282 self._setrestfreqs(freqs, [""], unit)
1283 else:
1284 # allow the 'old' mode of setting mulitple IFs
1285 sel = selector()
1286 savesel = self._getselection()
1287 iflist = self.getifnos()
1288 if len(freqs)>len(iflist):
1289 raise ValueError("number of elements in list of list "
1290 "exeeds the current IF selections")
1291 iflist = self.getifnos()
1292 for i, fval in enumerate(freqs):
1293 sel.set_ifs(iflist[i])
1294 self._setselection(sel)
1295 self._setrestfreqs([fval], [""], unit)
1296 self._setselection(savesel)
1297
1298 # list values are dict, {'value'=, 'name'=)
[1157]1299 elif isinstance(freqs[-1], dict):
[1845]1300 values = []
1301 names = []
1302 for d in freqs:
1303 values.append(d["value"])
1304 names.append(d["name"])
1305 self._setrestfreqs(values, names, unit)
[1819]1306 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1307 sel = selector()
1308 savesel = self._getselection()
[1322]1309 iflist = self.getifnos()
[1819]1310 if len(freqs)>len(iflist):
[1845]1311 raise ValueError("number of elements in list of list exeeds"
1312 " the current IF selections")
1313 for i, fval in enumerate(freqs):
[1322]1314 sel.set_ifs(iflist[i])
[1259]1315 self._setselection(sel)
[1845]1316 self._setrestfreqs(fval, [""], unit)
[1157]1317 self._setselection(savesel)
1318 # freqs are to be taken from a linecatalog
[1153]1319 elif isinstance(freqs, linecatalog):
1320 sel = selector()
1321 savesel = self._getselection()
1322 for i in xrange(freqs.nrow()):
[1322]1323 sel.set_ifs(iflist[i])
[1153]1324 self._setselection(sel)
[1845]1325 self._setrestfreqs([freqs.get_frequency(i)],
1326 [freqs.get_name(i)], "MHz")
[1153]1327 # ensure that we are not iterating past nIF
1328 if i == self.nif()-1: break
1329 self._setselection(savesel)
[931]1330 else:
1331 return
1332 self._add_history("set_restfreqs", varlist)
1333
[1360]1334 def shift_refpix(self, delta):
[1846]1335 """\
[1589]1336 Shift the reference pixel of the Spectra Coordinate by an
1337 integer amount.
[1846]1338
[1589]1339 Parameters:
[1846]1340
[1589]1341 delta: the amount to shift by
[1846]1342
1343 *Note*:
1344
[1589]1345 Be careful using this with broadband data.
[1846]1346
[1360]1347 """
[1731]1348 Scantable.shift_refpix(self, delta)
[931]1349
[1862]1350 @asaplog_post_dec
[1259]1351 def history(self, filename=None):
[1846]1352 """\
[1259]1353 Print the history. Optionally to a file.
[1846]1354
[1348]1355 Parameters:
[1846]1356
[1348]1357 filename: The name of the file to save the history to.
[1846]1358
[1259]1359 """
[484]1360 hist = list(self._gethistory())
[794]1361 out = "-"*80
[484]1362 for h in hist:
[489]1363 if h.startswith("---"):
[1857]1364 out = "\n".join([out, h])
[489]1365 else:
1366 items = h.split("##")
1367 date = items[0]
1368 func = items[1]
1369 items = items[2:]
[794]1370 out += "\n"+date+"\n"
1371 out += "Function: %s\n Parameters:" % (func)
[489]1372 for i in items:
1373 s = i.split("=")
[1118]1374 out += "\n %s = %s" % (s[0], s[1])
[1857]1375 out = "\n".join([out, "-"*80])
[1259]1376 if filename is not None:
1377 if filename is "":
1378 filename = 'scantable_history.txt'
1379 import os
1380 filename = os.path.expandvars(os.path.expanduser(filename))
1381 if not os.path.isdir(filename):
1382 data = open(filename, 'w')
1383 data.write(out)
1384 data.close()
1385 else:
1386 msg = "Illegal file name '%s'." % (filename)
[1859]1387 raise IOError(msg)
1388 return page(out)
[513]1389 #
1390 # Maths business
1391 #
[1862]1392 @asaplog_post_dec
[931]1393 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1394 """\
[1070]1395 Return the (time) weighted average of a scan.
[1846]1396
1397 *Note*:
1398
[1070]1399 in channels only - align if necessary
[1846]1400
[513]1401 Parameters:
[1846]1402
[513]1403 mask: an optional mask (only used for 'var' and 'tsys'
1404 weighting)
[1855]1405
[558]1406 scanav: True averages each scan separately
1407 False (default) averages all scans together,
[1855]1408
[1099]1409 weight: Weighting scheme.
1410 'none' (mean no weight)
1411 'var' (1/var(spec) weighted)
1412 'tsys' (1/Tsys**2 weighted)
1413 'tint' (integration time weighted)
1414 'tintsys' (Tint/Tsys**2)
1415 'median' ( median averaging)
[535]1416 The default is 'tint'
[1855]1417
[931]1418 align: align the spectra in velocity before averaging. It takes
1419 the time of the first spectrum as reference time.
[1846]1420
1421 Example::
1422
[513]1423 # time average the scantable without using a mask
[710]1424 newscan = scan.average_time()
[1846]1425
[513]1426 """
1427 varlist = vars()
[1593]1428 weight = weight or 'TINT'
1429 mask = mask or ()
1430 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1431 scan = (self, )
[1859]1432
1433 if align:
1434 scan = (self.freq_align(insitu=False), )
1435 s = None
1436 if weight.upper() == 'MEDIAN':
1437 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1438 scanav))
1439 else:
1440 s = scantable(self._math._average(scan, mask, weight.upper(),
1441 scanav))
[1099]1442 s._add_history("average_time", varlist)
[513]1443 return s
[710]1444
[1862]1445 @asaplog_post_dec
[876]1446 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1447 """\
[513]1448 Return a scan where all spectra are converted to either
1449 Jansky or Kelvin depending upon the flux units of the scan table.
1450 By default the function tries to look the values up internally.
1451 If it can't find them (or if you want to over-ride), you must
1452 specify EITHER jyperk OR eta (and D which it will try to look up
1453 also if you don't set it). jyperk takes precedence if you set both.
[1846]1454
[513]1455 Parameters:
[1846]1456
[513]1457 jyperk: the Jy / K conversion factor
[1855]1458
[513]1459 eta: the aperture efficiency
[1855]1460
[513]1461 d: the geomtric diameter (metres)
[1855]1462
[513]1463 insitu: if False a new scantable is returned.
1464 Otherwise, the scaling is done in-situ
1465 The default is taken from .asaprc (False)
[1846]1466
[513]1467 """
1468 if insitu is None: insitu = rcParams['insitu']
[876]1469 self._math._setinsitu(insitu)
[513]1470 varlist = vars()
[1593]1471 jyperk = jyperk or -1.0
1472 d = d or -1.0
1473 eta = eta or -1.0
[876]1474 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1475 s._add_history("convert_flux", varlist)
1476 if insitu: self._assign(s)
1477 else: return s
[513]1478
[1862]1479 @asaplog_post_dec
[876]1480 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1481 """\
[513]1482 Return a scan after applying a gain-elevation correction.
1483 The correction can be made via either a polynomial or a
1484 table-based interpolation (and extrapolation if necessary).
1485 You specify polynomial coefficients, an ascii table or neither.
1486 If you specify neither, then a polynomial correction will be made
1487 with built in coefficients known for certain telescopes (an error
1488 will occur if the instrument is not known).
1489 The data and Tsys are *divided* by the scaling factors.
[1846]1490
[513]1491 Parameters:
[1846]1492
[513]1493 poly: Polynomial coefficients (default None) to compute a
1494 gain-elevation correction as a function of
1495 elevation (in degrees).
[1855]1496
[513]1497 filename: The name of an ascii file holding correction factors.
1498 The first row of the ascii file must give the column
1499 names and these MUST include columns
1500 "ELEVATION" (degrees) and "FACTOR" (multiply data
1501 by this) somewhere.
1502 The second row must give the data type of the
1503 column. Use 'R' for Real and 'I' for Integer.
1504 An example file would be
1505 (actual factors are arbitrary) :
1506
1507 TIME ELEVATION FACTOR
1508 R R R
1509 0.1 0 0.8
1510 0.2 20 0.85
1511 0.3 40 0.9
1512 0.4 60 0.85
1513 0.5 80 0.8
1514 0.6 90 0.75
[1855]1515
[513]1516 method: Interpolation method when correcting from a table.
1517 Values are "nearest", "linear" (default), "cubic"
1518 and "spline"
[1855]1519
[513]1520 insitu: if False a new scantable is returned.
1521 Otherwise, the scaling is done in-situ
1522 The default is taken from .asaprc (False)
[1846]1523
[513]1524 """
1525
1526 if insitu is None: insitu = rcParams['insitu']
[876]1527 self._math._setinsitu(insitu)
[513]1528 varlist = vars()
[1593]1529 poly = poly or ()
[513]1530 from os.path import expandvars
1531 filename = expandvars(filename)
[876]1532 s = scantable(self._math._gainel(self, poly, filename, method))
1533 s._add_history("gain_el", varlist)
[1593]1534 if insitu:
1535 self._assign(s)
1536 else:
1537 return s
[710]1538
[1862]1539 @asaplog_post_dec
[931]1540 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1541 """\
[513]1542 Return a scan where all rows have been aligned in frequency/velocity.
1543 The alignment frequency frame (e.g. LSRK) is that set by function
1544 set_freqframe.
[1846]1545
[513]1546 Parameters:
[1855]1547
[513]1548 reftime: reference time to align at. By default, the time of
1549 the first row of data is used.
[1855]1550
[513]1551 method: Interpolation method for regridding the spectra.
1552 Choose from "nearest", "linear", "cubic" (default)
1553 and "spline"
[1855]1554
[513]1555 insitu: if False a new scantable is returned.
1556 Otherwise, the scaling is done in-situ
1557 The default is taken from .asaprc (False)
[1846]1558
[513]1559 """
[931]1560 if insitu is None: insitu = rcParams["insitu"]
[876]1561 self._math._setinsitu(insitu)
[513]1562 varlist = vars()
[1593]1563 reftime = reftime or ""
[931]1564 s = scantable(self._math._freq_align(self, reftime, method))
[876]1565 s._add_history("freq_align", varlist)
1566 if insitu: self._assign(s)
1567 else: return s
[513]1568
[1862]1569 @asaplog_post_dec
[1725]1570 def opacity(self, tau=None, insitu=None):
[1846]1571 """\
[513]1572 Apply an opacity correction. The data
1573 and Tsys are multiplied by the correction factor.
[1846]1574
[513]1575 Parameters:
[1855]1576
[1689]1577 tau: (list of) opacity from which the correction factor is
[513]1578 exp(tau*ZD)
[1689]1579 where ZD is the zenith-distance.
1580 If a list is provided, it has to be of length nIF,
1581 nIF*nPol or 1 and in order of IF/POL, e.g.
1582 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1583 if tau is `None` the opacities are determined from a
1584 model.
[1855]1585
[513]1586 insitu: if False a new scantable is returned.
1587 Otherwise, the scaling is done in-situ
1588 The default is taken from .asaprc (False)
[1846]1589
[513]1590 """
1591 if insitu is None: insitu = rcParams['insitu']
[876]1592 self._math._setinsitu(insitu)
[513]1593 varlist = vars()
[1689]1594 if not hasattr(tau, "__len__"):
1595 tau = [tau]
[876]1596 s = scantable(self._math._opacity(self, tau))
1597 s._add_history("opacity", varlist)
1598 if insitu: self._assign(s)
1599 else: return s
[513]1600
[1862]1601 @asaplog_post_dec
[513]1602 def bin(self, width=5, insitu=None):
[1846]1603 """\
[513]1604 Return a scan where all spectra have been binned up.
[1846]1605
[1348]1606 Parameters:
[1846]1607
[513]1608 width: The bin width (default=5) in pixels
[1855]1609
[513]1610 insitu: if False a new scantable is returned.
1611 Otherwise, the scaling is done in-situ
1612 The default is taken from .asaprc (False)
[1846]1613
[513]1614 """
1615 if insitu is None: insitu = rcParams['insitu']
[876]1616 self._math._setinsitu(insitu)
[513]1617 varlist = vars()
[876]1618 s = scantable(self._math._bin(self, width))
[1118]1619 s._add_history("bin", varlist)
[1589]1620 if insitu:
1621 self._assign(s)
1622 else:
1623 return s
[513]1624
[1862]1625 @asaplog_post_dec
[513]1626 def resample(self, width=5, method='cubic', insitu=None):
[1846]1627 """\
[1348]1628 Return a scan where all spectra have been binned up.
[1573]1629
[1348]1630 Parameters:
[1846]1631
[513]1632 width: The bin width (default=5) in pixels
[1855]1633
[513]1634 method: Interpolation method when correcting from a table.
1635 Values are "nearest", "linear", "cubic" (default)
1636 and "spline"
[1855]1637
[513]1638 insitu: if False a new scantable is returned.
1639 Otherwise, the scaling is done in-situ
1640 The default is taken from .asaprc (False)
[1846]1641
[513]1642 """
1643 if insitu is None: insitu = rcParams['insitu']
[876]1644 self._math._setinsitu(insitu)
[513]1645 varlist = vars()
[876]1646 s = scantable(self._math._resample(self, method, width))
[1118]1647 s._add_history("resample", varlist)
[876]1648 if insitu: self._assign(s)
1649 else: return s
[513]1650
[1862]1651 @asaplog_post_dec
[946]1652 def average_pol(self, mask=None, weight='none'):
[1846]1653 """\
[946]1654 Average the Polarisations together.
[1846]1655
[946]1656 Parameters:
[1846]1657
[946]1658 mask: An optional mask defining the region, where the
1659 averaging will be applied. The output will have all
1660 specified points masked.
[1855]1661
[946]1662 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1663 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1664
[946]1665 """
1666 varlist = vars()
[1593]1667 mask = mask or ()
[1010]1668 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1669 s._add_history("average_pol", varlist)
[992]1670 return s
[513]1671
[1862]1672 @asaplog_post_dec
[1145]1673 def average_beam(self, mask=None, weight='none'):
[1846]1674 """\
[1145]1675 Average the Beams together.
[1846]1676
[1145]1677 Parameters:
1678 mask: An optional mask defining the region, where the
1679 averaging will be applied. The output will have all
1680 specified points masked.
[1855]1681
[1145]1682 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1683 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1684
[1145]1685 """
1686 varlist = vars()
[1593]1687 mask = mask or ()
[1145]1688 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1689 s._add_history("average_beam", varlist)
1690 return s
1691
[1586]1692 def parallactify(self, pflag):
[1846]1693 """\
[1843]1694 Set a flag to indicate whether this data should be treated as having
[1617]1695 been 'parallactified' (total phase == 0.0)
[1846]1696
[1617]1697 Parameters:
[1855]1698
[1843]1699 pflag: Bool indicating whether to turn this on (True) or
[1617]1700 off (False)
[1846]1701
[1617]1702 """
[1586]1703 varlist = vars()
1704 self._parallactify(pflag)
1705 self._add_history("parallactify", varlist)
1706
[1862]1707 @asaplog_post_dec
[992]1708 def convert_pol(self, poltype=None):
[1846]1709 """\
[992]1710 Convert the data to a different polarisation type.
[1565]1711 Note that you will need cross-polarisation terms for most conversions.
[1846]1712
[992]1713 Parameters:
[1855]1714
[992]1715 poltype: The new polarisation type. Valid types are:
[1565]1716 "linear", "circular", "stokes" and "linpol"
[1846]1717
[992]1718 """
1719 varlist = vars()
[1859]1720 s = scantable(self._math._convertpol(self, poltype))
[1118]1721 s._add_history("convert_pol", varlist)
[992]1722 return s
1723
[1862]1724 @asaplog_post_dec
[1819]1725 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1726 """\
[513]1727 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1728
[513]1729 Parameters:
[1846]1730
[513]1731 kernel: The type of smoothing kernel. Select from
[1574]1732 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1733 or 'poly'
[1855]1734
[513]1735 width: The width of the kernel in pixels. For hanning this is
1736 ignored otherwise it defauls to 5 pixels.
1737 For 'gaussian' it is the Full Width Half
1738 Maximum. For 'boxcar' it is the full width.
[1574]1739 For 'rmedian' and 'poly' it is the half width.
[1855]1740
[1574]1741 order: Optional parameter for 'poly' kernel (default is 2), to
1742 specify the order of the polnomial. Ignored by all other
1743 kernels.
[1855]1744
[1819]1745 plot: plot the original and the smoothed spectra.
1746 In this each indivual fit has to be approved, by
1747 typing 'y' or 'n'
[1855]1748
[513]1749 insitu: if False a new scantable is returned.
1750 Otherwise, the scaling is done in-situ
1751 The default is taken from .asaprc (False)
[1846]1752
[513]1753 """
1754 if insitu is None: insitu = rcParams['insitu']
[876]1755 self._math._setinsitu(insitu)
[513]1756 varlist = vars()
[1819]1757
1758 if plot: orgscan = self.copy()
1759
[1574]1760 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1761 s._add_history("smooth", varlist)
[1819]1762
1763 if plot:
1764 if rcParams['plotter.gui']:
1765 from asap.asaplotgui import asaplotgui as asaplot
1766 else:
1767 from asap.asaplot import asaplot
1768 self._p=asaplot()
1769 self._p.set_panels()
1770 ylab=s._get_ordinate_label()
1771 #self._p.palette(0,["#777777","red"])
1772 for r in xrange(s.nrow()):
1773 xsm=s._getabcissa(r)
1774 ysm=s._getspectrum(r)
1775 xorg=orgscan._getabcissa(r)
1776 yorg=orgscan._getspectrum(r)
1777 self._p.clear()
1778 self._p.hold()
1779 self._p.set_axes('ylabel',ylab)
1780 self._p.set_axes('xlabel',s._getabcissalabel(r))
1781 self._p.set_axes('title',s._getsourcename(r))
1782 self._p.set_line(label='Original',color="#777777")
1783 self._p.plot(xorg,yorg)
1784 self._p.set_line(label='Smoothed',color="red")
1785 self._p.plot(xsm,ysm)
1786 ### Ugly part for legend
1787 for i in [0,1]:
1788 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1789 self._p.release()
1790 ### Ugly part for legend
1791 self._p.subplots[0]['lines']=[]
1792 res = raw_input("Accept smoothing ([y]/n): ")
1793 if res.upper() == 'N':
1794 s._setspectrum(yorg, r)
1795 self._p.unmap()
1796 self._p = None
1797 del orgscan
1798
[876]1799 if insitu: self._assign(s)
1800 else: return s
[513]1801
[1862]1802 @asaplog_post_dec
[1617]1803 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1804 insitu=None):
[1846]1805 """\
[513]1806 Return a scan which has been baselined (all rows) by a polynomial.
[1846]1807
[513]1808 Parameters:
[1846]1809
[794]1810 mask: an optional mask
[1855]1811
[794]1812 order: the order of the polynomial (default is 0)
[1855]1813
[1061]1814 plot: plot the fit and the residual. In this each
1815 indivual fit has to be approved, by typing 'y'
1816 or 'n'
[1855]1817
[1391]1818 uselin: use linear polynomial fit
[1855]1819
[794]1820 insitu: if False a new scantable is returned.
1821 Otherwise, the scaling is done in-situ
1822 The default is taken from .asaprc (False)
[1846]1823
1824 Example::
1825
[513]1826 # return a scan baselined by a third order polynomial,
1827 # not using a mask
1828 bscan = scan.poly_baseline(order=3)
[1846]1829
[579]1830 """
[513]1831 if insitu is None: insitu = rcParams['insitu']
[1819]1832 if not insitu:
1833 workscan = self.copy()
1834 else:
1835 workscan = self
[513]1836 varlist = vars()
1837 if mask is None:
[1295]1838 mask = [True for i in xrange(self.nchan(-1))]
[1819]1839
[513]1840 from asap.asapfitter import fitter
[1217]1841 try:
1842 f = fitter()
[1391]1843 if uselin:
1844 f.set_function(lpoly=order)
1845 else:
1846 f.set_function(poly=order)
[1819]1847
1848 rows = range(workscan.nrow())
1849 if len(rows) > 0:
1850 self.blpars = []
1851
1852 for r in rows:
1853 # take into account flagtra info (CAS-1434)
1854 flagtra = workscan._getmask(r)
1855 actualmask = mask[:]
1856 if len(actualmask) == 0:
1857 actualmask = list(flagtra[:])
1858 else:
1859 if len(actualmask) != len(flagtra):
1860 raise RuntimeError, "Mask and flagtra have different length"
1861 else:
1862 for i in range(0, len(actualmask)):
1863 actualmask[i] = actualmask[i] and flagtra[i]
1864 f.set_scan(workscan, actualmask)
1865 f.x = workscan._getabcissa(r)
1866 f.y = workscan._getspectrum(r)
1867 f.data = None
1868 f.fit()
1869 if plot:
1870 f.plot(residual=True)
1871 x = raw_input("Accept fit ( [y]/n ): ")
1872 if x.upper() == 'N':
1873 self.blpars.append(None)
1874 continue
1875 workscan._setspectrum(f.fitter.getresidual(), r)
1876 self.blpars.append(f.get_parameters())
1877
1878 if plot:
1879 f._p.unmap()
1880 f._p = None
1881 workscan._add_history("poly_baseline", varlist)
[1856]1882 if insitu:
1883 self._assign(workscan)
1884 else:
1885 return workscan
[1217]1886 except RuntimeError:
1887 msg = "The fit failed, possibly because it didn't converge."
[1859]1888 raise RuntimeError(msg)
[513]1889
[1819]1890
[1118]1891 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1892 threshold=3, chan_avg_limit=1, plot=False,
1893 insitu=None):
[1846]1894 """\
[880]1895 Return a scan which has been baselined (all rows) by a polynomial.
1896 Spectral lines are detected first using linefinder and masked out
1897 to avoid them affecting the baseline solution.
1898
1899 Parameters:
[1846]1900
[880]1901 mask: an optional mask retreived from scantable
[1846]1902
1903 edge: an optional number of channel to drop at the edge of
1904 spectrum. If only one value is
[880]1905 specified, the same number will be dropped from
1906 both sides of the spectrum. Default is to keep
[907]1907 all channels. Nested tuples represent individual
[976]1908 edge selection for different IFs (a number of spectral
1909 channels can be different)
[1846]1910
[880]1911 order: the order of the polynomial (default is 0)
[1846]1912
[880]1913 threshold: the threshold used by line finder. It is better to
1914 keep it large as only strong lines affect the
1915 baseline solution.
[1846]1916
[1280]1917 chan_avg_limit:
1918 a maximum number of consequtive spectral channels to
1919 average during the search of weak and broad lines.
1920 The default is no averaging (and no search for weak
1921 lines). If such lines can affect the fitted baseline
1922 (e.g. a high order polynomial is fitted), increase this
1923 parameter (usually values up to 8 are reasonable). Most
1924 users of this method should find the default value
1925 sufficient.
[1846]1926
[1061]1927 plot: plot the fit and the residual. In this each
1928 indivual fit has to be approved, by typing 'y'
1929 or 'n'
[1846]1930
[880]1931 insitu: if False a new scantable is returned.
1932 Otherwise, the scaling is done in-situ
1933 The default is taken from .asaprc (False)
1934
[1846]1935
1936 Example::
1937
1938 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
1939
[880]1940 """
1941 if insitu is None: insitu = rcParams['insitu']
1942 varlist = vars()
1943 from asap.asapfitter import fitter
1944 from asap.asaplinefind import linefinder
1945 from asap import _is_sequence_or_number as _is_valid
1946
[976]1947 # check whether edge is set up for each IF individually
[1118]1948 individualedge = False;
1949 if len(edge) > 1:
1950 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1951 individualedge = True;
[907]1952
[1118]1953 if not _is_valid(edge, int) and not individualedge:
[909]1954 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1955 pair of integers specified as a tuple. Nested tuples are allowed \
1956 to make individual selection for different IFs."
[919]1957
[1118]1958 curedge = (0, 0)
1959 if individualedge:
1960 for edgepar in edge:
1961 if not _is_valid(edgepar, int):
1962 raise ValueError, "Each element of the 'edge' tuple has \
1963 to be a pair of integers or an integer."
[907]1964 else:
[1118]1965 curedge = edge;
[880]1966
1967 # setup fitter
1968 f = fitter()
1969 f.set_function(poly=order)
1970
1971 # setup line finder
[1118]1972 fl = linefinder()
[1268]1973 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1974
1975 if not insitu:
[1118]1976 workscan = self.copy()
[880]1977 else:
[1118]1978 workscan = self
[880]1979
[907]1980 fl.set_scan(workscan)
1981
[1118]1982 rows = range(workscan.nrow())
[1819]1983 # Save parameters of baseline fits & masklists as a class attribute.
1984 # NOTICE: It does not reflect changes in scantable!
1985 if len(rows) > 0:
1986 self.blpars=[]
1987 self.masklists=[]
[880]1988 asaplog.push("Processing:")
1989 for r in rows:
[1118]1990 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1991 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1992 workscan.getpol(r), workscan.getcycle(r))
[880]1993 asaplog.push(msg, False)
[907]1994
[976]1995 # figure out edge parameter
[1118]1996 if individualedge:
1997 if len(edge) >= workscan.getif(r):
1998 raise RuntimeError, "Number of edge elements appear to " \
1999 "be less than the number of IFs"
2000 curedge = edge[workscan.getif(r)]
[919]2001
[1819]2002 # take into account flagtra info (CAS-1434)
2003 flagtra = workscan._getmask(r)
2004 actualmask = mask[:]
2005 if len(actualmask) == 0:
2006 actualmask = list(flagtra[:])
2007 else:
2008 if len(actualmask) != len(flagtra):
2009 raise RuntimeError, "Mask and flagtra have different length"
2010 else:
2011 for i in range(0, len(actualmask)):
2012 actualmask[i] = actualmask[i] and flagtra[i]
2013
[976]2014 # setup line finder
[1819]2015 fl.find_lines(r, actualmask, curedge)
2016 outmask=fl.get_mask()
2017 f.set_scan(workscan, fl.get_mask())
2018 f.x = workscan._getabcissa(r)
2019 f.y = workscan._getspectrum(r)
2020 f.data = None
[880]2021 f.fit()
[1819]2022
2023 # Show mask list
2024 masklist=workscan.get_masklist(fl.get_mask(),row=r)
2025 msg = "mask range: "+str(masklist)
2026 asaplog.push(msg, False)
2027
[1061]2028 if plot:
2029 f.plot(residual=True)
2030 x = raw_input("Accept fit ( [y]/n ): ")
2031 if x.upper() == 'N':
[1819]2032 self.blpars.append(None)
2033 self.masklists.append(None)
[1061]2034 continue
[1819]2035
[880]2036 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2037 self.blpars.append(f.get_parameters())
2038 self.masklists.append(masklist)
[1061]2039 if plot:
2040 f._p.unmap()
2041 f._p = None
2042 workscan._add_history("auto_poly_baseline", varlist)
[880]2043 if insitu:
2044 self._assign(workscan)
2045 else:
2046 return workscan
2047
[1862]2048 @asaplog_post_dec
[914]2049 def rotate_linpolphase(self, angle):
[1846]2050 """\
[914]2051 Rotate the phase of the complex polarization O=Q+iU correlation.
2052 This is always done in situ in the raw data. So if you call this
2053 function more than once then each call rotates the phase further.
[1846]2054
[914]2055 Parameters:
[1846]2056
[914]2057 angle: The angle (degrees) to rotate (add) by.
[1846]2058
2059 Example::
2060
[914]2061 scan.rotate_linpolphase(2.3)
[1846]2062
[914]2063 """
2064 varlist = vars()
[936]2065 self._math._rotate_linpolphase(self, angle)
[914]2066 self._add_history("rotate_linpolphase", varlist)
2067 return
[710]2068
[1862]2069 @asaplog_post_dec
[914]2070 def rotate_xyphase(self, angle):
[1846]2071 """\
[914]2072 Rotate the phase of the XY correlation. This is always done in situ
2073 in the data. So if you call this function more than once
2074 then each call rotates the phase further.
[1846]2075
[914]2076 Parameters:
[1846]2077
[914]2078 angle: The angle (degrees) to rotate (add) by.
[1846]2079
2080 Example::
2081
[914]2082 scan.rotate_xyphase(2.3)
[1846]2083
[914]2084 """
2085 varlist = vars()
[936]2086 self._math._rotate_xyphase(self, angle)
[914]2087 self._add_history("rotate_xyphase", varlist)
2088 return
2089
[1862]2090 @asaplog_post_dec
[914]2091 def swap_linears(self):
[1846]2092 """\
[1573]2093 Swap the linear polarisations XX and YY, or better the first two
[1348]2094 polarisations as this also works for ciculars.
[914]2095 """
2096 varlist = vars()
[936]2097 self._math._swap_linears(self)
[914]2098 self._add_history("swap_linears", varlist)
2099 return
2100
[1862]2101 @asaplog_post_dec
[914]2102 def invert_phase(self):
[1846]2103 """\
[914]2104 Invert the phase of the complex polarisation
2105 """
2106 varlist = vars()
[936]2107 self._math._invert_phase(self)
[914]2108 self._add_history("invert_phase", varlist)
2109 return
2110
[1862]2111 @asaplog_post_dec
[876]2112 def add(self, offset, insitu=None):
[1846]2113 """\
[513]2114 Return a scan where all spectra have the offset added
[1846]2115
[513]2116 Parameters:
[1846]2117
[513]2118 offset: the offset
[1855]2119
[513]2120 insitu: if False a new scantable is returned.
2121 Otherwise, the scaling is done in-situ
2122 The default is taken from .asaprc (False)
[1846]2123
[513]2124 """
2125 if insitu is None: insitu = rcParams['insitu']
[876]2126 self._math._setinsitu(insitu)
[513]2127 varlist = vars()
[876]2128 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2129 s._add_history("add", varlist)
[876]2130 if insitu:
2131 self._assign(s)
2132 else:
[513]2133 return s
2134
[1862]2135 @asaplog_post_dec
[1308]2136 def scale(self, factor, tsys=True, insitu=None):
[1846]2137 """\
2138
[513]2139 Return a scan where all spectra are scaled by the give 'factor'
[1846]2140
[513]2141 Parameters:
[1846]2142
[1819]2143 factor: the scaling factor (float or 1D float list)
[1855]2144
[513]2145 insitu: if False a new scantable is returned.
2146 Otherwise, the scaling is done in-situ
2147 The default is taken from .asaprc (False)
[1855]2148
[513]2149 tsys: if True (default) then apply the operation to Tsys
2150 as well as the data
[1846]2151
[513]2152 """
2153 if insitu is None: insitu = rcParams['insitu']
[876]2154 self._math._setinsitu(insitu)
[513]2155 varlist = vars()
[1819]2156 s = None
2157 import numpy
2158 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2159 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2160 from asapmath import _array2dOp
2161 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2162 else:
2163 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2164 else:
2165 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2166 s._add_history("scale", varlist)
[876]2167 if insitu:
2168 self._assign(s)
2169 else:
[513]2170 return s
2171
[1504]2172 def set_sourcetype(self, match, matchtype="pattern",
2173 sourcetype="reference"):
[1846]2174 """\
[1502]2175 Set the type of the source to be an source or reference scan
[1846]2176 using the provided pattern.
2177
[1502]2178 Parameters:
[1846]2179
[1504]2180 match: a Unix style pattern, regular expression or selector
[1855]2181
[1504]2182 matchtype: 'pattern' (default) UNIX style pattern or
2183 'regex' regular expression
[1855]2184
[1502]2185 sourcetype: the type of the source to use (source/reference)
[1846]2186
[1502]2187 """
2188 varlist = vars()
2189 basesel = self.get_selection()
2190 stype = -1
2191 if sourcetype.lower().startswith("r"):
2192 stype = 1
2193 elif sourcetype.lower().startswith("s"):
2194 stype = 0
[1504]2195 else:
[1502]2196 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2197 if matchtype.lower().startswith("p"):
2198 matchtype = "pattern"
2199 elif matchtype.lower().startswith("r"):
2200 matchtype = "regex"
2201 else:
2202 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2203 sel = selector()
2204 if isinstance(match, selector):
2205 sel = match
2206 else:
[1504]2207 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2208 self.set_selection(basesel+sel)
2209 self._setsourcetype(stype)
2210 self.set_selection(basesel)
[1573]2211 self._add_history("set_sourcetype", varlist)
[1502]2212
[1862]2213 @asaplog_post_dec
[1857]2214 @preserve_selection
[1819]2215 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2216 """\
[670]2217 This function allows to build quotients automatically.
[1819]2218 It assumes the observation to have the same number of
[670]2219 "ons" and "offs"
[1846]2220
[670]2221 Parameters:
[1846]2222
[710]2223 preserve: you can preserve (default) the continuum or
2224 remove it. The equations used are
[1857]2225
[670]2226 preserve: Output = Toff * (on/off) - Toff
[1857]2227
[1070]2228 remove: Output = Toff * (on/off) - Ton
[1855]2229
[1573]2230 mode: the on/off detection mode
[1348]2231 'paired' (default)
2232 identifies 'off' scans by the
2233 trailing '_R' (Mopra/Parkes) or
2234 '_e'/'_w' (Tid) and matches
2235 on/off pairs from the observing pattern
[1502]2236 'time'
2237 finds the closest off in time
[1348]2238
[1857]2239 .. todo:: verify argument is not implemented
2240
[670]2241 """
[1857]2242 varlist = vars()
[1348]2243 modes = ["time", "paired"]
[670]2244 if not mode in modes:
[876]2245 msg = "please provide valid mode. Valid modes are %s" % (modes)
2246 raise ValueError(msg)
[1348]2247 s = None
2248 if mode.lower() == "paired":
[1857]2249 sel = self.get_selection()
[1356]2250 sel.set_query("SRCTYPE==1")
2251 self.set_selection(sel)
[1348]2252 offs = self.copy()
2253 sel.set_query("SRCTYPE==0")
[1356]2254 self.set_selection(sel)
[1348]2255 ons = self.copy()
2256 s = scantable(self._math._quotient(ons, offs, preserve))
2257 elif mode.lower() == "time":
2258 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2259 s._add_history("auto_quotient", varlist)
[876]2260 return s
[710]2261
[1862]2262 @asaplog_post_dec
[1145]2263 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2264 """\
[1143]2265 Form a quotient using "off" beams when observing in "MX" mode.
[1846]2266
[1143]2267 Parameters:
[1846]2268
[1145]2269 mask: an optional mask to be used when weight == 'stddev'
[1855]2270
[1143]2271 weight: How to average the off beams. Default is 'median'.
[1855]2272
[1145]2273 preserve: you can preserve (default) the continuum or
[1855]2274 remove it. The equations used are:
[1846]2275
[1855]2276 preserve: Output = Toff * (on/off) - Toff
2277
2278 remove: Output = Toff * (on/off) - Ton
2279
[1217]2280 """
[1593]2281 mask = mask or ()
[1141]2282 varlist = vars()
2283 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2284 preoff = scantable(self._math._mx_extract(self, 'off'))
2285 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2286 from asapmath import quotient
[1145]2287 q = quotient(on, off, preserve)
[1143]2288 q._add_history("mx_quotient", varlist)
[1217]2289 return q
[513]2290
[1862]2291 @asaplog_post_dec
[718]2292 def freq_switch(self, insitu=None):
[1846]2293 """\
[718]2294 Apply frequency switching to the data.
[1846]2295
[718]2296 Parameters:
[1846]2297
[718]2298 insitu: if False a new scantable is returned.
2299 Otherwise, the swictching is done in-situ
2300 The default is taken from .asaprc (False)
[1846]2301
[718]2302 """
2303 if insitu is None: insitu = rcParams['insitu']
[876]2304 self._math._setinsitu(insitu)
[718]2305 varlist = vars()
[876]2306 s = scantable(self._math._freqswitch(self))
[1118]2307 s._add_history("freq_switch", varlist)
[1856]2308 if insitu:
2309 self._assign(s)
2310 else:
2311 return s
[718]2312
[1862]2313 @asaplog_post_dec
[780]2314 def recalc_azel(self):
[1846]2315 """Recalculate the azimuth and elevation for each position."""
[780]2316 varlist = vars()
[876]2317 self._recalcazel()
[780]2318 self._add_history("recalc_azel", varlist)
2319 return
2320
[1862]2321 @asaplog_post_dec
[513]2322 def __add__(self, other):
2323 varlist = vars()
2324 s = None
2325 if isinstance(other, scantable):
[1573]2326 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2327 elif isinstance(other, float):
[876]2328 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2329 else:
[718]2330 raise TypeError("Other input is not a scantable or float value")
[513]2331 s._add_history("operator +", varlist)
2332 return s
2333
[1862]2334 @asaplog_post_dec
[513]2335 def __sub__(self, other):
2336 """
2337 implicit on all axes and on Tsys
2338 """
2339 varlist = vars()
2340 s = None
2341 if isinstance(other, scantable):
[1588]2342 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2343 elif isinstance(other, float):
[876]2344 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2345 else:
[718]2346 raise TypeError("Other input is not a scantable or float value")
[513]2347 s._add_history("operator -", varlist)
2348 return s
[710]2349
[1862]2350 @asaplog_post_dec
[513]2351 def __mul__(self, other):
2352 """
2353 implicit on all axes and on Tsys
2354 """
2355 varlist = vars()
2356 s = None
2357 if isinstance(other, scantable):
[1588]2358 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2359 elif isinstance(other, float):
[876]2360 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2361 else:
[718]2362 raise TypeError("Other input is not a scantable or float value")
[513]2363 s._add_history("operator *", varlist)
2364 return s
2365
[710]2366
[1862]2367 @asaplog_post_dec
[513]2368 def __div__(self, other):
2369 """
2370 implicit on all axes and on Tsys
2371 """
2372 varlist = vars()
2373 s = None
2374 if isinstance(other, scantable):
[1589]2375 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2376 elif isinstance(other, float):
2377 if other == 0.0:
[718]2378 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2379 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2380 else:
[718]2381 raise TypeError("Other input is not a scantable or float value")
[513]2382 s._add_history("operator /", varlist)
2383 return s
2384
[1862]2385 @asaplog_post_dec
[530]2386 def get_fit(self, row=0):
[1846]2387 """\
[530]2388 Print or return the stored fits for a row in the scantable
[1846]2389
[530]2390 Parameters:
[1846]2391
[530]2392 row: the row which the fit has been applied to.
[1846]2393
[530]2394 """
2395 if row > self.nrow():
2396 return
[976]2397 from asap.asapfit import asapfit
[530]2398 fit = asapfit(self._getfit(row))
[1859]2399 asaplog.push( '%s' %(fit) )
2400 return fit.as_dict()
[530]2401
[1483]2402 def flag_nans(self):
[1846]2403 """\
[1483]2404 Utility function to flag NaN values in the scantable.
2405 """
2406 import numpy
2407 basesel = self.get_selection()
2408 for i in range(self.nrow()):
[1589]2409 sel = self.get_row_selector(i)
2410 self.set_selection(basesel+sel)
[1483]2411 nans = numpy.isnan(self._getspectrum(0))
2412 if numpy.any(nans):
2413 bnans = [ bool(v) for v in nans]
2414 self.flag(bnans)
2415 self.set_selection(basesel)
2416
[1588]2417 def get_row_selector(self, rowno):
2418 return selector(beams=self.getbeam(rowno),
2419 ifs=self.getif(rowno),
2420 pols=self.getpol(rowno),
2421 scans=self.getscan(rowno),
2422 cycles=self.getcycle(rowno))
[1573]2423
[484]2424 def _add_history(self, funcname, parameters):
[1435]2425 if not rcParams['scantable.history']:
2426 return
[484]2427 # create date
2428 sep = "##"
2429 from datetime import datetime
2430 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2431 hist = dstr+sep
2432 hist += funcname+sep#cdate+sep
2433 if parameters.has_key('self'): del parameters['self']
[1118]2434 for k, v in parameters.iteritems():
[484]2435 if type(v) is dict:
[1118]2436 for k2, v2 in v.iteritems():
[484]2437 hist += k2
2438 hist += "="
[1118]2439 if isinstance(v2, scantable):
[484]2440 hist += 'scantable'
2441 elif k2 == 'mask':
[1118]2442 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2443 hist += str(self._zip_mask(v2))
2444 else:
2445 hist += str(v2)
[484]2446 else:
[513]2447 hist += str(v2)
[484]2448 else:
2449 hist += k
2450 hist += "="
[1118]2451 if isinstance(v, scantable):
[484]2452 hist += 'scantable'
2453 elif k == 'mask':
[1118]2454 if isinstance(v, list) or isinstance(v, tuple):
[513]2455 hist += str(self._zip_mask(v))
2456 else:
2457 hist += str(v)
[484]2458 else:
2459 hist += str(v)
2460 hist += sep
2461 hist = hist[:-2] # remove trailing '##'
2462 self._addhistory(hist)
2463
[710]2464
[484]2465 def _zip_mask(self, mask):
2466 mask = list(mask)
2467 i = 0
2468 segments = []
2469 while mask[i:].count(1):
2470 i += mask[i:].index(1)
2471 if mask[i:].count(0):
2472 j = i + mask[i:].index(0)
2473 else:
[710]2474 j = len(mask)
[1118]2475 segments.append([i, j])
[710]2476 i = j
[484]2477 return segments
[714]2478
[626]2479 def _get_ordinate_label(self):
2480 fu = "("+self.get_fluxunit()+")"
2481 import re
2482 lbl = "Intensity"
[1118]2483 if re.match(".K.", fu):
[626]2484 lbl = "Brightness Temperature "+ fu
[1118]2485 elif re.match(".Jy.", fu):
[626]2486 lbl = "Flux density "+ fu
2487 return lbl
[710]2488
[876]2489 def _check_ifs(self):
2490 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2491 nchans = filter(lambda t: t > 0, nchans)
[876]2492 return (sum(nchans)/len(nchans) == nchans[0])
[976]2493
[1862]2494 @asaplog_post_dec
[1819]2495 def _fill(self, names, unit, average, getpt, antenna):
[976]2496 first = True
2497 fullnames = []
2498 for name in names:
2499 name = os.path.expandvars(name)
2500 name = os.path.expanduser(name)
2501 if not os.path.exists(name):
2502 msg = "File '%s' does not exists" % (name)
2503 raise IOError(msg)
2504 fullnames.append(name)
2505 if average:
2506 asaplog.push('Auto averaging integrations')
[1079]2507 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2508 for name in fullnames:
[1073]2509 tbl = Scantable(stype)
[1843]2510 r = filler(tbl)
[1504]2511 rx = rcParams['scantable.reference']
[1843]2512 r.setreferenceexpr(rx)
[976]2513 msg = "Importing %s..." % (name)
[1118]2514 asaplog.push(msg, False)
[1843]2515 r.open(name)# antenna, -1, -1, getpt)
2516 r.fill()
[976]2517 if average:
[1118]2518 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2519 if not first:
2520 tbl = self._math._merge([self, tbl])
2521 Scantable.__init__(self, tbl)
[1843]2522 r.close()
[1118]2523 del r, tbl
[976]2524 first = False
[1861]2525 #flush log
2526 asaplog.post()
[976]2527 if unit is not None:
2528 self.set_fluxunit(unit)
[1824]2529 if not is_casapy():
2530 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2531
[1402]2532 def __getitem__(self, key):
2533 if key < 0:
2534 key += self.nrow()
2535 if key >= self.nrow():
2536 raise IndexError("Row index out of range.")
2537 return self._getspectrum(key)
2538
2539 def __setitem__(self, key, value):
2540 if key < 0:
2541 key += self.nrow()
2542 if key >= self.nrow():
2543 raise IndexError("Row index out of range.")
2544 if not hasattr(value, "__len__") or \
2545 len(value) > self.nchan(self.getif(key)):
2546 raise ValueError("Spectrum length doesn't match.")
2547 return self._setspectrum(value, key)
2548
2549 def __len__(self):
2550 return self.nrow()
2551
2552 def __iter__(self):
2553 for i in range(len(self)):
2554 yield self[i]
Note: See TracBrowser for help on using the repository browser.