source: trunk/python/scantable.py@ 2005

Last change on this file since 2005 was 2004, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: Yes .CAS-2718

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

The MSFiller is called instead of PKSFiller when input data is MS.
I have tested all task regressions as well as sdsave unit test and passed.

A few modification was needed for STMath::dototalpower() and
STWriter::write().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.9 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1948]4import numpy
[1691]5try:
6 from functools import wraps as wraps_dec
7except ImportError:
8 from asap.compatibility import wraps as wraps_dec
9
[1824]10from asap.env import is_casapy
[876]11from asap._asap import Scantable
[2004]12from asap._asap import filler, msfiller
[1824]13from asap.parameters import rcParams
[1862]14from asap.logging import asaplog, asaplog_post_dec
[1824]15from asap.selector import selector
16from asap.linecatalog import linecatalog
[1600]17from asap.coordinate import coordinate
[1859]18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]19from asap.asapfitter import fitter
[102]20
[1689]21
22def preserve_selection(func):
[1691]23 @wraps_dec(func)
[1689]24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
[1857]26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
[1689]30 return val
31 return wrap
32
[1846]33def is_scantable(filename):
34 """Is the given file a scantable?
[1689]35
[1846]36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
[1883]41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 #if ( l.find('Scantable') != -1 ):
48 if ( l.find('Measurement Set') == -1 ):
49 return True
50 else:
51 return False
52 else:
53 return False
54## return (os.path.isdir(filename)
55## and not os.path.exists(filename+'/table.f1')
56## and os.path.exists(filename+'/table.info'))
[1697]57
[1883]58def is_ms(filename):
59 """Is the given file a MeasurementSet?
[1697]60
[1883]61 Parameters:
62
63 filename: the name of the file/directory to test
64
65 """
66 if ( os.path.isdir(filename)
67 and os.path.exists(filename+'/table.info')
68 and os.path.exists(filename+'/table.dat') ):
69 f=open(filename+'/table.info')
70 l=f.readline()
71 f.close()
72 if ( l.find('Measurement Set') != -1 ):
73 return True
74 else:
75 return False
76 else:
77 return False
78
[876]79class scantable(Scantable):
[1846]80 """\
81 The ASAP container for scans (single-dish data).
[102]82 """
[1819]83
[1862]84 @asaplog_post_dec
[1916]85 #def __init__(self, filename, average=None, unit=None, getpt=None,
86 # antenna=None, parallactify=None):
87 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]88 """\
[102]89 Create a scantable from a saved one or make a reference
[1846]90
[102]91 Parameters:
[1846]92
93 filename: the name of an asap table on disk
94 or
95 the name of a rpfits/sdfits/ms file
96 (integrations within scans are auto averaged
97 and the whole file is read) or
98 [advanced] a reference to an existing scantable
99
100 average: average all integrations withinb a scan on read.
101 The default (True) is taken from .asaprc.
102
[484]103 unit: brightness unit; must be consistent with K or Jy.
[1846]104 Over-rides the default selected by the filler
105 (input rpfits/sdfits/ms) or replaces the value
106 in existing scantables
107
108 getpt: for MeasurementSet input data only:
109 If True, all pointing data are filled.
110 The deafult is False, which makes time to load
111 the MS data faster in some cases.
112
[1920]113 antenna: for MeasurementSet input data only:
114 Antenna selection. integer (id) or string (name or id).
[1846]115
116 parallactify: Indicate that the data had been parallatified. Default
117 is taken from rc file.
118
[710]119 """
[976]120 if average is None:
[710]121 average = rcParams['scantable.autoaverage']
[1916]122 #if getpt is None:
123 # getpt = True
124 #if antenna is not None:
125 # asaplog.push("Antenna selection currently unsupported."
126 # "Using ''")
127 # asaplog.post('WARN')
128 #if antenna is None:
129 # antenna = ''
130 #elif type(antenna) == int:
131 # antenna = '%s' % antenna
132 #elif type(antenna) == list:
133 # tmpstr = ''
134 # for i in range( len(antenna) ):
135 # if type(antenna[i]) == int:
136 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
137 # elif type(antenna[i]) == str:
138 # tmpstr=tmpstr+antenna[i]+','
139 # else:
140 # raise TypeError('Bad antenna selection.')
141 # antenna = tmpstr.rstrip(',')
[1593]142 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]143 varlist = vars()
[876]144 from asap._asap import stmath
[1819]145 self._math = stmath( rcParams['insitu'] )
[876]146 if isinstance(filename, Scantable):
147 Scantable.__init__(self, filename)
[181]148 else:
[1697]149 if isinstance(filename, str):
[976]150 filename = os.path.expandvars(filename)
151 filename = os.path.expanduser(filename)
152 if not os.path.exists(filename):
153 s = "File '%s' not found." % (filename)
154 raise IOError(s)
[1697]155 if is_scantable(filename):
156 ondisk = rcParams['scantable.storage'] == 'disk'
157 Scantable.__init__(self, filename, ondisk)
158 if unit is not None:
159 self.set_fluxunit(unit)
[1819]160 # do not reset to the default freqframe
161 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]162 #elif os.path.isdir(filename) \
163 # and not os.path.exists(filename+'/table.f1'):
164 elif is_ms(filename):
[1916]165 # Measurement Set
166 opts={'ms': {}}
167 mskeys=['getpt','antenna']
168 for key in mskeys:
169 if key in args.keys():
170 opts['ms'][key] = args[key]
171 #self._fill([filename], unit, average, getpt, antenna)
172 self._fill([filename], unit, average, opts)
[1893]173 elif os.path.isfile(filename):
[1916]174 #self._fill([filename], unit, average, getpt, antenna)
175 self._fill([filename], unit, average)
[1883]176 else:
[1819]177 msg = "The given file '%s'is not a valid " \
178 "asap table." % (filename)
[1859]179 raise IOError(msg)
[1118]180 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]181 and isinstance(filename[-1], str):
[1916]182 #self._fill(filename, unit, average, getpt, antenna)
183 self._fill(filename, unit, average)
[1586]184 self.parallactify(parallactify)
[1259]185 self._add_history("scantable", varlist)
[102]186
[1862]187 @asaplog_post_dec
[876]188 def save(self, name=None, format=None, overwrite=False):
[1846]189 """\
[1280]190 Store the scantable on disk. This can be an asap (aips++) Table,
191 SDFITS or MS2 format.
[1846]192
[116]193 Parameters:
[1846]194
[1093]195 name: the name of the outputfile. For format "ASCII"
196 this is the root file name (data in 'name'.txt
[497]197 and header in 'name'_header.txt)
[1855]198
[116]199 format: an optional file format. Default is ASAP.
[1855]200 Allowed are:
201
202 * 'ASAP' (save as ASAP [aips++] Table),
203 * 'SDFITS' (save as SDFITS file)
204 * 'ASCII' (saves as ascii text file)
205 * 'MS2' (saves as an casacore MeasurementSet V2)
206 * 'FITS' (save as image FITS - not readable by class)
207 * 'CLASS' (save as FITS readable by CLASS)
208
[411]209 overwrite: If the file should be overwritten if it exists.
[256]210 The default False is to return with warning
[411]211 without writing the output. USE WITH CARE.
[1855]212
[1846]213 Example::
214
[116]215 scan.save('myscan.asap')
[1118]216 scan.save('myscan.sdfits', 'SDFITS')
[1846]217
[116]218 """
[411]219 from os import path
[1593]220 format = format or rcParams['scantable.save']
[256]221 suffix = '.'+format.lower()
[1118]222 if name is None or name == "":
[256]223 name = 'scantable'+suffix
[718]224 msg = "No filename given. Using default name %s..." % name
225 asaplog.push(msg)
[411]226 name = path.expandvars(name)
[256]227 if path.isfile(name) or path.isdir(name):
228 if not overwrite:
[718]229 msg = "File %s exists." % name
[1859]230 raise IOError(msg)
[451]231 format2 = format.upper()
232 if format2 == 'ASAP':
[116]233 self._save(name)
[2004]234## elif format2 == 'MS2':
235## msopt = {'ms': {'overwrite': overwrite } }
236## from asap._asap import mswriter
237## writer = mswriter( self )
238## writer.write( name, msopt )
[116]239 else:
[989]240 from asap._asap import stwriter as stw
[1118]241 writer = stw(format2)
242 writer.write(self, name)
[116]243 return
244
[102]245 def copy(self):
[1846]246 """Return a copy of this scantable.
247
248 *Note*:
249
[1348]250 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]251
252 Example::
253
[102]254 copiedscan = scan.copy()
[1846]255
[102]256 """
[876]257 sd = scantable(Scantable._copy(self))
[113]258 return sd
259
[1093]260 def drop_scan(self, scanid=None):
[1846]261 """\
[1093]262 Return a new scantable where the specified scan number(s) has(have)
263 been dropped.
[1846]264
[1093]265 Parameters:
[1846]266
[1093]267 scanid: a (list of) scan number(s)
[1846]268
[1093]269 """
270 from asap import _is_sequence_or_number as _is_valid
271 from asap import _to_list
272 from asap import unique
273 if not _is_valid(scanid):
[1859]274 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
275 scanid = _to_list(scanid)
276 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
277 for sid in scanid: allscans.remove(sid)
278 if len(allscans) == 0:
279 raise ValueError("Can't remove all scans")
280 sel = selector(scans=allscans)
281 return self._select_copy(sel)
[1093]282
[1594]283 def _select_copy(self, selection):
284 orig = self.get_selection()
285 self.set_selection(orig+selection)
286 cp = self.copy()
287 self.set_selection(orig)
288 return cp
289
[102]290 def get_scan(self, scanid=None):
[1855]291 """\
[102]292 Return a specific scan (by scanno) or collection of scans (by
293 source name) in a new scantable.
[1846]294
295 *Note*:
296
[1348]297 See scantable.drop_scan() for the inverse operation.
[1846]298
[102]299 Parameters:
[1846]300
[513]301 scanid: a (list of) scanno or a source name, unix-style
302 patterns are accepted for source name matching, e.g.
303 '*_R' gets all 'ref scans
[1846]304
305 Example::
306
[513]307 # get all scans containing the source '323p459'
308 newscan = scan.get_scan('323p459')
309 # get all 'off' scans
310 refscans = scan.get_scan('*_R')
311 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]312 newscan = scan.get_scan([0, 2, 7, 10])
[1846]313
[102]314 """
315 if scanid is None:
[1859]316 raise RuntimeError( 'Please specify a scan no or name to '
317 'retrieve from the scantable' )
[102]318 try:
[946]319 bsel = self.get_selection()
320 sel = selector()
[102]321 if type(scanid) is str:
[946]322 sel.set_name(scanid)
[1594]323 return self._select_copy(sel)
[102]324 elif type(scanid) is int:
[946]325 sel.set_scans([scanid])
[1594]326 return self._select_copy(sel)
[381]327 elif type(scanid) is list:
[946]328 sel.set_scans(scanid)
[1594]329 return self._select_copy(sel)
[381]330 else:
[718]331 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]332 raise TypeError(msg)
[102]333 except RuntimeError:
[1859]334 raise
[102]335
336 def __str__(self):
[1118]337 return Scantable._summary(self, True)
[102]338
[976]339 def summary(self, filename=None):
[1846]340 """\
[102]341 Print a summary of the contents of this scantable.
[1846]342
[102]343 Parameters:
[1846]344
[1931]345 filename: the name of a file to write the putput to
[102]346 Default - no file output
[1846]347
[102]348 """
[976]349 info = Scantable._summary(self, True)
[102]350 if filename is not None:
[256]351 if filename is "":
352 filename = 'scantable_summary.txt'
[415]353 from os.path import expandvars, isdir
[411]354 filename = expandvars(filename)
[415]355 if not isdir(filename):
[413]356 data = open(filename, 'w')
357 data.write(info)
358 data.close()
359 else:
[718]360 msg = "Illegal file name '%s'." % (filename)
[1859]361 raise IOError(msg)
362 return page(info)
[710]363
[1512]364 def get_spectrum(self, rowno):
[1471]365 """Return the spectrum for the current row in the scantable as a list.
[1846]366
[1471]367 Parameters:
[1846]368
[1573]369 rowno: the row number to retrieve the spectrum from
[1846]370
[1471]371 """
372 return self._getspectrum(rowno)
[946]373
[1471]374 def get_mask(self, rowno):
375 """Return the mask for the current row in the scantable as a list.
[1846]376
[1471]377 Parameters:
[1846]378
[1573]379 rowno: the row number to retrieve the mask from
[1846]380
[1471]381 """
382 return self._getmask(rowno)
383
384 def set_spectrum(self, spec, rowno):
[1938]385 """Set the spectrum for the current row in the scantable.
[1846]386
[1471]387 Parameters:
[1846]388
[1855]389 spec: the new spectrum
[1846]390
[1855]391 rowno: the row number to set the spectrum for
392
[1471]393 """
394 assert(len(spec) == self.nchan())
395 return self._setspectrum(spec, rowno)
396
[1600]397 def get_coordinate(self, rowno):
398 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]399
400 *Note*:
401
[1600]402 * This coordinate is only valid until a scantable method modifies
403 the frequency axis.
404 * This coordinate does contain the original frequency set-up
405 NOT the new frame. The conversions however are done using the user
406 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
407 use scantable.freq_align first. Without it there is no closure,
[1846]408 i.e.::
[1600]409
[1846]410 c = myscan.get_coordinate(0)
411 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
412
[1600]413 Parameters:
[1846]414
[1600]415 rowno: the row number for the spectral coordinate
416
417 """
418 return coordinate(Scantable.get_coordinate(self, rowno))
419
[946]420 def get_selection(self):
[1846]421 """\
[1005]422 Get the selection object currently set on this scantable.
[1846]423
424 Example::
425
[1005]426 sel = scan.get_selection()
427 sel.set_ifs(0) # select IF 0
428 scan.set_selection(sel) # apply modified selection
[1846]429
[946]430 """
431 return selector(self._getselection())
432
[1576]433 def set_selection(self, selection=None, **kw):
[1846]434 """\
[1005]435 Select a subset of the data. All following operations on this scantable
436 are only applied to thi selection.
[1846]437
[1005]438 Parameters:
[1697]439
[1846]440 selection: a selector object (default unset the selection), or
441 any combination of "pols", "ifs", "beams", "scans",
442 "cycles", "name", "query"
[1697]443
[1846]444 Examples::
[1697]445
[1005]446 sel = selector() # create a selection object
[1118]447 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]448 scan.set_selection(sel) # set the selection
449 scan.summary() # will only print summary of scanno 0 an 3
450 scan.set_selection() # unset the selection
[1697]451 # or the equivalent
452 scan.set_selection(scans=[0,3])
453 scan.summary() # will only print summary of scanno 0 an 3
454 scan.set_selection() # unset the selection
[1846]455
[946]456 """
[1576]457 if selection is None:
458 # reset
459 if len(kw) == 0:
460 selection = selector()
461 else:
462 # try keywords
463 for k in kw:
464 if k not in selector.fields:
465 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
466 selection = selector(**kw)
[946]467 self._setselection(selection)
468
[1819]469 def get_row(self, row=0, insitu=None):
[1846]470 """\
[1819]471 Select a row in the scantable.
472 Return a scantable with single row.
[1846]473
[1819]474 Parameters:
[1846]475
476 row: row no of integration, default is 0.
477 insitu: if False a new scantable is returned. Otherwise, the
478 scaling is done in-situ. The default is taken from .asaprc
479 (False)
480
[1819]481 """
482 if insitu is None: insitu = rcParams['insitu']
483 if not insitu:
484 workscan = self.copy()
485 else:
486 workscan = self
487 # Select a row
488 sel=selector()
[1992]489 sel.set_rows([row])
490 #sel.set_scans([workscan.getscan(row)])
491 #sel.set_cycles([workscan.getcycle(row)])
492 #sel.set_beams([workscan.getbeam(row)])
493 #sel.set_ifs([workscan.getif(row)])
494 #sel.set_polarisations([workscan.getpol(row)])
495 #sel.set_name(workscan._getsourcename(row))
[1819]496 workscan.set_selection(sel)
497 if not workscan.nrow() == 1:
498 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
499 raise RuntimeError(msg)
500 del sel
501 if insitu:
502 self._assign(workscan)
503 else:
504 return workscan
505
[1862]506 @asaplog_post_dec
[1907]507 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]508 """\
[135]509 Determine the specified statistic of the current beam/if/pol
[102]510 Takes a 'mask' as an optional parameter to specify which
511 channels should be excluded.
[1846]512
[102]513 Parameters:
[1846]514
[1819]515 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
516 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]517
[135]518 mask: an optional mask specifying where the statistic
[102]519 should be determined.
[1855]520
[1819]521 form: format string to print statistic values
[1846]522
[1907]523 row: row number of spectrum to process.
524 (default is None: for all rows)
[1846]525
[1907]526 Example:
[113]527 scan.set_unit('channel')
[1118]528 msk = scan.create_mask([100, 200], [500, 600])
[135]529 scan.stats(stat='mean', mask=m)
[1846]530
[102]531 """
[1593]532 mask = mask or []
[876]533 if not self._check_ifs():
[1118]534 raise ValueError("Cannot apply mask as the IFs have different "
535 "number of channels. Please use setselection() "
536 "to select individual IFs")
[1819]537 rtnabc = False
538 if stat.lower().endswith('_abc'): rtnabc = True
539 getchan = False
540 if stat.lower().startswith('min') or stat.lower().startswith('max'):
541 chan = self._math._minmaxchan(self, mask, stat)
542 getchan = True
543 statvals = []
[1907]544 if not rtnabc:
545 if row == None:
546 statvals = self._math._stats(self, mask, stat)
547 else:
548 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]549
[1819]550 #def cb(i):
551 # return statvals[i]
[256]552
[1819]553 #return self._row_callback(cb, stat)
[102]554
[1819]555 label=stat
556 #callback=cb
557 out = ""
558 #outvec = []
559 sep = '-'*50
[1907]560
561 if row == None:
562 rows = xrange(self.nrow())
563 elif isinstance(row, int):
564 rows = [ row ]
565
566 for i in rows:
[1819]567 refstr = ''
568 statunit= ''
569 if getchan:
570 qx, qy = self.chan2data(rowno=i, chan=chan[i])
571 if rtnabc:
572 statvals.append(qx['value'])
573 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
574 statunit= '['+qx['unit']+']'
575 else:
576 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
577
578 tm = self._gettime(i)
579 src = self._getsourcename(i)
580 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
581 out += 'Time[%s]:\n' % (tm)
[1907]582 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
583 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
584 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]585 #outvec.append(callback(i))
[1907]586 if len(rows) > 1:
587 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
588 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
589 else:
590 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
591 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]592 out += sep+"\n"
593
[1859]594 import os
595 if os.environ.has_key( 'USER' ):
596 usr = os.environ['USER']
597 else:
598 import commands
599 usr = commands.getoutput( 'whoami' )
600 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
601 f = open(tmpfile,'w')
602 print >> f, sep
603 print >> f, ' %s %s' % (label, statunit)
604 print >> f, sep
605 print >> f, out
606 f.close()
607 f = open(tmpfile,'r')
608 x = f.readlines()
609 f.close()
610 asaplog.push(''.join(x), False)
611
[1819]612 return statvals
613
614 def chan2data(self, rowno=0, chan=0):
[1846]615 """\
[1819]616 Returns channel/frequency/velocity and spectral value
617 at an arbitrary row and channel in the scantable.
[1846]618
[1819]619 Parameters:
[1846]620
[1819]621 rowno: a row number in the scantable. Default is the
622 first row, i.e. rowno=0
[1855]623
[1819]624 chan: a channel in the scantable. Default is the first
625 channel, i.e. pos=0
[1846]626
[1819]627 """
628 if isinstance(rowno, int) and isinstance(chan, int):
629 qx = {'unit': self.get_unit(),
630 'value': self._getabcissa(rowno)[chan]}
631 qy = {'unit': self.get_fluxunit(),
632 'value': self._getspectrum(rowno)[chan]}
633 return qx, qy
634
[1118]635 def stddev(self, mask=None):
[1846]636 """\
[135]637 Determine the standard deviation of the current beam/if/pol
638 Takes a 'mask' as an optional parameter to specify which
639 channels should be excluded.
[1846]640
[135]641 Parameters:
[1846]642
[135]643 mask: an optional mask specifying where the standard
644 deviation should be determined.
645
[1846]646 Example::
647
[135]648 scan.set_unit('channel')
[1118]649 msk = scan.create_mask([100, 200], [500, 600])
[135]650 scan.stddev(mask=m)
[1846]651
[135]652 """
[1118]653 return self.stats(stat='stddev', mask=mask);
[135]654
[1003]655
[1259]656 def get_column_names(self):
[1846]657 """\
[1003]658 Return a list of column names, which can be used for selection.
659 """
[1259]660 return list(Scantable.get_column_names(self))
[1003]661
[1730]662 def get_tsys(self, row=-1):
[1846]663 """\
[113]664 Return the System temperatures.
[1846]665
666 Parameters:
667
668 row: the rowno to get the information for. (default all rows)
669
[113]670 Returns:
[1846]671
[876]672 a list of Tsys values for the current selection
[1846]673
[113]674 """
[1730]675 if row > -1:
676 return self._get_column(self._gettsys, row)
[876]677 return self._row_callback(self._gettsys, "Tsys")
[256]678
[1730]679
680 def get_weather(self, row=-1):
[1846]681 """\
682 Return the weather informations.
683
684 Parameters:
685
686 row: the rowno to get the information for. (default all rows)
687
688 Returns:
689
690 a dict or list of of dicts of values for the current selection
691
692 """
693
[1730]694 values = self._get_column(self._get_weather, row)
695 if row > -1:
696 return {'temperature': values[0],
697 'pressure': values[1], 'humidity' : values[2],
698 'windspeed' : values[3], 'windaz' : values[4]
699 }
700 else:
701 out = []
702 for r in values:
703
704 out.append({'temperature': r[0],
705 'pressure': r[1], 'humidity' : r[2],
706 'windspeed' : r[3], 'windaz' : r[4]
707 })
708 return out
709
[876]710 def _row_callback(self, callback, label):
711 out = ""
[1118]712 outvec = []
[1590]713 sep = '-'*50
[876]714 for i in range(self.nrow()):
715 tm = self._gettime(i)
716 src = self._getsourcename(i)
[1590]717 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]718 out += 'Time[%s]:\n' % (tm)
[1590]719 if self.nbeam(-1) > 1:
720 out += ' Beam[%d] ' % (self.getbeam(i))
721 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
722 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]723 outvec.append(callback(i))
724 out += '= %3.3f\n' % (outvec[i])
[1590]725 out += sep+'\n'
[1859]726
727 asaplog.push(sep)
728 asaplog.push(" %s" % (label))
729 asaplog.push(sep)
730 asaplog.push(out)
[1861]731 asaplog.post()
[1175]732 return outvec
[256]733
[1947]734 def _get_column(self, callback, row=-1, *args):
[1070]735 """
736 """
737 if row == -1:
[1947]738 return [callback(i, *args) for i in range(self.nrow())]
[1070]739 else:
[1819]740 if 0 <= row < self.nrow():
[1947]741 return callback(row, *args)
[256]742
[1070]743
[1948]744 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]745 """\
[113]746 Get a list of time stamps for the observations.
[1938]747 Return a datetime object or a string (default) for each
748 integration time stamp in the scantable.
[1846]749
[113]750 Parameters:
[1846]751
[1348]752 row: row no of integration. Default -1 return all rows
[1855]753
[1348]754 asdatetime: return values as datetime objects rather than strings
[1846]755
[1948]756 prec: number of digits shown. Default -1 to automatic calculation.
757 Note this number is equals to the digits of MVTime,
758 i.e., 0<prec<3: dates with hh:: only,
759 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]760 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
761
[113]762 """
[1175]763 from datetime import datetime
[1948]764 if prec < 0:
765 # automagically set necessary precision +1
[1950]766 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]767 prec = max(6, int(prec))
768 else:
769 prec = max(0, prec)
770 if asdatetime:
771 #precision can be 1 millisecond at max
772 prec = min(12, prec)
773
[1947]774 times = self._get_column(self._gettime, row, prec)
[1348]775 if not asdatetime:
[1392]776 return times
[1947]777 format = "%Y/%m/%d/%H:%M:%S.%f"
778 if prec < 7:
779 nsub = 1 + (((6-prec)/2) % 3)
780 substr = [".%f","%S","%M"]
781 for i in range(nsub):
782 format = format.replace(substr[i],"")
[1175]783 if isinstance(times, list):
[1947]784 return [datetime.strptime(i, format) for i in times]
[1175]785 else:
[1947]786 return datetime.strptime(times, format)
[102]787
[1348]788
789 def get_inttime(self, row=-1):
[1846]790 """\
[1348]791 Get a list of integration times for the observations.
792 Return a time in seconds for each integration in the scantable.
[1846]793
[1348]794 Parameters:
[1846]795
[1348]796 row: row no of integration. Default -1 return all rows.
[1846]797
[1348]798 """
[1573]799 return self._get_column(self._getinttime, row)
[1348]800
[1573]801
[714]802 def get_sourcename(self, row=-1):
[1846]803 """\
[794]804 Get a list source names for the observations.
[714]805 Return a string for each integration in the scantable.
806 Parameters:
[1846]807
[1348]808 row: row no of integration. Default -1 return all rows.
[1846]809
[714]810 """
[1070]811 return self._get_column(self._getsourcename, row)
[714]812
[794]813 def get_elevation(self, row=-1):
[1846]814 """\
[794]815 Get a list of elevations for the observations.
816 Return a float for each integration in the scantable.
[1846]817
[794]818 Parameters:
[1846]819
[1348]820 row: row no of integration. Default -1 return all rows.
[1846]821
[794]822 """
[1070]823 return self._get_column(self._getelevation, row)
[794]824
825 def get_azimuth(self, row=-1):
[1846]826 """\
[794]827 Get a list of azimuths for the observations.
828 Return a float for each integration in the scantable.
[1846]829
[794]830 Parameters:
[1348]831 row: row no of integration. Default -1 return all rows.
[1846]832
[794]833 """
[1070]834 return self._get_column(self._getazimuth, row)
[794]835
836 def get_parangle(self, row=-1):
[1846]837 """\
[794]838 Get a list of parallactic angles for the observations.
839 Return a float for each integration in the scantable.
[1846]840
[794]841 Parameters:
[1846]842
[1348]843 row: row no of integration. Default -1 return all rows.
[1846]844
[794]845 """
[1070]846 return self._get_column(self._getparangle, row)
[794]847
[1070]848 def get_direction(self, row=-1):
849 """
850 Get a list of Positions on the sky (direction) for the observations.
[1594]851 Return a string for each integration in the scantable.
[1855]852
[1070]853 Parameters:
[1855]854
[1070]855 row: row no of integration. Default -1 return all rows
[1855]856
[1070]857 """
858 return self._get_column(self._getdirection, row)
859
[1391]860 def get_directionval(self, row=-1):
[1846]861 """\
[1391]862 Get a list of Positions on the sky (direction) for the observations.
863 Return a float for each integration in the scantable.
[1846]864
[1391]865 Parameters:
[1846]866
[1391]867 row: row no of integration. Default -1 return all rows
[1846]868
[1391]869 """
870 return self._get_column(self._getdirectionvec, row)
871
[1862]872 @asaplog_post_dec
[102]873 def set_unit(self, unit='channel'):
[1846]874 """\
[102]875 Set the unit for all following operations on this scantable
[1846]876
[102]877 Parameters:
[1846]878
879 unit: optional unit, default is 'channel'. Use one of '*Hz',
880 'km/s', 'channel' or equivalent ''
881
[102]882 """
[484]883 varlist = vars()
[1118]884 if unit in ['', 'pixel', 'channel']:
[113]885 unit = ''
886 inf = list(self._getcoordinfo())
887 inf[0] = unit
888 self._setcoordinfo(inf)
[1118]889 self._add_history("set_unit", varlist)
[113]890
[1862]891 @asaplog_post_dec
[484]892 def set_instrument(self, instr):
[1846]893 """\
[1348]894 Set the instrument for subsequent processing.
[1846]895
[358]896 Parameters:
[1846]897
[710]898 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]899 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]900
[358]901 """
902 self._setInstrument(instr)
[1118]903 self._add_history("set_instument", vars())
[358]904
[1862]905 @asaplog_post_dec
[1190]906 def set_feedtype(self, feedtype):
[1846]907 """\
[1190]908 Overwrite the feed type, which might not be set correctly.
[1846]909
[1190]910 Parameters:
[1846]911
[1190]912 feedtype: 'linear' or 'circular'
[1846]913
[1190]914 """
915 self._setfeedtype(feedtype)
916 self._add_history("set_feedtype", vars())
917
[1862]918 @asaplog_post_dec
[276]919 def set_doppler(self, doppler='RADIO'):
[1846]920 """\
[276]921 Set the doppler for all following operations on this scantable.
[1846]922
[276]923 Parameters:
[1846]924
[276]925 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]926
[276]927 """
[484]928 varlist = vars()
[276]929 inf = list(self._getcoordinfo())
930 inf[2] = doppler
931 self._setcoordinfo(inf)
[1118]932 self._add_history("set_doppler", vars())
[710]933
[1862]934 @asaplog_post_dec
[226]935 def set_freqframe(self, frame=None):
[1846]936 """\
[113]937 Set the frame type of the Spectral Axis.
[1846]938
[113]939 Parameters:
[1846]940
[591]941 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]942 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]943 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]944
945 Example::
946
[113]947 scan.set_freqframe('BARY')
[1846]948
[113]949 """
[1593]950 frame = frame or rcParams['scantable.freqframe']
[484]951 varlist = vars()
[1819]952 # "REST" is not implemented in casacore
953 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
954 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
955 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]956 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]957
[989]958 if frame in valid:
[113]959 inf = list(self._getcoordinfo())
960 inf[1] = frame
961 self._setcoordinfo(inf)
[1118]962 self._add_history("set_freqframe", varlist)
[102]963 else:
[1118]964 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]965 raise TypeError(msg)
[710]966
[1862]967 @asaplog_post_dec
[989]968 def set_dirframe(self, frame=""):
[1846]969 """\
[989]970 Set the frame type of the Direction on the sky.
[1846]971
[989]972 Parameters:
[1846]973
[989]974 frame: an optional frame type, default ''. Valid frames are:
975 'J2000', 'B1950', 'GALACTIC'
[1846]976
977 Example:
978
[989]979 scan.set_dirframe('GALACTIC')
[1846]980
[989]981 """
982 varlist = vars()
[1859]983 Scantable.set_dirframe(self, frame)
[1118]984 self._add_history("set_dirframe", varlist)
[989]985
[113]986 def get_unit(self):
[1846]987 """\
[113]988 Get the default unit set in this scantable
[1846]989
[113]990 Returns:
[1846]991
[113]992 A unit string
[1846]993
[113]994 """
995 inf = self._getcoordinfo()
996 unit = inf[0]
997 if unit == '': unit = 'channel'
998 return unit
[102]999
[1862]1000 @asaplog_post_dec
[158]1001 def get_abcissa(self, rowno=0):
[1846]1002 """\
[158]1003 Get the abcissa in the current coordinate setup for the currently
[113]1004 selected Beam/IF/Pol
[1846]1005
[113]1006 Parameters:
[1846]1007
[226]1008 rowno: an optional row number in the scantable. Default is the
1009 first row, i.e. rowno=0
[1846]1010
[113]1011 Returns:
[1846]1012
[1348]1013 The abcissa values and the format string (as a dictionary)
[1846]1014
[113]1015 """
[256]1016 abc = self._getabcissa(rowno)
[710]1017 lbl = self._getabcissalabel(rowno)
[158]1018 return abc, lbl
[113]1019
[1862]1020 @asaplog_post_dec
[1994]1021 def flag(self, row=-1, mask=None, unflag=False):
[1846]1022 """\
[1001]1023 Flag the selected data using an optional channel mask.
[1846]1024
[1001]1025 Parameters:
[1846]1026
[1994]1027 row: an optional row number in the scantable.
1028 Default -1 flags all rows
1029
[1001]1030 mask: an optional channel mask, created with create_mask. Default
1031 (no mask) is all channels.
[1855]1032
[1819]1033 unflag: if True, unflag the data
[1846]1034
[1001]1035 """
1036 varlist = vars()
[1593]1037 mask = mask or []
[1994]1038 self._flag(row, mask, unflag)
[1001]1039 self._add_history("flag", varlist)
1040
[1862]1041 @asaplog_post_dec
[1819]1042 def flag_row(self, rows=[], unflag=False):
[1846]1043 """\
[1819]1044 Flag the selected data in row-based manner.
[1846]1045
[1819]1046 Parameters:
[1846]1047
[1843]1048 rows: list of row numbers to be flagged. Default is no row
1049 (must be explicitly specified to execute row-based flagging).
[1855]1050
[1819]1051 unflag: if True, unflag the data.
[1846]1052
[1819]1053 """
1054 varlist = vars()
[1859]1055 self._flag_row(rows, unflag)
[1819]1056 self._add_history("flag_row", varlist)
1057
[1862]1058 @asaplog_post_dec
[1819]1059 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1060 """\
[1819]1061 Flag the selected data outside a specified range (in channel-base)
[1846]1062
[1819]1063 Parameters:
[1846]1064
[1819]1065 uthres: upper threshold.
[1855]1066
[1819]1067 dthres: lower threshold
[1846]1068
[1819]1069 clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1070 False for flagging data inside the range.
[1855]1071
[1846]1072 unflag: if True, unflag the data.
1073
[1819]1074 """
1075 varlist = vars()
[1859]1076 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1077 self._add_history("clip", varlist)
1078
[1862]1079 @asaplog_post_dec
[1584]1080 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1081 """\
[1192]1082 Flag the data in 'lag' space by providing a frequency to remove.
[1584]1083 Flagged data in the scantable gets interpolated over the region.
[1192]1084 No taper is applied.
[1846]1085
[1192]1086 Parameters:
[1846]1087
[1579]1088 start: the start frequency (really a period within the
1089 bandwidth) or period to remove
[1855]1090
[1579]1091 end: the end frequency or period to remove
[1855]1092
[1584]1093 unit: the frequency unit (default "MHz") or "" for
[1579]1094 explicit lag channels
[1846]1095
1096 *Notes*:
1097
[1579]1098 It is recommended to flag edges of the band or strong
[1348]1099 signals beforehand.
[1846]1100
[1192]1101 """
1102 if insitu is None: insitu = rcParams['insitu']
1103 self._math._setinsitu(insitu)
1104 varlist = vars()
[1579]1105 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1106 if not (unit == "" or base.has_key(unit)):
[1192]1107 raise ValueError("%s is not a valid unit." % unit)
[1859]1108 if unit == "":
1109 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1110 else:
1111 s = scantable(self._math._lag_flag(self, start*base[unit],
1112 end*base[unit], "frequency"))
[1192]1113 s._add_history("lag_flag", varlist)
1114 if insitu:
1115 self._assign(s)
1116 else:
1117 return s
[1001]1118
[1862]1119 @asaplog_post_dec
[113]1120 def create_mask(self, *args, **kwargs):
[1846]1121 """\
[1118]1122 Compute and return a mask based on [min, max] windows.
[189]1123 The specified windows are to be INCLUDED, when the mask is
[113]1124 applied.
[1846]1125
[102]1126 Parameters:
[1846]1127
[1118]1128 [min, max], [min2, max2], ...
[1024]1129 Pairs of start/end points (inclusive)specifying the regions
[102]1130 to be masked
[1855]1131
[189]1132 invert: optional argument. If specified as True,
1133 return an inverted mask, i.e. the regions
1134 specified are EXCLUDED
[1855]1135
[513]1136 row: create the mask using the specified row for
1137 unit conversions, default is row=0
1138 only necessary if frequency varies over rows.
[1846]1139
1140 Examples::
1141
[113]1142 scan.set_unit('channel')
[1846]1143 # a)
[1118]1144 msk = scan.create_mask([400, 500], [800, 900])
[189]1145 # masks everything outside 400 and 500
[113]1146 # and 800 and 900 in the unit 'channel'
1147
[1846]1148 # b)
[1118]1149 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1150 # masks the regions between 400 and 500
[113]1151 # and 800 and 900 in the unit 'channel'
[1846]1152
1153 # c)
1154 #mask only channel 400
[1554]1155 msk = scan.create_mask([400])
[1846]1156
[102]1157 """
[1554]1158 row = kwargs.get("row", 0)
[513]1159 data = self._getabcissa(row)
[113]1160 u = self._getcoordinfo()[0]
[1859]1161 if u == "":
1162 u = "channel"
1163 msg = "The current mask window unit is %s" % u
1164 i = self._check_ifs()
1165 if not i:
1166 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1167 asaplog.push(msg)
[102]1168 n = self.nchan()
[1295]1169 msk = _n_bools(n, False)
[710]1170 # test if args is a 'list' or a 'normal *args - UGLY!!!
1171
[1118]1172 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1173 and args or args[0]
[710]1174 for window in ws:
[1554]1175 if len(window) == 1:
1176 window = [window[0], window[0]]
1177 if len(window) == 0 or len(window) > 2:
1178 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1179 if window[0] > window[1]:
1180 tmp = window[0]
1181 window[0] = window[1]
1182 window[1] = tmp
[102]1183 for i in range(n):
[1024]1184 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1185 msk[i] = True
[113]1186 if kwargs.has_key('invert'):
1187 if kwargs.get('invert'):
[1295]1188 msk = mask_not(msk)
[102]1189 return msk
[710]1190
[1931]1191 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1192 """\
[1819]1193 Compute and return a list of mask windows, [min, max].
[1846]1194
[1819]1195 Parameters:
[1846]1196
[1819]1197 mask: channel mask, created with create_mask.
[1855]1198
[1819]1199 row: calcutate the masklist using the specified row
1200 for unit conversions, default is row=0
1201 only necessary if frequency varies over rows.
[1846]1202
[1819]1203 Returns:
[1846]1204
[1819]1205 [min, max], [min2, max2], ...
1206 Pairs of start/end points (inclusive)specifying
1207 the masked regions
[1846]1208
[1819]1209 """
1210 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1211 raise TypeError("The mask should be list or tuple.")
1212 if len(mask) < 2:
1213 raise TypeError("The mask elements should be > 1")
1214 if self.nchan() != len(mask):
1215 msg = "Number of channels in scantable != number of mask elements"
1216 raise TypeError(msg)
1217 data = self._getabcissa(row)
1218 u = self._getcoordinfo()[0]
[1859]1219 if u == "":
1220 u = "channel"
1221 msg = "The current mask window unit is %s" % u
1222 i = self._check_ifs()
1223 if not i:
1224 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1225 if not silent:
1226 asaplog.push(msg)
[1819]1227 masklist=[]
1228 ist, ien = None, None
1229 ist, ien=self.get_mask_indices(mask)
1230 if ist is not None and ien is not None:
1231 for i in xrange(len(ist)):
1232 range=[data[ist[i]],data[ien[i]]]
1233 range.sort()
1234 masklist.append([range[0],range[1]])
1235 return masklist
1236
1237 def get_mask_indices(self, mask=None):
[1846]1238 """\
[1819]1239 Compute and Return lists of mask start indices and mask end indices.
[1855]1240
1241 Parameters:
1242
[1819]1243 mask: channel mask, created with create_mask.
[1846]1244
[1819]1245 Returns:
[1846]1246
[1819]1247 List of mask start indices and that of mask end indices,
1248 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1249
[1819]1250 """
1251 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1252 raise TypeError("The mask should be list or tuple.")
1253 if len(mask) < 2:
1254 raise TypeError("The mask elements should be > 1")
1255 istart=[]
1256 iend=[]
1257 if mask[0]: istart.append(0)
1258 for i in range(len(mask)-1):
1259 if not mask[i] and mask[i+1]:
1260 istart.append(i+1)
1261 elif mask[i] and not mask[i+1]:
1262 iend.append(i)
1263 if mask[len(mask)-1]: iend.append(len(mask)-1)
1264 if len(istart) != len(iend):
1265 raise RuntimeError("Numbers of mask start != mask end.")
1266 for i in range(len(istart)):
1267 if istart[i] > iend[i]:
1268 raise RuntimeError("Mask start index > mask end index")
1269 break
1270 return istart,iend
1271
1272# def get_restfreqs(self):
1273# """
1274# Get the restfrequency(s) stored in this scantable.
1275# The return value(s) are always of unit 'Hz'
1276# Parameters:
1277# none
1278# Returns:
1279# a list of doubles
1280# """
1281# return list(self._getrestfreqs())
1282
1283 def get_restfreqs(self, ids=None):
[1846]1284 """\
[256]1285 Get the restfrequency(s) stored in this scantable.
1286 The return value(s) are always of unit 'Hz'
[1846]1287
[256]1288 Parameters:
[1846]1289
[1819]1290 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1291 be retrieved
[1846]1292
[256]1293 Returns:
[1846]1294
[1819]1295 dictionary containing ids and a list of doubles for each id
[1846]1296
[256]1297 """
[1819]1298 if ids is None:
1299 rfreqs={}
1300 idlist = self.getmolnos()
1301 for i in idlist:
1302 rfreqs[i]=list(self._getrestfreqs(i))
1303 return rfreqs
1304 else:
1305 if type(ids)==list or type(ids)==tuple:
1306 rfreqs={}
1307 for i in ids:
1308 rfreqs[i]=list(self._getrestfreqs(i))
1309 return rfreqs
1310 else:
1311 return list(self._getrestfreqs(ids))
1312 #return list(self._getrestfreqs(ids))
[102]1313
[931]1314 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1315 """\
[931]1316 Set or replace the restfrequency specified and
[1938]1317 if the 'freqs' argument holds a scalar,
[931]1318 then that rest frequency will be applied to all the selected
1319 data. If the 'freqs' argument holds
1320 a vector, then it MUST be of equal or smaller length than
1321 the number of IFs (and the available restfrequencies will be
1322 replaced by this vector). In this case, *all* data have
1323 the restfrequency set per IF according
1324 to the corresponding value you give in the 'freqs' vector.
[1118]1325 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1326 IF 1 gets restfreq 2e9.
[1846]1327
[1395]1328 You can also specify the frequencies via a linecatalog.
[1153]1329
[931]1330 Parameters:
[1846]1331
[931]1332 freqs: list of rest frequency values or string idenitfiers
[1855]1333
[931]1334 unit: unit for rest frequency (default 'Hz')
[402]1335
[1846]1336
1337 Example::
1338
[1819]1339 # set the given restfrequency for the all currently selected IFs
[931]1340 scan.set_restfreqs(freqs=1.4e9)
[1845]1341 # set restfrequencies for the n IFs (n > 1) in the order of the
1342 # list, i.e
1343 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1344 # len(list_of_restfreqs) == nIF
1345 # for nIF == 1 the following will set multiple restfrequency for
1346 # that IF
[1819]1347 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1348 # set multiple restfrequencies per IF. as a list of lists where
1349 # the outer list has nIF elements, the inner s arbitrary
1350 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1351
[1846]1352 *Note*:
[1845]1353
[931]1354 To do more sophisticate Restfrequency setting, e.g. on a
1355 source and IF basis, use scantable.set_selection() before using
[1846]1356 this function::
[931]1357
[1846]1358 # provided your scantable is called scan
1359 selection = selector()
1360 selection.set_name("ORION*")
1361 selection.set_ifs([1])
1362 scan.set_selection(selection)
1363 scan.set_restfreqs(freqs=86.6e9)
1364
[931]1365 """
1366 varlist = vars()
[1157]1367 from asap import linecatalog
1368 # simple value
[1118]1369 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1370 self._setrestfreqs([freqs], [""], unit)
[1157]1371 # list of values
[1118]1372 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1373 # list values are scalars
[1118]1374 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1375 if len(freqs) == 1:
1376 self._setrestfreqs(freqs, [""], unit)
1377 else:
1378 # allow the 'old' mode of setting mulitple IFs
1379 sel = selector()
1380 savesel = self._getselection()
1381 iflist = self.getifnos()
1382 if len(freqs)>len(iflist):
1383 raise ValueError("number of elements in list of list "
1384 "exeeds the current IF selections")
1385 iflist = self.getifnos()
1386 for i, fval in enumerate(freqs):
1387 sel.set_ifs(iflist[i])
1388 self._setselection(sel)
1389 self._setrestfreqs([fval], [""], unit)
1390 self._setselection(savesel)
1391
1392 # list values are dict, {'value'=, 'name'=)
[1157]1393 elif isinstance(freqs[-1], dict):
[1845]1394 values = []
1395 names = []
1396 for d in freqs:
1397 values.append(d["value"])
1398 names.append(d["name"])
1399 self._setrestfreqs(values, names, unit)
[1819]1400 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1401 sel = selector()
1402 savesel = self._getselection()
[1322]1403 iflist = self.getifnos()
[1819]1404 if len(freqs)>len(iflist):
[1845]1405 raise ValueError("number of elements in list of list exeeds"
1406 " the current IF selections")
1407 for i, fval in enumerate(freqs):
[1322]1408 sel.set_ifs(iflist[i])
[1259]1409 self._setselection(sel)
[1845]1410 self._setrestfreqs(fval, [""], unit)
[1157]1411 self._setselection(savesel)
1412 # freqs are to be taken from a linecatalog
[1153]1413 elif isinstance(freqs, linecatalog):
1414 sel = selector()
1415 savesel = self._getselection()
1416 for i in xrange(freqs.nrow()):
[1322]1417 sel.set_ifs(iflist[i])
[1153]1418 self._setselection(sel)
[1845]1419 self._setrestfreqs([freqs.get_frequency(i)],
1420 [freqs.get_name(i)], "MHz")
[1153]1421 # ensure that we are not iterating past nIF
1422 if i == self.nif()-1: break
1423 self._setselection(savesel)
[931]1424 else:
1425 return
1426 self._add_history("set_restfreqs", varlist)
1427
[1360]1428 def shift_refpix(self, delta):
[1846]1429 """\
[1589]1430 Shift the reference pixel of the Spectra Coordinate by an
1431 integer amount.
[1846]1432
[1589]1433 Parameters:
[1846]1434
[1589]1435 delta: the amount to shift by
[1846]1436
1437 *Note*:
1438
[1589]1439 Be careful using this with broadband data.
[1846]1440
[1360]1441 """
[1731]1442 Scantable.shift_refpix(self, delta)
[931]1443
[1862]1444 @asaplog_post_dec
[1259]1445 def history(self, filename=None):
[1846]1446 """\
[1259]1447 Print the history. Optionally to a file.
[1846]1448
[1348]1449 Parameters:
[1846]1450
[1928]1451 filename: The name of the file to save the history to.
[1846]1452
[1259]1453 """
[484]1454 hist = list(self._gethistory())
[794]1455 out = "-"*80
[484]1456 for h in hist:
[489]1457 if h.startswith("---"):
[1857]1458 out = "\n".join([out, h])
[489]1459 else:
1460 items = h.split("##")
1461 date = items[0]
1462 func = items[1]
1463 items = items[2:]
[794]1464 out += "\n"+date+"\n"
1465 out += "Function: %s\n Parameters:" % (func)
[489]1466 for i in items:
[1938]1467 if i == '':
1468 continue
[489]1469 s = i.split("=")
[1118]1470 out += "\n %s = %s" % (s[0], s[1])
[1857]1471 out = "\n".join([out, "-"*80])
[1259]1472 if filename is not None:
1473 if filename is "":
1474 filename = 'scantable_history.txt'
1475 import os
1476 filename = os.path.expandvars(os.path.expanduser(filename))
1477 if not os.path.isdir(filename):
1478 data = open(filename, 'w')
1479 data.write(out)
1480 data.close()
1481 else:
1482 msg = "Illegal file name '%s'." % (filename)
[1859]1483 raise IOError(msg)
1484 return page(out)
[513]1485 #
1486 # Maths business
1487 #
[1862]1488 @asaplog_post_dec
[931]1489 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1490 """\
[1070]1491 Return the (time) weighted average of a scan.
[1846]1492
1493 *Note*:
1494
[1070]1495 in channels only - align if necessary
[1846]1496
[513]1497 Parameters:
[1846]1498
[513]1499 mask: an optional mask (only used for 'var' and 'tsys'
1500 weighting)
[1855]1501
[558]1502 scanav: True averages each scan separately
1503 False (default) averages all scans together,
[1855]1504
[1099]1505 weight: Weighting scheme.
1506 'none' (mean no weight)
1507 'var' (1/var(spec) weighted)
1508 'tsys' (1/Tsys**2 weighted)
1509 'tint' (integration time weighted)
1510 'tintsys' (Tint/Tsys**2)
1511 'median' ( median averaging)
[535]1512 The default is 'tint'
[1855]1513
[931]1514 align: align the spectra in velocity before averaging. It takes
1515 the time of the first spectrum as reference time.
[1846]1516
1517 Example::
1518
[513]1519 # time average the scantable without using a mask
[710]1520 newscan = scan.average_time()
[1846]1521
[513]1522 """
1523 varlist = vars()
[1593]1524 weight = weight or 'TINT'
1525 mask = mask or ()
1526 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1527 scan = (self, )
[1859]1528
1529 if align:
1530 scan = (self.freq_align(insitu=False), )
1531 s = None
1532 if weight.upper() == 'MEDIAN':
1533 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1534 scanav))
1535 else:
1536 s = scantable(self._math._average(scan, mask, weight.upper(),
1537 scanav))
[1099]1538 s._add_history("average_time", varlist)
[513]1539 return s
[710]1540
[1862]1541 @asaplog_post_dec
[876]1542 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1543 """\
[513]1544 Return a scan where all spectra are converted to either
1545 Jansky or Kelvin depending upon the flux units of the scan table.
1546 By default the function tries to look the values up internally.
1547 If it can't find them (or if you want to over-ride), you must
1548 specify EITHER jyperk OR eta (and D which it will try to look up
1549 also if you don't set it). jyperk takes precedence if you set both.
[1846]1550
[513]1551 Parameters:
[1846]1552
[513]1553 jyperk: the Jy / K conversion factor
[1855]1554
[513]1555 eta: the aperture efficiency
[1855]1556
[1928]1557 d: the geometric diameter (metres)
[1855]1558
[513]1559 insitu: if False a new scantable is returned.
1560 Otherwise, the scaling is done in-situ
1561 The default is taken from .asaprc (False)
[1846]1562
[513]1563 """
1564 if insitu is None: insitu = rcParams['insitu']
[876]1565 self._math._setinsitu(insitu)
[513]1566 varlist = vars()
[1593]1567 jyperk = jyperk or -1.0
1568 d = d or -1.0
1569 eta = eta or -1.0
[876]1570 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1571 s._add_history("convert_flux", varlist)
1572 if insitu: self._assign(s)
1573 else: return s
[513]1574
[1862]1575 @asaplog_post_dec
[876]1576 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1577 """\
[513]1578 Return a scan after applying a gain-elevation correction.
1579 The correction can be made via either a polynomial or a
1580 table-based interpolation (and extrapolation if necessary).
1581 You specify polynomial coefficients, an ascii table or neither.
1582 If you specify neither, then a polynomial correction will be made
1583 with built in coefficients known for certain telescopes (an error
1584 will occur if the instrument is not known).
1585 The data and Tsys are *divided* by the scaling factors.
[1846]1586
[513]1587 Parameters:
[1846]1588
[513]1589 poly: Polynomial coefficients (default None) to compute a
1590 gain-elevation correction as a function of
1591 elevation (in degrees).
[1855]1592
[513]1593 filename: The name of an ascii file holding correction factors.
1594 The first row of the ascii file must give the column
1595 names and these MUST include columns
1596 "ELEVATION" (degrees) and "FACTOR" (multiply data
1597 by this) somewhere.
1598 The second row must give the data type of the
1599 column. Use 'R' for Real and 'I' for Integer.
1600 An example file would be
1601 (actual factors are arbitrary) :
1602
1603 TIME ELEVATION FACTOR
1604 R R R
1605 0.1 0 0.8
1606 0.2 20 0.85
1607 0.3 40 0.9
1608 0.4 60 0.85
1609 0.5 80 0.8
1610 0.6 90 0.75
[1855]1611
[513]1612 method: Interpolation method when correcting from a table.
1613 Values are "nearest", "linear" (default), "cubic"
1614 and "spline"
[1855]1615
[513]1616 insitu: if False a new scantable is returned.
1617 Otherwise, the scaling is done in-situ
1618 The default is taken from .asaprc (False)
[1846]1619
[513]1620 """
1621
1622 if insitu is None: insitu = rcParams['insitu']
[876]1623 self._math._setinsitu(insitu)
[513]1624 varlist = vars()
[1593]1625 poly = poly or ()
[513]1626 from os.path import expandvars
1627 filename = expandvars(filename)
[876]1628 s = scantable(self._math._gainel(self, poly, filename, method))
1629 s._add_history("gain_el", varlist)
[1593]1630 if insitu:
1631 self._assign(s)
1632 else:
1633 return s
[710]1634
[1862]1635 @asaplog_post_dec
[931]1636 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1637 """\
[513]1638 Return a scan where all rows have been aligned in frequency/velocity.
1639 The alignment frequency frame (e.g. LSRK) is that set by function
1640 set_freqframe.
[1846]1641
[513]1642 Parameters:
[1855]1643
[513]1644 reftime: reference time to align at. By default, the time of
1645 the first row of data is used.
[1855]1646
[513]1647 method: Interpolation method for regridding the spectra.
1648 Choose from "nearest", "linear", "cubic" (default)
1649 and "spline"
[1855]1650
[513]1651 insitu: if False a new scantable is returned.
1652 Otherwise, the scaling is done in-situ
1653 The default is taken from .asaprc (False)
[1846]1654
[513]1655 """
[931]1656 if insitu is None: insitu = rcParams["insitu"]
[876]1657 self._math._setinsitu(insitu)
[513]1658 varlist = vars()
[1593]1659 reftime = reftime or ""
[931]1660 s = scantable(self._math._freq_align(self, reftime, method))
[876]1661 s._add_history("freq_align", varlist)
1662 if insitu: self._assign(s)
1663 else: return s
[513]1664
[1862]1665 @asaplog_post_dec
[1725]1666 def opacity(self, tau=None, insitu=None):
[1846]1667 """\
[513]1668 Apply an opacity correction. The data
1669 and Tsys are multiplied by the correction factor.
[1846]1670
[513]1671 Parameters:
[1855]1672
[1689]1673 tau: (list of) opacity from which the correction factor is
[513]1674 exp(tau*ZD)
[1689]1675 where ZD is the zenith-distance.
1676 If a list is provided, it has to be of length nIF,
1677 nIF*nPol or 1 and in order of IF/POL, e.g.
1678 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1679 if tau is `None` the opacities are determined from a
1680 model.
[1855]1681
[513]1682 insitu: if False a new scantable is returned.
1683 Otherwise, the scaling is done in-situ
1684 The default is taken from .asaprc (False)
[1846]1685
[513]1686 """
1687 if insitu is None: insitu = rcParams['insitu']
[876]1688 self._math._setinsitu(insitu)
[513]1689 varlist = vars()
[1689]1690 if not hasattr(tau, "__len__"):
1691 tau = [tau]
[876]1692 s = scantable(self._math._opacity(self, tau))
1693 s._add_history("opacity", varlist)
1694 if insitu: self._assign(s)
1695 else: return s
[513]1696
[1862]1697 @asaplog_post_dec
[513]1698 def bin(self, width=5, insitu=None):
[1846]1699 """\
[513]1700 Return a scan where all spectra have been binned up.
[1846]1701
[1348]1702 Parameters:
[1846]1703
[513]1704 width: The bin width (default=5) in pixels
[1855]1705
[513]1706 insitu: if False a new scantable is returned.
1707 Otherwise, the scaling is done in-situ
1708 The default is taken from .asaprc (False)
[1846]1709
[513]1710 """
1711 if insitu is None: insitu = rcParams['insitu']
[876]1712 self._math._setinsitu(insitu)
[513]1713 varlist = vars()
[876]1714 s = scantable(self._math._bin(self, width))
[1118]1715 s._add_history("bin", varlist)
[1589]1716 if insitu:
1717 self._assign(s)
1718 else:
1719 return s
[513]1720
[1862]1721 @asaplog_post_dec
[513]1722 def resample(self, width=5, method='cubic', insitu=None):
[1846]1723 """\
[1348]1724 Return a scan where all spectra have been binned up.
[1573]1725
[1348]1726 Parameters:
[1846]1727
[513]1728 width: The bin width (default=5) in pixels
[1855]1729
[513]1730 method: Interpolation method when correcting from a table.
1731 Values are "nearest", "linear", "cubic" (default)
1732 and "spline"
[1855]1733
[513]1734 insitu: if False a new scantable is returned.
1735 Otherwise, the scaling is done in-situ
1736 The default is taken from .asaprc (False)
[1846]1737
[513]1738 """
1739 if insitu is None: insitu = rcParams['insitu']
[876]1740 self._math._setinsitu(insitu)
[513]1741 varlist = vars()
[876]1742 s = scantable(self._math._resample(self, method, width))
[1118]1743 s._add_history("resample", varlist)
[876]1744 if insitu: self._assign(s)
1745 else: return s
[513]1746
[1862]1747 @asaplog_post_dec
[946]1748 def average_pol(self, mask=None, weight='none'):
[1846]1749 """\
[946]1750 Average the Polarisations together.
[1846]1751
[946]1752 Parameters:
[1846]1753
[946]1754 mask: An optional mask defining the region, where the
1755 averaging will be applied. The output will have all
1756 specified points masked.
[1855]1757
[946]1758 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1759 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1760
[946]1761 """
1762 varlist = vars()
[1593]1763 mask = mask or ()
[1010]1764 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1765 s._add_history("average_pol", varlist)
[992]1766 return s
[513]1767
[1862]1768 @asaplog_post_dec
[1145]1769 def average_beam(self, mask=None, weight='none'):
[1846]1770 """\
[1145]1771 Average the Beams together.
[1846]1772
[1145]1773 Parameters:
1774 mask: An optional mask defining the region, where the
1775 averaging will be applied. The output will have all
1776 specified points masked.
[1855]1777
[1145]1778 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1779 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1780
[1145]1781 """
1782 varlist = vars()
[1593]1783 mask = mask or ()
[1145]1784 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1785 s._add_history("average_beam", varlist)
1786 return s
1787
[1586]1788 def parallactify(self, pflag):
[1846]1789 """\
[1843]1790 Set a flag to indicate whether this data should be treated as having
[1617]1791 been 'parallactified' (total phase == 0.0)
[1846]1792
[1617]1793 Parameters:
[1855]1794
[1843]1795 pflag: Bool indicating whether to turn this on (True) or
[1617]1796 off (False)
[1846]1797
[1617]1798 """
[1586]1799 varlist = vars()
1800 self._parallactify(pflag)
1801 self._add_history("parallactify", varlist)
1802
[1862]1803 @asaplog_post_dec
[992]1804 def convert_pol(self, poltype=None):
[1846]1805 """\
[992]1806 Convert the data to a different polarisation type.
[1565]1807 Note that you will need cross-polarisation terms for most conversions.
[1846]1808
[992]1809 Parameters:
[1855]1810
[992]1811 poltype: The new polarisation type. Valid types are:
[1565]1812 "linear", "circular", "stokes" and "linpol"
[1846]1813
[992]1814 """
1815 varlist = vars()
[1859]1816 s = scantable(self._math._convertpol(self, poltype))
[1118]1817 s._add_history("convert_pol", varlist)
[992]1818 return s
1819
[1862]1820 @asaplog_post_dec
[1819]1821 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1822 """\
[513]1823 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1824
[513]1825 Parameters:
[1846]1826
[513]1827 kernel: The type of smoothing kernel. Select from
[1574]1828 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1829 or 'poly'
[1855]1830
[513]1831 width: The width of the kernel in pixels. For hanning this is
1832 ignored otherwise it defauls to 5 pixels.
1833 For 'gaussian' it is the Full Width Half
1834 Maximum. For 'boxcar' it is the full width.
[1574]1835 For 'rmedian' and 'poly' it is the half width.
[1855]1836
[1574]1837 order: Optional parameter for 'poly' kernel (default is 2), to
1838 specify the order of the polnomial. Ignored by all other
1839 kernels.
[1855]1840
[1819]1841 plot: plot the original and the smoothed spectra.
1842 In this each indivual fit has to be approved, by
1843 typing 'y' or 'n'
[1855]1844
[513]1845 insitu: if False a new scantable is returned.
1846 Otherwise, the scaling is done in-situ
1847 The default is taken from .asaprc (False)
[1846]1848
[513]1849 """
1850 if insitu is None: insitu = rcParams['insitu']
[876]1851 self._math._setinsitu(insitu)
[513]1852 varlist = vars()
[1819]1853
1854 if plot: orgscan = self.copy()
1855
[1574]1856 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1857 s._add_history("smooth", varlist)
[1819]1858
1859 if plot:
1860 if rcParams['plotter.gui']:
1861 from asap.asaplotgui import asaplotgui as asaplot
1862 else:
1863 from asap.asaplot import asaplot
1864 self._p=asaplot()
1865 self._p.set_panels()
1866 ylab=s._get_ordinate_label()
1867 #self._p.palette(0,["#777777","red"])
1868 for r in xrange(s.nrow()):
1869 xsm=s._getabcissa(r)
1870 ysm=s._getspectrum(r)
1871 xorg=orgscan._getabcissa(r)
1872 yorg=orgscan._getspectrum(r)
1873 self._p.clear()
1874 self._p.hold()
1875 self._p.set_axes('ylabel',ylab)
1876 self._p.set_axes('xlabel',s._getabcissalabel(r))
1877 self._p.set_axes('title',s._getsourcename(r))
1878 self._p.set_line(label='Original',color="#777777")
1879 self._p.plot(xorg,yorg)
1880 self._p.set_line(label='Smoothed',color="red")
1881 self._p.plot(xsm,ysm)
1882 ### Ugly part for legend
1883 for i in [0,1]:
1884 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1885 self._p.release()
1886 ### Ugly part for legend
1887 self._p.subplots[0]['lines']=[]
1888 res = raw_input("Accept smoothing ([y]/n): ")
1889 if res.upper() == 'N':
1890 s._setspectrum(yorg, r)
1891 self._p.unmap()
1892 self._p = None
1893 del orgscan
1894
[876]1895 if insitu: self._assign(s)
1896 else: return s
[513]1897
[1862]1898 @asaplog_post_dec
[1907]1899 def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1900 """\
[513]1901 Return a scan which has been baselined (all rows) by a polynomial.
[1907]1902
[513]1903 Parameters:
[1846]1904
[794]1905 mask: an optional mask
[1855]1906
[794]1907 order: the order of the polynomial (default is 0)
[1855]1908
[1061]1909 plot: plot the fit and the residual. In this each
1910 indivual fit has to be approved, by typing 'y'
1911 or 'n'
[1855]1912
[1391]1913 uselin: use linear polynomial fit
[1855]1914
[794]1915 insitu: if False a new scantable is returned.
1916 Otherwise, the scaling is done in-situ
1917 The default is taken from .asaprc (False)
[1846]1918
[1907]1919 rows: row numbers of spectra to be processed.
1920 (default is None: for all rows)
1921
1922 Example:
[513]1923 # return a scan baselined by a third order polynomial,
1924 # not using a mask
1925 bscan = scan.poly_baseline(order=3)
[1846]1926
[579]1927 """
[513]1928 if insitu is None: insitu = rcParams['insitu']
[1819]1929 if not insitu:
1930 workscan = self.copy()
1931 else:
1932 workscan = self
[513]1933 varlist = vars()
1934 if mask is None:
[1907]1935 mask = [True for i in xrange(self.nchan())]
[1819]1936
[1217]1937 try:
1938 f = fitter()
[1391]1939 if uselin:
1940 f.set_function(lpoly=order)
1941 else:
1942 f.set_function(poly=order)
[1819]1943
[1907]1944 if rows == None:
1945 rows = xrange(workscan.nrow())
1946 elif isinstance(rows, int):
1947 rows = [ rows ]
1948
[1819]1949 if len(rows) > 0:
1950 self.blpars = []
[1907]1951 self.masklists = []
1952 self.actualmask = []
1953
[1819]1954 for r in rows:
1955 f.x = workscan._getabcissa(r)
1956 f.y = workscan._getspectrum(r)
[1907]1957 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]1958 f.data = None
1959 f.fit()
1960 if plot:
1961 f.plot(residual=True)
1962 x = raw_input("Accept fit ( [y]/n ): ")
1963 if x.upper() == 'N':
1964 self.blpars.append(None)
[1907]1965 self.masklists.append(None)
1966 self.actualmask.append(None)
[1819]1967 continue
1968 workscan._setspectrum(f.fitter.getresidual(), r)
1969 self.blpars.append(f.get_parameters())
[1931]1970 self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
[1907]1971 self.actualmask.append(f.mask)
[1819]1972
1973 if plot:
1974 f._p.unmap()
1975 f._p = None
1976 workscan._add_history("poly_baseline", varlist)
[1856]1977 if insitu:
1978 self._assign(workscan)
1979 else:
1980 return workscan
[1217]1981 except RuntimeError:
1982 msg = "The fit failed, possibly because it didn't converge."
[1859]1983 raise RuntimeError(msg)
[513]1984
[1931]1985 @asaplog_post_dec
[1907]1986 def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1987 """\
1988 Return a scan which has been baselined (all rows) by a polynomial.
1989 Parameters:
1990 mask: an optional mask
1991 order: the order of the polynomial (default is 0)
1992 plot: plot the fit and the residual. In this each
1993 indivual fit has to be approved, by typing 'y'
1994 or 'n'. Ignored if batch = True.
1995 batch: if True a faster algorithm is used and logs
1996 including the fit results are not output
1997 (default is False)
1998 insitu: if False a new scantable is returned.
1999 Otherwise, the scaling is done in-situ
2000 The default is taken from .asaprc (False)
[1931]2001 rows: row numbers of spectra to be baselined.
[1907]2002 (default is None: for all rows)
2003 Example:
2004 # return a scan baselined by a third order polynomial,
2005 # not using a mask
2006 bscan = scan.poly_baseline(order=3)
2007 """
[1931]2008
2009 varlist = vars()
2010
[1907]2011 if insitu is None: insitu = rcParams["insitu"]
2012 if insitu:
2013 workscan = self
2014 else:
2015 workscan = self.copy()
2016
2017 nchan = workscan.nchan()
2018
2019 if mask is None:
2020 mask = [True for i in xrange(nchan)]
2021
2022 try:
2023 if rows == None:
2024 rows = xrange(workscan.nrow())
2025 elif isinstance(rows, int):
2026 rows = [ rows ]
2027
2028 if len(rows) > 0:
[1931]2029 workscan.blpars = []
2030 workscan.masklists = []
2031 workscan.actualmask = []
[1907]2032
2033 if batch:
[1931]2034 workscan._poly_baseline_batch(mask, order)
[1907]2035 elif plot:
2036 f = fitter()
2037 f.set_function(lpoly=order)
2038 for r in rows:
2039 f.x = workscan._getabcissa(r)
2040 f.y = workscan._getspectrum(r)
2041 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2042 f.data = None
2043 f.fit()
2044
2045 f.plot(residual=True)
2046 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2047 if accept_fit.upper() == "N":
2048 self.blpars.append(None)
2049 self.masklists.append(None)
2050 self.actualmask.append(None)
2051 continue
2052 workscan._setspectrum(f.fitter.getresidual(), r)
[1931]2053 workscan.blpars.append(f.get_parameters())
2054 workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2055 workscan.actualmask.append(f.mask)
[1907]2056
2057 f._p.unmap()
2058 f._p = None
2059 else:
2060 for r in rows:
[1931]2061 fitparams = workscan._poly_baseline(mask, order, r)
2062 params = fitparams.getparameters()
2063 fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2064 errors = fitparams.geterrors()
2065 fmask = mask_and(mask, workscan._getmask(r))
2066
2067 workscan.blpars.append({"params":params,
2068 "fixed": fitparams.getfixedparameters(),
2069 "formatted":fmtd, "errors":errors})
2070 workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2071 workscan.actualmask.append(fmask)
[1907]2072
[1931]2073 asaplog.push(fmtd)
[1907]2074
2075 workscan._add_history("poly_baseline", varlist)
2076
2077 if insitu:
2078 self._assign(workscan)
2079 else:
2080 return workscan
2081
[1919]2082 except RuntimeError, e:
[1907]2083 msg = "The fit failed, possibly because it didn't converge."
2084 if rcParams["verbose"]:
[1919]2085 asaplog.push(str(e))
[1907]2086 asaplog.push(str(msg))
2087 return
2088 else:
[1919]2089 raise RuntimeError(str(e)+'\n'+msg)
[1907]2090
2091
2092 def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2093 threshold=3, chan_avg_limit=1, plot=False,
[1907]2094 insitu=None, rows=None):
[1846]2095 """\
[1931]2096 Return a scan which has been baselined (all rows) by a polynomial.
[880]2097 Spectral lines are detected first using linefinder and masked out
2098 to avoid them affecting the baseline solution.
2099
2100 Parameters:
[1846]2101
[880]2102 mask: an optional mask retreived from scantable
[1846]2103
2104 edge: an optional number of channel to drop at the edge of
2105 spectrum. If only one value is
[880]2106 specified, the same number will be dropped from
2107 both sides of the spectrum. Default is to keep
[907]2108 all channels. Nested tuples represent individual
[976]2109 edge selection for different IFs (a number of spectral
2110 channels can be different)
[1846]2111
[880]2112 order: the order of the polynomial (default is 0)
[1846]2113
[880]2114 threshold: the threshold used by line finder. It is better to
2115 keep it large as only strong lines affect the
2116 baseline solution.
[1846]2117
[1280]2118 chan_avg_limit:
2119 a maximum number of consequtive spectral channels to
2120 average during the search of weak and broad lines.
2121 The default is no averaging (and no search for weak
2122 lines). If such lines can affect the fitted baseline
2123 (e.g. a high order polynomial is fitted), increase this
2124 parameter (usually values up to 8 are reasonable). Most
2125 users of this method should find the default value
2126 sufficient.
[1846]2127
[1061]2128 plot: plot the fit and the residual. In this each
2129 indivual fit has to be approved, by typing 'y'
2130 or 'n'
[1846]2131
[880]2132 insitu: if False a new scantable is returned.
2133 Otherwise, the scaling is done in-situ
2134 The default is taken from .asaprc (False)
[1907]2135 rows: row numbers of spectra to be processed.
2136 (default is None: for all rows)
[880]2137
[1846]2138
2139 Example::
2140
2141 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2142
[880]2143 """
2144 if insitu is None: insitu = rcParams['insitu']
2145 varlist = vars()
2146 from asap.asaplinefind import linefinder
2147 from asap import _is_sequence_or_number as _is_valid
2148
[976]2149 # check whether edge is set up for each IF individually
[1118]2150 individualedge = False;
2151 if len(edge) > 1:
2152 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2153 individualedge = True;
[907]2154
[1118]2155 if not _is_valid(edge, int) and not individualedge:
[909]2156 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2157 pair of integers specified as a tuple. Nested tuples are allowed \
2158 to make individual selection for different IFs."
[919]2159
[1118]2160 curedge = (0, 0)
2161 if individualedge:
2162 for edgepar in edge:
2163 if not _is_valid(edgepar, int):
2164 raise ValueError, "Each element of the 'edge' tuple has \
2165 to be a pair of integers or an integer."
[907]2166 else:
[1118]2167 curedge = edge;
[880]2168
[1907]2169 if not insitu:
2170 workscan = self.copy()
2171 else:
2172 workscan = self
2173
[880]2174 # setup fitter
2175 f = fitter()
[1907]2176 f.set_function(lpoly=order)
[880]2177
2178 # setup line finder
[1118]2179 fl = linefinder()
[1268]2180 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2181
[907]2182 fl.set_scan(workscan)
2183
[1907]2184 if mask is None:
2185 mask = _n_bools(workscan.nchan(), True)
2186
2187 if rows is None:
2188 rows = xrange(workscan.nrow())
2189 elif isinstance(rows, int):
2190 rows = [ rows ]
2191
[1819]2192 # Save parameters of baseline fits & masklists as a class attribute.
2193 # NOTICE: It does not reflect changes in scantable!
2194 if len(rows) > 0:
2195 self.blpars=[]
2196 self.masklists=[]
[1907]2197 self.actualmask=[]
[880]2198 asaplog.push("Processing:")
2199 for r in rows:
[1118]2200 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2201 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2202 workscan.getpol(r), workscan.getcycle(r))
[880]2203 asaplog.push(msg, False)
[907]2204
[976]2205 # figure out edge parameter
[1118]2206 if individualedge:
2207 if len(edge) >= workscan.getif(r):
2208 raise RuntimeError, "Number of edge elements appear to " \
2209 "be less than the number of IFs"
2210 curedge = edge[workscan.getif(r)]
[919]2211
[1907]2212 actualmask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]2213
[976]2214 # setup line finder
[1819]2215 fl.find_lines(r, actualmask, curedge)
[1907]2216
[1819]2217 f.x = workscan._getabcissa(r)
2218 f.y = workscan._getspectrum(r)
[1907]2219 f.mask = fl.get_mask()
[1819]2220 f.data = None
[880]2221 f.fit()
[1819]2222
2223 # Show mask list
[1931]2224 masklist=workscan.get_masklist(f.mask, row=r, silent=True)
[1819]2225 msg = "mask range: "+str(masklist)
2226 asaplog.push(msg, False)
2227
[1061]2228 if plot:
2229 f.plot(residual=True)
2230 x = raw_input("Accept fit ( [y]/n ): ")
2231 if x.upper() == 'N':
[1819]2232 self.blpars.append(None)
2233 self.masklists.append(None)
[1907]2234 self.actualmask.append(None)
[1061]2235 continue
[1819]2236
[880]2237 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2238 self.blpars.append(f.get_parameters())
2239 self.masklists.append(masklist)
[1907]2240 self.actualmask.append(f.mask)
[1061]2241 if plot:
2242 f._p.unmap()
2243 f._p = None
2244 workscan._add_history("auto_poly_baseline", varlist)
[880]2245 if insitu:
2246 self._assign(workscan)
2247 else:
2248 return workscan
2249
[1862]2250 @asaplog_post_dec
[914]2251 def rotate_linpolphase(self, angle):
[1846]2252 """\
[914]2253 Rotate the phase of the complex polarization O=Q+iU correlation.
2254 This is always done in situ in the raw data. So if you call this
2255 function more than once then each call rotates the phase further.
[1846]2256
[914]2257 Parameters:
[1846]2258
[914]2259 angle: The angle (degrees) to rotate (add) by.
[1846]2260
2261 Example::
2262
[914]2263 scan.rotate_linpolphase(2.3)
[1846]2264
[914]2265 """
2266 varlist = vars()
[936]2267 self._math._rotate_linpolphase(self, angle)
[914]2268 self._add_history("rotate_linpolphase", varlist)
2269 return
[710]2270
[1862]2271 @asaplog_post_dec
[914]2272 def rotate_xyphase(self, angle):
[1846]2273 """\
[914]2274 Rotate the phase of the XY correlation. This is always done in situ
2275 in the data. So if you call this function more than once
2276 then each call rotates the phase further.
[1846]2277
[914]2278 Parameters:
[1846]2279
[914]2280 angle: The angle (degrees) to rotate (add) by.
[1846]2281
2282 Example::
2283
[914]2284 scan.rotate_xyphase(2.3)
[1846]2285
[914]2286 """
2287 varlist = vars()
[936]2288 self._math._rotate_xyphase(self, angle)
[914]2289 self._add_history("rotate_xyphase", varlist)
2290 return
2291
[1862]2292 @asaplog_post_dec
[914]2293 def swap_linears(self):
[1846]2294 """\
[1573]2295 Swap the linear polarisations XX and YY, or better the first two
[1348]2296 polarisations as this also works for ciculars.
[914]2297 """
2298 varlist = vars()
[936]2299 self._math._swap_linears(self)
[914]2300 self._add_history("swap_linears", varlist)
2301 return
2302
[1862]2303 @asaplog_post_dec
[914]2304 def invert_phase(self):
[1846]2305 """\
[914]2306 Invert the phase of the complex polarisation
2307 """
2308 varlist = vars()
[936]2309 self._math._invert_phase(self)
[914]2310 self._add_history("invert_phase", varlist)
2311 return
2312
[1862]2313 @asaplog_post_dec
[876]2314 def add(self, offset, insitu=None):
[1846]2315 """\
[513]2316 Return a scan where all spectra have the offset added
[1846]2317
[513]2318 Parameters:
[1846]2319
[513]2320 offset: the offset
[1855]2321
[513]2322 insitu: if False a new scantable is returned.
2323 Otherwise, the scaling is done in-situ
2324 The default is taken from .asaprc (False)
[1846]2325
[513]2326 """
2327 if insitu is None: insitu = rcParams['insitu']
[876]2328 self._math._setinsitu(insitu)
[513]2329 varlist = vars()
[876]2330 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2331 s._add_history("add", varlist)
[876]2332 if insitu:
2333 self._assign(s)
2334 else:
[513]2335 return s
2336
[1862]2337 @asaplog_post_dec
[1308]2338 def scale(self, factor, tsys=True, insitu=None):
[1846]2339 """\
2340
[1938]2341 Return a scan where all spectra are scaled by the given 'factor'
[1846]2342
[513]2343 Parameters:
[1846]2344
[1819]2345 factor: the scaling factor (float or 1D float list)
[1855]2346
[513]2347 insitu: if False a new scantable is returned.
2348 Otherwise, the scaling is done in-situ
2349 The default is taken from .asaprc (False)
[1855]2350
[513]2351 tsys: if True (default) then apply the operation to Tsys
2352 as well as the data
[1846]2353
[513]2354 """
2355 if insitu is None: insitu = rcParams['insitu']
[876]2356 self._math._setinsitu(insitu)
[513]2357 varlist = vars()
[1819]2358 s = None
2359 import numpy
2360 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2361 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2362 from asapmath import _array2dOp
2363 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2364 else:
2365 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2366 else:
2367 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2368 s._add_history("scale", varlist)
[876]2369 if insitu:
2370 self._assign(s)
2371 else:
[513]2372 return s
2373
[1504]2374 def set_sourcetype(self, match, matchtype="pattern",
2375 sourcetype="reference"):
[1846]2376 """\
[1502]2377 Set the type of the source to be an source or reference scan
[1846]2378 using the provided pattern.
2379
[1502]2380 Parameters:
[1846]2381
[1504]2382 match: a Unix style pattern, regular expression or selector
[1855]2383
[1504]2384 matchtype: 'pattern' (default) UNIX style pattern or
2385 'regex' regular expression
[1855]2386
[1502]2387 sourcetype: the type of the source to use (source/reference)
[1846]2388
[1502]2389 """
2390 varlist = vars()
2391 basesel = self.get_selection()
2392 stype = -1
2393 if sourcetype.lower().startswith("r"):
2394 stype = 1
2395 elif sourcetype.lower().startswith("s"):
2396 stype = 0
[1504]2397 else:
[1502]2398 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2399 if matchtype.lower().startswith("p"):
2400 matchtype = "pattern"
2401 elif matchtype.lower().startswith("r"):
2402 matchtype = "regex"
2403 else:
2404 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2405 sel = selector()
2406 if isinstance(match, selector):
2407 sel = match
2408 else:
[1504]2409 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2410 self.set_selection(basesel+sel)
2411 self._setsourcetype(stype)
2412 self.set_selection(basesel)
[1573]2413 self._add_history("set_sourcetype", varlist)
[1502]2414
[1862]2415 @asaplog_post_dec
[1857]2416 @preserve_selection
[1819]2417 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2418 """\
[670]2419 This function allows to build quotients automatically.
[1819]2420 It assumes the observation to have the same number of
[670]2421 "ons" and "offs"
[1846]2422
[670]2423 Parameters:
[1846]2424
[710]2425 preserve: you can preserve (default) the continuum or
2426 remove it. The equations used are
[1857]2427
[670]2428 preserve: Output = Toff * (on/off) - Toff
[1857]2429
[1070]2430 remove: Output = Toff * (on/off) - Ton
[1855]2431
[1573]2432 mode: the on/off detection mode
[1348]2433 'paired' (default)
2434 identifies 'off' scans by the
2435 trailing '_R' (Mopra/Parkes) or
2436 '_e'/'_w' (Tid) and matches
2437 on/off pairs from the observing pattern
[1502]2438 'time'
2439 finds the closest off in time
[1348]2440
[1857]2441 .. todo:: verify argument is not implemented
2442
[670]2443 """
[1857]2444 varlist = vars()
[1348]2445 modes = ["time", "paired"]
[670]2446 if not mode in modes:
[876]2447 msg = "please provide valid mode. Valid modes are %s" % (modes)
2448 raise ValueError(msg)
[1348]2449 s = None
2450 if mode.lower() == "paired":
[1857]2451 sel = self.get_selection()
[1875]2452 sel.set_query("SRCTYPE==psoff")
[1356]2453 self.set_selection(sel)
[1348]2454 offs = self.copy()
[1875]2455 sel.set_query("SRCTYPE==pson")
[1356]2456 self.set_selection(sel)
[1348]2457 ons = self.copy()
2458 s = scantable(self._math._quotient(ons, offs, preserve))
2459 elif mode.lower() == "time":
2460 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2461 s._add_history("auto_quotient", varlist)
[876]2462 return s
[710]2463
[1862]2464 @asaplog_post_dec
[1145]2465 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2466 """\
[1143]2467 Form a quotient using "off" beams when observing in "MX" mode.
[1846]2468
[1143]2469 Parameters:
[1846]2470
[1145]2471 mask: an optional mask to be used when weight == 'stddev'
[1855]2472
[1143]2473 weight: How to average the off beams. Default is 'median'.
[1855]2474
[1145]2475 preserve: you can preserve (default) the continuum or
[1855]2476 remove it. The equations used are:
[1846]2477
[1855]2478 preserve: Output = Toff * (on/off) - Toff
2479
2480 remove: Output = Toff * (on/off) - Ton
2481
[1217]2482 """
[1593]2483 mask = mask or ()
[1141]2484 varlist = vars()
2485 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2486 preoff = scantable(self._math._mx_extract(self, 'off'))
2487 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2488 from asapmath import quotient
[1145]2489 q = quotient(on, off, preserve)
[1143]2490 q._add_history("mx_quotient", varlist)
[1217]2491 return q
[513]2492
[1862]2493 @asaplog_post_dec
[718]2494 def freq_switch(self, insitu=None):
[1846]2495 """\
[718]2496 Apply frequency switching to the data.
[1846]2497
[718]2498 Parameters:
[1846]2499
[718]2500 insitu: if False a new scantable is returned.
2501 Otherwise, the swictching is done in-situ
2502 The default is taken from .asaprc (False)
[1846]2503
[718]2504 """
2505 if insitu is None: insitu = rcParams['insitu']
[876]2506 self._math._setinsitu(insitu)
[718]2507 varlist = vars()
[876]2508 s = scantable(self._math._freqswitch(self))
[1118]2509 s._add_history("freq_switch", varlist)
[1856]2510 if insitu:
2511 self._assign(s)
2512 else:
2513 return s
[718]2514
[1862]2515 @asaplog_post_dec
[780]2516 def recalc_azel(self):
[1846]2517 """Recalculate the azimuth and elevation for each position."""
[780]2518 varlist = vars()
[876]2519 self._recalcazel()
[780]2520 self._add_history("recalc_azel", varlist)
2521 return
2522
[1862]2523 @asaplog_post_dec
[513]2524 def __add__(self, other):
2525 varlist = vars()
2526 s = None
2527 if isinstance(other, scantable):
[1573]2528 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2529 elif isinstance(other, float):
[876]2530 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2531 else:
[718]2532 raise TypeError("Other input is not a scantable or float value")
[513]2533 s._add_history("operator +", varlist)
2534 return s
2535
[1862]2536 @asaplog_post_dec
[513]2537 def __sub__(self, other):
2538 """
2539 implicit on all axes and on Tsys
2540 """
2541 varlist = vars()
2542 s = None
2543 if isinstance(other, scantable):
[1588]2544 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2545 elif isinstance(other, float):
[876]2546 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2547 else:
[718]2548 raise TypeError("Other input is not a scantable or float value")
[513]2549 s._add_history("operator -", varlist)
2550 return s
[710]2551
[1862]2552 @asaplog_post_dec
[513]2553 def __mul__(self, other):
2554 """
2555 implicit on all axes and on Tsys
2556 """
2557 varlist = vars()
2558 s = None
2559 if isinstance(other, scantable):
[1588]2560 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2561 elif isinstance(other, float):
[876]2562 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2563 else:
[718]2564 raise TypeError("Other input is not a scantable or float value")
[513]2565 s._add_history("operator *", varlist)
2566 return s
2567
[710]2568
[1862]2569 @asaplog_post_dec
[513]2570 def __div__(self, other):
2571 """
2572 implicit on all axes and on Tsys
2573 """
2574 varlist = vars()
2575 s = None
2576 if isinstance(other, scantable):
[1589]2577 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2578 elif isinstance(other, float):
2579 if other == 0.0:
[718]2580 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2581 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2582 else:
[718]2583 raise TypeError("Other input is not a scantable or float value")
[513]2584 s._add_history("operator /", varlist)
2585 return s
2586
[1862]2587 @asaplog_post_dec
[530]2588 def get_fit(self, row=0):
[1846]2589 """\
[530]2590 Print or return the stored fits for a row in the scantable
[1846]2591
[530]2592 Parameters:
[1846]2593
[530]2594 row: the row which the fit has been applied to.
[1846]2595
[530]2596 """
2597 if row > self.nrow():
2598 return
[976]2599 from asap.asapfit import asapfit
[530]2600 fit = asapfit(self._getfit(row))
[1859]2601 asaplog.push( '%s' %(fit) )
2602 return fit.as_dict()
[530]2603
[1483]2604 def flag_nans(self):
[1846]2605 """\
[1483]2606 Utility function to flag NaN values in the scantable.
2607 """
2608 import numpy
2609 basesel = self.get_selection()
2610 for i in range(self.nrow()):
[1589]2611 sel = self.get_row_selector(i)
2612 self.set_selection(basesel+sel)
[1483]2613 nans = numpy.isnan(self._getspectrum(0))
2614 if numpy.any(nans):
2615 bnans = [ bool(v) for v in nans]
2616 self.flag(bnans)
2617 self.set_selection(basesel)
2618
[1588]2619 def get_row_selector(self, rowno):
[1992]2620 #return selector(beams=self.getbeam(rowno),
2621 # ifs=self.getif(rowno),
2622 # pols=self.getpol(rowno),
2623 # scans=self.getscan(rowno),
2624 # cycles=self.getcycle(rowno))
2625 return selector(rows=[rowno])
[1573]2626
[484]2627 def _add_history(self, funcname, parameters):
[1435]2628 if not rcParams['scantable.history']:
2629 return
[484]2630 # create date
2631 sep = "##"
2632 from datetime import datetime
2633 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2634 hist = dstr+sep
2635 hist += funcname+sep#cdate+sep
2636 if parameters.has_key('self'): del parameters['self']
[1118]2637 for k, v in parameters.iteritems():
[484]2638 if type(v) is dict:
[1118]2639 for k2, v2 in v.iteritems():
[484]2640 hist += k2
2641 hist += "="
[1118]2642 if isinstance(v2, scantable):
[484]2643 hist += 'scantable'
2644 elif k2 == 'mask':
[1118]2645 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2646 hist += str(self._zip_mask(v2))
2647 else:
2648 hist += str(v2)
[484]2649 else:
[513]2650 hist += str(v2)
[484]2651 else:
2652 hist += k
2653 hist += "="
[1118]2654 if isinstance(v, scantable):
[484]2655 hist += 'scantable'
2656 elif k == 'mask':
[1118]2657 if isinstance(v, list) or isinstance(v, tuple):
[513]2658 hist += str(self._zip_mask(v))
2659 else:
2660 hist += str(v)
[484]2661 else:
2662 hist += str(v)
2663 hist += sep
2664 hist = hist[:-2] # remove trailing '##'
2665 self._addhistory(hist)
2666
[710]2667
[484]2668 def _zip_mask(self, mask):
2669 mask = list(mask)
2670 i = 0
2671 segments = []
2672 while mask[i:].count(1):
2673 i += mask[i:].index(1)
2674 if mask[i:].count(0):
2675 j = i + mask[i:].index(0)
2676 else:
[710]2677 j = len(mask)
[1118]2678 segments.append([i, j])
[710]2679 i = j
[484]2680 return segments
[714]2681
[626]2682 def _get_ordinate_label(self):
2683 fu = "("+self.get_fluxunit()+")"
2684 import re
2685 lbl = "Intensity"
[1118]2686 if re.match(".K.", fu):
[626]2687 lbl = "Brightness Temperature "+ fu
[1118]2688 elif re.match(".Jy.", fu):
[626]2689 lbl = "Flux density "+ fu
2690 return lbl
[710]2691
[876]2692 def _check_ifs(self):
[1986]2693 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
2694 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]2695 nchans = filter(lambda t: t > 0, nchans)
[876]2696 return (sum(nchans)/len(nchans) == nchans[0])
[976]2697
[1862]2698 @asaplog_post_dec
[1916]2699 #def _fill(self, names, unit, average, getpt, antenna):
2700 def _fill(self, names, unit, average, opts={}):
[976]2701 first = True
2702 fullnames = []
2703 for name in names:
2704 name = os.path.expandvars(name)
2705 name = os.path.expanduser(name)
2706 if not os.path.exists(name):
2707 msg = "File '%s' does not exists" % (name)
2708 raise IOError(msg)
2709 fullnames.append(name)
2710 if average:
2711 asaplog.push('Auto averaging integrations')
[1079]2712 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2713 for name in fullnames:
[1073]2714 tbl = Scantable(stype)
[2004]2715 if is_ms( name ):
2716 r = msfiller( tbl )
2717 else:
2718 r = filler( tbl )
2719 rx = rcParams['scantable.reference']
2720 r.setreferenceexpr(rx)
2721 #r = filler(tbl)
2722 #rx = rcParams['scantable.reference']
2723 #r.setreferenceexpr(rx)
[976]2724 msg = "Importing %s..." % (name)
[1118]2725 asaplog.push(msg, False)
[1916]2726 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2727 r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2728 r.fill()
[976]2729 if average:
[1118]2730 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2731 if not first:
2732 tbl = self._math._merge([self, tbl])
2733 Scantable.__init__(self, tbl)
[1843]2734 r.close()
[1118]2735 del r, tbl
[976]2736 first = False
[1861]2737 #flush log
2738 asaplog.post()
[976]2739 if unit is not None:
2740 self.set_fluxunit(unit)
[1824]2741 if not is_casapy():
2742 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2743
[1402]2744 def __getitem__(self, key):
2745 if key < 0:
2746 key += self.nrow()
2747 if key >= self.nrow():
2748 raise IndexError("Row index out of range.")
2749 return self._getspectrum(key)
2750
2751 def __setitem__(self, key, value):
2752 if key < 0:
2753 key += self.nrow()
2754 if key >= self.nrow():
2755 raise IndexError("Row index out of range.")
2756 if not hasattr(value, "__len__") or \
2757 len(value) > self.nchan(self.getif(key)):
2758 raise ValueError("Spectrum length doesn't match.")
2759 return self._setspectrum(value, key)
2760
2761 def __len__(self):
2762 return self.nrow()
2763
2764 def __iter__(self):
2765 for i in range(len(self)):
2766 yield self[i]
Note: See TracBrowser for help on using the repository browser.