source: trunk/python/scantable.py@ 1947

Last change on this file since 1947 was 1947, checked in by Kana Sugimoto, 15 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed:
+ An optional parameter 'prec (unsigned int)' is added to

scantable.get_time, python_Scantable::_gettime, ScantableWrapper::getTime and Scantable::getTime.

+ Also Scantable::fromatTime accepts 'prec' as a parameter.
+ scantable._get_column accepts args which will be passed to callback function.

Test Programs:

Put in Release Notes: No

Module(s): scantable

Description:

Add a parameter prec to scantable.get_time which specifies the precision of time returned.
The default value is prec=0.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.0 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
[1862]13from asap.logging import asaplog, asaplog_post_dec
[1824]14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1859]17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]18from asap.asapfitter import fitter
[102]19
[1689]20
21def preserve_selection(func):
[1691]22 @wraps_dec(func)
[1689]23 def wrap(obj, *args, **kw):
24 basesel = obj.get_selection()
[1857]25 try:
26 val = func(obj, *args, **kw)
27 finally:
28 obj.set_selection(basesel)
[1689]29 return val
30 return wrap
31
[1846]32def is_scantable(filename):
33 """Is the given file a scantable?
[1689]34
[1846]35 Parameters:
36
37 filename: the name of the file/directory to test
38
39 """
[1883]40 if ( os.path.isdir(filename)
41 and os.path.exists(filename+'/table.info')
42 and os.path.exists(filename+'/table.dat') ):
43 f=open(filename+'/table.info')
44 l=f.readline()
45 f.close()
46 #if ( l.find('Scantable') != -1 ):
47 if ( l.find('Measurement Set') == -1 ):
48 return True
49 else:
50 return False
51 else:
52 return False
53## return (os.path.isdir(filename)
54## and not os.path.exists(filename+'/table.f1')
55## and os.path.exists(filename+'/table.info'))
[1697]56
[1883]57def is_ms(filename):
58 """Is the given file a MeasurementSet?
[1697]59
[1883]60 Parameters:
61
62 filename: the name of the file/directory to test
63
64 """
65 if ( os.path.isdir(filename)
66 and os.path.exists(filename+'/table.info')
67 and os.path.exists(filename+'/table.dat') ):
68 f=open(filename+'/table.info')
69 l=f.readline()
70 f.close()
71 if ( l.find('Measurement Set') != -1 ):
72 return True
73 else:
74 return False
75 else:
76 return False
77
[876]78class scantable(Scantable):
[1846]79 """\
80 The ASAP container for scans (single-dish data).
[102]81 """
[1819]82
[1862]83 @asaplog_post_dec
[1916]84 #def __init__(self, filename, average=None, unit=None, getpt=None,
85 # antenna=None, parallactify=None):
86 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]87 """\
[102]88 Create a scantable from a saved one or make a reference
[1846]89
[102]90 Parameters:
[1846]91
92 filename: the name of an asap table on disk
93 or
94 the name of a rpfits/sdfits/ms file
95 (integrations within scans are auto averaged
96 and the whole file is read) or
97 [advanced] a reference to an existing scantable
98
99 average: average all integrations withinb a scan on read.
100 The default (True) is taken from .asaprc.
101
[484]102 unit: brightness unit; must be consistent with K or Jy.
[1846]103 Over-rides the default selected by the filler
104 (input rpfits/sdfits/ms) or replaces the value
105 in existing scantables
106
107 getpt: for MeasurementSet input data only:
108 If True, all pointing data are filled.
109 The deafult is False, which makes time to load
110 the MS data faster in some cases.
111
[1920]112 antenna: for MeasurementSet input data only:
113 Antenna selection. integer (id) or string (name or id).
[1846]114
115 parallactify: Indicate that the data had been parallatified. Default
116 is taken from rc file.
117
[710]118 """
[976]119 if average is None:
[710]120 average = rcParams['scantable.autoaverage']
[1916]121 #if getpt is None:
122 # getpt = True
123 #if antenna is not None:
124 # asaplog.push("Antenna selection currently unsupported."
125 # "Using ''")
126 # asaplog.post('WARN')
127 #if antenna is None:
128 # antenna = ''
129 #elif type(antenna) == int:
130 # antenna = '%s' % antenna
131 #elif type(antenna) == list:
132 # tmpstr = ''
133 # for i in range( len(antenna) ):
134 # if type(antenna[i]) == int:
135 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
136 # elif type(antenna[i]) == str:
137 # tmpstr=tmpstr+antenna[i]+','
138 # else:
139 # raise TypeError('Bad antenna selection.')
140 # antenna = tmpstr.rstrip(',')
[1593]141 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]142 varlist = vars()
[876]143 from asap._asap import stmath
[1819]144 self._math = stmath( rcParams['insitu'] )
[876]145 if isinstance(filename, Scantable):
146 Scantable.__init__(self, filename)
[181]147 else:
[1697]148 if isinstance(filename, str):
[976]149 filename = os.path.expandvars(filename)
150 filename = os.path.expanduser(filename)
151 if not os.path.exists(filename):
152 s = "File '%s' not found." % (filename)
153 raise IOError(s)
[1697]154 if is_scantable(filename):
155 ondisk = rcParams['scantable.storage'] == 'disk'
156 Scantable.__init__(self, filename, ondisk)
157 if unit is not None:
158 self.set_fluxunit(unit)
[1819]159 # do not reset to the default freqframe
160 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]161 #elif os.path.isdir(filename) \
162 # and not os.path.exists(filename+'/table.f1'):
163 elif is_ms(filename):
[1916]164 # Measurement Set
165 opts={'ms': {}}
166 mskeys=['getpt','antenna']
167 for key in mskeys:
168 if key in args.keys():
169 opts['ms'][key] = args[key]
170 #self._fill([filename], unit, average, getpt, antenna)
171 self._fill([filename], unit, average, opts)
[1893]172 elif os.path.isfile(filename):
[1916]173 #self._fill([filename], unit, average, getpt, antenna)
174 self._fill([filename], unit, average)
[1883]175 else:
[1819]176 msg = "The given file '%s'is not a valid " \
177 "asap table." % (filename)
[1859]178 raise IOError(msg)
[1118]179 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]180 and isinstance(filename[-1], str):
[1916]181 #self._fill(filename, unit, average, getpt, antenna)
182 self._fill(filename, unit, average)
[1586]183 self.parallactify(parallactify)
[1259]184 self._add_history("scantable", varlist)
[102]185
[1862]186 @asaplog_post_dec
[876]187 def save(self, name=None, format=None, overwrite=False):
[1846]188 """\
[1280]189 Store the scantable on disk. This can be an asap (aips++) Table,
190 SDFITS or MS2 format.
[1846]191
[116]192 Parameters:
[1846]193
[1093]194 name: the name of the outputfile. For format "ASCII"
195 this is the root file name (data in 'name'.txt
[497]196 and header in 'name'_header.txt)
[1855]197
[116]198 format: an optional file format. Default is ASAP.
[1855]199 Allowed are:
200
201 * 'ASAP' (save as ASAP [aips++] Table),
202 * 'SDFITS' (save as SDFITS file)
203 * 'ASCII' (saves as ascii text file)
204 * 'MS2' (saves as an casacore MeasurementSet V2)
205 * 'FITS' (save as image FITS - not readable by class)
206 * 'CLASS' (save as FITS readable by CLASS)
207
[411]208 overwrite: If the file should be overwritten if it exists.
[256]209 The default False is to return with warning
[411]210 without writing the output. USE WITH CARE.
[1855]211
[1846]212 Example::
213
[116]214 scan.save('myscan.asap')
[1118]215 scan.save('myscan.sdfits', 'SDFITS')
[1846]216
[116]217 """
[411]218 from os import path
[1593]219 format = format or rcParams['scantable.save']
[256]220 suffix = '.'+format.lower()
[1118]221 if name is None or name == "":
[256]222 name = 'scantable'+suffix
[718]223 msg = "No filename given. Using default name %s..." % name
224 asaplog.push(msg)
[411]225 name = path.expandvars(name)
[256]226 if path.isfile(name) or path.isdir(name):
227 if not overwrite:
[718]228 msg = "File %s exists." % name
[1859]229 raise IOError(msg)
[451]230 format2 = format.upper()
231 if format2 == 'ASAP':
[116]232 self._save(name)
233 else:
[989]234 from asap._asap import stwriter as stw
[1118]235 writer = stw(format2)
236 writer.write(self, name)
[116]237 return
238
[102]239 def copy(self):
[1846]240 """Return a copy of this scantable.
241
242 *Note*:
243
[1348]244 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]245
246 Example::
247
[102]248 copiedscan = scan.copy()
[1846]249
[102]250 """
[876]251 sd = scantable(Scantable._copy(self))
[113]252 return sd
253
[1093]254 def drop_scan(self, scanid=None):
[1846]255 """\
[1093]256 Return a new scantable where the specified scan number(s) has(have)
257 been dropped.
[1846]258
[1093]259 Parameters:
[1846]260
[1093]261 scanid: a (list of) scan number(s)
[1846]262
[1093]263 """
264 from asap import _is_sequence_or_number as _is_valid
265 from asap import _to_list
266 from asap import unique
267 if not _is_valid(scanid):
[1859]268 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269 scanid = _to_list(scanid)
270 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271 for sid in scanid: allscans.remove(sid)
272 if len(allscans) == 0:
273 raise ValueError("Can't remove all scans")
274 sel = selector(scans=allscans)
275 return self._select_copy(sel)
[1093]276
[1594]277 def _select_copy(self, selection):
278 orig = self.get_selection()
279 self.set_selection(orig+selection)
280 cp = self.copy()
281 self.set_selection(orig)
282 return cp
283
[102]284 def get_scan(self, scanid=None):
[1855]285 """\
[102]286 Return a specific scan (by scanno) or collection of scans (by
287 source name) in a new scantable.
[1846]288
289 *Note*:
290
[1348]291 See scantable.drop_scan() for the inverse operation.
[1846]292
[102]293 Parameters:
[1846]294
[513]295 scanid: a (list of) scanno or a source name, unix-style
296 patterns are accepted for source name matching, e.g.
297 '*_R' gets all 'ref scans
[1846]298
299 Example::
300
[513]301 # get all scans containing the source '323p459'
302 newscan = scan.get_scan('323p459')
303 # get all 'off' scans
304 refscans = scan.get_scan('*_R')
305 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]306 newscan = scan.get_scan([0, 2, 7, 10])
[1846]307
[102]308 """
309 if scanid is None:
[1859]310 raise RuntimeError( 'Please specify a scan no or name to '
311 'retrieve from the scantable' )
[102]312 try:
[946]313 bsel = self.get_selection()
314 sel = selector()
[102]315 if type(scanid) is str:
[946]316 sel.set_name(scanid)
[1594]317 return self._select_copy(sel)
[102]318 elif type(scanid) is int:
[946]319 sel.set_scans([scanid])
[1594]320 return self._select_copy(sel)
[381]321 elif type(scanid) is list:
[946]322 sel.set_scans(scanid)
[1594]323 return self._select_copy(sel)
[381]324 else:
[718]325 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]326 raise TypeError(msg)
[102]327 except RuntimeError:
[1859]328 raise
[102]329
330 def __str__(self):
[1118]331 return Scantable._summary(self, True)
[102]332
[976]333 def summary(self, filename=None):
[1846]334 """\
[102]335 Print a summary of the contents of this scantable.
[1846]336
[102]337 Parameters:
[1846]338
[1931]339 filename: the name of a file to write the putput to
[102]340 Default - no file output
[1846]341
[102]342 """
[976]343 info = Scantable._summary(self, True)
[102]344 if filename is not None:
[256]345 if filename is "":
346 filename = 'scantable_summary.txt'
[415]347 from os.path import expandvars, isdir
[411]348 filename = expandvars(filename)
[415]349 if not isdir(filename):
[413]350 data = open(filename, 'w')
351 data.write(info)
352 data.close()
353 else:
[718]354 msg = "Illegal file name '%s'." % (filename)
[1859]355 raise IOError(msg)
356 return page(info)
[710]357
[1512]358 def get_spectrum(self, rowno):
[1471]359 """Return the spectrum for the current row in the scantable as a list.
[1846]360
[1471]361 Parameters:
[1846]362
[1573]363 rowno: the row number to retrieve the spectrum from
[1846]364
[1471]365 """
366 return self._getspectrum(rowno)
[946]367
[1471]368 def get_mask(self, rowno):
369 """Return the mask for the current row in the scantable as a list.
[1846]370
[1471]371 Parameters:
[1846]372
[1573]373 rowno: the row number to retrieve the mask from
[1846]374
[1471]375 """
376 return self._getmask(rowno)
377
378 def set_spectrum(self, spec, rowno):
[1938]379 """Set the spectrum for the current row in the scantable.
[1846]380
[1471]381 Parameters:
[1846]382
[1855]383 spec: the new spectrum
[1846]384
[1855]385 rowno: the row number to set the spectrum for
386
[1471]387 """
388 assert(len(spec) == self.nchan())
389 return self._setspectrum(spec, rowno)
390
[1600]391 def get_coordinate(self, rowno):
392 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]393
394 *Note*:
395
[1600]396 * This coordinate is only valid until a scantable method modifies
397 the frequency axis.
398 * This coordinate does contain the original frequency set-up
399 NOT the new frame. The conversions however are done using the user
400 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401 use scantable.freq_align first. Without it there is no closure,
[1846]402 i.e.::
[1600]403
[1846]404 c = myscan.get_coordinate(0)
405 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
[1600]407 Parameters:
[1846]408
[1600]409 rowno: the row number for the spectral coordinate
410
411 """
412 return coordinate(Scantable.get_coordinate(self, rowno))
413
[946]414 def get_selection(self):
[1846]415 """\
[1005]416 Get the selection object currently set on this scantable.
[1846]417
418 Example::
419
[1005]420 sel = scan.get_selection()
421 sel.set_ifs(0) # select IF 0
422 scan.set_selection(sel) # apply modified selection
[1846]423
[946]424 """
425 return selector(self._getselection())
426
[1576]427 def set_selection(self, selection=None, **kw):
[1846]428 """\
[1005]429 Select a subset of the data. All following operations on this scantable
430 are only applied to thi selection.
[1846]431
[1005]432 Parameters:
[1697]433
[1846]434 selection: a selector object (default unset the selection), or
435 any combination of "pols", "ifs", "beams", "scans",
436 "cycles", "name", "query"
[1697]437
[1846]438 Examples::
[1697]439
[1005]440 sel = selector() # create a selection object
[1118]441 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]442 scan.set_selection(sel) # set the selection
443 scan.summary() # will only print summary of scanno 0 an 3
444 scan.set_selection() # unset the selection
[1697]445 # or the equivalent
446 scan.set_selection(scans=[0,3])
447 scan.summary() # will only print summary of scanno 0 an 3
448 scan.set_selection() # unset the selection
[1846]449
[946]450 """
[1576]451 if selection is None:
452 # reset
453 if len(kw) == 0:
454 selection = selector()
455 else:
456 # try keywords
457 for k in kw:
458 if k not in selector.fields:
459 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460 selection = selector(**kw)
[946]461 self._setselection(selection)
462
[1819]463 def get_row(self, row=0, insitu=None):
[1846]464 """\
[1819]465 Select a row in the scantable.
466 Return a scantable with single row.
[1846]467
[1819]468 Parameters:
[1846]469
470 row: row no of integration, default is 0.
471 insitu: if False a new scantable is returned. Otherwise, the
472 scaling is done in-situ. The default is taken from .asaprc
473 (False)
474
[1819]475 """
476 if insitu is None: insitu = rcParams['insitu']
477 if not insitu:
478 workscan = self.copy()
479 else:
480 workscan = self
481 # Select a row
482 sel=selector()
483 sel.set_scans([workscan.getscan(row)])
484 sel.set_cycles([workscan.getcycle(row)])
485 sel.set_beams([workscan.getbeam(row)])
486 sel.set_ifs([workscan.getif(row)])
487 sel.set_polarisations([workscan.getpol(row)])
488 sel.set_name(workscan._getsourcename(row))
489 workscan.set_selection(sel)
490 if not workscan.nrow() == 1:
491 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492 raise RuntimeError(msg)
493 del sel
494 if insitu:
495 self._assign(workscan)
496 else:
497 return workscan
498
[1862]499 @asaplog_post_dec
[1907]500 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]501 """\
[135]502 Determine the specified statistic of the current beam/if/pol
[102]503 Takes a 'mask' as an optional parameter to specify which
504 channels should be excluded.
[1846]505
[102]506 Parameters:
[1846]507
[1819]508 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]510
[135]511 mask: an optional mask specifying where the statistic
[102]512 should be determined.
[1855]513
[1819]514 form: format string to print statistic values
[1846]515
[1907]516 row: row number of spectrum to process.
517 (default is None: for all rows)
[1846]518
[1907]519 Example:
[113]520 scan.set_unit('channel')
[1118]521 msk = scan.create_mask([100, 200], [500, 600])
[135]522 scan.stats(stat='mean', mask=m)
[1846]523
[102]524 """
[1593]525 mask = mask or []
[876]526 if not self._check_ifs():
[1118]527 raise ValueError("Cannot apply mask as the IFs have different "
528 "number of channels. Please use setselection() "
529 "to select individual IFs")
[1819]530 rtnabc = False
531 if stat.lower().endswith('_abc'): rtnabc = True
532 getchan = False
533 if stat.lower().startswith('min') or stat.lower().startswith('max'):
534 chan = self._math._minmaxchan(self, mask, stat)
535 getchan = True
536 statvals = []
[1907]537 if not rtnabc:
538 if row == None:
539 statvals = self._math._stats(self, mask, stat)
540 else:
541 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]542
[1819]543 #def cb(i):
544 # return statvals[i]
[256]545
[1819]546 #return self._row_callback(cb, stat)
[102]547
[1819]548 label=stat
549 #callback=cb
550 out = ""
551 #outvec = []
552 sep = '-'*50
[1907]553
554 if row == None:
555 rows = xrange(self.nrow())
556 elif isinstance(row, int):
557 rows = [ row ]
558
559 for i in rows:
[1819]560 refstr = ''
561 statunit= ''
562 if getchan:
563 qx, qy = self.chan2data(rowno=i, chan=chan[i])
564 if rtnabc:
565 statvals.append(qx['value'])
566 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567 statunit= '['+qx['unit']+']'
568 else:
569 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571 tm = self._gettime(i)
572 src = self._getsourcename(i)
573 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574 out += 'Time[%s]:\n' % (tm)
[1907]575 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
576 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
577 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]578 #outvec.append(callback(i))
[1907]579 if len(rows) > 1:
580 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
581 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
582 else:
583 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
584 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]585 out += sep+"\n"
586
[1859]587 import os
588 if os.environ.has_key( 'USER' ):
589 usr = os.environ['USER']
590 else:
591 import commands
592 usr = commands.getoutput( 'whoami' )
593 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594 f = open(tmpfile,'w')
595 print >> f, sep
596 print >> f, ' %s %s' % (label, statunit)
597 print >> f, sep
598 print >> f, out
599 f.close()
600 f = open(tmpfile,'r')
601 x = f.readlines()
602 f.close()
603 asaplog.push(''.join(x), False)
604
[1819]605 return statvals
606
607 def chan2data(self, rowno=0, chan=0):
[1846]608 """\
[1819]609 Returns channel/frequency/velocity and spectral value
610 at an arbitrary row and channel in the scantable.
[1846]611
[1819]612 Parameters:
[1846]613
[1819]614 rowno: a row number in the scantable. Default is the
615 first row, i.e. rowno=0
[1855]616
[1819]617 chan: a channel in the scantable. Default is the first
618 channel, i.e. pos=0
[1846]619
[1819]620 """
621 if isinstance(rowno, int) and isinstance(chan, int):
622 qx = {'unit': self.get_unit(),
623 'value': self._getabcissa(rowno)[chan]}
624 qy = {'unit': self.get_fluxunit(),
625 'value': self._getspectrum(rowno)[chan]}
626 return qx, qy
627
[1118]628 def stddev(self, mask=None):
[1846]629 """\
[135]630 Determine the standard deviation of the current beam/if/pol
631 Takes a 'mask' as an optional parameter to specify which
632 channels should be excluded.
[1846]633
[135]634 Parameters:
[1846]635
[135]636 mask: an optional mask specifying where the standard
637 deviation should be determined.
638
[1846]639 Example::
640
[135]641 scan.set_unit('channel')
[1118]642 msk = scan.create_mask([100, 200], [500, 600])
[135]643 scan.stddev(mask=m)
[1846]644
[135]645 """
[1118]646 return self.stats(stat='stddev', mask=mask);
[135]647
[1003]648
[1259]649 def get_column_names(self):
[1846]650 """\
[1003]651 Return a list of column names, which can be used for selection.
652 """
[1259]653 return list(Scantable.get_column_names(self))
[1003]654
[1730]655 def get_tsys(self, row=-1):
[1846]656 """\
[113]657 Return the System temperatures.
[1846]658
659 Parameters:
660
661 row: the rowno to get the information for. (default all rows)
662
[113]663 Returns:
[1846]664
[876]665 a list of Tsys values for the current selection
[1846]666
[113]667 """
[1730]668 if row > -1:
669 return self._get_column(self._gettsys, row)
[876]670 return self._row_callback(self._gettsys, "Tsys")
[256]671
[1730]672
673 def get_weather(self, row=-1):
[1846]674 """\
675 Return the weather informations.
676
677 Parameters:
678
679 row: the rowno to get the information for. (default all rows)
680
681 Returns:
682
683 a dict or list of of dicts of values for the current selection
684
685 """
686
[1730]687 values = self._get_column(self._get_weather, row)
688 if row > -1:
689 return {'temperature': values[0],
690 'pressure': values[1], 'humidity' : values[2],
691 'windspeed' : values[3], 'windaz' : values[4]
692 }
693 else:
694 out = []
695 for r in values:
696
697 out.append({'temperature': r[0],
698 'pressure': r[1], 'humidity' : r[2],
699 'windspeed' : r[3], 'windaz' : r[4]
700 })
701 return out
702
[876]703 def _row_callback(self, callback, label):
704 out = ""
[1118]705 outvec = []
[1590]706 sep = '-'*50
[876]707 for i in range(self.nrow()):
708 tm = self._gettime(i)
709 src = self._getsourcename(i)
[1590]710 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]711 out += 'Time[%s]:\n' % (tm)
[1590]712 if self.nbeam(-1) > 1:
713 out += ' Beam[%d] ' % (self.getbeam(i))
714 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
715 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]716 outvec.append(callback(i))
717 out += '= %3.3f\n' % (outvec[i])
[1590]718 out += sep+'\n'
[1859]719
720 asaplog.push(sep)
721 asaplog.push(" %s" % (label))
722 asaplog.push(sep)
723 asaplog.push(out)
[1861]724 asaplog.post()
[1175]725 return outvec
[256]726
[1947]727 def _get_column(self, callback, row=-1, *args):
[1070]728 """
729 """
730 if row == -1:
[1947]731 return [callback(i, *args) for i in range(self.nrow())]
[1070]732 else:
[1819]733 if 0 <= row < self.nrow():
[1947]734 return callback(row, *args)
[256]735
[1070]736
[1947]737 def get_time(self, row=-1, asdatetime=False, prec=0):
[1846]738 """\
[113]739 Get a list of time stamps for the observations.
[1938]740 Return a datetime object or a string (default) for each
741 integration time stamp in the scantable.
[1846]742
[113]743 Parameters:
[1846]744
[1348]745 row: row no of integration. Default -1 return all rows
[1855]746
[1348]747 asdatetime: return values as datetime objects rather than strings
[1846]748
[1947]749 prec: number of digits shown. Note this number is equals to
750 the digits of MVTime, i.e., 0<prec<3: dates with hh::
751 only, <5: with hh:mm:, <7 or 0: with hh:mm:ss,
752 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
753
[113]754 """
[1947]755 #from time import strptime
[1175]756 from datetime import datetime
[1947]757 times = self._get_column(self._gettime, row, prec)
[1348]758 if not asdatetime:
[1392]759 return times
[1947]760 #format = "%Y/%m/%d/%H:%M:%S"
761 format = "%Y/%m/%d/%H:%M:%S.%f"
762 if prec < 7:
763 nsub = 1 + (((6-prec)/2) % 3)
764 substr = [".%f","%S","%M"]
765 for i in range(nsub):
766 format = format.replace(substr[i],"")
[1175]767 if isinstance(times, list):
[1947]768 #return [datetime(*strptime(i, format)[:6]) for i in times]
769 return [datetime.strptime(i, format) for i in times]
[1175]770 else:
[1947]771 #return datetime(*strptime(times, format)[:6])
772 return datetime.strptime(times, format)
[102]773
[1348]774
775 def get_inttime(self, row=-1):
[1846]776 """\
[1348]777 Get a list of integration times for the observations.
778 Return a time in seconds for each integration in the scantable.
[1846]779
[1348]780 Parameters:
[1846]781
[1348]782 row: row no of integration. Default -1 return all rows.
[1846]783
[1348]784 """
[1573]785 return self._get_column(self._getinttime, row)
[1348]786
[1573]787
[714]788 def get_sourcename(self, row=-1):
[1846]789 """\
[794]790 Get a list source names for the observations.
[714]791 Return a string for each integration in the scantable.
792 Parameters:
[1846]793
[1348]794 row: row no of integration. Default -1 return all rows.
[1846]795
[714]796 """
[1070]797 return self._get_column(self._getsourcename, row)
[714]798
[794]799 def get_elevation(self, row=-1):
[1846]800 """\
[794]801 Get a list of elevations for the observations.
802 Return a float for each integration in the scantable.
[1846]803
[794]804 Parameters:
[1846]805
[1348]806 row: row no of integration. Default -1 return all rows.
[1846]807
[794]808 """
[1070]809 return self._get_column(self._getelevation, row)
[794]810
811 def get_azimuth(self, row=-1):
[1846]812 """\
[794]813 Get a list of azimuths for the observations.
814 Return a float for each integration in the scantable.
[1846]815
[794]816 Parameters:
[1348]817 row: row no of integration. Default -1 return all rows.
[1846]818
[794]819 """
[1070]820 return self._get_column(self._getazimuth, row)
[794]821
822 def get_parangle(self, row=-1):
[1846]823 """\
[794]824 Get a list of parallactic angles for the observations.
825 Return a float for each integration in the scantable.
[1846]826
[794]827 Parameters:
[1846]828
[1348]829 row: row no of integration. Default -1 return all rows.
[1846]830
[794]831 """
[1070]832 return self._get_column(self._getparangle, row)
[794]833
[1070]834 def get_direction(self, row=-1):
835 """
836 Get a list of Positions on the sky (direction) for the observations.
[1594]837 Return a string for each integration in the scantable.
[1855]838
[1070]839 Parameters:
[1855]840
[1070]841 row: row no of integration. Default -1 return all rows
[1855]842
[1070]843 """
844 return self._get_column(self._getdirection, row)
845
[1391]846 def get_directionval(self, row=-1):
[1846]847 """\
[1391]848 Get a list of Positions on the sky (direction) for the observations.
849 Return a float for each integration in the scantable.
[1846]850
[1391]851 Parameters:
[1846]852
[1391]853 row: row no of integration. Default -1 return all rows
[1846]854
[1391]855 """
856 return self._get_column(self._getdirectionvec, row)
857
[1862]858 @asaplog_post_dec
[102]859 def set_unit(self, unit='channel'):
[1846]860 """\
[102]861 Set the unit for all following operations on this scantable
[1846]862
[102]863 Parameters:
[1846]864
865 unit: optional unit, default is 'channel'. Use one of '*Hz',
866 'km/s', 'channel' or equivalent ''
867
[102]868 """
[484]869 varlist = vars()
[1118]870 if unit in ['', 'pixel', 'channel']:
[113]871 unit = ''
872 inf = list(self._getcoordinfo())
873 inf[0] = unit
874 self._setcoordinfo(inf)
[1118]875 self._add_history("set_unit", varlist)
[113]876
[1862]877 @asaplog_post_dec
[484]878 def set_instrument(self, instr):
[1846]879 """\
[1348]880 Set the instrument for subsequent processing.
[1846]881
[358]882 Parameters:
[1846]883
[710]884 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]885 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]886
[358]887 """
888 self._setInstrument(instr)
[1118]889 self._add_history("set_instument", vars())
[358]890
[1862]891 @asaplog_post_dec
[1190]892 def set_feedtype(self, feedtype):
[1846]893 """\
[1190]894 Overwrite the feed type, which might not be set correctly.
[1846]895
[1190]896 Parameters:
[1846]897
[1190]898 feedtype: 'linear' or 'circular'
[1846]899
[1190]900 """
901 self._setfeedtype(feedtype)
902 self._add_history("set_feedtype", vars())
903
[1862]904 @asaplog_post_dec
[276]905 def set_doppler(self, doppler='RADIO'):
[1846]906 """\
[276]907 Set the doppler for all following operations on this scantable.
[1846]908
[276]909 Parameters:
[1846]910
[276]911 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]912
[276]913 """
[484]914 varlist = vars()
[276]915 inf = list(self._getcoordinfo())
916 inf[2] = doppler
917 self._setcoordinfo(inf)
[1118]918 self._add_history("set_doppler", vars())
[710]919
[1862]920 @asaplog_post_dec
[226]921 def set_freqframe(self, frame=None):
[1846]922 """\
[113]923 Set the frame type of the Spectral Axis.
[1846]924
[113]925 Parameters:
[1846]926
[591]927 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]928 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]929 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]930
931 Example::
932
[113]933 scan.set_freqframe('BARY')
[1846]934
[113]935 """
[1593]936 frame = frame or rcParams['scantable.freqframe']
[484]937 varlist = vars()
[1819]938 # "REST" is not implemented in casacore
939 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
940 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
941 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]942 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]943
[989]944 if frame in valid:
[113]945 inf = list(self._getcoordinfo())
946 inf[1] = frame
947 self._setcoordinfo(inf)
[1118]948 self._add_history("set_freqframe", varlist)
[102]949 else:
[1118]950 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]951 raise TypeError(msg)
[710]952
[1862]953 @asaplog_post_dec
[989]954 def set_dirframe(self, frame=""):
[1846]955 """\
[989]956 Set the frame type of the Direction on the sky.
[1846]957
[989]958 Parameters:
[1846]959
[989]960 frame: an optional frame type, default ''. Valid frames are:
961 'J2000', 'B1950', 'GALACTIC'
[1846]962
963 Example:
964
[989]965 scan.set_dirframe('GALACTIC')
[1846]966
[989]967 """
968 varlist = vars()
[1859]969 Scantable.set_dirframe(self, frame)
[1118]970 self._add_history("set_dirframe", varlist)
[989]971
[113]972 def get_unit(self):
[1846]973 """\
[113]974 Get the default unit set in this scantable
[1846]975
[113]976 Returns:
[1846]977
[113]978 A unit string
[1846]979
[113]980 """
981 inf = self._getcoordinfo()
982 unit = inf[0]
983 if unit == '': unit = 'channel'
984 return unit
[102]985
[1862]986 @asaplog_post_dec
[158]987 def get_abcissa(self, rowno=0):
[1846]988 """\
[158]989 Get the abcissa in the current coordinate setup for the currently
[113]990 selected Beam/IF/Pol
[1846]991
[113]992 Parameters:
[1846]993
[226]994 rowno: an optional row number in the scantable. Default is the
995 first row, i.e. rowno=0
[1846]996
[113]997 Returns:
[1846]998
[1348]999 The abcissa values and the format string (as a dictionary)
[1846]1000
[113]1001 """
[256]1002 abc = self._getabcissa(rowno)
[710]1003 lbl = self._getabcissalabel(rowno)
[158]1004 return abc, lbl
[113]1005
[1862]1006 @asaplog_post_dec
[1819]1007 def flag(self, mask=None, unflag=False):
[1846]1008 """\
[1001]1009 Flag the selected data using an optional channel mask.
[1846]1010
[1001]1011 Parameters:
[1846]1012
[1001]1013 mask: an optional channel mask, created with create_mask. Default
1014 (no mask) is all channels.
[1855]1015
[1819]1016 unflag: if True, unflag the data
[1846]1017
[1001]1018 """
1019 varlist = vars()
[1593]1020 mask = mask or []
[1859]1021 self._flag(mask, unflag)
[1001]1022 self._add_history("flag", varlist)
1023
[1862]1024 @asaplog_post_dec
[1819]1025 def flag_row(self, rows=[], unflag=False):
[1846]1026 """\
[1819]1027 Flag the selected data in row-based manner.
[1846]1028
[1819]1029 Parameters:
[1846]1030
[1843]1031 rows: list of row numbers to be flagged. Default is no row
1032 (must be explicitly specified to execute row-based flagging).
[1855]1033
[1819]1034 unflag: if True, unflag the data.
[1846]1035
[1819]1036 """
1037 varlist = vars()
[1859]1038 self._flag_row(rows, unflag)
[1819]1039 self._add_history("flag_row", varlist)
1040
[1862]1041 @asaplog_post_dec
[1819]1042 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1043 """\
[1819]1044 Flag the selected data outside a specified range (in channel-base)
[1846]1045
[1819]1046 Parameters:
[1846]1047
[1819]1048 uthres: upper threshold.
[1855]1049
[1819]1050 dthres: lower threshold
[1846]1051
[1819]1052 clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1053 False for flagging data inside the range.
[1855]1054
[1846]1055 unflag: if True, unflag the data.
1056
[1819]1057 """
1058 varlist = vars()
[1859]1059 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1060 self._add_history("clip", varlist)
1061
[1862]1062 @asaplog_post_dec
[1584]1063 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1064 """\
[1192]1065 Flag the data in 'lag' space by providing a frequency to remove.
[1584]1066 Flagged data in the scantable gets interpolated over the region.
[1192]1067 No taper is applied.
[1846]1068
[1192]1069 Parameters:
[1846]1070
[1579]1071 start: the start frequency (really a period within the
1072 bandwidth) or period to remove
[1855]1073
[1579]1074 end: the end frequency or period to remove
[1855]1075
[1584]1076 unit: the frequency unit (default "MHz") or "" for
[1579]1077 explicit lag channels
[1846]1078
1079 *Notes*:
1080
[1579]1081 It is recommended to flag edges of the band or strong
[1348]1082 signals beforehand.
[1846]1083
[1192]1084 """
1085 if insitu is None: insitu = rcParams['insitu']
1086 self._math._setinsitu(insitu)
1087 varlist = vars()
[1579]1088 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1089 if not (unit == "" or base.has_key(unit)):
[1192]1090 raise ValueError("%s is not a valid unit." % unit)
[1859]1091 if unit == "":
1092 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1093 else:
1094 s = scantable(self._math._lag_flag(self, start*base[unit],
1095 end*base[unit], "frequency"))
[1192]1096 s._add_history("lag_flag", varlist)
1097 if insitu:
1098 self._assign(s)
1099 else:
1100 return s
[1001]1101
[1862]1102 @asaplog_post_dec
[113]1103 def create_mask(self, *args, **kwargs):
[1846]1104 """\
[1118]1105 Compute and return a mask based on [min, max] windows.
[189]1106 The specified windows are to be INCLUDED, when the mask is
[113]1107 applied.
[1846]1108
[102]1109 Parameters:
[1846]1110
[1118]1111 [min, max], [min2, max2], ...
[1024]1112 Pairs of start/end points (inclusive)specifying the regions
[102]1113 to be masked
[1855]1114
[189]1115 invert: optional argument. If specified as True,
1116 return an inverted mask, i.e. the regions
1117 specified are EXCLUDED
[1855]1118
[513]1119 row: create the mask using the specified row for
1120 unit conversions, default is row=0
1121 only necessary if frequency varies over rows.
[1846]1122
1123 Examples::
1124
[113]1125 scan.set_unit('channel')
[1846]1126 # a)
[1118]1127 msk = scan.create_mask([400, 500], [800, 900])
[189]1128 # masks everything outside 400 and 500
[113]1129 # and 800 and 900 in the unit 'channel'
1130
[1846]1131 # b)
[1118]1132 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1133 # masks the regions between 400 and 500
[113]1134 # and 800 and 900 in the unit 'channel'
[1846]1135
1136 # c)
1137 #mask only channel 400
[1554]1138 msk = scan.create_mask([400])
[1846]1139
[102]1140 """
[1554]1141 row = kwargs.get("row", 0)
[513]1142 data = self._getabcissa(row)
[113]1143 u = self._getcoordinfo()[0]
[1859]1144 if u == "":
1145 u = "channel"
1146 msg = "The current mask window unit is %s" % u
1147 i = self._check_ifs()
1148 if not i:
1149 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1150 asaplog.push(msg)
[102]1151 n = self.nchan()
[1295]1152 msk = _n_bools(n, False)
[710]1153 # test if args is a 'list' or a 'normal *args - UGLY!!!
1154
[1118]1155 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1156 and args or args[0]
[710]1157 for window in ws:
[1554]1158 if len(window) == 1:
1159 window = [window[0], window[0]]
1160 if len(window) == 0 or len(window) > 2:
1161 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1162 if window[0] > window[1]:
1163 tmp = window[0]
1164 window[0] = window[1]
1165 window[1] = tmp
[102]1166 for i in range(n):
[1024]1167 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1168 msk[i] = True
[113]1169 if kwargs.has_key('invert'):
1170 if kwargs.get('invert'):
[1295]1171 msk = mask_not(msk)
[102]1172 return msk
[710]1173
[1931]1174 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1175 """\
[1819]1176 Compute and return a list of mask windows, [min, max].
[1846]1177
[1819]1178 Parameters:
[1846]1179
[1819]1180 mask: channel mask, created with create_mask.
[1855]1181
[1819]1182 row: calcutate the masklist using the specified row
1183 for unit conversions, default is row=0
1184 only necessary if frequency varies over rows.
[1846]1185
[1819]1186 Returns:
[1846]1187
[1819]1188 [min, max], [min2, max2], ...
1189 Pairs of start/end points (inclusive)specifying
1190 the masked regions
[1846]1191
[1819]1192 """
1193 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1194 raise TypeError("The mask should be list or tuple.")
1195 if len(mask) < 2:
1196 raise TypeError("The mask elements should be > 1")
1197 if self.nchan() != len(mask):
1198 msg = "Number of channels in scantable != number of mask elements"
1199 raise TypeError(msg)
1200 data = self._getabcissa(row)
1201 u = self._getcoordinfo()[0]
[1859]1202 if u == "":
1203 u = "channel"
1204 msg = "The current mask window unit is %s" % u
1205 i = self._check_ifs()
1206 if not i:
1207 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1208 if not silent:
1209 asaplog.push(msg)
[1819]1210 masklist=[]
1211 ist, ien = None, None
1212 ist, ien=self.get_mask_indices(mask)
1213 if ist is not None and ien is not None:
1214 for i in xrange(len(ist)):
1215 range=[data[ist[i]],data[ien[i]]]
1216 range.sort()
1217 masklist.append([range[0],range[1]])
1218 return masklist
1219
1220 def get_mask_indices(self, mask=None):
[1846]1221 """\
[1819]1222 Compute and Return lists of mask start indices and mask end indices.
[1855]1223
1224 Parameters:
1225
[1819]1226 mask: channel mask, created with create_mask.
[1846]1227
[1819]1228 Returns:
[1846]1229
[1819]1230 List of mask start indices and that of mask end indices,
1231 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1232
[1819]1233 """
1234 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1235 raise TypeError("The mask should be list or tuple.")
1236 if len(mask) < 2:
1237 raise TypeError("The mask elements should be > 1")
1238 istart=[]
1239 iend=[]
1240 if mask[0]: istart.append(0)
1241 for i in range(len(mask)-1):
1242 if not mask[i] and mask[i+1]:
1243 istart.append(i+1)
1244 elif mask[i] and not mask[i+1]:
1245 iend.append(i)
1246 if mask[len(mask)-1]: iend.append(len(mask)-1)
1247 if len(istart) != len(iend):
1248 raise RuntimeError("Numbers of mask start != mask end.")
1249 for i in range(len(istart)):
1250 if istart[i] > iend[i]:
1251 raise RuntimeError("Mask start index > mask end index")
1252 break
1253 return istart,iend
1254
1255# def get_restfreqs(self):
1256# """
1257# Get the restfrequency(s) stored in this scantable.
1258# The return value(s) are always of unit 'Hz'
1259# Parameters:
1260# none
1261# Returns:
1262# a list of doubles
1263# """
1264# return list(self._getrestfreqs())
1265
1266 def get_restfreqs(self, ids=None):
[1846]1267 """\
[256]1268 Get the restfrequency(s) stored in this scantable.
1269 The return value(s) are always of unit 'Hz'
[1846]1270
[256]1271 Parameters:
[1846]1272
[1819]1273 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1274 be retrieved
[1846]1275
[256]1276 Returns:
[1846]1277
[1819]1278 dictionary containing ids and a list of doubles for each id
[1846]1279
[256]1280 """
[1819]1281 if ids is None:
1282 rfreqs={}
1283 idlist = self.getmolnos()
1284 for i in idlist:
1285 rfreqs[i]=list(self._getrestfreqs(i))
1286 return rfreqs
1287 else:
1288 if type(ids)==list or type(ids)==tuple:
1289 rfreqs={}
1290 for i in ids:
1291 rfreqs[i]=list(self._getrestfreqs(i))
1292 return rfreqs
1293 else:
1294 return list(self._getrestfreqs(ids))
1295 #return list(self._getrestfreqs(ids))
[102]1296
[931]1297 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1298 """\
[931]1299 Set or replace the restfrequency specified and
[1938]1300 if the 'freqs' argument holds a scalar,
[931]1301 then that rest frequency will be applied to all the selected
1302 data. If the 'freqs' argument holds
1303 a vector, then it MUST be of equal or smaller length than
1304 the number of IFs (and the available restfrequencies will be
1305 replaced by this vector). In this case, *all* data have
1306 the restfrequency set per IF according
1307 to the corresponding value you give in the 'freqs' vector.
[1118]1308 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1309 IF 1 gets restfreq 2e9.
[1846]1310
[1395]1311 You can also specify the frequencies via a linecatalog.
[1153]1312
[931]1313 Parameters:
[1846]1314
[931]1315 freqs: list of rest frequency values or string idenitfiers
[1855]1316
[931]1317 unit: unit for rest frequency (default 'Hz')
[402]1318
[1846]1319
1320 Example::
1321
[1819]1322 # set the given restfrequency for the all currently selected IFs
[931]1323 scan.set_restfreqs(freqs=1.4e9)
[1845]1324 # set restfrequencies for the n IFs (n > 1) in the order of the
1325 # list, i.e
1326 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1327 # len(list_of_restfreqs) == nIF
1328 # for nIF == 1 the following will set multiple restfrequency for
1329 # that IF
[1819]1330 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1331 # set multiple restfrequencies per IF. as a list of lists where
1332 # the outer list has nIF elements, the inner s arbitrary
1333 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1334
[1846]1335 *Note*:
[1845]1336
[931]1337 To do more sophisticate Restfrequency setting, e.g. on a
1338 source and IF basis, use scantable.set_selection() before using
[1846]1339 this function::
[931]1340
[1846]1341 # provided your scantable is called scan
1342 selection = selector()
1343 selection.set_name("ORION*")
1344 selection.set_ifs([1])
1345 scan.set_selection(selection)
1346 scan.set_restfreqs(freqs=86.6e9)
1347
[931]1348 """
1349 varlist = vars()
[1157]1350 from asap import linecatalog
1351 # simple value
[1118]1352 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1353 self._setrestfreqs([freqs], [""], unit)
[1157]1354 # list of values
[1118]1355 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1356 # list values are scalars
[1118]1357 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1358 if len(freqs) == 1:
1359 self._setrestfreqs(freqs, [""], unit)
1360 else:
1361 # allow the 'old' mode of setting mulitple IFs
1362 sel = selector()
1363 savesel = self._getselection()
1364 iflist = self.getifnos()
1365 if len(freqs)>len(iflist):
1366 raise ValueError("number of elements in list of list "
1367 "exeeds the current IF selections")
1368 iflist = self.getifnos()
1369 for i, fval in enumerate(freqs):
1370 sel.set_ifs(iflist[i])
1371 self._setselection(sel)
1372 self._setrestfreqs([fval], [""], unit)
1373 self._setselection(savesel)
1374
1375 # list values are dict, {'value'=, 'name'=)
[1157]1376 elif isinstance(freqs[-1], dict):
[1845]1377 values = []
1378 names = []
1379 for d in freqs:
1380 values.append(d["value"])
1381 names.append(d["name"])
1382 self._setrestfreqs(values, names, unit)
[1819]1383 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1384 sel = selector()
1385 savesel = self._getselection()
[1322]1386 iflist = self.getifnos()
[1819]1387 if len(freqs)>len(iflist):
[1845]1388 raise ValueError("number of elements in list of list exeeds"
1389 " the current IF selections")
1390 for i, fval in enumerate(freqs):
[1322]1391 sel.set_ifs(iflist[i])
[1259]1392 self._setselection(sel)
[1845]1393 self._setrestfreqs(fval, [""], unit)
[1157]1394 self._setselection(savesel)
1395 # freqs are to be taken from a linecatalog
[1153]1396 elif isinstance(freqs, linecatalog):
1397 sel = selector()
1398 savesel = self._getselection()
1399 for i in xrange(freqs.nrow()):
[1322]1400 sel.set_ifs(iflist[i])
[1153]1401 self._setselection(sel)
[1845]1402 self._setrestfreqs([freqs.get_frequency(i)],
1403 [freqs.get_name(i)], "MHz")
[1153]1404 # ensure that we are not iterating past nIF
1405 if i == self.nif()-1: break
1406 self._setselection(savesel)
[931]1407 else:
1408 return
1409 self._add_history("set_restfreqs", varlist)
1410
[1360]1411 def shift_refpix(self, delta):
[1846]1412 """\
[1589]1413 Shift the reference pixel of the Spectra Coordinate by an
1414 integer amount.
[1846]1415
[1589]1416 Parameters:
[1846]1417
[1589]1418 delta: the amount to shift by
[1846]1419
1420 *Note*:
1421
[1589]1422 Be careful using this with broadband data.
[1846]1423
[1360]1424 """
[1731]1425 Scantable.shift_refpix(self, delta)
[931]1426
[1862]1427 @asaplog_post_dec
[1259]1428 def history(self, filename=None):
[1846]1429 """\
[1259]1430 Print the history. Optionally to a file.
[1846]1431
[1348]1432 Parameters:
[1846]1433
[1928]1434 filename: The name of the file to save the history to.
[1846]1435
[1259]1436 """
[484]1437 hist = list(self._gethistory())
[794]1438 out = "-"*80
[484]1439 for h in hist:
[489]1440 if h.startswith("---"):
[1857]1441 out = "\n".join([out, h])
[489]1442 else:
1443 items = h.split("##")
1444 date = items[0]
1445 func = items[1]
1446 items = items[2:]
[794]1447 out += "\n"+date+"\n"
1448 out += "Function: %s\n Parameters:" % (func)
[489]1449 for i in items:
[1938]1450 if i == '':
1451 continue
[489]1452 s = i.split("=")
[1118]1453 out += "\n %s = %s" % (s[0], s[1])
[1857]1454 out = "\n".join([out, "-"*80])
[1259]1455 if filename is not None:
1456 if filename is "":
1457 filename = 'scantable_history.txt'
1458 import os
1459 filename = os.path.expandvars(os.path.expanduser(filename))
1460 if not os.path.isdir(filename):
1461 data = open(filename, 'w')
1462 data.write(out)
1463 data.close()
1464 else:
1465 msg = "Illegal file name '%s'." % (filename)
[1859]1466 raise IOError(msg)
1467 return page(out)
[513]1468 #
1469 # Maths business
1470 #
[1862]1471 @asaplog_post_dec
[931]1472 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1473 """\
[1070]1474 Return the (time) weighted average of a scan.
[1846]1475
1476 *Note*:
1477
[1070]1478 in channels only - align if necessary
[1846]1479
[513]1480 Parameters:
[1846]1481
[513]1482 mask: an optional mask (only used for 'var' and 'tsys'
1483 weighting)
[1855]1484
[558]1485 scanav: True averages each scan separately
1486 False (default) averages all scans together,
[1855]1487
[1099]1488 weight: Weighting scheme.
1489 'none' (mean no weight)
1490 'var' (1/var(spec) weighted)
1491 'tsys' (1/Tsys**2 weighted)
1492 'tint' (integration time weighted)
1493 'tintsys' (Tint/Tsys**2)
1494 'median' ( median averaging)
[535]1495 The default is 'tint'
[1855]1496
[931]1497 align: align the spectra in velocity before averaging. It takes
1498 the time of the first spectrum as reference time.
[1846]1499
1500 Example::
1501
[513]1502 # time average the scantable without using a mask
[710]1503 newscan = scan.average_time()
[1846]1504
[513]1505 """
1506 varlist = vars()
[1593]1507 weight = weight or 'TINT'
1508 mask = mask or ()
1509 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1510 scan = (self, )
[1859]1511
1512 if align:
1513 scan = (self.freq_align(insitu=False), )
1514 s = None
1515 if weight.upper() == 'MEDIAN':
1516 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1517 scanav))
1518 else:
1519 s = scantable(self._math._average(scan, mask, weight.upper(),
1520 scanav))
[1099]1521 s._add_history("average_time", varlist)
[513]1522 return s
[710]1523
[1862]1524 @asaplog_post_dec
[876]1525 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1526 """\
[513]1527 Return a scan where all spectra are converted to either
1528 Jansky or Kelvin depending upon the flux units of the scan table.
1529 By default the function tries to look the values up internally.
1530 If it can't find them (or if you want to over-ride), you must
1531 specify EITHER jyperk OR eta (and D which it will try to look up
1532 also if you don't set it). jyperk takes precedence if you set both.
[1846]1533
[513]1534 Parameters:
[1846]1535
[513]1536 jyperk: the Jy / K conversion factor
[1855]1537
[513]1538 eta: the aperture efficiency
[1855]1539
[1928]1540 d: the geometric diameter (metres)
[1855]1541
[513]1542 insitu: if False a new scantable is returned.
1543 Otherwise, the scaling is done in-situ
1544 The default is taken from .asaprc (False)
[1846]1545
[513]1546 """
1547 if insitu is None: insitu = rcParams['insitu']
[876]1548 self._math._setinsitu(insitu)
[513]1549 varlist = vars()
[1593]1550 jyperk = jyperk or -1.0
1551 d = d or -1.0
1552 eta = eta or -1.0
[876]1553 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1554 s._add_history("convert_flux", varlist)
1555 if insitu: self._assign(s)
1556 else: return s
[513]1557
[1862]1558 @asaplog_post_dec
[876]1559 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1560 """\
[513]1561 Return a scan after applying a gain-elevation correction.
1562 The correction can be made via either a polynomial or a
1563 table-based interpolation (and extrapolation if necessary).
1564 You specify polynomial coefficients, an ascii table or neither.
1565 If you specify neither, then a polynomial correction will be made
1566 with built in coefficients known for certain telescopes (an error
1567 will occur if the instrument is not known).
1568 The data and Tsys are *divided* by the scaling factors.
[1846]1569
[513]1570 Parameters:
[1846]1571
[513]1572 poly: Polynomial coefficients (default None) to compute a
1573 gain-elevation correction as a function of
1574 elevation (in degrees).
[1855]1575
[513]1576 filename: The name of an ascii file holding correction factors.
1577 The first row of the ascii file must give the column
1578 names and these MUST include columns
1579 "ELEVATION" (degrees) and "FACTOR" (multiply data
1580 by this) somewhere.
1581 The second row must give the data type of the
1582 column. Use 'R' for Real and 'I' for Integer.
1583 An example file would be
1584 (actual factors are arbitrary) :
1585
1586 TIME ELEVATION FACTOR
1587 R R R
1588 0.1 0 0.8
1589 0.2 20 0.85
1590 0.3 40 0.9
1591 0.4 60 0.85
1592 0.5 80 0.8
1593 0.6 90 0.75
[1855]1594
[513]1595 method: Interpolation method when correcting from a table.
1596 Values are "nearest", "linear" (default), "cubic"
1597 and "spline"
[1855]1598
[513]1599 insitu: if False a new scantable is returned.
1600 Otherwise, the scaling is done in-situ
1601 The default is taken from .asaprc (False)
[1846]1602
[513]1603 """
1604
1605 if insitu is None: insitu = rcParams['insitu']
[876]1606 self._math._setinsitu(insitu)
[513]1607 varlist = vars()
[1593]1608 poly = poly or ()
[513]1609 from os.path import expandvars
1610 filename = expandvars(filename)
[876]1611 s = scantable(self._math._gainel(self, poly, filename, method))
1612 s._add_history("gain_el", varlist)
[1593]1613 if insitu:
1614 self._assign(s)
1615 else:
1616 return s
[710]1617
[1862]1618 @asaplog_post_dec
[931]1619 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1620 """\
[513]1621 Return a scan where all rows have been aligned in frequency/velocity.
1622 The alignment frequency frame (e.g. LSRK) is that set by function
1623 set_freqframe.
[1846]1624
[513]1625 Parameters:
[1855]1626
[513]1627 reftime: reference time to align at. By default, the time of
1628 the first row of data is used.
[1855]1629
[513]1630 method: Interpolation method for regridding the spectra.
1631 Choose from "nearest", "linear", "cubic" (default)
1632 and "spline"
[1855]1633
[513]1634 insitu: if False a new scantable is returned.
1635 Otherwise, the scaling is done in-situ
1636 The default is taken from .asaprc (False)
[1846]1637
[513]1638 """
[931]1639 if insitu is None: insitu = rcParams["insitu"]
[876]1640 self._math._setinsitu(insitu)
[513]1641 varlist = vars()
[1593]1642 reftime = reftime or ""
[931]1643 s = scantable(self._math._freq_align(self, reftime, method))
[876]1644 s._add_history("freq_align", varlist)
1645 if insitu: self._assign(s)
1646 else: return s
[513]1647
[1862]1648 @asaplog_post_dec
[1725]1649 def opacity(self, tau=None, insitu=None):
[1846]1650 """\
[513]1651 Apply an opacity correction. The data
1652 and Tsys are multiplied by the correction factor.
[1846]1653
[513]1654 Parameters:
[1855]1655
[1689]1656 tau: (list of) opacity from which the correction factor is
[513]1657 exp(tau*ZD)
[1689]1658 where ZD is the zenith-distance.
1659 If a list is provided, it has to be of length nIF,
1660 nIF*nPol or 1 and in order of IF/POL, e.g.
1661 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1662 if tau is `None` the opacities are determined from a
1663 model.
[1855]1664
[513]1665 insitu: if False a new scantable is returned.
1666 Otherwise, the scaling is done in-situ
1667 The default is taken from .asaprc (False)
[1846]1668
[513]1669 """
1670 if insitu is None: insitu = rcParams['insitu']
[876]1671 self._math._setinsitu(insitu)
[513]1672 varlist = vars()
[1689]1673 if not hasattr(tau, "__len__"):
1674 tau = [tau]
[876]1675 s = scantable(self._math._opacity(self, tau))
1676 s._add_history("opacity", varlist)
1677 if insitu: self._assign(s)
1678 else: return s
[513]1679
[1862]1680 @asaplog_post_dec
[513]1681 def bin(self, width=5, insitu=None):
[1846]1682 """\
[513]1683 Return a scan where all spectra have been binned up.
[1846]1684
[1348]1685 Parameters:
[1846]1686
[513]1687 width: The bin width (default=5) in pixels
[1855]1688
[513]1689 insitu: if False a new scantable is returned.
1690 Otherwise, the scaling is done in-situ
1691 The default is taken from .asaprc (False)
[1846]1692
[513]1693 """
1694 if insitu is None: insitu = rcParams['insitu']
[876]1695 self._math._setinsitu(insitu)
[513]1696 varlist = vars()
[876]1697 s = scantable(self._math._bin(self, width))
[1118]1698 s._add_history("bin", varlist)
[1589]1699 if insitu:
1700 self._assign(s)
1701 else:
1702 return s
[513]1703
[1862]1704 @asaplog_post_dec
[513]1705 def resample(self, width=5, method='cubic', insitu=None):
[1846]1706 """\
[1348]1707 Return a scan where all spectra have been binned up.
[1573]1708
[1348]1709 Parameters:
[1846]1710
[513]1711 width: The bin width (default=5) in pixels
[1855]1712
[513]1713 method: Interpolation method when correcting from a table.
1714 Values are "nearest", "linear", "cubic" (default)
1715 and "spline"
[1855]1716
[513]1717 insitu: if False a new scantable is returned.
1718 Otherwise, the scaling is done in-situ
1719 The default is taken from .asaprc (False)
[1846]1720
[513]1721 """
1722 if insitu is None: insitu = rcParams['insitu']
[876]1723 self._math._setinsitu(insitu)
[513]1724 varlist = vars()
[876]1725 s = scantable(self._math._resample(self, method, width))
[1118]1726 s._add_history("resample", varlist)
[876]1727 if insitu: self._assign(s)
1728 else: return s
[513]1729
[1862]1730 @asaplog_post_dec
[946]1731 def average_pol(self, mask=None, weight='none'):
[1846]1732 """\
[946]1733 Average the Polarisations together.
[1846]1734
[946]1735 Parameters:
[1846]1736
[946]1737 mask: An optional mask defining the region, where the
1738 averaging will be applied. The output will have all
1739 specified points masked.
[1855]1740
[946]1741 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1742 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1743
[946]1744 """
1745 varlist = vars()
[1593]1746 mask = mask or ()
[1010]1747 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1748 s._add_history("average_pol", varlist)
[992]1749 return s
[513]1750
[1862]1751 @asaplog_post_dec
[1145]1752 def average_beam(self, mask=None, weight='none'):
[1846]1753 """\
[1145]1754 Average the Beams together.
[1846]1755
[1145]1756 Parameters:
1757 mask: An optional mask defining the region, where the
1758 averaging will be applied. The output will have all
1759 specified points masked.
[1855]1760
[1145]1761 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1762 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1763
[1145]1764 """
1765 varlist = vars()
[1593]1766 mask = mask or ()
[1145]1767 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1768 s._add_history("average_beam", varlist)
1769 return s
1770
[1586]1771 def parallactify(self, pflag):
[1846]1772 """\
[1843]1773 Set a flag to indicate whether this data should be treated as having
[1617]1774 been 'parallactified' (total phase == 0.0)
[1846]1775
[1617]1776 Parameters:
[1855]1777
[1843]1778 pflag: Bool indicating whether to turn this on (True) or
[1617]1779 off (False)
[1846]1780
[1617]1781 """
[1586]1782 varlist = vars()
1783 self._parallactify(pflag)
1784 self._add_history("parallactify", varlist)
1785
[1862]1786 @asaplog_post_dec
[992]1787 def convert_pol(self, poltype=None):
[1846]1788 """\
[992]1789 Convert the data to a different polarisation type.
[1565]1790 Note that you will need cross-polarisation terms for most conversions.
[1846]1791
[992]1792 Parameters:
[1855]1793
[992]1794 poltype: The new polarisation type. Valid types are:
[1565]1795 "linear", "circular", "stokes" and "linpol"
[1846]1796
[992]1797 """
1798 varlist = vars()
[1859]1799 s = scantable(self._math._convertpol(self, poltype))
[1118]1800 s._add_history("convert_pol", varlist)
[992]1801 return s
1802
[1862]1803 @asaplog_post_dec
[1819]1804 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1805 """\
[513]1806 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1807
[513]1808 Parameters:
[1846]1809
[513]1810 kernel: The type of smoothing kernel. Select from
[1574]1811 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1812 or 'poly'
[1855]1813
[513]1814 width: The width of the kernel in pixels. For hanning this is
1815 ignored otherwise it defauls to 5 pixels.
1816 For 'gaussian' it is the Full Width Half
1817 Maximum. For 'boxcar' it is the full width.
[1574]1818 For 'rmedian' and 'poly' it is the half width.
[1855]1819
[1574]1820 order: Optional parameter for 'poly' kernel (default is 2), to
1821 specify the order of the polnomial. Ignored by all other
1822 kernels.
[1855]1823
[1819]1824 plot: plot the original and the smoothed spectra.
1825 In this each indivual fit has to be approved, by
1826 typing 'y' or 'n'
[1855]1827
[513]1828 insitu: if False a new scantable is returned.
1829 Otherwise, the scaling is done in-situ
1830 The default is taken from .asaprc (False)
[1846]1831
[513]1832 """
1833 if insitu is None: insitu = rcParams['insitu']
[876]1834 self._math._setinsitu(insitu)
[513]1835 varlist = vars()
[1819]1836
1837 if plot: orgscan = self.copy()
1838
[1574]1839 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1840 s._add_history("smooth", varlist)
[1819]1841
1842 if plot:
1843 if rcParams['plotter.gui']:
1844 from asap.asaplotgui import asaplotgui as asaplot
1845 else:
1846 from asap.asaplot import asaplot
1847 self._p=asaplot()
1848 self._p.set_panels()
1849 ylab=s._get_ordinate_label()
1850 #self._p.palette(0,["#777777","red"])
1851 for r in xrange(s.nrow()):
1852 xsm=s._getabcissa(r)
1853 ysm=s._getspectrum(r)
1854 xorg=orgscan._getabcissa(r)
1855 yorg=orgscan._getspectrum(r)
1856 self._p.clear()
1857 self._p.hold()
1858 self._p.set_axes('ylabel',ylab)
1859 self._p.set_axes('xlabel',s._getabcissalabel(r))
1860 self._p.set_axes('title',s._getsourcename(r))
1861 self._p.set_line(label='Original',color="#777777")
1862 self._p.plot(xorg,yorg)
1863 self._p.set_line(label='Smoothed',color="red")
1864 self._p.plot(xsm,ysm)
1865 ### Ugly part for legend
1866 for i in [0,1]:
1867 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1868 self._p.release()
1869 ### Ugly part for legend
1870 self._p.subplots[0]['lines']=[]
1871 res = raw_input("Accept smoothing ([y]/n): ")
1872 if res.upper() == 'N':
1873 s._setspectrum(yorg, r)
1874 self._p.unmap()
1875 self._p = None
1876 del orgscan
1877
[876]1878 if insitu: self._assign(s)
1879 else: return s
[513]1880
[1862]1881 @asaplog_post_dec
[1907]1882 def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1883 """\
[513]1884 Return a scan which has been baselined (all rows) by a polynomial.
[1907]1885
[513]1886 Parameters:
[1846]1887
[794]1888 mask: an optional mask
[1855]1889
[794]1890 order: the order of the polynomial (default is 0)
[1855]1891
[1061]1892 plot: plot the fit and the residual. In this each
1893 indivual fit has to be approved, by typing 'y'
1894 or 'n'
[1855]1895
[1391]1896 uselin: use linear polynomial fit
[1855]1897
[794]1898 insitu: if False a new scantable is returned.
1899 Otherwise, the scaling is done in-situ
1900 The default is taken from .asaprc (False)
[1846]1901
[1907]1902 rows: row numbers of spectra to be processed.
1903 (default is None: for all rows)
1904
1905 Example:
[513]1906 # return a scan baselined by a third order polynomial,
1907 # not using a mask
1908 bscan = scan.poly_baseline(order=3)
[1846]1909
[579]1910 """
[513]1911 if insitu is None: insitu = rcParams['insitu']
[1819]1912 if not insitu:
1913 workscan = self.copy()
1914 else:
1915 workscan = self
[513]1916 varlist = vars()
1917 if mask is None:
[1907]1918 mask = [True for i in xrange(self.nchan())]
[1819]1919
[1217]1920 try:
1921 f = fitter()
[1391]1922 if uselin:
1923 f.set_function(lpoly=order)
1924 else:
1925 f.set_function(poly=order)
[1819]1926
[1907]1927 if rows == None:
1928 rows = xrange(workscan.nrow())
1929 elif isinstance(rows, int):
1930 rows = [ rows ]
1931
[1819]1932 if len(rows) > 0:
1933 self.blpars = []
[1907]1934 self.masklists = []
1935 self.actualmask = []
1936
[1819]1937 for r in rows:
1938 f.x = workscan._getabcissa(r)
1939 f.y = workscan._getspectrum(r)
[1907]1940 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]1941 f.data = None
1942 f.fit()
1943 if plot:
1944 f.plot(residual=True)
1945 x = raw_input("Accept fit ( [y]/n ): ")
1946 if x.upper() == 'N':
1947 self.blpars.append(None)
[1907]1948 self.masklists.append(None)
1949 self.actualmask.append(None)
[1819]1950 continue
1951 workscan._setspectrum(f.fitter.getresidual(), r)
1952 self.blpars.append(f.get_parameters())
[1931]1953 self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
[1907]1954 self.actualmask.append(f.mask)
[1819]1955
1956 if plot:
1957 f._p.unmap()
1958 f._p = None
1959 workscan._add_history("poly_baseline", varlist)
[1856]1960 if insitu:
1961 self._assign(workscan)
1962 else:
1963 return workscan
[1217]1964 except RuntimeError:
1965 msg = "The fit failed, possibly because it didn't converge."
[1859]1966 raise RuntimeError(msg)
[513]1967
[1931]1968 @asaplog_post_dec
[1907]1969 def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1970 """\
1971 Return a scan which has been baselined (all rows) by a polynomial.
1972 Parameters:
1973 mask: an optional mask
1974 order: the order of the polynomial (default is 0)
1975 plot: plot the fit and the residual. In this each
1976 indivual fit has to be approved, by typing 'y'
1977 or 'n'. Ignored if batch = True.
1978 batch: if True a faster algorithm is used and logs
1979 including the fit results are not output
1980 (default is False)
1981 insitu: if False a new scantable is returned.
1982 Otherwise, the scaling is done in-situ
1983 The default is taken from .asaprc (False)
[1931]1984 rows: row numbers of spectra to be baselined.
[1907]1985 (default is None: for all rows)
1986 Example:
1987 # return a scan baselined by a third order polynomial,
1988 # not using a mask
1989 bscan = scan.poly_baseline(order=3)
1990 """
[1931]1991
1992 varlist = vars()
1993
[1907]1994 if insitu is None: insitu = rcParams["insitu"]
1995 if insitu:
1996 workscan = self
1997 else:
1998 workscan = self.copy()
1999
2000 nchan = workscan.nchan()
2001
2002 if mask is None:
2003 mask = [True for i in xrange(nchan)]
2004
2005 try:
2006 if rows == None:
2007 rows = xrange(workscan.nrow())
2008 elif isinstance(rows, int):
2009 rows = [ rows ]
2010
2011 if len(rows) > 0:
[1931]2012 workscan.blpars = []
2013 workscan.masklists = []
2014 workscan.actualmask = []
[1907]2015
2016 if batch:
[1931]2017 workscan._poly_baseline_batch(mask, order)
[1907]2018 elif plot:
2019 f = fitter()
2020 f.set_function(lpoly=order)
2021 for r in rows:
2022 f.x = workscan._getabcissa(r)
2023 f.y = workscan._getspectrum(r)
2024 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2025 f.data = None
2026 f.fit()
2027
2028 f.plot(residual=True)
2029 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2030 if accept_fit.upper() == "N":
2031 self.blpars.append(None)
2032 self.masklists.append(None)
2033 self.actualmask.append(None)
2034 continue
2035 workscan._setspectrum(f.fitter.getresidual(), r)
[1931]2036 workscan.blpars.append(f.get_parameters())
2037 workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2038 workscan.actualmask.append(f.mask)
[1907]2039
2040 f._p.unmap()
2041 f._p = None
2042 else:
2043 for r in rows:
[1931]2044 fitparams = workscan._poly_baseline(mask, order, r)
2045 params = fitparams.getparameters()
2046 fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2047 errors = fitparams.geterrors()
2048 fmask = mask_and(mask, workscan._getmask(r))
2049
2050 workscan.blpars.append({"params":params,
2051 "fixed": fitparams.getfixedparameters(),
2052 "formatted":fmtd, "errors":errors})
2053 workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2054 workscan.actualmask.append(fmask)
[1907]2055
[1931]2056 asaplog.push(fmtd)
[1907]2057
2058 workscan._add_history("poly_baseline", varlist)
2059
2060 if insitu:
2061 self._assign(workscan)
2062 else:
2063 return workscan
2064
[1919]2065 except RuntimeError, e:
[1907]2066 msg = "The fit failed, possibly because it didn't converge."
2067 if rcParams["verbose"]:
[1919]2068 asaplog.push(str(e))
[1907]2069 asaplog.push(str(msg))
2070 return
2071 else:
[1919]2072 raise RuntimeError(str(e)+'\n'+msg)
[1907]2073
2074
2075 def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2076 threshold=3, chan_avg_limit=1, plot=False,
[1907]2077 insitu=None, rows=None):
[1846]2078 """\
[1931]2079 Return a scan which has been baselined (all rows) by a polynomial.
[880]2080 Spectral lines are detected first using linefinder and masked out
2081 to avoid them affecting the baseline solution.
2082
2083 Parameters:
[1846]2084
[880]2085 mask: an optional mask retreived from scantable
[1846]2086
2087 edge: an optional number of channel to drop at the edge of
2088 spectrum. If only one value is
[880]2089 specified, the same number will be dropped from
2090 both sides of the spectrum. Default is to keep
[907]2091 all channels. Nested tuples represent individual
[976]2092 edge selection for different IFs (a number of spectral
2093 channels can be different)
[1846]2094
[880]2095 order: the order of the polynomial (default is 0)
[1846]2096
[880]2097 threshold: the threshold used by line finder. It is better to
2098 keep it large as only strong lines affect the
2099 baseline solution.
[1846]2100
[1280]2101 chan_avg_limit:
2102 a maximum number of consequtive spectral channels to
2103 average during the search of weak and broad lines.
2104 The default is no averaging (and no search for weak
2105 lines). If such lines can affect the fitted baseline
2106 (e.g. a high order polynomial is fitted), increase this
2107 parameter (usually values up to 8 are reasonable). Most
2108 users of this method should find the default value
2109 sufficient.
[1846]2110
[1061]2111 plot: plot the fit and the residual. In this each
2112 indivual fit has to be approved, by typing 'y'
2113 or 'n'
[1846]2114
[880]2115 insitu: if False a new scantable is returned.
2116 Otherwise, the scaling is done in-situ
2117 The default is taken from .asaprc (False)
[1907]2118 rows: row numbers of spectra to be processed.
2119 (default is None: for all rows)
[880]2120
[1846]2121
2122 Example::
2123
2124 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2125
[880]2126 """
2127 if insitu is None: insitu = rcParams['insitu']
2128 varlist = vars()
2129 from asap.asaplinefind import linefinder
2130 from asap import _is_sequence_or_number as _is_valid
2131
[976]2132 # check whether edge is set up for each IF individually
[1118]2133 individualedge = False;
2134 if len(edge) > 1:
2135 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2136 individualedge = True;
[907]2137
[1118]2138 if not _is_valid(edge, int) and not individualedge:
[909]2139 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2140 pair of integers specified as a tuple. Nested tuples are allowed \
2141 to make individual selection for different IFs."
[919]2142
[1118]2143 curedge = (0, 0)
2144 if individualedge:
2145 for edgepar in edge:
2146 if not _is_valid(edgepar, int):
2147 raise ValueError, "Each element of the 'edge' tuple has \
2148 to be a pair of integers or an integer."
[907]2149 else:
[1118]2150 curedge = edge;
[880]2151
[1907]2152 if not insitu:
2153 workscan = self.copy()
2154 else:
2155 workscan = self
2156
[880]2157 # setup fitter
2158 f = fitter()
[1907]2159 f.set_function(lpoly=order)
[880]2160
2161 # setup line finder
[1118]2162 fl = linefinder()
[1268]2163 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2164
[907]2165 fl.set_scan(workscan)
2166
[1907]2167 if mask is None:
2168 mask = _n_bools(workscan.nchan(), True)
2169
2170 if rows is None:
2171 rows = xrange(workscan.nrow())
2172 elif isinstance(rows, int):
2173 rows = [ rows ]
2174
[1819]2175 # Save parameters of baseline fits & masklists as a class attribute.
2176 # NOTICE: It does not reflect changes in scantable!
2177 if len(rows) > 0:
2178 self.blpars=[]
2179 self.masklists=[]
[1907]2180 self.actualmask=[]
[880]2181 asaplog.push("Processing:")
2182 for r in rows:
[1118]2183 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2184 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2185 workscan.getpol(r), workscan.getcycle(r))
[880]2186 asaplog.push(msg, False)
[907]2187
[976]2188 # figure out edge parameter
[1118]2189 if individualedge:
2190 if len(edge) >= workscan.getif(r):
2191 raise RuntimeError, "Number of edge elements appear to " \
2192 "be less than the number of IFs"
2193 curedge = edge[workscan.getif(r)]
[919]2194
[1907]2195 actualmask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]2196
[976]2197 # setup line finder
[1819]2198 fl.find_lines(r, actualmask, curedge)
[1907]2199
[1819]2200 f.x = workscan._getabcissa(r)
2201 f.y = workscan._getspectrum(r)
[1907]2202 f.mask = fl.get_mask()
[1819]2203 f.data = None
[880]2204 f.fit()
[1819]2205
2206 # Show mask list
[1931]2207 masklist=workscan.get_masklist(f.mask, row=r, silent=True)
[1819]2208 msg = "mask range: "+str(masklist)
2209 asaplog.push(msg, False)
2210
[1061]2211 if plot:
2212 f.plot(residual=True)
2213 x = raw_input("Accept fit ( [y]/n ): ")
2214 if x.upper() == 'N':
[1819]2215 self.blpars.append(None)
2216 self.masklists.append(None)
[1907]2217 self.actualmask.append(None)
[1061]2218 continue
[1819]2219
[880]2220 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2221 self.blpars.append(f.get_parameters())
2222 self.masklists.append(masklist)
[1907]2223 self.actualmask.append(f.mask)
[1061]2224 if plot:
2225 f._p.unmap()
2226 f._p = None
2227 workscan._add_history("auto_poly_baseline", varlist)
[880]2228 if insitu:
2229 self._assign(workscan)
2230 else:
2231 return workscan
2232
[1862]2233 @asaplog_post_dec
[914]2234 def rotate_linpolphase(self, angle):
[1846]2235 """\
[914]2236 Rotate the phase of the complex polarization O=Q+iU correlation.
2237 This is always done in situ in the raw data. So if you call this
2238 function more than once then each call rotates the phase further.
[1846]2239
[914]2240 Parameters:
[1846]2241
[914]2242 angle: The angle (degrees) to rotate (add) by.
[1846]2243
2244 Example::
2245
[914]2246 scan.rotate_linpolphase(2.3)
[1846]2247
[914]2248 """
2249 varlist = vars()
[936]2250 self._math._rotate_linpolphase(self, angle)
[914]2251 self._add_history("rotate_linpolphase", varlist)
2252 return
[710]2253
[1862]2254 @asaplog_post_dec
[914]2255 def rotate_xyphase(self, angle):
[1846]2256 """\
[914]2257 Rotate the phase of the XY correlation. This is always done in situ
2258 in the data. So if you call this function more than once
2259 then each call rotates the phase further.
[1846]2260
[914]2261 Parameters:
[1846]2262
[914]2263 angle: The angle (degrees) to rotate (add) by.
[1846]2264
2265 Example::
2266
[914]2267 scan.rotate_xyphase(2.3)
[1846]2268
[914]2269 """
2270 varlist = vars()
[936]2271 self._math._rotate_xyphase(self, angle)
[914]2272 self._add_history("rotate_xyphase", varlist)
2273 return
2274
[1862]2275 @asaplog_post_dec
[914]2276 def swap_linears(self):
[1846]2277 """\
[1573]2278 Swap the linear polarisations XX and YY, or better the first two
[1348]2279 polarisations as this also works for ciculars.
[914]2280 """
2281 varlist = vars()
[936]2282 self._math._swap_linears(self)
[914]2283 self._add_history("swap_linears", varlist)
2284 return
2285
[1862]2286 @asaplog_post_dec
[914]2287 def invert_phase(self):
[1846]2288 """\
[914]2289 Invert the phase of the complex polarisation
2290 """
2291 varlist = vars()
[936]2292 self._math._invert_phase(self)
[914]2293 self._add_history("invert_phase", varlist)
2294 return
2295
[1862]2296 @asaplog_post_dec
[876]2297 def add(self, offset, insitu=None):
[1846]2298 """\
[513]2299 Return a scan where all spectra have the offset added
[1846]2300
[513]2301 Parameters:
[1846]2302
[513]2303 offset: the offset
[1855]2304
[513]2305 insitu: if False a new scantable is returned.
2306 Otherwise, the scaling is done in-situ
2307 The default is taken from .asaprc (False)
[1846]2308
[513]2309 """
2310 if insitu is None: insitu = rcParams['insitu']
[876]2311 self._math._setinsitu(insitu)
[513]2312 varlist = vars()
[876]2313 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2314 s._add_history("add", varlist)
[876]2315 if insitu:
2316 self._assign(s)
2317 else:
[513]2318 return s
2319
[1862]2320 @asaplog_post_dec
[1308]2321 def scale(self, factor, tsys=True, insitu=None):
[1846]2322 """\
2323
[1938]2324 Return a scan where all spectra are scaled by the given 'factor'
[1846]2325
[513]2326 Parameters:
[1846]2327
[1819]2328 factor: the scaling factor (float or 1D float list)
[1855]2329
[513]2330 insitu: if False a new scantable is returned.
2331 Otherwise, the scaling is done in-situ
2332 The default is taken from .asaprc (False)
[1855]2333
[513]2334 tsys: if True (default) then apply the operation to Tsys
2335 as well as the data
[1846]2336
[513]2337 """
2338 if insitu is None: insitu = rcParams['insitu']
[876]2339 self._math._setinsitu(insitu)
[513]2340 varlist = vars()
[1819]2341 s = None
2342 import numpy
2343 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2344 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2345 from asapmath import _array2dOp
2346 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2347 else:
2348 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2349 else:
2350 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2351 s._add_history("scale", varlist)
[876]2352 if insitu:
2353 self._assign(s)
2354 else:
[513]2355 return s
2356
[1504]2357 def set_sourcetype(self, match, matchtype="pattern",
2358 sourcetype="reference"):
[1846]2359 """\
[1502]2360 Set the type of the source to be an source or reference scan
[1846]2361 using the provided pattern.
2362
[1502]2363 Parameters:
[1846]2364
[1504]2365 match: a Unix style pattern, regular expression or selector
[1855]2366
[1504]2367 matchtype: 'pattern' (default) UNIX style pattern or
2368 'regex' regular expression
[1855]2369
[1502]2370 sourcetype: the type of the source to use (source/reference)
[1846]2371
[1502]2372 """
2373 varlist = vars()
2374 basesel = self.get_selection()
2375 stype = -1
2376 if sourcetype.lower().startswith("r"):
2377 stype = 1
2378 elif sourcetype.lower().startswith("s"):
2379 stype = 0
[1504]2380 else:
[1502]2381 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2382 if matchtype.lower().startswith("p"):
2383 matchtype = "pattern"
2384 elif matchtype.lower().startswith("r"):
2385 matchtype = "regex"
2386 else:
2387 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2388 sel = selector()
2389 if isinstance(match, selector):
2390 sel = match
2391 else:
[1504]2392 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2393 self.set_selection(basesel+sel)
2394 self._setsourcetype(stype)
2395 self.set_selection(basesel)
[1573]2396 self._add_history("set_sourcetype", varlist)
[1502]2397
[1862]2398 @asaplog_post_dec
[1857]2399 @preserve_selection
[1819]2400 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2401 """\
[670]2402 This function allows to build quotients automatically.
[1819]2403 It assumes the observation to have the same number of
[670]2404 "ons" and "offs"
[1846]2405
[670]2406 Parameters:
[1846]2407
[710]2408 preserve: you can preserve (default) the continuum or
2409 remove it. The equations used are
[1857]2410
[670]2411 preserve: Output = Toff * (on/off) - Toff
[1857]2412
[1070]2413 remove: Output = Toff * (on/off) - Ton
[1855]2414
[1573]2415 mode: the on/off detection mode
[1348]2416 'paired' (default)
2417 identifies 'off' scans by the
2418 trailing '_R' (Mopra/Parkes) or
2419 '_e'/'_w' (Tid) and matches
2420 on/off pairs from the observing pattern
[1502]2421 'time'
2422 finds the closest off in time
[1348]2423
[1857]2424 .. todo:: verify argument is not implemented
2425
[670]2426 """
[1857]2427 varlist = vars()
[1348]2428 modes = ["time", "paired"]
[670]2429 if not mode in modes:
[876]2430 msg = "please provide valid mode. Valid modes are %s" % (modes)
2431 raise ValueError(msg)
[1348]2432 s = None
2433 if mode.lower() == "paired":
[1857]2434 sel = self.get_selection()
[1875]2435 sel.set_query("SRCTYPE==psoff")
[1356]2436 self.set_selection(sel)
[1348]2437 offs = self.copy()
[1875]2438 sel.set_query("SRCTYPE==pson")
[1356]2439 self.set_selection(sel)
[1348]2440 ons = self.copy()
2441 s = scantable(self._math._quotient(ons, offs, preserve))
2442 elif mode.lower() == "time":
2443 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2444 s._add_history("auto_quotient", varlist)
[876]2445 return s
[710]2446
[1862]2447 @asaplog_post_dec
[1145]2448 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2449 """\
[1143]2450 Form a quotient using "off" beams when observing in "MX" mode.
[1846]2451
[1143]2452 Parameters:
[1846]2453
[1145]2454 mask: an optional mask to be used when weight == 'stddev'
[1855]2455
[1143]2456 weight: How to average the off beams. Default is 'median'.
[1855]2457
[1145]2458 preserve: you can preserve (default) the continuum or
[1855]2459 remove it. The equations used are:
[1846]2460
[1855]2461 preserve: Output = Toff * (on/off) - Toff
2462
2463 remove: Output = Toff * (on/off) - Ton
2464
[1217]2465 """
[1593]2466 mask = mask or ()
[1141]2467 varlist = vars()
2468 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2469 preoff = scantable(self._math._mx_extract(self, 'off'))
2470 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2471 from asapmath import quotient
[1145]2472 q = quotient(on, off, preserve)
[1143]2473 q._add_history("mx_quotient", varlist)
[1217]2474 return q
[513]2475
[1862]2476 @asaplog_post_dec
[718]2477 def freq_switch(self, insitu=None):
[1846]2478 """\
[718]2479 Apply frequency switching to the data.
[1846]2480
[718]2481 Parameters:
[1846]2482
[718]2483 insitu: if False a new scantable is returned.
2484 Otherwise, the swictching is done in-situ
2485 The default is taken from .asaprc (False)
[1846]2486
[718]2487 """
2488 if insitu is None: insitu = rcParams['insitu']
[876]2489 self._math._setinsitu(insitu)
[718]2490 varlist = vars()
[876]2491 s = scantable(self._math._freqswitch(self))
[1118]2492 s._add_history("freq_switch", varlist)
[1856]2493 if insitu:
2494 self._assign(s)
2495 else:
2496 return s
[718]2497
[1862]2498 @asaplog_post_dec
[780]2499 def recalc_azel(self):
[1846]2500 """Recalculate the azimuth and elevation for each position."""
[780]2501 varlist = vars()
[876]2502 self._recalcazel()
[780]2503 self._add_history("recalc_azel", varlist)
2504 return
2505
[1862]2506 @asaplog_post_dec
[513]2507 def __add__(self, other):
2508 varlist = vars()
2509 s = None
2510 if isinstance(other, scantable):
[1573]2511 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2512 elif isinstance(other, float):
[876]2513 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2514 else:
[718]2515 raise TypeError("Other input is not a scantable or float value")
[513]2516 s._add_history("operator +", varlist)
2517 return s
2518
[1862]2519 @asaplog_post_dec
[513]2520 def __sub__(self, other):
2521 """
2522 implicit on all axes and on Tsys
2523 """
2524 varlist = vars()
2525 s = None
2526 if isinstance(other, scantable):
[1588]2527 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2528 elif isinstance(other, float):
[876]2529 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2530 else:
[718]2531 raise TypeError("Other input is not a scantable or float value")
[513]2532 s._add_history("operator -", varlist)
2533 return s
[710]2534
[1862]2535 @asaplog_post_dec
[513]2536 def __mul__(self, other):
2537 """
2538 implicit on all axes and on Tsys
2539 """
2540 varlist = vars()
2541 s = None
2542 if isinstance(other, scantable):
[1588]2543 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2544 elif isinstance(other, float):
[876]2545 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2546 else:
[718]2547 raise TypeError("Other input is not a scantable or float value")
[513]2548 s._add_history("operator *", varlist)
2549 return s
2550
[710]2551
[1862]2552 @asaplog_post_dec
[513]2553 def __div__(self, other):
2554 """
2555 implicit on all axes and on Tsys
2556 """
2557 varlist = vars()
2558 s = None
2559 if isinstance(other, scantable):
[1589]2560 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2561 elif isinstance(other, float):
2562 if other == 0.0:
[718]2563 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2564 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2565 else:
[718]2566 raise TypeError("Other input is not a scantable or float value")
[513]2567 s._add_history("operator /", varlist)
2568 return s
2569
[1862]2570 @asaplog_post_dec
[530]2571 def get_fit(self, row=0):
[1846]2572 """\
[530]2573 Print or return the stored fits for a row in the scantable
[1846]2574
[530]2575 Parameters:
[1846]2576
[530]2577 row: the row which the fit has been applied to.
[1846]2578
[530]2579 """
2580 if row > self.nrow():
2581 return
[976]2582 from asap.asapfit import asapfit
[530]2583 fit = asapfit(self._getfit(row))
[1859]2584 asaplog.push( '%s' %(fit) )
2585 return fit.as_dict()
[530]2586
[1483]2587 def flag_nans(self):
[1846]2588 """\
[1483]2589 Utility function to flag NaN values in the scantable.
2590 """
2591 import numpy
2592 basesel = self.get_selection()
2593 for i in range(self.nrow()):
[1589]2594 sel = self.get_row_selector(i)
2595 self.set_selection(basesel+sel)
[1483]2596 nans = numpy.isnan(self._getspectrum(0))
2597 if numpy.any(nans):
2598 bnans = [ bool(v) for v in nans]
2599 self.flag(bnans)
2600 self.set_selection(basesel)
2601
[1588]2602 def get_row_selector(self, rowno):
2603 return selector(beams=self.getbeam(rowno),
2604 ifs=self.getif(rowno),
2605 pols=self.getpol(rowno),
2606 scans=self.getscan(rowno),
2607 cycles=self.getcycle(rowno))
[1573]2608
[484]2609 def _add_history(self, funcname, parameters):
[1435]2610 if not rcParams['scantable.history']:
2611 return
[484]2612 # create date
2613 sep = "##"
2614 from datetime import datetime
2615 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2616 hist = dstr+sep
2617 hist += funcname+sep#cdate+sep
2618 if parameters.has_key('self'): del parameters['self']
[1118]2619 for k, v in parameters.iteritems():
[484]2620 if type(v) is dict:
[1118]2621 for k2, v2 in v.iteritems():
[484]2622 hist += k2
2623 hist += "="
[1118]2624 if isinstance(v2, scantable):
[484]2625 hist += 'scantable'
2626 elif k2 == 'mask':
[1118]2627 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2628 hist += str(self._zip_mask(v2))
2629 else:
2630 hist += str(v2)
[484]2631 else:
[513]2632 hist += str(v2)
[484]2633 else:
2634 hist += k
2635 hist += "="
[1118]2636 if isinstance(v, scantable):
[484]2637 hist += 'scantable'
2638 elif k == 'mask':
[1118]2639 if isinstance(v, list) or isinstance(v, tuple):
[513]2640 hist += str(self._zip_mask(v))
2641 else:
2642 hist += str(v)
[484]2643 else:
2644 hist += str(v)
2645 hist += sep
2646 hist = hist[:-2] # remove trailing '##'
2647 self._addhistory(hist)
2648
[710]2649
[484]2650 def _zip_mask(self, mask):
2651 mask = list(mask)
2652 i = 0
2653 segments = []
2654 while mask[i:].count(1):
2655 i += mask[i:].index(1)
2656 if mask[i:].count(0):
2657 j = i + mask[i:].index(0)
2658 else:
[710]2659 j = len(mask)
[1118]2660 segments.append([i, j])
[710]2661 i = j
[484]2662 return segments
[714]2663
[626]2664 def _get_ordinate_label(self):
2665 fu = "("+self.get_fluxunit()+")"
2666 import re
2667 lbl = "Intensity"
[1118]2668 if re.match(".K.", fu):
[626]2669 lbl = "Brightness Temperature "+ fu
[1118]2670 elif re.match(".Jy.", fu):
[626]2671 lbl = "Flux density "+ fu
2672 return lbl
[710]2673
[876]2674 def _check_ifs(self):
2675 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2676 nchans = filter(lambda t: t > 0, nchans)
[876]2677 return (sum(nchans)/len(nchans) == nchans[0])
[976]2678
[1862]2679 @asaplog_post_dec
[1916]2680 #def _fill(self, names, unit, average, getpt, antenna):
2681 def _fill(self, names, unit, average, opts={}):
[976]2682 first = True
2683 fullnames = []
2684 for name in names:
2685 name = os.path.expandvars(name)
2686 name = os.path.expanduser(name)
2687 if not os.path.exists(name):
2688 msg = "File '%s' does not exists" % (name)
2689 raise IOError(msg)
2690 fullnames.append(name)
2691 if average:
2692 asaplog.push('Auto averaging integrations')
[1079]2693 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2694 for name in fullnames:
[1073]2695 tbl = Scantable(stype)
[1843]2696 r = filler(tbl)
[1504]2697 rx = rcParams['scantable.reference']
[1843]2698 r.setreferenceexpr(rx)
[976]2699 msg = "Importing %s..." % (name)
[1118]2700 asaplog.push(msg, False)
[1916]2701 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2702 r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2703 r.fill()
[976]2704 if average:
[1118]2705 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2706 if not first:
2707 tbl = self._math._merge([self, tbl])
2708 Scantable.__init__(self, tbl)
[1843]2709 r.close()
[1118]2710 del r, tbl
[976]2711 first = False
[1861]2712 #flush log
2713 asaplog.post()
[976]2714 if unit is not None:
2715 self.set_fluxunit(unit)
[1824]2716 if not is_casapy():
2717 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2718
[1402]2719 def __getitem__(self, key):
2720 if key < 0:
2721 key += self.nrow()
2722 if key >= self.nrow():
2723 raise IndexError("Row index out of range.")
2724 return self._getspectrum(key)
2725
2726 def __setitem__(self, key, value):
2727 if key < 0:
2728 key += self.nrow()
2729 if key >= self.nrow():
2730 raise IndexError("Row index out of range.")
2731 if not hasattr(value, "__len__") or \
2732 len(value) > self.nchan(self.getif(key)):
2733 raise ValueError("Spectrum length doesn't match.")
2734 return self._setspectrum(value, key)
2735
2736 def __len__(self):
2737 return self.nrow()
2738
2739 def __iter__(self):
2740 for i in range(len(self)):
2741 yield self[i]
Note: See TracBrowser for help on using the repository browser.