source: trunk/python/scantable.py@ 1927

Last change on this file since 1927 was 1920, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Modified help texts.
Added 'ms' to the possible suffix values for list_files().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.9 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
[1862]13from asap.logging import asaplog, asaplog_post_dec
[1824]14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1859]17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]18from asap.asapfitter import fitter
[102]19
[1689]20
21def preserve_selection(func):
[1691]22 @wraps_dec(func)
[1689]23 def wrap(obj, *args, **kw):
24 basesel = obj.get_selection()
[1857]25 try:
26 val = func(obj, *args, **kw)
27 finally:
28 obj.set_selection(basesel)
[1689]29 return val
30 return wrap
31
[1846]32def is_scantable(filename):
33 """Is the given file a scantable?
[1689]34
[1846]35 Parameters:
36
37 filename: the name of the file/directory to test
38
39 """
[1883]40 if ( os.path.isdir(filename)
41 and os.path.exists(filename+'/table.info')
42 and os.path.exists(filename+'/table.dat') ):
43 f=open(filename+'/table.info')
44 l=f.readline()
45 f.close()
46 #if ( l.find('Scantable') != -1 ):
47 if ( l.find('Measurement Set') == -1 ):
48 return True
49 else:
50 return False
51 else:
52 return False
53## return (os.path.isdir(filename)
54## and not os.path.exists(filename+'/table.f1')
55## and os.path.exists(filename+'/table.info'))
[1697]56
[1883]57def is_ms(filename):
58 """Is the given file a MeasurementSet?
[1697]59
[1883]60 Parameters:
61
62 filename: the name of the file/directory to test
63
64 """
65 if ( os.path.isdir(filename)
66 and os.path.exists(filename+'/table.info')
67 and os.path.exists(filename+'/table.dat') ):
68 f=open(filename+'/table.info')
69 l=f.readline()
70 f.close()
71 if ( l.find('Measurement Set') != -1 ):
72 return True
73 else:
74 return False
75 else:
76 return False
77
[876]78class scantable(Scantable):
[1846]79 """\
80 The ASAP container for scans (single-dish data).
[102]81 """
[1819]82
[1862]83 @asaplog_post_dec
[1916]84 #def __init__(self, filename, average=None, unit=None, getpt=None,
85 # antenna=None, parallactify=None):
86 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]87 """\
[102]88 Create a scantable from a saved one or make a reference
[1846]89
[102]90 Parameters:
[1846]91
92 filename: the name of an asap table on disk
93 or
94 the name of a rpfits/sdfits/ms file
95 (integrations within scans are auto averaged
96 and the whole file is read) or
97 [advanced] a reference to an existing scantable
98
99 average: average all integrations withinb a scan on read.
100 The default (True) is taken from .asaprc.
101
[484]102 unit: brightness unit; must be consistent with K or Jy.
[1846]103 Over-rides the default selected by the filler
104 (input rpfits/sdfits/ms) or replaces the value
105 in existing scantables
106
107 getpt: for MeasurementSet input data only:
108 If True, all pointing data are filled.
109 The deafult is False, which makes time to load
110 the MS data faster in some cases.
111
[1920]112 antenna: for MeasurementSet input data only:
113 Antenna selection. integer (id) or string (name or id).
[1846]114
115 parallactify: Indicate that the data had been parallatified. Default
116 is taken from rc file.
117
[710]118 """
[976]119 if average is None:
[710]120 average = rcParams['scantable.autoaverage']
[1916]121 #if getpt is None:
122 # getpt = True
123 #if antenna is not None:
124 # asaplog.push("Antenna selection currently unsupported."
125 # "Using ''")
126 # asaplog.post('WARN')
127 #if antenna is None:
128 # antenna = ''
129 #elif type(antenna) == int:
130 # antenna = '%s' % antenna
131 #elif type(antenna) == list:
132 # tmpstr = ''
133 # for i in range( len(antenna) ):
134 # if type(antenna[i]) == int:
135 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
136 # elif type(antenna[i]) == str:
137 # tmpstr=tmpstr+antenna[i]+','
138 # else:
139 # raise TypeError('Bad antenna selection.')
140 # antenna = tmpstr.rstrip(',')
[1593]141 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]142 varlist = vars()
[876]143 from asap._asap import stmath
[1819]144 self._math = stmath( rcParams['insitu'] )
[876]145 if isinstance(filename, Scantable):
146 Scantable.__init__(self, filename)
[181]147 else:
[1697]148 if isinstance(filename, str):
[976]149 filename = os.path.expandvars(filename)
150 filename = os.path.expanduser(filename)
151 if not os.path.exists(filename):
152 s = "File '%s' not found." % (filename)
153 raise IOError(s)
[1697]154 if is_scantable(filename):
155 ondisk = rcParams['scantable.storage'] == 'disk'
156 Scantable.__init__(self, filename, ondisk)
157 if unit is not None:
158 self.set_fluxunit(unit)
[1819]159 # do not reset to the default freqframe
160 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]161 #elif os.path.isdir(filename) \
162 # and not os.path.exists(filename+'/table.f1'):
163 elif is_ms(filename):
[1916]164 # Measurement Set
165 opts={'ms': {}}
166 mskeys=['getpt','antenna']
167 for key in mskeys:
168 if key in args.keys():
169 opts['ms'][key] = args[key]
170 #self._fill([filename], unit, average, getpt, antenna)
171 self._fill([filename], unit, average, opts)
[1893]172 elif os.path.isfile(filename):
[1916]173 #self._fill([filename], unit, average, getpt, antenna)
174 self._fill([filename], unit, average)
[1883]175 else:
[1819]176 msg = "The given file '%s'is not a valid " \
177 "asap table." % (filename)
[1859]178 raise IOError(msg)
[1118]179 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]180 and isinstance(filename[-1], str):
[1916]181 #self._fill(filename, unit, average, getpt, antenna)
182 self._fill(filename, unit, average)
[1586]183 self.parallactify(parallactify)
[1259]184 self._add_history("scantable", varlist)
[102]185
[1862]186 @asaplog_post_dec
[876]187 def save(self, name=None, format=None, overwrite=False):
[1846]188 """\
[1280]189 Store the scantable on disk. This can be an asap (aips++) Table,
190 SDFITS or MS2 format.
[1846]191
[116]192 Parameters:
[1846]193
[1093]194 name: the name of the outputfile. For format "ASCII"
195 this is the root file name (data in 'name'.txt
[497]196 and header in 'name'_header.txt)
[1855]197
[116]198 format: an optional file format. Default is ASAP.
[1855]199 Allowed are:
200
201 * 'ASAP' (save as ASAP [aips++] Table),
202 * 'SDFITS' (save as SDFITS file)
203 * 'ASCII' (saves as ascii text file)
204 * 'MS2' (saves as an casacore MeasurementSet V2)
205 * 'FITS' (save as image FITS - not readable by class)
206 * 'CLASS' (save as FITS readable by CLASS)
207
[411]208 overwrite: If the file should be overwritten if it exists.
[256]209 The default False is to return with warning
[411]210 without writing the output. USE WITH CARE.
[1855]211
[1846]212 Example::
213
[116]214 scan.save('myscan.asap')
[1118]215 scan.save('myscan.sdfits', 'SDFITS')
[1846]216
[116]217 """
[411]218 from os import path
[1593]219 format = format or rcParams['scantable.save']
[256]220 suffix = '.'+format.lower()
[1118]221 if name is None or name == "":
[256]222 name = 'scantable'+suffix
[718]223 msg = "No filename given. Using default name %s..." % name
224 asaplog.push(msg)
[411]225 name = path.expandvars(name)
[256]226 if path.isfile(name) or path.isdir(name):
227 if not overwrite:
[718]228 msg = "File %s exists." % name
[1859]229 raise IOError(msg)
[451]230 format2 = format.upper()
231 if format2 == 'ASAP':
[116]232 self._save(name)
233 else:
[989]234 from asap._asap import stwriter as stw
[1118]235 writer = stw(format2)
236 writer.write(self, name)
[116]237 return
238
[102]239 def copy(self):
[1846]240 """Return a copy of this scantable.
241
242 *Note*:
243
[1348]244 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]245
246 Example::
247
[102]248 copiedscan = scan.copy()
[1846]249
[102]250 """
[876]251 sd = scantable(Scantable._copy(self))
[113]252 return sd
253
[1093]254 def drop_scan(self, scanid=None):
[1846]255 """\
[1093]256 Return a new scantable where the specified scan number(s) has(have)
257 been dropped.
[1846]258
[1093]259 Parameters:
[1846]260
[1093]261 scanid: a (list of) scan number(s)
[1846]262
[1093]263 """
264 from asap import _is_sequence_or_number as _is_valid
265 from asap import _to_list
266 from asap import unique
267 if not _is_valid(scanid):
[1859]268 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269 scanid = _to_list(scanid)
270 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271 for sid in scanid: allscans.remove(sid)
272 if len(allscans) == 0:
273 raise ValueError("Can't remove all scans")
274 sel = selector(scans=allscans)
275 return self._select_copy(sel)
[1093]276
[1594]277 def _select_copy(self, selection):
278 orig = self.get_selection()
279 self.set_selection(orig+selection)
280 cp = self.copy()
281 self.set_selection(orig)
282 return cp
283
[102]284 def get_scan(self, scanid=None):
[1855]285 """\
[102]286 Return a specific scan (by scanno) or collection of scans (by
287 source name) in a new scantable.
[1846]288
289 *Note*:
290
[1348]291 See scantable.drop_scan() for the inverse operation.
[1846]292
[102]293 Parameters:
[1846]294
[513]295 scanid: a (list of) scanno or a source name, unix-style
296 patterns are accepted for source name matching, e.g.
297 '*_R' gets all 'ref scans
[1846]298
299 Example::
300
[513]301 # get all scans containing the source '323p459'
302 newscan = scan.get_scan('323p459')
303 # get all 'off' scans
304 refscans = scan.get_scan('*_R')
305 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]306 newscan = scan.get_scan([0, 2, 7, 10])
[1846]307
[102]308 """
309 if scanid is None:
[1859]310 raise RuntimeError( 'Please specify a scan no or name to '
311 'retrieve from the scantable' )
[102]312 try:
[946]313 bsel = self.get_selection()
314 sel = selector()
[102]315 if type(scanid) is str:
[946]316 sel.set_name(scanid)
[1594]317 return self._select_copy(sel)
[102]318 elif type(scanid) is int:
[946]319 sel.set_scans([scanid])
[1594]320 return self._select_copy(sel)
[381]321 elif type(scanid) is list:
[946]322 sel.set_scans(scanid)
[1594]323 return self._select_copy(sel)
[381]324 else:
[718]325 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]326 raise TypeError(msg)
[102]327 except RuntimeError:
[1859]328 raise
[102]329
330 def __str__(self):
[1118]331 return Scantable._summary(self, True)
[102]332
[976]333 def summary(self, filename=None):
[1846]334 """\
[102]335 Print a summary of the contents of this scantable.
[1846]336
[102]337 Parameters:
[1846]338
[102]339 filename: the name of a file to write the putput to
340 Default - no file output
[1846]341
[102]342 """
[976]343 info = Scantable._summary(self, True)
[102]344 if filename is not None:
[256]345 if filename is "":
346 filename = 'scantable_summary.txt'
[415]347 from os.path import expandvars, isdir
[411]348 filename = expandvars(filename)
[415]349 if not isdir(filename):
[413]350 data = open(filename, 'w')
351 data.write(info)
352 data.close()
353 else:
[718]354 msg = "Illegal file name '%s'." % (filename)
[1859]355 raise IOError(msg)
356 return page(info)
[710]357
[1512]358 def get_spectrum(self, rowno):
[1471]359 """Return the spectrum for the current row in the scantable as a list.
[1846]360
[1471]361 Parameters:
[1846]362
[1573]363 rowno: the row number to retrieve the spectrum from
[1846]364
[1471]365 """
366 return self._getspectrum(rowno)
[946]367
[1471]368 def get_mask(self, rowno):
369 """Return the mask for the current row in the scantable as a list.
[1846]370
[1471]371 Parameters:
[1846]372
[1573]373 rowno: the row number to retrieve the mask from
[1846]374
[1471]375 """
376 return self._getmask(rowno)
377
378 def set_spectrum(self, spec, rowno):
379 """Return the spectrum for the current row in the scantable as a list.
[1846]380
[1471]381 Parameters:
[1846]382
[1855]383 spec: the new spectrum
[1846]384
[1855]385 rowno: the row number to set the spectrum for
386
[1471]387 """
388 assert(len(spec) == self.nchan())
389 return self._setspectrum(spec, rowno)
390
[1600]391 def get_coordinate(self, rowno):
392 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]393
394 *Note*:
395
[1600]396 * This coordinate is only valid until a scantable method modifies
397 the frequency axis.
398 * This coordinate does contain the original frequency set-up
399 NOT the new frame. The conversions however are done using the user
400 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401 use scantable.freq_align first. Without it there is no closure,
[1846]402 i.e.::
[1600]403
[1846]404 c = myscan.get_coordinate(0)
405 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
[1600]407 Parameters:
[1846]408
[1600]409 rowno: the row number for the spectral coordinate
410
411 """
412 return coordinate(Scantable.get_coordinate(self, rowno))
413
[946]414 def get_selection(self):
[1846]415 """\
[1005]416 Get the selection object currently set on this scantable.
[1846]417
418 Example::
419
[1005]420 sel = scan.get_selection()
421 sel.set_ifs(0) # select IF 0
422 scan.set_selection(sel) # apply modified selection
[1846]423
[946]424 """
425 return selector(self._getselection())
426
[1576]427 def set_selection(self, selection=None, **kw):
[1846]428 """\
[1005]429 Select a subset of the data. All following operations on this scantable
430 are only applied to thi selection.
[1846]431
[1005]432 Parameters:
[1697]433
[1846]434 selection: a selector object (default unset the selection), or
435 any combination of "pols", "ifs", "beams", "scans",
436 "cycles", "name", "query"
[1697]437
[1846]438 Examples::
[1697]439
[1005]440 sel = selector() # create a selection object
[1118]441 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]442 scan.set_selection(sel) # set the selection
443 scan.summary() # will only print summary of scanno 0 an 3
444 scan.set_selection() # unset the selection
[1697]445 # or the equivalent
446 scan.set_selection(scans=[0,3])
447 scan.summary() # will only print summary of scanno 0 an 3
448 scan.set_selection() # unset the selection
[1846]449
[946]450 """
[1576]451 if selection is None:
452 # reset
453 if len(kw) == 0:
454 selection = selector()
455 else:
456 # try keywords
457 for k in kw:
458 if k not in selector.fields:
459 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460 selection = selector(**kw)
[946]461 self._setselection(selection)
462
[1819]463 def get_row(self, row=0, insitu=None):
[1846]464 """\
[1819]465 Select a row in the scantable.
466 Return a scantable with single row.
[1846]467
[1819]468 Parameters:
[1846]469
470 row: row no of integration, default is 0.
471 insitu: if False a new scantable is returned. Otherwise, the
472 scaling is done in-situ. The default is taken from .asaprc
473 (False)
474
[1819]475 """
476 if insitu is None: insitu = rcParams['insitu']
477 if not insitu:
478 workscan = self.copy()
479 else:
480 workscan = self
481 # Select a row
482 sel=selector()
483 sel.set_scans([workscan.getscan(row)])
484 sel.set_cycles([workscan.getcycle(row)])
485 sel.set_beams([workscan.getbeam(row)])
486 sel.set_ifs([workscan.getif(row)])
487 sel.set_polarisations([workscan.getpol(row)])
488 sel.set_name(workscan._getsourcename(row))
489 workscan.set_selection(sel)
490 if not workscan.nrow() == 1:
491 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492 raise RuntimeError(msg)
493 del sel
494 if insitu:
495 self._assign(workscan)
496 else:
497 return workscan
498
[1862]499 @asaplog_post_dec
[1907]500 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]501 """\
[135]502 Determine the specified statistic of the current beam/if/pol
[102]503 Takes a 'mask' as an optional parameter to specify which
504 channels should be excluded.
[1846]505
[102]506 Parameters:
[1846]507
[1819]508 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]510
[135]511 mask: an optional mask specifying where the statistic
[102]512 should be determined.
[1855]513
[1819]514 form: format string to print statistic values
[1846]515
[1907]516 row: row number of spectrum to process.
517 (default is None: for all rows)
[1846]518
[1907]519 Example:
[113]520 scan.set_unit('channel')
[1118]521 msk = scan.create_mask([100, 200], [500, 600])
[135]522 scan.stats(stat='mean', mask=m)
[1846]523
[102]524 """
[1593]525 mask = mask or []
[876]526 if not self._check_ifs():
[1118]527 raise ValueError("Cannot apply mask as the IFs have different "
528 "number of channels. Please use setselection() "
529 "to select individual IFs")
[1819]530 rtnabc = False
531 if stat.lower().endswith('_abc'): rtnabc = True
532 getchan = False
533 if stat.lower().startswith('min') or stat.lower().startswith('max'):
534 chan = self._math._minmaxchan(self, mask, stat)
535 getchan = True
536 statvals = []
[1907]537 if not rtnabc:
538 if row == None:
539 statvals = self._math._stats(self, mask, stat)
540 else:
541 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]542
[1819]543 #def cb(i):
544 # return statvals[i]
[256]545
[1819]546 #return self._row_callback(cb, stat)
[102]547
[1819]548 label=stat
549 #callback=cb
550 out = ""
551 #outvec = []
552 sep = '-'*50
[1907]553
554 if row == None:
555 rows = xrange(self.nrow())
556 elif isinstance(row, int):
557 rows = [ row ]
558
559 for i in rows:
[1819]560 refstr = ''
561 statunit= ''
562 if getchan:
563 qx, qy = self.chan2data(rowno=i, chan=chan[i])
564 if rtnabc:
565 statvals.append(qx['value'])
566 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567 statunit= '['+qx['unit']+']'
568 else:
569 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571 tm = self._gettime(i)
572 src = self._getsourcename(i)
573 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574 out += 'Time[%s]:\n' % (tm)
[1907]575 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
576 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
577 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]578 #outvec.append(callback(i))
[1907]579 if len(rows) > 1:
580 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
581 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
582 else:
583 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
584 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]585 out += sep+"\n"
586
[1859]587 import os
588 if os.environ.has_key( 'USER' ):
589 usr = os.environ['USER']
590 else:
591 import commands
592 usr = commands.getoutput( 'whoami' )
593 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594 f = open(tmpfile,'w')
595 print >> f, sep
596 print >> f, ' %s %s' % (label, statunit)
597 print >> f, sep
598 print >> f, out
599 f.close()
600 f = open(tmpfile,'r')
601 x = f.readlines()
602 f.close()
603 asaplog.push(''.join(x), False)
604
[1819]605 return statvals
606
607 def chan2data(self, rowno=0, chan=0):
[1846]608 """\
[1819]609 Returns channel/frequency/velocity and spectral value
610 at an arbitrary row and channel in the scantable.
[1846]611
[1819]612 Parameters:
[1846]613
[1819]614 rowno: a row number in the scantable. Default is the
615 first row, i.e. rowno=0
[1855]616
[1819]617 chan: a channel in the scantable. Default is the first
618 channel, i.e. pos=0
[1846]619
[1819]620 """
621 if isinstance(rowno, int) and isinstance(chan, int):
622 qx = {'unit': self.get_unit(),
623 'value': self._getabcissa(rowno)[chan]}
624 qy = {'unit': self.get_fluxunit(),
625 'value': self._getspectrum(rowno)[chan]}
626 return qx, qy
627
[1118]628 def stddev(self, mask=None):
[1846]629 """\
[135]630 Determine the standard deviation of the current beam/if/pol
631 Takes a 'mask' as an optional parameter to specify which
632 channels should be excluded.
[1846]633
[135]634 Parameters:
[1846]635
[135]636 mask: an optional mask specifying where the standard
637 deviation should be determined.
638
[1846]639 Example::
640
[135]641 scan.set_unit('channel')
[1118]642 msk = scan.create_mask([100, 200], [500, 600])
[135]643 scan.stddev(mask=m)
[1846]644
[135]645 """
[1118]646 return self.stats(stat='stddev', mask=mask);
[135]647
[1003]648
[1259]649 def get_column_names(self):
[1846]650 """\
[1003]651 Return a list of column names, which can be used for selection.
652 """
[1259]653 return list(Scantable.get_column_names(self))
[1003]654
[1730]655 def get_tsys(self, row=-1):
[1846]656 """\
[113]657 Return the System temperatures.
[1846]658
659 Parameters:
660
661 row: the rowno to get the information for. (default all rows)
662
[113]663 Returns:
[1846]664
[876]665 a list of Tsys values for the current selection
[1846]666
[113]667 """
[1730]668 if row > -1:
669 return self._get_column(self._gettsys, row)
[876]670 return self._row_callback(self._gettsys, "Tsys")
[256]671
[1730]672
673 def get_weather(self, row=-1):
[1846]674 """\
675 Return the weather informations.
676
677 Parameters:
678
679 row: the rowno to get the information for. (default all rows)
680
681 Returns:
682
683 a dict or list of of dicts of values for the current selection
684
685 """
686
[1730]687 values = self._get_column(self._get_weather, row)
688 if row > -1:
689 return {'temperature': values[0],
690 'pressure': values[1], 'humidity' : values[2],
691 'windspeed' : values[3], 'windaz' : values[4]
692 }
693 else:
694 out = []
695 for r in values:
696
697 out.append({'temperature': r[0],
698 'pressure': r[1], 'humidity' : r[2],
699 'windspeed' : r[3], 'windaz' : r[4]
700 })
701 return out
702
[876]703 def _row_callback(self, callback, label):
704 out = ""
[1118]705 outvec = []
[1590]706 sep = '-'*50
[876]707 for i in range(self.nrow()):
708 tm = self._gettime(i)
709 src = self._getsourcename(i)
[1590]710 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]711 out += 'Time[%s]:\n' % (tm)
[1590]712 if self.nbeam(-1) > 1:
713 out += ' Beam[%d] ' % (self.getbeam(i))
714 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
715 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]716 outvec.append(callback(i))
717 out += '= %3.3f\n' % (outvec[i])
[1590]718 out += sep+'\n'
[1859]719
720 asaplog.push(sep)
721 asaplog.push(" %s" % (label))
722 asaplog.push(sep)
723 asaplog.push(out)
[1861]724 asaplog.post()
[1175]725 return outvec
[256]726
[1070]727 def _get_column(self, callback, row=-1):
728 """
729 """
730 if row == -1:
731 return [callback(i) for i in range(self.nrow())]
732 else:
[1819]733 if 0 <= row < self.nrow():
[1070]734 return callback(row)
[256]735
[1070]736
[1348]737 def get_time(self, row=-1, asdatetime=False):
[1846]738 """\
[113]739 Get a list of time stamps for the observations.
[1348]740 Return a datetime object for each integration time stamp in the scantable.
[1846]741
[113]742 Parameters:
[1846]743
[1348]744 row: row no of integration. Default -1 return all rows
[1855]745
[1348]746 asdatetime: return values as datetime objects rather than strings
[1846]747
[113]748 """
[1175]749 from time import strptime
750 from datetime import datetime
[1392]751 times = self._get_column(self._gettime, row)
[1348]752 if not asdatetime:
[1392]753 return times
[1175]754 format = "%Y/%m/%d/%H:%M:%S"
755 if isinstance(times, list):
756 return [datetime(*strptime(i, format)[:6]) for i in times]
757 else:
758 return datetime(*strptime(times, format)[:6])
[102]759
[1348]760
761 def get_inttime(self, row=-1):
[1846]762 """\
[1348]763 Get a list of integration times for the observations.
764 Return a time in seconds for each integration in the scantable.
[1846]765
[1348]766 Parameters:
[1846]767
[1348]768 row: row no of integration. Default -1 return all rows.
[1846]769
[1348]770 """
[1573]771 return self._get_column(self._getinttime, row)
[1348]772
[1573]773
[714]774 def get_sourcename(self, row=-1):
[1846]775 """\
[794]776 Get a list source names for the observations.
[714]777 Return a string for each integration in the scantable.
778 Parameters:
[1846]779
[1348]780 row: row no of integration. Default -1 return all rows.
[1846]781
[714]782 """
[1070]783 return self._get_column(self._getsourcename, row)
[714]784
[794]785 def get_elevation(self, row=-1):
[1846]786 """\
[794]787 Get a list of elevations for the observations.
788 Return a float for each integration in the scantable.
[1846]789
[794]790 Parameters:
[1846]791
[1348]792 row: row no of integration. Default -1 return all rows.
[1846]793
[794]794 """
[1070]795 return self._get_column(self._getelevation, row)
[794]796
797 def get_azimuth(self, row=-1):
[1846]798 """\
[794]799 Get a list of azimuths for the observations.
800 Return a float for each integration in the scantable.
[1846]801
[794]802 Parameters:
[1348]803 row: row no of integration. Default -1 return all rows.
[1846]804
[794]805 """
[1070]806 return self._get_column(self._getazimuth, row)
[794]807
808 def get_parangle(self, row=-1):
[1846]809 """\
[794]810 Get a list of parallactic angles for the observations.
811 Return a float for each integration in the scantable.
[1846]812
[794]813 Parameters:
[1846]814
[1348]815 row: row no of integration. Default -1 return all rows.
[1846]816
[794]817 """
[1070]818 return self._get_column(self._getparangle, row)
[794]819
[1070]820 def get_direction(self, row=-1):
821 """
822 Get a list of Positions on the sky (direction) for the observations.
[1594]823 Return a string for each integration in the scantable.
[1855]824
[1070]825 Parameters:
[1855]826
[1070]827 row: row no of integration. Default -1 return all rows
[1855]828
[1070]829 """
830 return self._get_column(self._getdirection, row)
831
[1391]832 def get_directionval(self, row=-1):
[1846]833 """\
[1391]834 Get a list of Positions on the sky (direction) for the observations.
835 Return a float for each integration in the scantable.
[1846]836
[1391]837 Parameters:
[1846]838
[1391]839 row: row no of integration. Default -1 return all rows
[1846]840
[1391]841 """
842 return self._get_column(self._getdirectionvec, row)
843
[1862]844 @asaplog_post_dec
[102]845 def set_unit(self, unit='channel'):
[1846]846 """\
[102]847 Set the unit for all following operations on this scantable
[1846]848
[102]849 Parameters:
[1846]850
851 unit: optional unit, default is 'channel'. Use one of '*Hz',
852 'km/s', 'channel' or equivalent ''
853
[102]854 """
[484]855 varlist = vars()
[1118]856 if unit in ['', 'pixel', 'channel']:
[113]857 unit = ''
858 inf = list(self._getcoordinfo())
859 inf[0] = unit
860 self._setcoordinfo(inf)
[1118]861 self._add_history("set_unit", varlist)
[113]862
[1862]863 @asaplog_post_dec
[484]864 def set_instrument(self, instr):
[1846]865 """\
[1348]866 Set the instrument for subsequent processing.
[1846]867
[358]868 Parameters:
[1846]869
[710]870 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]871 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]872
[358]873 """
874 self._setInstrument(instr)
[1118]875 self._add_history("set_instument", vars())
[358]876
[1862]877 @asaplog_post_dec
[1190]878 def set_feedtype(self, feedtype):
[1846]879 """\
[1190]880 Overwrite the feed type, which might not be set correctly.
[1846]881
[1190]882 Parameters:
[1846]883
[1190]884 feedtype: 'linear' or 'circular'
[1846]885
[1190]886 """
887 self._setfeedtype(feedtype)
888 self._add_history("set_feedtype", vars())
889
[1862]890 @asaplog_post_dec
[276]891 def set_doppler(self, doppler='RADIO'):
[1846]892 """\
[276]893 Set the doppler for all following operations on this scantable.
[1846]894
[276]895 Parameters:
[1846]896
[276]897 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]898
[276]899 """
[484]900 varlist = vars()
[276]901 inf = list(self._getcoordinfo())
902 inf[2] = doppler
903 self._setcoordinfo(inf)
[1118]904 self._add_history("set_doppler", vars())
[710]905
[1862]906 @asaplog_post_dec
[226]907 def set_freqframe(self, frame=None):
[1846]908 """\
[113]909 Set the frame type of the Spectral Axis.
[1846]910
[113]911 Parameters:
[1846]912
[591]913 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]914 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]915 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]916
917 Example::
918
[113]919 scan.set_freqframe('BARY')
[1846]920
[113]921 """
[1593]922 frame = frame or rcParams['scantable.freqframe']
[484]923 varlist = vars()
[1819]924 # "REST" is not implemented in casacore
925 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
926 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
927 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]928 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]929
[989]930 if frame in valid:
[113]931 inf = list(self._getcoordinfo())
932 inf[1] = frame
933 self._setcoordinfo(inf)
[1118]934 self._add_history("set_freqframe", varlist)
[102]935 else:
[1118]936 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]937 raise TypeError(msg)
[710]938
[1862]939 @asaplog_post_dec
[989]940 def set_dirframe(self, frame=""):
[1846]941 """\
[989]942 Set the frame type of the Direction on the sky.
[1846]943
[989]944 Parameters:
[1846]945
[989]946 frame: an optional frame type, default ''. Valid frames are:
947 'J2000', 'B1950', 'GALACTIC'
[1846]948
949 Example:
950
[989]951 scan.set_dirframe('GALACTIC')
[1846]952
[989]953 """
954 varlist = vars()
[1859]955 Scantable.set_dirframe(self, frame)
[1118]956 self._add_history("set_dirframe", varlist)
[989]957
[113]958 def get_unit(self):
[1846]959 """\
[113]960 Get the default unit set in this scantable
[1846]961
[113]962 Returns:
[1846]963
[113]964 A unit string
[1846]965
[113]966 """
967 inf = self._getcoordinfo()
968 unit = inf[0]
969 if unit == '': unit = 'channel'
970 return unit
[102]971
[1862]972 @asaplog_post_dec
[158]973 def get_abcissa(self, rowno=0):
[1846]974 """\
[158]975 Get the abcissa in the current coordinate setup for the currently
[113]976 selected Beam/IF/Pol
[1846]977
[113]978 Parameters:
[1846]979
[226]980 rowno: an optional row number in the scantable. Default is the
981 first row, i.e. rowno=0
[1846]982
[113]983 Returns:
[1846]984
[1348]985 The abcissa values and the format string (as a dictionary)
[1846]986
[113]987 """
[256]988 abc = self._getabcissa(rowno)
[710]989 lbl = self._getabcissalabel(rowno)
[158]990 return abc, lbl
[113]991
[1862]992 @asaplog_post_dec
[1819]993 def flag(self, mask=None, unflag=False):
[1846]994 """\
[1001]995 Flag the selected data using an optional channel mask.
[1846]996
[1001]997 Parameters:
[1846]998
[1001]999 mask: an optional channel mask, created with create_mask. Default
1000 (no mask) is all channels.
[1855]1001
[1819]1002 unflag: if True, unflag the data
[1846]1003
[1001]1004 """
1005 varlist = vars()
[1593]1006 mask = mask or []
[1859]1007 self._flag(mask, unflag)
[1001]1008 self._add_history("flag", varlist)
1009
[1862]1010 @asaplog_post_dec
[1819]1011 def flag_row(self, rows=[], unflag=False):
[1846]1012 """\
[1819]1013 Flag the selected data in row-based manner.
[1846]1014
[1819]1015 Parameters:
[1846]1016
[1843]1017 rows: list of row numbers to be flagged. Default is no row
1018 (must be explicitly specified to execute row-based flagging).
[1855]1019
[1819]1020 unflag: if True, unflag the data.
[1846]1021
[1819]1022 """
1023 varlist = vars()
[1859]1024 self._flag_row(rows, unflag)
[1819]1025 self._add_history("flag_row", varlist)
1026
[1862]1027 @asaplog_post_dec
[1819]1028 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1029 """\
[1819]1030 Flag the selected data outside a specified range (in channel-base)
[1846]1031
[1819]1032 Parameters:
[1846]1033
[1819]1034 uthres: upper threshold.
[1855]1035
[1819]1036 dthres: lower threshold
[1846]1037
[1819]1038 clipoutside: True for flagging data outside the range [dthres:uthres].
1039 False for glagging data inside the range.
[1855]1040
[1846]1041 unflag: if True, unflag the data.
1042
[1819]1043 """
1044 varlist = vars()
[1859]1045 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1046 self._add_history("clip", varlist)
1047
[1862]1048 @asaplog_post_dec
[1584]1049 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1050 """\
[1192]1051 Flag the data in 'lag' space by providing a frequency to remove.
[1584]1052 Flagged data in the scantable gets interpolated over the region.
[1192]1053 No taper is applied.
[1846]1054
[1192]1055 Parameters:
[1846]1056
[1579]1057 start: the start frequency (really a period within the
1058 bandwidth) or period to remove
[1855]1059
[1579]1060 end: the end frequency or period to remove
[1855]1061
[1584]1062 unit: the frequency unit (default "MHz") or "" for
[1579]1063 explicit lag channels
[1846]1064
1065 *Notes*:
1066
[1579]1067 It is recommended to flag edges of the band or strong
[1348]1068 signals beforehand.
[1846]1069
[1192]1070 """
1071 if insitu is None: insitu = rcParams['insitu']
1072 self._math._setinsitu(insitu)
1073 varlist = vars()
[1579]1074 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1075 if not (unit == "" or base.has_key(unit)):
[1192]1076 raise ValueError("%s is not a valid unit." % unit)
[1859]1077 if unit == "":
1078 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1079 else:
1080 s = scantable(self._math._lag_flag(self, start*base[unit],
1081 end*base[unit], "frequency"))
[1192]1082 s._add_history("lag_flag", varlist)
1083 if insitu:
1084 self._assign(s)
1085 else:
1086 return s
[1001]1087
[1862]1088 @asaplog_post_dec
[113]1089 def create_mask(self, *args, **kwargs):
[1846]1090 """\
[1118]1091 Compute and return a mask based on [min, max] windows.
[189]1092 The specified windows are to be INCLUDED, when the mask is
[113]1093 applied.
[1846]1094
[102]1095 Parameters:
[1846]1096
[1118]1097 [min, max], [min2, max2], ...
[1024]1098 Pairs of start/end points (inclusive)specifying the regions
[102]1099 to be masked
[1855]1100
[189]1101 invert: optional argument. If specified as True,
1102 return an inverted mask, i.e. the regions
1103 specified are EXCLUDED
[1855]1104
[513]1105 row: create the mask using the specified row for
1106 unit conversions, default is row=0
1107 only necessary if frequency varies over rows.
[1846]1108
1109 Examples::
1110
[113]1111 scan.set_unit('channel')
[1846]1112 # a)
[1118]1113 msk = scan.create_mask([400, 500], [800, 900])
[189]1114 # masks everything outside 400 and 500
[113]1115 # and 800 and 900 in the unit 'channel'
1116
[1846]1117 # b)
[1118]1118 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1119 # masks the regions between 400 and 500
[113]1120 # and 800 and 900 in the unit 'channel'
[1846]1121
1122 # c)
1123 #mask only channel 400
[1554]1124 msk = scan.create_mask([400])
[1846]1125
[102]1126 """
[1554]1127 row = kwargs.get("row", 0)
[513]1128 data = self._getabcissa(row)
[113]1129 u = self._getcoordinfo()[0]
[1859]1130 if u == "":
1131 u = "channel"
1132 msg = "The current mask window unit is %s" % u
1133 i = self._check_ifs()
1134 if not i:
1135 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1136 asaplog.push(msg)
[102]1137 n = self.nchan()
[1295]1138 msk = _n_bools(n, False)
[710]1139 # test if args is a 'list' or a 'normal *args - UGLY!!!
1140
[1118]1141 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1142 and args or args[0]
[710]1143 for window in ws:
[1554]1144 if len(window) == 1:
1145 window = [window[0], window[0]]
1146 if len(window) == 0 or len(window) > 2:
1147 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1148 if window[0] > window[1]:
1149 tmp = window[0]
1150 window[0] = window[1]
1151 window[1] = tmp
[102]1152 for i in range(n):
[1024]1153 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1154 msk[i] = True
[113]1155 if kwargs.has_key('invert'):
1156 if kwargs.get('invert'):
[1295]1157 msk = mask_not(msk)
[102]1158 return msk
[710]1159
[1819]1160 def get_masklist(self, mask=None, row=0):
[1846]1161 """\
[1819]1162 Compute and return a list of mask windows, [min, max].
[1846]1163
[1819]1164 Parameters:
[1846]1165
[1819]1166 mask: channel mask, created with create_mask.
[1855]1167
[1819]1168 row: calcutate the masklist using the specified row
1169 for unit conversions, default is row=0
1170 only necessary if frequency varies over rows.
[1846]1171
[1819]1172 Returns:
[1846]1173
[1819]1174 [min, max], [min2, max2], ...
1175 Pairs of start/end points (inclusive)specifying
1176 the masked regions
[1846]1177
[1819]1178 """
1179 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1180 raise TypeError("The mask should be list or tuple.")
1181 if len(mask) < 2:
1182 raise TypeError("The mask elements should be > 1")
1183 if self.nchan() != len(mask):
1184 msg = "Number of channels in scantable != number of mask elements"
1185 raise TypeError(msg)
1186 data = self._getabcissa(row)
1187 u = self._getcoordinfo()[0]
[1859]1188 if u == "":
1189 u = "channel"
1190 msg = "The current mask window unit is %s" % u
1191 i = self._check_ifs()
1192 if not i:
1193 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1194 asaplog.push(msg)
[1819]1195 masklist=[]
1196 ist, ien = None, None
1197 ist, ien=self.get_mask_indices(mask)
1198 if ist is not None and ien is not None:
1199 for i in xrange(len(ist)):
1200 range=[data[ist[i]],data[ien[i]]]
1201 range.sort()
1202 masklist.append([range[0],range[1]])
1203 return masklist
1204
1205 def get_mask_indices(self, mask=None):
[1846]1206 """\
[1819]1207 Compute and Return lists of mask start indices and mask end indices.
[1855]1208
1209 Parameters:
1210
[1819]1211 mask: channel mask, created with create_mask.
[1846]1212
[1819]1213 Returns:
[1846]1214
[1819]1215 List of mask start indices and that of mask end indices,
1216 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1217
[1819]1218 """
1219 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1220 raise TypeError("The mask should be list or tuple.")
1221 if len(mask) < 2:
1222 raise TypeError("The mask elements should be > 1")
1223 istart=[]
1224 iend=[]
1225 if mask[0]: istart.append(0)
1226 for i in range(len(mask)-1):
1227 if not mask[i] and mask[i+1]:
1228 istart.append(i+1)
1229 elif mask[i] and not mask[i+1]:
1230 iend.append(i)
1231 if mask[len(mask)-1]: iend.append(len(mask)-1)
1232 if len(istart) != len(iend):
1233 raise RuntimeError("Numbers of mask start != mask end.")
1234 for i in range(len(istart)):
1235 if istart[i] > iend[i]:
1236 raise RuntimeError("Mask start index > mask end index")
1237 break
1238 return istart,iend
1239
1240# def get_restfreqs(self):
1241# """
1242# Get the restfrequency(s) stored in this scantable.
1243# The return value(s) are always of unit 'Hz'
1244# Parameters:
1245# none
1246# Returns:
1247# a list of doubles
1248# """
1249# return list(self._getrestfreqs())
1250
1251 def get_restfreqs(self, ids=None):
[1846]1252 """\
[256]1253 Get the restfrequency(s) stored in this scantable.
1254 The return value(s) are always of unit 'Hz'
[1846]1255
[256]1256 Parameters:
[1846]1257
[1819]1258 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1259 be retrieved
[1846]1260
[256]1261 Returns:
[1846]1262
[1819]1263 dictionary containing ids and a list of doubles for each id
[1846]1264
[256]1265 """
[1819]1266 if ids is None:
1267 rfreqs={}
1268 idlist = self.getmolnos()
1269 for i in idlist:
1270 rfreqs[i]=list(self._getrestfreqs(i))
1271 return rfreqs
1272 else:
1273 if type(ids)==list or type(ids)==tuple:
1274 rfreqs={}
1275 for i in ids:
1276 rfreqs[i]=list(self._getrestfreqs(i))
1277 return rfreqs
1278 else:
1279 return list(self._getrestfreqs(ids))
1280 #return list(self._getrestfreqs(ids))
[102]1281
[931]1282 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1283 """\
[931]1284 Set or replace the restfrequency specified and
1285 If the 'freqs' argument holds a scalar,
1286 then that rest frequency will be applied to all the selected
1287 data. If the 'freqs' argument holds
1288 a vector, then it MUST be of equal or smaller length than
1289 the number of IFs (and the available restfrequencies will be
1290 replaced by this vector). In this case, *all* data have
1291 the restfrequency set per IF according
1292 to the corresponding value you give in the 'freqs' vector.
[1118]1293 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1294 IF 1 gets restfreq 2e9.
[1846]1295
[1395]1296 You can also specify the frequencies via a linecatalog.
[1153]1297
[931]1298 Parameters:
[1846]1299
[931]1300 freqs: list of rest frequency values or string idenitfiers
[1855]1301
[931]1302 unit: unit for rest frequency (default 'Hz')
[402]1303
[1846]1304
1305 Example::
1306
[1819]1307 # set the given restfrequency for the all currently selected IFs
[931]1308 scan.set_restfreqs(freqs=1.4e9)
[1845]1309 # set restfrequencies for the n IFs (n > 1) in the order of the
1310 # list, i.e
1311 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1312 # len(list_of_restfreqs) == nIF
1313 # for nIF == 1 the following will set multiple restfrequency for
1314 # that IF
[1819]1315 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1316 # set multiple restfrequencies per IF. as a list of lists where
1317 # the outer list has nIF elements, the inner s arbitrary
1318 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1319
[1846]1320 *Note*:
[1845]1321
[931]1322 To do more sophisticate Restfrequency setting, e.g. on a
1323 source and IF basis, use scantable.set_selection() before using
[1846]1324 this function::
[931]1325
[1846]1326 # provided your scantable is called scan
1327 selection = selector()
1328 selection.set_name("ORION*")
1329 selection.set_ifs([1])
1330 scan.set_selection(selection)
1331 scan.set_restfreqs(freqs=86.6e9)
1332
[931]1333 """
1334 varlist = vars()
[1157]1335 from asap import linecatalog
1336 # simple value
[1118]1337 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1338 self._setrestfreqs([freqs], [""], unit)
[1157]1339 # list of values
[1118]1340 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1341 # list values are scalars
[1118]1342 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1343 if len(freqs) == 1:
1344 self._setrestfreqs(freqs, [""], unit)
1345 else:
1346 # allow the 'old' mode of setting mulitple IFs
1347 sel = selector()
1348 savesel = self._getselection()
1349 iflist = self.getifnos()
1350 if len(freqs)>len(iflist):
1351 raise ValueError("number of elements in list of list "
1352 "exeeds the current IF selections")
1353 iflist = self.getifnos()
1354 for i, fval in enumerate(freqs):
1355 sel.set_ifs(iflist[i])
1356 self._setselection(sel)
1357 self._setrestfreqs([fval], [""], unit)
1358 self._setselection(savesel)
1359
1360 # list values are dict, {'value'=, 'name'=)
[1157]1361 elif isinstance(freqs[-1], dict):
[1845]1362 values = []
1363 names = []
1364 for d in freqs:
1365 values.append(d["value"])
1366 names.append(d["name"])
1367 self._setrestfreqs(values, names, unit)
[1819]1368 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1369 sel = selector()
1370 savesel = self._getselection()
[1322]1371 iflist = self.getifnos()
[1819]1372 if len(freqs)>len(iflist):
[1845]1373 raise ValueError("number of elements in list of list exeeds"
1374 " the current IF selections")
1375 for i, fval in enumerate(freqs):
[1322]1376 sel.set_ifs(iflist[i])
[1259]1377 self._setselection(sel)
[1845]1378 self._setrestfreqs(fval, [""], unit)
[1157]1379 self._setselection(savesel)
1380 # freqs are to be taken from a linecatalog
[1153]1381 elif isinstance(freqs, linecatalog):
1382 sel = selector()
1383 savesel = self._getselection()
1384 for i in xrange(freqs.nrow()):
[1322]1385 sel.set_ifs(iflist[i])
[1153]1386 self._setselection(sel)
[1845]1387 self._setrestfreqs([freqs.get_frequency(i)],
1388 [freqs.get_name(i)], "MHz")
[1153]1389 # ensure that we are not iterating past nIF
1390 if i == self.nif()-1: break
1391 self._setselection(savesel)
[931]1392 else:
1393 return
1394 self._add_history("set_restfreqs", varlist)
1395
[1360]1396 def shift_refpix(self, delta):
[1846]1397 """\
[1589]1398 Shift the reference pixel of the Spectra Coordinate by an
1399 integer amount.
[1846]1400
[1589]1401 Parameters:
[1846]1402
[1589]1403 delta: the amount to shift by
[1846]1404
1405 *Note*:
1406
[1589]1407 Be careful using this with broadband data.
[1846]1408
[1360]1409 """
[1731]1410 Scantable.shift_refpix(self, delta)
[931]1411
[1862]1412 @asaplog_post_dec
[1259]1413 def history(self, filename=None):
[1846]1414 """\
[1259]1415 Print the history. Optionally to a file.
[1846]1416
[1348]1417 Parameters:
[1846]1418
[1348]1419 filename: The name of the file to save the history to.
[1846]1420
[1259]1421 """
[484]1422 hist = list(self._gethistory())
[794]1423 out = "-"*80
[484]1424 for h in hist:
[489]1425 if h.startswith("---"):
[1857]1426 out = "\n".join([out, h])
[489]1427 else:
1428 items = h.split("##")
1429 date = items[0]
1430 func = items[1]
1431 items = items[2:]
[794]1432 out += "\n"+date+"\n"
1433 out += "Function: %s\n Parameters:" % (func)
[489]1434 for i in items:
1435 s = i.split("=")
[1118]1436 out += "\n %s = %s" % (s[0], s[1])
[1857]1437 out = "\n".join([out, "-"*80])
[1259]1438 if filename is not None:
1439 if filename is "":
1440 filename = 'scantable_history.txt'
1441 import os
1442 filename = os.path.expandvars(os.path.expanduser(filename))
1443 if not os.path.isdir(filename):
1444 data = open(filename, 'w')
1445 data.write(out)
1446 data.close()
1447 else:
1448 msg = "Illegal file name '%s'." % (filename)
[1859]1449 raise IOError(msg)
1450 return page(out)
[513]1451 #
1452 # Maths business
1453 #
[1862]1454 @asaplog_post_dec
[931]1455 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1456 """\
[1070]1457 Return the (time) weighted average of a scan.
[1846]1458
1459 *Note*:
1460
[1070]1461 in channels only - align if necessary
[1846]1462
[513]1463 Parameters:
[1846]1464
[513]1465 mask: an optional mask (only used for 'var' and 'tsys'
1466 weighting)
[1855]1467
[558]1468 scanav: True averages each scan separately
1469 False (default) averages all scans together,
[1855]1470
[1099]1471 weight: Weighting scheme.
1472 'none' (mean no weight)
1473 'var' (1/var(spec) weighted)
1474 'tsys' (1/Tsys**2 weighted)
1475 'tint' (integration time weighted)
1476 'tintsys' (Tint/Tsys**2)
1477 'median' ( median averaging)
[535]1478 The default is 'tint'
[1855]1479
[931]1480 align: align the spectra in velocity before averaging. It takes
1481 the time of the first spectrum as reference time.
[1846]1482
1483 Example::
1484
[513]1485 # time average the scantable without using a mask
[710]1486 newscan = scan.average_time()
[1846]1487
[513]1488 """
1489 varlist = vars()
[1593]1490 weight = weight or 'TINT'
1491 mask = mask or ()
1492 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1493 scan = (self, )
[1859]1494
1495 if align:
1496 scan = (self.freq_align(insitu=False), )
1497 s = None
1498 if weight.upper() == 'MEDIAN':
1499 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1500 scanav))
1501 else:
1502 s = scantable(self._math._average(scan, mask, weight.upper(),
1503 scanav))
[1099]1504 s._add_history("average_time", varlist)
[513]1505 return s
[710]1506
[1862]1507 @asaplog_post_dec
[876]1508 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1509 """\
[513]1510 Return a scan where all spectra are converted to either
1511 Jansky or Kelvin depending upon the flux units of the scan table.
1512 By default the function tries to look the values up internally.
1513 If it can't find them (or if you want to over-ride), you must
1514 specify EITHER jyperk OR eta (and D which it will try to look up
1515 also if you don't set it). jyperk takes precedence if you set both.
[1846]1516
[513]1517 Parameters:
[1846]1518
[513]1519 jyperk: the Jy / K conversion factor
[1855]1520
[513]1521 eta: the aperture efficiency
[1855]1522
[513]1523 d: the geomtric diameter (metres)
[1855]1524
[513]1525 insitu: if False a new scantable is returned.
1526 Otherwise, the scaling is done in-situ
1527 The default is taken from .asaprc (False)
[1846]1528
[513]1529 """
1530 if insitu is None: insitu = rcParams['insitu']
[876]1531 self._math._setinsitu(insitu)
[513]1532 varlist = vars()
[1593]1533 jyperk = jyperk or -1.0
1534 d = d or -1.0
1535 eta = eta or -1.0
[876]1536 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1537 s._add_history("convert_flux", varlist)
1538 if insitu: self._assign(s)
1539 else: return s
[513]1540
[1862]1541 @asaplog_post_dec
[876]1542 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1543 """\
[513]1544 Return a scan after applying a gain-elevation correction.
1545 The correction can be made via either a polynomial or a
1546 table-based interpolation (and extrapolation if necessary).
1547 You specify polynomial coefficients, an ascii table or neither.
1548 If you specify neither, then a polynomial correction will be made
1549 with built in coefficients known for certain telescopes (an error
1550 will occur if the instrument is not known).
1551 The data and Tsys are *divided* by the scaling factors.
[1846]1552
[513]1553 Parameters:
[1846]1554
[513]1555 poly: Polynomial coefficients (default None) to compute a
1556 gain-elevation correction as a function of
1557 elevation (in degrees).
[1855]1558
[513]1559 filename: The name of an ascii file holding correction factors.
1560 The first row of the ascii file must give the column
1561 names and these MUST include columns
1562 "ELEVATION" (degrees) and "FACTOR" (multiply data
1563 by this) somewhere.
1564 The second row must give the data type of the
1565 column. Use 'R' for Real and 'I' for Integer.
1566 An example file would be
1567 (actual factors are arbitrary) :
1568
1569 TIME ELEVATION FACTOR
1570 R R R
1571 0.1 0 0.8
1572 0.2 20 0.85
1573 0.3 40 0.9
1574 0.4 60 0.85
1575 0.5 80 0.8
1576 0.6 90 0.75
[1855]1577
[513]1578 method: Interpolation method when correcting from a table.
1579 Values are "nearest", "linear" (default), "cubic"
1580 and "spline"
[1855]1581
[513]1582 insitu: if False a new scantable is returned.
1583 Otherwise, the scaling is done in-situ
1584 The default is taken from .asaprc (False)
[1846]1585
[513]1586 """
1587
1588 if insitu is None: insitu = rcParams['insitu']
[876]1589 self._math._setinsitu(insitu)
[513]1590 varlist = vars()
[1593]1591 poly = poly or ()
[513]1592 from os.path import expandvars
1593 filename = expandvars(filename)
[876]1594 s = scantable(self._math._gainel(self, poly, filename, method))
1595 s._add_history("gain_el", varlist)
[1593]1596 if insitu:
1597 self._assign(s)
1598 else:
1599 return s
[710]1600
[1862]1601 @asaplog_post_dec
[931]1602 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1603 """\
[513]1604 Return a scan where all rows have been aligned in frequency/velocity.
1605 The alignment frequency frame (e.g. LSRK) is that set by function
1606 set_freqframe.
[1846]1607
[513]1608 Parameters:
[1855]1609
[513]1610 reftime: reference time to align at. By default, the time of
1611 the first row of data is used.
[1855]1612
[513]1613 method: Interpolation method for regridding the spectra.
1614 Choose from "nearest", "linear", "cubic" (default)
1615 and "spline"
[1855]1616
[513]1617 insitu: if False a new scantable is returned.
1618 Otherwise, the scaling is done in-situ
1619 The default is taken from .asaprc (False)
[1846]1620
[513]1621 """
[931]1622 if insitu is None: insitu = rcParams["insitu"]
[876]1623 self._math._setinsitu(insitu)
[513]1624 varlist = vars()
[1593]1625 reftime = reftime or ""
[931]1626 s = scantable(self._math._freq_align(self, reftime, method))
[876]1627 s._add_history("freq_align", varlist)
1628 if insitu: self._assign(s)
1629 else: return s
[513]1630
[1862]1631 @asaplog_post_dec
[1725]1632 def opacity(self, tau=None, insitu=None):
[1846]1633 """\
[513]1634 Apply an opacity correction. The data
1635 and Tsys are multiplied by the correction factor.
[1846]1636
[513]1637 Parameters:
[1855]1638
[1689]1639 tau: (list of) opacity from which the correction factor is
[513]1640 exp(tau*ZD)
[1689]1641 where ZD is the zenith-distance.
1642 If a list is provided, it has to be of length nIF,
1643 nIF*nPol or 1 and in order of IF/POL, e.g.
1644 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1645 if tau is `None` the opacities are determined from a
1646 model.
[1855]1647
[513]1648 insitu: if False a new scantable is returned.
1649 Otherwise, the scaling is done in-situ
1650 The default is taken from .asaprc (False)
[1846]1651
[513]1652 """
1653 if insitu is None: insitu = rcParams['insitu']
[876]1654 self._math._setinsitu(insitu)
[513]1655 varlist = vars()
[1689]1656 if not hasattr(tau, "__len__"):
1657 tau = [tau]
[876]1658 s = scantable(self._math._opacity(self, tau))
1659 s._add_history("opacity", varlist)
1660 if insitu: self._assign(s)
1661 else: return s
[513]1662
[1862]1663 @asaplog_post_dec
[513]1664 def bin(self, width=5, insitu=None):
[1846]1665 """\
[513]1666 Return a scan where all spectra have been binned up.
[1846]1667
[1348]1668 Parameters:
[1846]1669
[513]1670 width: The bin width (default=5) in pixels
[1855]1671
[513]1672 insitu: if False a new scantable is returned.
1673 Otherwise, the scaling is done in-situ
1674 The default is taken from .asaprc (False)
[1846]1675
[513]1676 """
1677 if insitu is None: insitu = rcParams['insitu']
[876]1678 self._math._setinsitu(insitu)
[513]1679 varlist = vars()
[876]1680 s = scantable(self._math._bin(self, width))
[1118]1681 s._add_history("bin", varlist)
[1589]1682 if insitu:
1683 self._assign(s)
1684 else:
1685 return s
[513]1686
[1862]1687 @asaplog_post_dec
[513]1688 def resample(self, width=5, method='cubic', insitu=None):
[1846]1689 """\
[1348]1690 Return a scan where all spectra have been binned up.
[1573]1691
[1348]1692 Parameters:
[1846]1693
[513]1694 width: The bin width (default=5) in pixels
[1855]1695
[513]1696 method: Interpolation method when correcting from a table.
1697 Values are "nearest", "linear", "cubic" (default)
1698 and "spline"
[1855]1699
[513]1700 insitu: if False a new scantable is returned.
1701 Otherwise, the scaling is done in-situ
1702 The default is taken from .asaprc (False)
[1846]1703
[513]1704 """
1705 if insitu is None: insitu = rcParams['insitu']
[876]1706 self._math._setinsitu(insitu)
[513]1707 varlist = vars()
[876]1708 s = scantable(self._math._resample(self, method, width))
[1118]1709 s._add_history("resample", varlist)
[876]1710 if insitu: self._assign(s)
1711 else: return s
[513]1712
[1862]1713 @asaplog_post_dec
[946]1714 def average_pol(self, mask=None, weight='none'):
[1846]1715 """\
[946]1716 Average the Polarisations together.
[1846]1717
[946]1718 Parameters:
[1846]1719
[946]1720 mask: An optional mask defining the region, where the
1721 averaging will be applied. The output will have all
1722 specified points masked.
[1855]1723
[946]1724 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1725 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1726
[946]1727 """
1728 varlist = vars()
[1593]1729 mask = mask or ()
[1010]1730 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1731 s._add_history("average_pol", varlist)
[992]1732 return s
[513]1733
[1862]1734 @asaplog_post_dec
[1145]1735 def average_beam(self, mask=None, weight='none'):
[1846]1736 """\
[1145]1737 Average the Beams together.
[1846]1738
[1145]1739 Parameters:
1740 mask: An optional mask defining the region, where the
1741 averaging will be applied. The output will have all
1742 specified points masked.
[1855]1743
[1145]1744 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1745 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1746
[1145]1747 """
1748 varlist = vars()
[1593]1749 mask = mask or ()
[1145]1750 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1751 s._add_history("average_beam", varlist)
1752 return s
1753
[1586]1754 def parallactify(self, pflag):
[1846]1755 """\
[1843]1756 Set a flag to indicate whether this data should be treated as having
[1617]1757 been 'parallactified' (total phase == 0.0)
[1846]1758
[1617]1759 Parameters:
[1855]1760
[1843]1761 pflag: Bool indicating whether to turn this on (True) or
[1617]1762 off (False)
[1846]1763
[1617]1764 """
[1586]1765 varlist = vars()
1766 self._parallactify(pflag)
1767 self._add_history("parallactify", varlist)
1768
[1862]1769 @asaplog_post_dec
[992]1770 def convert_pol(self, poltype=None):
[1846]1771 """\
[992]1772 Convert the data to a different polarisation type.
[1565]1773 Note that you will need cross-polarisation terms for most conversions.
[1846]1774
[992]1775 Parameters:
[1855]1776
[992]1777 poltype: The new polarisation type. Valid types are:
[1565]1778 "linear", "circular", "stokes" and "linpol"
[1846]1779
[992]1780 """
1781 varlist = vars()
[1859]1782 s = scantable(self._math._convertpol(self, poltype))
[1118]1783 s._add_history("convert_pol", varlist)
[992]1784 return s
1785
[1862]1786 @asaplog_post_dec
[1819]1787 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1788 """\
[513]1789 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1790
[513]1791 Parameters:
[1846]1792
[513]1793 kernel: The type of smoothing kernel. Select from
[1574]1794 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1795 or 'poly'
[1855]1796
[513]1797 width: The width of the kernel in pixels. For hanning this is
1798 ignored otherwise it defauls to 5 pixels.
1799 For 'gaussian' it is the Full Width Half
1800 Maximum. For 'boxcar' it is the full width.
[1574]1801 For 'rmedian' and 'poly' it is the half width.
[1855]1802
[1574]1803 order: Optional parameter for 'poly' kernel (default is 2), to
1804 specify the order of the polnomial. Ignored by all other
1805 kernels.
[1855]1806
[1819]1807 plot: plot the original and the smoothed spectra.
1808 In this each indivual fit has to be approved, by
1809 typing 'y' or 'n'
[1855]1810
[513]1811 insitu: if False a new scantable is returned.
1812 Otherwise, the scaling is done in-situ
1813 The default is taken from .asaprc (False)
[1846]1814
[513]1815 """
1816 if insitu is None: insitu = rcParams['insitu']
[876]1817 self._math._setinsitu(insitu)
[513]1818 varlist = vars()
[1819]1819
1820 if plot: orgscan = self.copy()
1821
[1574]1822 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1823 s._add_history("smooth", varlist)
[1819]1824
1825 if plot:
1826 if rcParams['plotter.gui']:
1827 from asap.asaplotgui import asaplotgui as asaplot
1828 else:
1829 from asap.asaplot import asaplot
1830 self._p=asaplot()
1831 self._p.set_panels()
1832 ylab=s._get_ordinate_label()
1833 #self._p.palette(0,["#777777","red"])
1834 for r in xrange(s.nrow()):
1835 xsm=s._getabcissa(r)
1836 ysm=s._getspectrum(r)
1837 xorg=orgscan._getabcissa(r)
1838 yorg=orgscan._getspectrum(r)
1839 self._p.clear()
1840 self._p.hold()
1841 self._p.set_axes('ylabel',ylab)
1842 self._p.set_axes('xlabel',s._getabcissalabel(r))
1843 self._p.set_axes('title',s._getsourcename(r))
1844 self._p.set_line(label='Original',color="#777777")
1845 self._p.plot(xorg,yorg)
1846 self._p.set_line(label='Smoothed',color="red")
1847 self._p.plot(xsm,ysm)
1848 ### Ugly part for legend
1849 for i in [0,1]:
1850 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1851 self._p.release()
1852 ### Ugly part for legend
1853 self._p.subplots[0]['lines']=[]
1854 res = raw_input("Accept smoothing ([y]/n): ")
1855 if res.upper() == 'N':
1856 s._setspectrum(yorg, r)
1857 self._p.unmap()
1858 self._p = None
1859 del orgscan
1860
[876]1861 if insitu: self._assign(s)
1862 else: return s
[513]1863
[1862]1864 @asaplog_post_dec
[1907]1865 def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1866 """\
[513]1867 Return a scan which has been baselined (all rows) by a polynomial.
[1907]1868
[513]1869 Parameters:
[1846]1870
[794]1871 mask: an optional mask
[1855]1872
[794]1873 order: the order of the polynomial (default is 0)
[1855]1874
[1061]1875 plot: plot the fit and the residual. In this each
1876 indivual fit has to be approved, by typing 'y'
1877 or 'n'
[1855]1878
[1391]1879 uselin: use linear polynomial fit
[1855]1880
[794]1881 insitu: if False a new scantable is returned.
1882 Otherwise, the scaling is done in-situ
1883 The default is taken from .asaprc (False)
[1846]1884
[1907]1885 rows: row numbers of spectra to be processed.
1886 (default is None: for all rows)
1887
1888 Example:
[513]1889 # return a scan baselined by a third order polynomial,
1890 # not using a mask
1891 bscan = scan.poly_baseline(order=3)
[1846]1892
[579]1893 """
[513]1894 if insitu is None: insitu = rcParams['insitu']
[1819]1895 if not insitu:
1896 workscan = self.copy()
1897 else:
1898 workscan = self
[513]1899 varlist = vars()
1900 if mask is None:
[1907]1901 mask = [True for i in xrange(self.nchan())]
[1819]1902
[1217]1903 try:
1904 f = fitter()
[1391]1905 if uselin:
1906 f.set_function(lpoly=order)
1907 else:
1908 f.set_function(poly=order)
[1819]1909
[1907]1910 if rows == None:
1911 rows = xrange(workscan.nrow())
1912 elif isinstance(rows, int):
1913 rows = [ rows ]
1914
[1819]1915 if len(rows) > 0:
1916 self.blpars = []
[1907]1917 self.masklists = []
1918 self.actualmask = []
1919
[1819]1920 for r in rows:
1921 f.x = workscan._getabcissa(r)
1922 f.y = workscan._getspectrum(r)
[1907]1923 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]1924 f.data = None
1925 f.fit()
1926 if plot:
1927 f.plot(residual=True)
1928 x = raw_input("Accept fit ( [y]/n ): ")
1929 if x.upper() == 'N':
1930 self.blpars.append(None)
[1907]1931 self.masklists.append(None)
1932 self.actualmask.append(None)
[1819]1933 continue
1934 workscan._setspectrum(f.fitter.getresidual(), r)
1935 self.blpars.append(f.get_parameters())
[1907]1936 self.masklists.append(workscan.get_masklist(f.mask, row=r))
1937 self.actualmask.append(f.mask)
[1819]1938
1939 if plot:
1940 f._p.unmap()
1941 f._p = None
1942 workscan._add_history("poly_baseline", varlist)
[1856]1943 if insitu:
1944 self._assign(workscan)
1945 else:
1946 return workscan
[1217]1947 except RuntimeError:
1948 msg = "The fit failed, possibly because it didn't converge."
[1859]1949 raise RuntimeError(msg)
[513]1950
[1819]1951
[1907]1952 def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1953 """\
1954 Return a scan which has been baselined (all rows) by a polynomial.
1955 Parameters:
1956 mask: an optional mask
1957 order: the order of the polynomial (default is 0)
1958 plot: plot the fit and the residual. In this each
1959 indivual fit has to be approved, by typing 'y'
1960 or 'n'. Ignored if batch = True.
1961 batch: if True a faster algorithm is used and logs
1962 including the fit results are not output
1963 (default is False)
1964 insitu: if False a new scantable is returned.
1965 Otherwise, the scaling is done in-situ
1966 The default is taken from .asaprc (False)
1967 rows: row numbers of spectra to be processed.
1968 (default is None: for all rows)
1969 Example:
1970 # return a scan baselined by a third order polynomial,
1971 # not using a mask
1972 bscan = scan.poly_baseline(order=3)
1973 """
1974 if insitu is None: insitu = rcParams["insitu"]
1975 if insitu:
1976 workscan = self
1977 else:
1978 workscan = self.copy()
1979
1980 varlist = vars()
1981 nchan = workscan.nchan()
1982
1983 if mask is None:
1984 mask = [True for i in xrange(nchan)]
1985
1986 try:
1987 if rows == None:
1988 rows = xrange(workscan.nrow())
1989 elif isinstance(rows, int):
1990 rows = [ rows ]
1991
1992 if len(rows) > 0:
1993 self.blpars = []
1994 self.masklists = []
1995 self.actualmask = []
1996
1997 if batch:
1998 for r in rows:
[1908]1999 workscan._poly_baseline_batch(mask, order, r)
[1907]2000 elif plot:
2001 f = fitter()
2002 f.set_function(lpoly=order)
2003 for r in rows:
2004 f.x = workscan._getabcissa(r)
2005 f.y = workscan._getspectrum(r)
2006 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2007 f.data = None
2008 f.fit()
2009
2010 f.plot(residual=True)
2011 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2012 if accept_fit.upper() == "N":
2013 self.blpars.append(None)
2014 self.masklists.append(None)
2015 self.actualmask.append(None)
2016 continue
2017 workscan._setspectrum(f.fitter.getresidual(), r)
2018 self.blpars.append(f.get_parameters())
2019 self.masklists.append(workscan.get_masklist(f.mask, row=r))
2020 self.actualmask.append(f.mask)
2021
2022 f._p.unmap()
2023 f._p = None
2024 else:
2025 import array
2026 for r in rows:
2027 pars = array.array("f", [0.0 for i in range(order+1)])
2028 pars_adr = pars.buffer_info()[0]
2029 pars_len = pars.buffer_info()[1]
2030
2031 errs = array.array("f", [0.0 for i in range(order+1)])
2032 errs_adr = errs.buffer_info()[0]
2033 errs_len = errs.buffer_info()[1]
2034
2035 fmsk = array.array("i", [1 for i in range(nchan)])
2036 fmsk_adr = fmsk.buffer_info()[0]
2037 fmsk_len = fmsk.buffer_info()[1]
2038
2039 workscan._poly_baseline(mask, order, r, pars_adr, pars_len, errs_adr, errs_len, fmsk_adr, fmsk_len)
2040
2041 params = pars.tolist()
2042 fmtd = ""
2043 for i in xrange(len(params)): fmtd += " p%d= %3.6f," % (i, params[i])
2044 fmtd = fmtd[:-1] # remove trailing ","
2045 errors = errs.tolist()
2046 fmask = fmsk.tolist()
2047 for i in xrange(len(fmask)): fmask[i] = (fmask[i] > 0) # transform (1/0) -> (True/False)
2048
2049 self.blpars.append({"params":params, "fixed":[], "formatted":fmtd, "errors":errors})
2050 self.masklists.append(workscan.get_masklist(fmask, r))
2051 self.actualmask.append(fmask)
2052
2053 asaplog.push(str(fmtd))
2054
2055 workscan._add_history("poly_baseline", varlist)
2056
2057 if insitu:
2058 self._assign(workscan)
2059 else:
2060 return workscan
2061
[1919]2062 except RuntimeError, e:
[1907]2063 msg = "The fit failed, possibly because it didn't converge."
2064 if rcParams["verbose"]:
[1919]2065 asaplog.push(str(e))
[1907]2066 asaplog.push(str(msg))
2067 return
2068 else:
[1919]2069 raise RuntimeError(str(e)+'\n'+msg)
[1907]2070
2071
2072 def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2073 threshold=3, chan_avg_limit=1, plot=False,
[1907]2074 insitu=None, rows=None):
[1846]2075 """\
[880]2076 Return a scan which has been baselined (all rows) by a polynomial.
2077 Spectral lines are detected first using linefinder and masked out
2078 to avoid them affecting the baseline solution.
2079
2080 Parameters:
[1846]2081
[880]2082 mask: an optional mask retreived from scantable
[1846]2083
2084 edge: an optional number of channel to drop at the edge of
2085 spectrum. If only one value is
[880]2086 specified, the same number will be dropped from
2087 both sides of the spectrum. Default is to keep
[907]2088 all channels. Nested tuples represent individual
[976]2089 edge selection for different IFs (a number of spectral
2090 channels can be different)
[1846]2091
[880]2092 order: the order of the polynomial (default is 0)
[1846]2093
[880]2094 threshold: the threshold used by line finder. It is better to
2095 keep it large as only strong lines affect the
2096 baseline solution.
[1846]2097
[1280]2098 chan_avg_limit:
2099 a maximum number of consequtive spectral channels to
2100 average during the search of weak and broad lines.
2101 The default is no averaging (and no search for weak
2102 lines). If such lines can affect the fitted baseline
2103 (e.g. a high order polynomial is fitted), increase this
2104 parameter (usually values up to 8 are reasonable). Most
2105 users of this method should find the default value
2106 sufficient.
[1846]2107
[1061]2108 plot: plot the fit and the residual. In this each
2109 indivual fit has to be approved, by typing 'y'
2110 or 'n'
[1846]2111
[880]2112 insitu: if False a new scantable is returned.
2113 Otherwise, the scaling is done in-situ
2114 The default is taken from .asaprc (False)
[1907]2115 rows: row numbers of spectra to be processed.
2116 (default is None: for all rows)
[880]2117
[1846]2118
2119 Example::
2120
2121 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2122
[880]2123 """
2124 if insitu is None: insitu = rcParams['insitu']
2125 varlist = vars()
2126 from asap.asaplinefind import linefinder
2127 from asap import _is_sequence_or_number as _is_valid
2128
[976]2129 # check whether edge is set up for each IF individually
[1118]2130 individualedge = False;
2131 if len(edge) > 1:
2132 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2133 individualedge = True;
[907]2134
[1118]2135 if not _is_valid(edge, int) and not individualedge:
[909]2136 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2137 pair of integers specified as a tuple. Nested tuples are allowed \
2138 to make individual selection for different IFs."
[919]2139
[1118]2140 curedge = (0, 0)
2141 if individualedge:
2142 for edgepar in edge:
2143 if not _is_valid(edgepar, int):
2144 raise ValueError, "Each element of the 'edge' tuple has \
2145 to be a pair of integers or an integer."
[907]2146 else:
[1118]2147 curedge = edge;
[880]2148
[1907]2149 if not insitu:
2150 workscan = self.copy()
2151 else:
2152 workscan = self
2153
[880]2154 # setup fitter
2155 f = fitter()
[1907]2156 f.set_function(lpoly=order)
[880]2157
2158 # setup line finder
[1118]2159 fl = linefinder()
[1268]2160 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2161
[907]2162 fl.set_scan(workscan)
2163
[1907]2164 if mask is None:
2165 mask = _n_bools(workscan.nchan(), True)
2166
2167 if rows is None:
2168 rows = xrange(workscan.nrow())
2169 elif isinstance(rows, int):
2170 rows = [ rows ]
2171
[1819]2172 # Save parameters of baseline fits & masklists as a class attribute.
2173 # NOTICE: It does not reflect changes in scantable!
2174 if len(rows) > 0:
2175 self.blpars=[]
2176 self.masklists=[]
[1907]2177 self.actualmask=[]
[880]2178 asaplog.push("Processing:")
2179 for r in rows:
[1118]2180 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2181 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2182 workscan.getpol(r), workscan.getcycle(r))
[880]2183 asaplog.push(msg, False)
[907]2184
[976]2185 # figure out edge parameter
[1118]2186 if individualedge:
2187 if len(edge) >= workscan.getif(r):
2188 raise RuntimeError, "Number of edge elements appear to " \
2189 "be less than the number of IFs"
2190 curedge = edge[workscan.getif(r)]
[919]2191
[1907]2192 actualmask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]2193
[976]2194 # setup line finder
[1819]2195 fl.find_lines(r, actualmask, curedge)
[1907]2196
[1819]2197 f.x = workscan._getabcissa(r)
2198 f.y = workscan._getspectrum(r)
[1907]2199 f.mask = fl.get_mask()
[1819]2200 f.data = None
[880]2201 f.fit()
[1819]2202
2203 # Show mask list
[1907]2204 masklist=workscan.get_masklist(f.mask, row=r)
[1819]2205 msg = "mask range: "+str(masklist)
2206 asaplog.push(msg, False)
2207
[1061]2208 if plot:
2209 f.plot(residual=True)
2210 x = raw_input("Accept fit ( [y]/n ): ")
2211 if x.upper() == 'N':
[1819]2212 self.blpars.append(None)
2213 self.masklists.append(None)
[1907]2214 self.actualmask.append(None)
[1061]2215 continue
[1819]2216
[880]2217 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2218 self.blpars.append(f.get_parameters())
2219 self.masklists.append(masklist)
[1907]2220 self.actualmask.append(f.mask)
[1061]2221 if plot:
2222 f._p.unmap()
2223 f._p = None
2224 workscan._add_history("auto_poly_baseline", varlist)
[880]2225 if insitu:
2226 self._assign(workscan)
2227 else:
2228 return workscan
2229
[1862]2230 @asaplog_post_dec
[914]2231 def rotate_linpolphase(self, angle):
[1846]2232 """\
[914]2233 Rotate the phase of the complex polarization O=Q+iU correlation.
2234 This is always done in situ in the raw data. So if you call this
2235 function more than once then each call rotates the phase further.
[1846]2236
[914]2237 Parameters:
[1846]2238
[914]2239 angle: The angle (degrees) to rotate (add) by.
[1846]2240
2241 Example::
2242
[914]2243 scan.rotate_linpolphase(2.3)
[1846]2244
[914]2245 """
2246 varlist = vars()
[936]2247 self._math._rotate_linpolphase(self, angle)
[914]2248 self._add_history("rotate_linpolphase", varlist)
2249 return
[710]2250
[1862]2251 @asaplog_post_dec
[914]2252 def rotate_xyphase(self, angle):
[1846]2253 """\
[914]2254 Rotate the phase of the XY correlation. This is always done in situ
2255 in the data. So if you call this function more than once
2256 then each call rotates the phase further.
[1846]2257
[914]2258 Parameters:
[1846]2259
[914]2260 angle: The angle (degrees) to rotate (add) by.
[1846]2261
2262 Example::
2263
[914]2264 scan.rotate_xyphase(2.3)
[1846]2265
[914]2266 """
2267 varlist = vars()
[936]2268 self._math._rotate_xyphase(self, angle)
[914]2269 self._add_history("rotate_xyphase", varlist)
2270 return
2271
[1862]2272 @asaplog_post_dec
[914]2273 def swap_linears(self):
[1846]2274 """\
[1573]2275 Swap the linear polarisations XX and YY, or better the first two
[1348]2276 polarisations as this also works for ciculars.
[914]2277 """
2278 varlist = vars()
[936]2279 self._math._swap_linears(self)
[914]2280 self._add_history("swap_linears", varlist)
2281 return
2282
[1862]2283 @asaplog_post_dec
[914]2284 def invert_phase(self):
[1846]2285 """\
[914]2286 Invert the phase of the complex polarisation
2287 """
2288 varlist = vars()
[936]2289 self._math._invert_phase(self)
[914]2290 self._add_history("invert_phase", varlist)
2291 return
2292
[1862]2293 @asaplog_post_dec
[876]2294 def add(self, offset, insitu=None):
[1846]2295 """\
[513]2296 Return a scan where all spectra have the offset added
[1846]2297
[513]2298 Parameters:
[1846]2299
[513]2300 offset: the offset
[1855]2301
[513]2302 insitu: if False a new scantable is returned.
2303 Otherwise, the scaling is done in-situ
2304 The default is taken from .asaprc (False)
[1846]2305
[513]2306 """
2307 if insitu is None: insitu = rcParams['insitu']
[876]2308 self._math._setinsitu(insitu)
[513]2309 varlist = vars()
[876]2310 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2311 s._add_history("add", varlist)
[876]2312 if insitu:
2313 self._assign(s)
2314 else:
[513]2315 return s
2316
[1862]2317 @asaplog_post_dec
[1308]2318 def scale(self, factor, tsys=True, insitu=None):
[1846]2319 """\
2320
[513]2321 Return a scan where all spectra are scaled by the give 'factor'
[1846]2322
[513]2323 Parameters:
[1846]2324
[1819]2325 factor: the scaling factor (float or 1D float list)
[1855]2326
[513]2327 insitu: if False a new scantable is returned.
2328 Otherwise, the scaling is done in-situ
2329 The default is taken from .asaprc (False)
[1855]2330
[513]2331 tsys: if True (default) then apply the operation to Tsys
2332 as well as the data
[1846]2333
[513]2334 """
2335 if insitu is None: insitu = rcParams['insitu']
[876]2336 self._math._setinsitu(insitu)
[513]2337 varlist = vars()
[1819]2338 s = None
2339 import numpy
2340 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2341 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2342 from asapmath import _array2dOp
2343 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2344 else:
2345 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2346 else:
2347 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2348 s._add_history("scale", varlist)
[876]2349 if insitu:
2350 self._assign(s)
2351 else:
[513]2352 return s
2353
[1504]2354 def set_sourcetype(self, match, matchtype="pattern",
2355 sourcetype="reference"):
[1846]2356 """\
[1502]2357 Set the type of the source to be an source or reference scan
[1846]2358 using the provided pattern.
2359
[1502]2360 Parameters:
[1846]2361
[1504]2362 match: a Unix style pattern, regular expression or selector
[1855]2363
[1504]2364 matchtype: 'pattern' (default) UNIX style pattern or
2365 'regex' regular expression
[1855]2366
[1502]2367 sourcetype: the type of the source to use (source/reference)
[1846]2368
[1502]2369 """
2370 varlist = vars()
2371 basesel = self.get_selection()
2372 stype = -1
2373 if sourcetype.lower().startswith("r"):
2374 stype = 1
2375 elif sourcetype.lower().startswith("s"):
2376 stype = 0
[1504]2377 else:
[1502]2378 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2379 if matchtype.lower().startswith("p"):
2380 matchtype = "pattern"
2381 elif matchtype.lower().startswith("r"):
2382 matchtype = "regex"
2383 else:
2384 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2385 sel = selector()
2386 if isinstance(match, selector):
2387 sel = match
2388 else:
[1504]2389 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2390 self.set_selection(basesel+sel)
2391 self._setsourcetype(stype)
2392 self.set_selection(basesel)
[1573]2393 self._add_history("set_sourcetype", varlist)
[1502]2394
[1862]2395 @asaplog_post_dec
[1857]2396 @preserve_selection
[1819]2397 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2398 """\
[670]2399 This function allows to build quotients automatically.
[1819]2400 It assumes the observation to have the same number of
[670]2401 "ons" and "offs"
[1846]2402
[670]2403 Parameters:
[1846]2404
[710]2405 preserve: you can preserve (default) the continuum or
2406 remove it. The equations used are
[1857]2407
[670]2408 preserve: Output = Toff * (on/off) - Toff
[1857]2409
[1070]2410 remove: Output = Toff * (on/off) - Ton
[1855]2411
[1573]2412 mode: the on/off detection mode
[1348]2413 'paired' (default)
2414 identifies 'off' scans by the
2415 trailing '_R' (Mopra/Parkes) or
2416 '_e'/'_w' (Tid) and matches
2417 on/off pairs from the observing pattern
[1502]2418 'time'
2419 finds the closest off in time
[1348]2420
[1857]2421 .. todo:: verify argument is not implemented
2422
[670]2423 """
[1857]2424 varlist = vars()
[1348]2425 modes = ["time", "paired"]
[670]2426 if not mode in modes:
[876]2427 msg = "please provide valid mode. Valid modes are %s" % (modes)
2428 raise ValueError(msg)
[1348]2429 s = None
2430 if mode.lower() == "paired":
[1857]2431 sel = self.get_selection()
[1875]2432 sel.set_query("SRCTYPE==psoff")
[1356]2433 self.set_selection(sel)
[1348]2434 offs = self.copy()
[1875]2435 sel.set_query("SRCTYPE==pson")
[1356]2436 self.set_selection(sel)
[1348]2437 ons = self.copy()
2438 s = scantable(self._math._quotient(ons, offs, preserve))
2439 elif mode.lower() == "time":
2440 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2441 s._add_history("auto_quotient", varlist)
[876]2442 return s
[710]2443
[1862]2444 @asaplog_post_dec
[1145]2445 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2446 """\
[1143]2447 Form a quotient using "off" beams when observing in "MX" mode.
[1846]2448
[1143]2449 Parameters:
[1846]2450
[1145]2451 mask: an optional mask to be used when weight == 'stddev'
[1855]2452
[1143]2453 weight: How to average the off beams. Default is 'median'.
[1855]2454
[1145]2455 preserve: you can preserve (default) the continuum or
[1855]2456 remove it. The equations used are:
[1846]2457
[1855]2458 preserve: Output = Toff * (on/off) - Toff
2459
2460 remove: Output = Toff * (on/off) - Ton
2461
[1217]2462 """
[1593]2463 mask = mask or ()
[1141]2464 varlist = vars()
2465 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2466 preoff = scantable(self._math._mx_extract(self, 'off'))
2467 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2468 from asapmath import quotient
[1145]2469 q = quotient(on, off, preserve)
[1143]2470 q._add_history("mx_quotient", varlist)
[1217]2471 return q
[513]2472
[1862]2473 @asaplog_post_dec
[718]2474 def freq_switch(self, insitu=None):
[1846]2475 """\
[718]2476 Apply frequency switching to the data.
[1846]2477
[718]2478 Parameters:
[1846]2479
[718]2480 insitu: if False a new scantable is returned.
2481 Otherwise, the swictching is done in-situ
2482 The default is taken from .asaprc (False)
[1846]2483
[718]2484 """
2485 if insitu is None: insitu = rcParams['insitu']
[876]2486 self._math._setinsitu(insitu)
[718]2487 varlist = vars()
[876]2488 s = scantable(self._math._freqswitch(self))
[1118]2489 s._add_history("freq_switch", varlist)
[1856]2490 if insitu:
2491 self._assign(s)
2492 else:
2493 return s
[718]2494
[1862]2495 @asaplog_post_dec
[780]2496 def recalc_azel(self):
[1846]2497 """Recalculate the azimuth and elevation for each position."""
[780]2498 varlist = vars()
[876]2499 self._recalcazel()
[780]2500 self._add_history("recalc_azel", varlist)
2501 return
2502
[1862]2503 @asaplog_post_dec
[513]2504 def __add__(self, other):
2505 varlist = vars()
2506 s = None
2507 if isinstance(other, scantable):
[1573]2508 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2509 elif isinstance(other, float):
[876]2510 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2511 else:
[718]2512 raise TypeError("Other input is not a scantable or float value")
[513]2513 s._add_history("operator +", varlist)
2514 return s
2515
[1862]2516 @asaplog_post_dec
[513]2517 def __sub__(self, other):
2518 """
2519 implicit on all axes and on Tsys
2520 """
2521 varlist = vars()
2522 s = None
2523 if isinstance(other, scantable):
[1588]2524 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2525 elif isinstance(other, float):
[876]2526 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2527 else:
[718]2528 raise TypeError("Other input is not a scantable or float value")
[513]2529 s._add_history("operator -", varlist)
2530 return s
[710]2531
[1862]2532 @asaplog_post_dec
[513]2533 def __mul__(self, other):
2534 """
2535 implicit on all axes and on Tsys
2536 """
2537 varlist = vars()
2538 s = None
2539 if isinstance(other, scantable):
[1588]2540 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2541 elif isinstance(other, float):
[876]2542 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2543 else:
[718]2544 raise TypeError("Other input is not a scantable or float value")
[513]2545 s._add_history("operator *", varlist)
2546 return s
2547
[710]2548
[1862]2549 @asaplog_post_dec
[513]2550 def __div__(self, other):
2551 """
2552 implicit on all axes and on Tsys
2553 """
2554 varlist = vars()
2555 s = None
2556 if isinstance(other, scantable):
[1589]2557 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2558 elif isinstance(other, float):
2559 if other == 0.0:
[718]2560 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2561 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2562 else:
[718]2563 raise TypeError("Other input is not a scantable or float value")
[513]2564 s._add_history("operator /", varlist)
2565 return s
2566
[1862]2567 @asaplog_post_dec
[530]2568 def get_fit(self, row=0):
[1846]2569 """\
[530]2570 Print or return the stored fits for a row in the scantable
[1846]2571
[530]2572 Parameters:
[1846]2573
[530]2574 row: the row which the fit has been applied to.
[1846]2575
[530]2576 """
2577 if row > self.nrow():
2578 return
[976]2579 from asap.asapfit import asapfit
[530]2580 fit = asapfit(self._getfit(row))
[1859]2581 asaplog.push( '%s' %(fit) )
2582 return fit.as_dict()
[530]2583
[1483]2584 def flag_nans(self):
[1846]2585 """\
[1483]2586 Utility function to flag NaN values in the scantable.
2587 """
2588 import numpy
2589 basesel = self.get_selection()
2590 for i in range(self.nrow()):
[1589]2591 sel = self.get_row_selector(i)
2592 self.set_selection(basesel+sel)
[1483]2593 nans = numpy.isnan(self._getspectrum(0))
2594 if numpy.any(nans):
2595 bnans = [ bool(v) for v in nans]
2596 self.flag(bnans)
2597 self.set_selection(basesel)
2598
[1588]2599 def get_row_selector(self, rowno):
2600 return selector(beams=self.getbeam(rowno),
2601 ifs=self.getif(rowno),
2602 pols=self.getpol(rowno),
2603 scans=self.getscan(rowno),
2604 cycles=self.getcycle(rowno))
[1573]2605
[484]2606 def _add_history(self, funcname, parameters):
[1435]2607 if not rcParams['scantable.history']:
2608 return
[484]2609 # create date
2610 sep = "##"
2611 from datetime import datetime
2612 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2613 hist = dstr+sep
2614 hist += funcname+sep#cdate+sep
2615 if parameters.has_key('self'): del parameters['self']
[1118]2616 for k, v in parameters.iteritems():
[484]2617 if type(v) is dict:
[1118]2618 for k2, v2 in v.iteritems():
[484]2619 hist += k2
2620 hist += "="
[1118]2621 if isinstance(v2, scantable):
[484]2622 hist += 'scantable'
2623 elif k2 == 'mask':
[1118]2624 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2625 hist += str(self._zip_mask(v2))
2626 else:
2627 hist += str(v2)
[484]2628 else:
[513]2629 hist += str(v2)
[484]2630 else:
2631 hist += k
2632 hist += "="
[1118]2633 if isinstance(v, scantable):
[484]2634 hist += 'scantable'
2635 elif k == 'mask':
[1118]2636 if isinstance(v, list) or isinstance(v, tuple):
[513]2637 hist += str(self._zip_mask(v))
2638 else:
2639 hist += str(v)
[484]2640 else:
2641 hist += str(v)
2642 hist += sep
2643 hist = hist[:-2] # remove trailing '##'
2644 self._addhistory(hist)
2645
[710]2646
[484]2647 def _zip_mask(self, mask):
2648 mask = list(mask)
2649 i = 0
2650 segments = []
2651 while mask[i:].count(1):
2652 i += mask[i:].index(1)
2653 if mask[i:].count(0):
2654 j = i + mask[i:].index(0)
2655 else:
[710]2656 j = len(mask)
[1118]2657 segments.append([i, j])
[710]2658 i = j
[484]2659 return segments
[714]2660
[626]2661 def _get_ordinate_label(self):
2662 fu = "("+self.get_fluxunit()+")"
2663 import re
2664 lbl = "Intensity"
[1118]2665 if re.match(".K.", fu):
[626]2666 lbl = "Brightness Temperature "+ fu
[1118]2667 elif re.match(".Jy.", fu):
[626]2668 lbl = "Flux density "+ fu
2669 return lbl
[710]2670
[876]2671 def _check_ifs(self):
2672 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2673 nchans = filter(lambda t: t > 0, nchans)
[876]2674 return (sum(nchans)/len(nchans) == nchans[0])
[976]2675
[1862]2676 @asaplog_post_dec
[1916]2677 #def _fill(self, names, unit, average, getpt, antenna):
2678 def _fill(self, names, unit, average, opts={}):
[976]2679 first = True
2680 fullnames = []
2681 for name in names:
2682 name = os.path.expandvars(name)
2683 name = os.path.expanduser(name)
2684 if not os.path.exists(name):
2685 msg = "File '%s' does not exists" % (name)
2686 raise IOError(msg)
2687 fullnames.append(name)
2688 if average:
2689 asaplog.push('Auto averaging integrations')
[1079]2690 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2691 for name in fullnames:
[1073]2692 tbl = Scantable(stype)
[1843]2693 r = filler(tbl)
[1504]2694 rx = rcParams['scantable.reference']
[1843]2695 r.setreferenceexpr(rx)
[976]2696 msg = "Importing %s..." % (name)
[1118]2697 asaplog.push(msg, False)
[1916]2698 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2699 r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2700 r.fill()
[976]2701 if average:
[1118]2702 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2703 if not first:
2704 tbl = self._math._merge([self, tbl])
2705 Scantable.__init__(self, tbl)
[1843]2706 r.close()
[1118]2707 del r, tbl
[976]2708 first = False
[1861]2709 #flush log
2710 asaplog.post()
[976]2711 if unit is not None:
2712 self.set_fluxunit(unit)
[1824]2713 if not is_casapy():
2714 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2715
[1402]2716 def __getitem__(self, key):
2717 if key < 0:
2718 key += self.nrow()
2719 if key >= self.nrow():
2720 raise IndexError("Row index out of range.")
2721 return self._getspectrum(key)
2722
2723 def __setitem__(self, key, value):
2724 if key < 0:
2725 key += self.nrow()
2726 if key >= self.nrow():
2727 raise IndexError("Row index out of range.")
2728 if not hasattr(value, "__len__") or \
2729 len(value) > self.nchan(self.getif(key)):
2730 raise ValueError("Spectrum length doesn't match.")
2731 return self._setspectrum(value, key)
2732
2733 def __len__(self):
2734 return self.nrow()
2735
2736 def __iter__(self):
2737 for i in range(len(self)):
2738 yield self[i]
Note: See TracBrowser for help on using the repository browser.