source: trunk/python/scantable.py@ 1928

Last change on this file since 1928 was 1928, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: Yes/No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Fixed typo in help and bug in scantable.history().

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.0 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
[1862]13from asap.logging import asaplog, asaplog_post_dec
[1824]14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1859]17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]18from asap.asapfitter import fitter
[102]19
[1689]20
21def preserve_selection(func):
[1691]22 @wraps_dec(func)
[1689]23 def wrap(obj, *args, **kw):
24 basesel = obj.get_selection()
[1857]25 try:
26 val = func(obj, *args, **kw)
27 finally:
28 obj.set_selection(basesel)
[1689]29 return val
30 return wrap
31
[1846]32def is_scantable(filename):
33 """Is the given file a scantable?
[1689]34
[1846]35 Parameters:
36
37 filename: the name of the file/directory to test
38
39 """
[1883]40 if ( os.path.isdir(filename)
41 and os.path.exists(filename+'/table.info')
42 and os.path.exists(filename+'/table.dat') ):
43 f=open(filename+'/table.info')
44 l=f.readline()
45 f.close()
46 #if ( l.find('Scantable') != -1 ):
47 if ( l.find('Measurement Set') == -1 ):
48 return True
49 else:
50 return False
51 else:
52 return False
53## return (os.path.isdir(filename)
54## and not os.path.exists(filename+'/table.f1')
55## and os.path.exists(filename+'/table.info'))
[1697]56
[1883]57def is_ms(filename):
58 """Is the given file a MeasurementSet?
[1697]59
[1883]60 Parameters:
61
62 filename: the name of the file/directory to test
63
64 """
65 if ( os.path.isdir(filename)
66 and os.path.exists(filename+'/table.info')
67 and os.path.exists(filename+'/table.dat') ):
68 f=open(filename+'/table.info')
69 l=f.readline()
70 f.close()
71 if ( l.find('Measurement Set') != -1 ):
72 return True
73 else:
74 return False
75 else:
76 return False
77
[876]78class scantable(Scantable):
[1846]79 """\
80 The ASAP container for scans (single-dish data).
[102]81 """
[1819]82
[1862]83 @asaplog_post_dec
[1916]84 #def __init__(self, filename, average=None, unit=None, getpt=None,
85 # antenna=None, parallactify=None):
86 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]87 """\
[102]88 Create a scantable from a saved one or make a reference
[1846]89
[102]90 Parameters:
[1846]91
92 filename: the name of an asap table on disk
93 or
94 the name of a rpfits/sdfits/ms file
95 (integrations within scans are auto averaged
96 and the whole file is read) or
97 [advanced] a reference to an existing scantable
98
99 average: average all integrations withinb a scan on read.
100 The default (True) is taken from .asaprc.
101
[484]102 unit: brightness unit; must be consistent with K or Jy.
[1846]103 Over-rides the default selected by the filler
104 (input rpfits/sdfits/ms) or replaces the value
105 in existing scantables
106
107 getpt: for MeasurementSet input data only:
108 If True, all pointing data are filled.
109 The deafult is False, which makes time to load
110 the MS data faster in some cases.
111
[1920]112 antenna: for MeasurementSet input data only:
113 Antenna selection. integer (id) or string (name or id).
[1846]114
115 parallactify: Indicate that the data had been parallatified. Default
116 is taken from rc file.
117
[710]118 """
[976]119 if average is None:
[710]120 average = rcParams['scantable.autoaverage']
[1916]121 #if getpt is None:
122 # getpt = True
123 #if antenna is not None:
124 # asaplog.push("Antenna selection currently unsupported."
125 # "Using ''")
126 # asaplog.post('WARN')
127 #if antenna is None:
128 # antenna = ''
129 #elif type(antenna) == int:
130 # antenna = '%s' % antenna
131 #elif type(antenna) == list:
132 # tmpstr = ''
133 # for i in range( len(antenna) ):
134 # if type(antenna[i]) == int:
135 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
136 # elif type(antenna[i]) == str:
137 # tmpstr=tmpstr+antenna[i]+','
138 # else:
139 # raise TypeError('Bad antenna selection.')
140 # antenna = tmpstr.rstrip(',')
[1593]141 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]142 varlist = vars()
[876]143 from asap._asap import stmath
[1819]144 self._math = stmath( rcParams['insitu'] )
[876]145 if isinstance(filename, Scantable):
146 Scantable.__init__(self, filename)
[181]147 else:
[1697]148 if isinstance(filename, str):
[976]149 filename = os.path.expandvars(filename)
150 filename = os.path.expanduser(filename)
151 if not os.path.exists(filename):
152 s = "File '%s' not found." % (filename)
153 raise IOError(s)
[1697]154 if is_scantable(filename):
155 ondisk = rcParams['scantable.storage'] == 'disk'
156 Scantable.__init__(self, filename, ondisk)
157 if unit is not None:
158 self.set_fluxunit(unit)
[1819]159 # do not reset to the default freqframe
160 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]161 #elif os.path.isdir(filename) \
162 # and not os.path.exists(filename+'/table.f1'):
163 elif is_ms(filename):
[1916]164 # Measurement Set
165 opts={'ms': {}}
166 mskeys=['getpt','antenna']
167 for key in mskeys:
168 if key in args.keys():
169 opts['ms'][key] = args[key]
170 #self._fill([filename], unit, average, getpt, antenna)
171 self._fill([filename], unit, average, opts)
[1893]172 elif os.path.isfile(filename):
[1916]173 #self._fill([filename], unit, average, getpt, antenna)
174 self._fill([filename], unit, average)
[1883]175 else:
[1819]176 msg = "The given file '%s'is not a valid " \
177 "asap table." % (filename)
[1859]178 raise IOError(msg)
[1118]179 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]180 and isinstance(filename[-1], str):
[1916]181 #self._fill(filename, unit, average, getpt, antenna)
182 self._fill(filename, unit, average)
[1586]183 self.parallactify(parallactify)
[1259]184 self._add_history("scantable", varlist)
[102]185
[1862]186 @asaplog_post_dec
[876]187 def save(self, name=None, format=None, overwrite=False):
[1846]188 """\
[1280]189 Store the scantable on disk. This can be an asap (aips++) Table,
190 SDFITS or MS2 format.
[1846]191
[116]192 Parameters:
[1846]193
[1093]194 name: the name of the outputfile. For format "ASCII"
195 this is the root file name (data in 'name'.txt
[497]196 and header in 'name'_header.txt)
[1855]197
[116]198 format: an optional file format. Default is ASAP.
[1855]199 Allowed are:
200
201 * 'ASAP' (save as ASAP [aips++] Table),
202 * 'SDFITS' (save as SDFITS file)
203 * 'ASCII' (saves as ascii text file)
204 * 'MS2' (saves as an casacore MeasurementSet V2)
205 * 'FITS' (save as image FITS - not readable by class)
206 * 'CLASS' (save as FITS readable by CLASS)
207
[411]208 overwrite: If the file should be overwritten if it exists.
[256]209 The default False is to return with warning
[411]210 without writing the output. USE WITH CARE.
[1855]211
[1846]212 Example::
213
[116]214 scan.save('myscan.asap')
[1118]215 scan.save('myscan.sdfits', 'SDFITS')
[1846]216
[116]217 """
[411]218 from os import path
[1593]219 format = format or rcParams['scantable.save']
[256]220 suffix = '.'+format.lower()
[1118]221 if name is None or name == "":
[256]222 name = 'scantable'+suffix
[718]223 msg = "No filename given. Using default name %s..." % name
224 asaplog.push(msg)
[411]225 name = path.expandvars(name)
[256]226 if path.isfile(name) or path.isdir(name):
227 if not overwrite:
[718]228 msg = "File %s exists." % name
[1859]229 raise IOError(msg)
[451]230 format2 = format.upper()
231 if format2 == 'ASAP':
[116]232 self._save(name)
233 else:
[989]234 from asap._asap import stwriter as stw
[1118]235 writer = stw(format2)
236 writer.write(self, name)
[116]237 return
238
[102]239 def copy(self):
[1846]240 """Return a copy of this scantable.
241
242 *Note*:
243
[1348]244 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]245
246 Example::
247
[102]248 copiedscan = scan.copy()
[1846]249
[102]250 """
[876]251 sd = scantable(Scantable._copy(self))
[113]252 return sd
253
[1093]254 def drop_scan(self, scanid=None):
[1846]255 """\
[1093]256 Return a new scantable where the specified scan number(s) has(have)
257 been dropped.
[1846]258
[1093]259 Parameters:
[1846]260
[1093]261 scanid: a (list of) scan number(s)
[1846]262
[1093]263 """
264 from asap import _is_sequence_or_number as _is_valid
265 from asap import _to_list
266 from asap import unique
267 if not _is_valid(scanid):
[1859]268 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269 scanid = _to_list(scanid)
270 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271 for sid in scanid: allscans.remove(sid)
272 if len(allscans) == 0:
273 raise ValueError("Can't remove all scans")
274 sel = selector(scans=allscans)
275 return self._select_copy(sel)
[1093]276
[1594]277 def _select_copy(self, selection):
278 orig = self.get_selection()
279 self.set_selection(orig+selection)
280 cp = self.copy()
281 self.set_selection(orig)
282 return cp
283
[102]284 def get_scan(self, scanid=None):
[1855]285 """\
[102]286 Return a specific scan (by scanno) or collection of scans (by
287 source name) in a new scantable.
[1846]288
289 *Note*:
290
[1348]291 See scantable.drop_scan() for the inverse operation.
[1846]292
[102]293 Parameters:
[1846]294
[513]295 scanid: a (list of) scanno or a source name, unix-style
296 patterns are accepted for source name matching, e.g.
297 '*_R' gets all 'ref scans
[1846]298
299 Example::
300
[513]301 # get all scans containing the source '323p459'
302 newscan = scan.get_scan('323p459')
303 # get all 'off' scans
304 refscans = scan.get_scan('*_R')
305 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]306 newscan = scan.get_scan([0, 2, 7, 10])
[1846]307
[102]308 """
309 if scanid is None:
[1859]310 raise RuntimeError( 'Please specify a scan no or name to '
311 'retrieve from the scantable' )
[102]312 try:
[946]313 bsel = self.get_selection()
314 sel = selector()
[102]315 if type(scanid) is str:
[946]316 sel.set_name(scanid)
[1594]317 return self._select_copy(sel)
[102]318 elif type(scanid) is int:
[946]319 sel.set_scans([scanid])
[1594]320 return self._select_copy(sel)
[381]321 elif type(scanid) is list:
[946]322 sel.set_scans(scanid)
[1594]323 return self._select_copy(sel)
[381]324 else:
[718]325 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]326 raise TypeError(msg)
[102]327 except RuntimeError:
[1859]328 raise
[102]329
330 def __str__(self):
[1118]331 return Scantable._summary(self, True)
[102]332
[976]333 def summary(self, filename=None):
[1846]334 """\
[102]335 Print a summary of the contents of this scantable.
[1846]336
[102]337 Parameters:
[1846]338
[1928]339 filename: the name of a file to write the output to
[102]340 Default - no file output
[1846]341
[102]342 """
[976]343 info = Scantable._summary(self, True)
[102]344 if filename is not None:
[256]345 if filename is "":
346 filename = 'scantable_summary.txt'
[415]347 from os.path import expandvars, isdir
[411]348 filename = expandvars(filename)
[415]349 if not isdir(filename):
[413]350 data = open(filename, 'w')
351 data.write(info)
352 data.close()
353 else:
[718]354 msg = "Illegal file name '%s'." % (filename)
[1859]355 raise IOError(msg)
356 return page(info)
[710]357
[1512]358 def get_spectrum(self, rowno):
[1471]359 """Return the spectrum for the current row in the scantable as a list.
[1846]360
[1471]361 Parameters:
[1846]362
[1573]363 rowno: the row number to retrieve the spectrum from
[1846]364
[1471]365 """
366 return self._getspectrum(rowno)
[946]367
[1471]368 def get_mask(self, rowno):
369 """Return the mask for the current row in the scantable as a list.
[1846]370
[1471]371 Parameters:
[1846]372
[1573]373 rowno: the row number to retrieve the mask from
[1846]374
[1471]375 """
376 return self._getmask(rowno)
377
378 def set_spectrum(self, spec, rowno):
[1928]379 """Set the spectrum for the current row in the scantable.
[1846]380
[1471]381 Parameters:
[1846]382
[1855]383 spec: the new spectrum
[1846]384
[1855]385 rowno: the row number to set the spectrum for
386
[1471]387 """
388 assert(len(spec) == self.nchan())
389 return self._setspectrum(spec, rowno)
390
[1600]391 def get_coordinate(self, rowno):
392 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]393
394 *Note*:
395
[1600]396 * This coordinate is only valid until a scantable method modifies
397 the frequency axis.
398 * This coordinate does contain the original frequency set-up
399 NOT the new frame. The conversions however are done using the user
400 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401 use scantable.freq_align first. Without it there is no closure,
[1846]402 i.e.::
[1600]403
[1846]404 c = myscan.get_coordinate(0)
405 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
[1600]407 Parameters:
[1846]408
[1600]409 rowno: the row number for the spectral coordinate
410
411 """
412 return coordinate(Scantable.get_coordinate(self, rowno))
413
[946]414 def get_selection(self):
[1846]415 """\
[1005]416 Get the selection object currently set on this scantable.
[1846]417
418 Example::
419
[1005]420 sel = scan.get_selection()
421 sel.set_ifs(0) # select IF 0
422 scan.set_selection(sel) # apply modified selection
[1846]423
[946]424 """
425 return selector(self._getselection())
426
[1576]427 def set_selection(self, selection=None, **kw):
[1846]428 """\
[1005]429 Select a subset of the data. All following operations on this scantable
430 are only applied to thi selection.
[1846]431
[1005]432 Parameters:
[1697]433
[1846]434 selection: a selector object (default unset the selection), or
435 any combination of "pols", "ifs", "beams", "scans",
436 "cycles", "name", "query"
[1697]437
[1846]438 Examples::
[1697]439
[1005]440 sel = selector() # create a selection object
[1118]441 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]442 scan.set_selection(sel) # set the selection
443 scan.summary() # will only print summary of scanno 0 an 3
444 scan.set_selection() # unset the selection
[1697]445 # or the equivalent
446 scan.set_selection(scans=[0,3])
447 scan.summary() # will only print summary of scanno 0 an 3
448 scan.set_selection() # unset the selection
[1846]449
[946]450 """
[1576]451 if selection is None:
452 # reset
453 if len(kw) == 0:
454 selection = selector()
455 else:
456 # try keywords
457 for k in kw:
458 if k not in selector.fields:
459 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460 selection = selector(**kw)
[946]461 self._setselection(selection)
462
[1819]463 def get_row(self, row=0, insitu=None):
[1846]464 """\
[1819]465 Select a row in the scantable.
466 Return a scantable with single row.
[1846]467
[1819]468 Parameters:
[1846]469
470 row: row no of integration, default is 0.
471 insitu: if False a new scantable is returned. Otherwise, the
472 scaling is done in-situ. The default is taken from .asaprc
473 (False)
474
[1819]475 """
476 if insitu is None: insitu = rcParams['insitu']
477 if not insitu:
478 workscan = self.copy()
479 else:
480 workscan = self
481 # Select a row
482 sel=selector()
483 sel.set_scans([workscan.getscan(row)])
484 sel.set_cycles([workscan.getcycle(row)])
485 sel.set_beams([workscan.getbeam(row)])
486 sel.set_ifs([workscan.getif(row)])
487 sel.set_polarisations([workscan.getpol(row)])
488 sel.set_name(workscan._getsourcename(row))
489 workscan.set_selection(sel)
490 if not workscan.nrow() == 1:
491 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492 raise RuntimeError(msg)
493 del sel
494 if insitu:
495 self._assign(workscan)
496 else:
497 return workscan
498
[1862]499 @asaplog_post_dec
[1907]500 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]501 """\
[135]502 Determine the specified statistic of the current beam/if/pol
[102]503 Takes a 'mask' as an optional parameter to specify which
504 channels should be excluded.
[1846]505
[102]506 Parameters:
[1846]507
[1819]508 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]510
[135]511 mask: an optional mask specifying where the statistic
[102]512 should be determined.
[1855]513
[1819]514 form: format string to print statistic values
[1846]515
[1907]516 row: row number of spectrum to process.
517 (default is None: for all rows)
[1846]518
[1907]519 Example:
[113]520 scan.set_unit('channel')
[1118]521 msk = scan.create_mask([100, 200], [500, 600])
[135]522 scan.stats(stat='mean', mask=m)
[1846]523
[102]524 """
[1593]525 mask = mask or []
[876]526 if not self._check_ifs():
[1118]527 raise ValueError("Cannot apply mask as the IFs have different "
528 "number of channels. Please use setselection() "
529 "to select individual IFs")
[1819]530 rtnabc = False
531 if stat.lower().endswith('_abc'): rtnabc = True
532 getchan = False
533 if stat.lower().startswith('min') or stat.lower().startswith('max'):
534 chan = self._math._minmaxchan(self, mask, stat)
535 getchan = True
536 statvals = []
[1907]537 if not rtnabc:
538 if row == None:
539 statvals = self._math._stats(self, mask, stat)
540 else:
541 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]542
[1819]543 #def cb(i):
544 # return statvals[i]
[256]545
[1819]546 #return self._row_callback(cb, stat)
[102]547
[1819]548 label=stat
549 #callback=cb
550 out = ""
551 #outvec = []
552 sep = '-'*50
[1907]553
554 if row == None:
555 rows = xrange(self.nrow())
556 elif isinstance(row, int):
557 rows = [ row ]
558
559 for i in rows:
[1819]560 refstr = ''
561 statunit= ''
562 if getchan:
563 qx, qy = self.chan2data(rowno=i, chan=chan[i])
564 if rtnabc:
565 statvals.append(qx['value'])
566 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567 statunit= '['+qx['unit']+']'
568 else:
569 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571 tm = self._gettime(i)
572 src = self._getsourcename(i)
573 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574 out += 'Time[%s]:\n' % (tm)
[1907]575 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
576 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
577 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]578 #outvec.append(callback(i))
[1907]579 if len(rows) > 1:
580 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
581 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
582 else:
583 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
584 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]585 out += sep+"\n"
586
[1859]587 import os
588 if os.environ.has_key( 'USER' ):
589 usr = os.environ['USER']
590 else:
591 import commands
592 usr = commands.getoutput( 'whoami' )
593 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594 f = open(tmpfile,'w')
595 print >> f, sep
596 print >> f, ' %s %s' % (label, statunit)
597 print >> f, sep
598 print >> f, out
599 f.close()
600 f = open(tmpfile,'r')
601 x = f.readlines()
602 f.close()
603 asaplog.push(''.join(x), False)
604
[1819]605 return statvals
606
607 def chan2data(self, rowno=0, chan=0):
[1846]608 """\
[1819]609 Returns channel/frequency/velocity and spectral value
610 at an arbitrary row and channel in the scantable.
[1846]611
[1819]612 Parameters:
[1846]613
[1819]614 rowno: a row number in the scantable. Default is the
615 first row, i.e. rowno=0
[1855]616
[1819]617 chan: a channel in the scantable. Default is the first
618 channel, i.e. pos=0
[1846]619
[1819]620 """
621 if isinstance(rowno, int) and isinstance(chan, int):
622 qx = {'unit': self.get_unit(),
623 'value': self._getabcissa(rowno)[chan]}
624 qy = {'unit': self.get_fluxunit(),
625 'value': self._getspectrum(rowno)[chan]}
626 return qx, qy
627
[1118]628 def stddev(self, mask=None):
[1846]629 """\
[135]630 Determine the standard deviation of the current beam/if/pol
631 Takes a 'mask' as an optional parameter to specify which
632 channels should be excluded.
[1846]633
[135]634 Parameters:
[1846]635
[135]636 mask: an optional mask specifying where the standard
637 deviation should be determined.
638
[1846]639 Example::
640
[135]641 scan.set_unit('channel')
[1118]642 msk = scan.create_mask([100, 200], [500, 600])
[135]643 scan.stddev(mask=m)
[1846]644
[135]645 """
[1118]646 return self.stats(stat='stddev', mask=mask);
[135]647
[1003]648
[1259]649 def get_column_names(self):
[1846]650 """\
[1003]651 Return a list of column names, which can be used for selection.
652 """
[1259]653 return list(Scantable.get_column_names(self))
[1003]654
[1730]655 def get_tsys(self, row=-1):
[1846]656 """\
[113]657 Return the System temperatures.
[1846]658
659 Parameters:
660
661 row: the rowno to get the information for. (default all rows)
662
[113]663 Returns:
[1846]664
[876]665 a list of Tsys values for the current selection
[1846]666
[113]667 """
[1730]668 if row > -1:
669 return self._get_column(self._gettsys, row)
[876]670 return self._row_callback(self._gettsys, "Tsys")
[256]671
[1730]672
673 def get_weather(self, row=-1):
[1846]674 """\
675 Return the weather informations.
676
677 Parameters:
678
679 row: the rowno to get the information for. (default all rows)
680
681 Returns:
682
683 a dict or list of of dicts of values for the current selection
684
685 """
686
[1730]687 values = self._get_column(self._get_weather, row)
688 if row > -1:
689 return {'temperature': values[0],
690 'pressure': values[1], 'humidity' : values[2],
691 'windspeed' : values[3], 'windaz' : values[4]
692 }
693 else:
694 out = []
695 for r in values:
696
697 out.append({'temperature': r[0],
698 'pressure': r[1], 'humidity' : r[2],
699 'windspeed' : r[3], 'windaz' : r[4]
700 })
701 return out
702
[876]703 def _row_callback(self, callback, label):
704 out = ""
[1118]705 outvec = []
[1590]706 sep = '-'*50
[876]707 for i in range(self.nrow()):
708 tm = self._gettime(i)
709 src = self._getsourcename(i)
[1590]710 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]711 out += 'Time[%s]:\n' % (tm)
[1590]712 if self.nbeam(-1) > 1:
713 out += ' Beam[%d] ' % (self.getbeam(i))
714 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
715 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]716 outvec.append(callback(i))
717 out += '= %3.3f\n' % (outvec[i])
[1590]718 out += sep+'\n'
[1859]719
720 asaplog.push(sep)
721 asaplog.push(" %s" % (label))
722 asaplog.push(sep)
723 asaplog.push(out)
[1861]724 asaplog.post()
[1175]725 return outvec
[256]726
[1070]727 def _get_column(self, callback, row=-1):
728 """
729 """
730 if row == -1:
731 return [callback(i) for i in range(self.nrow())]
732 else:
[1819]733 if 0 <= row < self.nrow():
[1070]734 return callback(row)
[256]735
[1070]736
[1348]737 def get_time(self, row=-1, asdatetime=False):
[1846]738 """\
[113]739 Get a list of time stamps for the observations.
[1928]740 Return a datetime object or a string (default) for each
741 integration time stamp in the scantable.
[1846]742
[113]743 Parameters:
[1846]744
[1348]745 row: row no of integration. Default -1 return all rows
[1855]746
[1348]747 asdatetime: return values as datetime objects rather than strings
[1846]748
[113]749 """
[1175]750 from time import strptime
751 from datetime import datetime
[1392]752 times = self._get_column(self._gettime, row)
[1348]753 if not asdatetime:
[1392]754 return times
[1175]755 format = "%Y/%m/%d/%H:%M:%S"
756 if isinstance(times, list):
757 return [datetime(*strptime(i, format)[:6]) for i in times]
758 else:
759 return datetime(*strptime(times, format)[:6])
[102]760
[1348]761
762 def get_inttime(self, row=-1):
[1846]763 """\
[1348]764 Get a list of integration times for the observations.
765 Return a time in seconds for each integration in the scantable.
[1846]766
[1348]767 Parameters:
[1846]768
[1348]769 row: row no of integration. Default -1 return all rows.
[1846]770
[1348]771 """
[1573]772 return self._get_column(self._getinttime, row)
[1348]773
[1573]774
[714]775 def get_sourcename(self, row=-1):
[1846]776 """\
[794]777 Get a list source names for the observations.
[714]778 Return a string for each integration in the scantable.
779 Parameters:
[1846]780
[1348]781 row: row no of integration. Default -1 return all rows.
[1846]782
[714]783 """
[1070]784 return self._get_column(self._getsourcename, row)
[714]785
[794]786 def get_elevation(self, row=-1):
[1846]787 """\
[794]788 Get a list of elevations for the observations.
789 Return a float for each integration in the scantable.
[1846]790
[794]791 Parameters:
[1846]792
[1348]793 row: row no of integration. Default -1 return all rows.
[1846]794
[794]795 """
[1070]796 return self._get_column(self._getelevation, row)
[794]797
798 def get_azimuth(self, row=-1):
[1846]799 """\
[794]800 Get a list of azimuths for the observations.
801 Return a float for each integration in the scantable.
[1846]802
[794]803 Parameters:
[1348]804 row: row no of integration. Default -1 return all rows.
[1846]805
[794]806 """
[1070]807 return self._get_column(self._getazimuth, row)
[794]808
809 def get_parangle(self, row=-1):
[1846]810 """\
[794]811 Get a list of parallactic angles for the observations.
812 Return a float for each integration in the scantable.
[1846]813
[794]814 Parameters:
[1846]815
[1348]816 row: row no of integration. Default -1 return all rows.
[1846]817
[794]818 """
[1070]819 return self._get_column(self._getparangle, row)
[794]820
[1070]821 def get_direction(self, row=-1):
822 """
823 Get a list of Positions on the sky (direction) for the observations.
[1594]824 Return a string for each integration in the scantable.
[1855]825
[1070]826 Parameters:
[1855]827
[1070]828 row: row no of integration. Default -1 return all rows
[1855]829
[1070]830 """
831 return self._get_column(self._getdirection, row)
832
[1391]833 def get_directionval(self, row=-1):
[1846]834 """\
[1391]835 Get a list of Positions on the sky (direction) for the observations.
836 Return a float for each integration in the scantable.
[1846]837
[1391]838 Parameters:
[1846]839
[1391]840 row: row no of integration. Default -1 return all rows
[1846]841
[1391]842 """
843 return self._get_column(self._getdirectionvec, row)
844
[1862]845 @asaplog_post_dec
[102]846 def set_unit(self, unit='channel'):
[1846]847 """\
[102]848 Set the unit for all following operations on this scantable
[1846]849
[102]850 Parameters:
[1846]851
852 unit: optional unit, default is 'channel'. Use one of '*Hz',
853 'km/s', 'channel' or equivalent ''
854
[102]855 """
[484]856 varlist = vars()
[1118]857 if unit in ['', 'pixel', 'channel']:
[113]858 unit = ''
859 inf = list(self._getcoordinfo())
860 inf[0] = unit
861 self._setcoordinfo(inf)
[1118]862 self._add_history("set_unit", varlist)
[113]863
[1862]864 @asaplog_post_dec
[484]865 def set_instrument(self, instr):
[1846]866 """\
[1348]867 Set the instrument for subsequent processing.
[1846]868
[358]869 Parameters:
[1846]870
[710]871 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]872 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]873
[358]874 """
875 self._setInstrument(instr)
[1118]876 self._add_history("set_instument", vars())
[358]877
[1862]878 @asaplog_post_dec
[1190]879 def set_feedtype(self, feedtype):
[1846]880 """\
[1190]881 Overwrite the feed type, which might not be set correctly.
[1846]882
[1190]883 Parameters:
[1846]884
[1190]885 feedtype: 'linear' or 'circular'
[1846]886
[1190]887 """
888 self._setfeedtype(feedtype)
889 self._add_history("set_feedtype", vars())
890
[1862]891 @asaplog_post_dec
[276]892 def set_doppler(self, doppler='RADIO'):
[1846]893 """\
[276]894 Set the doppler for all following operations on this scantable.
[1846]895
[276]896 Parameters:
[1846]897
[276]898 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]899
[276]900 """
[484]901 varlist = vars()
[276]902 inf = list(self._getcoordinfo())
903 inf[2] = doppler
904 self._setcoordinfo(inf)
[1118]905 self._add_history("set_doppler", vars())
[710]906
[1862]907 @asaplog_post_dec
[226]908 def set_freqframe(self, frame=None):
[1846]909 """\
[113]910 Set the frame type of the Spectral Axis.
[1846]911
[113]912 Parameters:
[1846]913
[591]914 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]915 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]916 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]917
918 Example::
919
[113]920 scan.set_freqframe('BARY')
[1846]921
[113]922 """
[1593]923 frame = frame or rcParams['scantable.freqframe']
[484]924 varlist = vars()
[1819]925 # "REST" is not implemented in casacore
926 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
927 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
928 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]929 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]930
[989]931 if frame in valid:
[113]932 inf = list(self._getcoordinfo())
933 inf[1] = frame
934 self._setcoordinfo(inf)
[1118]935 self._add_history("set_freqframe", varlist)
[102]936 else:
[1118]937 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]938 raise TypeError(msg)
[710]939
[1862]940 @asaplog_post_dec
[989]941 def set_dirframe(self, frame=""):
[1846]942 """\
[989]943 Set the frame type of the Direction on the sky.
[1846]944
[989]945 Parameters:
[1846]946
[989]947 frame: an optional frame type, default ''. Valid frames are:
948 'J2000', 'B1950', 'GALACTIC'
[1846]949
950 Example:
951
[989]952 scan.set_dirframe('GALACTIC')
[1846]953
[989]954 """
955 varlist = vars()
[1859]956 Scantable.set_dirframe(self, frame)
[1118]957 self._add_history("set_dirframe", varlist)
[989]958
[113]959 def get_unit(self):
[1846]960 """\
[113]961 Get the default unit set in this scantable
[1846]962
[113]963 Returns:
[1846]964
[113]965 A unit string
[1846]966
[113]967 """
968 inf = self._getcoordinfo()
969 unit = inf[0]
970 if unit == '': unit = 'channel'
971 return unit
[102]972
[1862]973 @asaplog_post_dec
[158]974 def get_abcissa(self, rowno=0):
[1846]975 """\
[158]976 Get the abcissa in the current coordinate setup for the currently
[113]977 selected Beam/IF/Pol
[1846]978
[113]979 Parameters:
[1846]980
[226]981 rowno: an optional row number in the scantable. Default is the
982 first row, i.e. rowno=0
[1846]983
[113]984 Returns:
[1846]985
[1348]986 The abcissa values and the format string (as a dictionary)
[1846]987
[113]988 """
[256]989 abc = self._getabcissa(rowno)
[710]990 lbl = self._getabcissalabel(rowno)
[158]991 return abc, lbl
[113]992
[1862]993 @asaplog_post_dec
[1819]994 def flag(self, mask=None, unflag=False):
[1846]995 """\
[1001]996 Flag the selected data using an optional channel mask.
[1846]997
[1001]998 Parameters:
[1846]999
[1001]1000 mask: an optional channel mask, created with create_mask. Default
1001 (no mask) is all channels.
[1855]1002
[1819]1003 unflag: if True, unflag the data
[1846]1004
[1001]1005 """
1006 varlist = vars()
[1593]1007 mask = mask or []
[1859]1008 self._flag(mask, unflag)
[1001]1009 self._add_history("flag", varlist)
1010
[1862]1011 @asaplog_post_dec
[1819]1012 def flag_row(self, rows=[], unflag=False):
[1846]1013 """\
[1819]1014 Flag the selected data in row-based manner.
[1846]1015
[1819]1016 Parameters:
[1846]1017
[1843]1018 rows: list of row numbers to be flagged. Default is no row
1019 (must be explicitly specified to execute row-based flagging).
[1855]1020
[1819]1021 unflag: if True, unflag the data.
[1846]1022
[1819]1023 """
1024 varlist = vars()
[1859]1025 self._flag_row(rows, unflag)
[1819]1026 self._add_history("flag_row", varlist)
1027
[1862]1028 @asaplog_post_dec
[1819]1029 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1030 """\
[1819]1031 Flag the selected data outside a specified range (in channel-base)
[1846]1032
[1819]1033 Parameters:
[1846]1034
[1819]1035 uthres: upper threshold.
[1855]1036
[1819]1037 dthres: lower threshold
[1846]1038
[1819]1039 clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1040 False for flagging data inside the range.
[1855]1041
[1846]1042 unflag: if True, unflag the data.
1043
[1819]1044 """
1045 varlist = vars()
[1859]1046 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1047 self._add_history("clip", varlist)
1048
[1862]1049 @asaplog_post_dec
[1584]1050 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1051 """\
[1192]1052 Flag the data in 'lag' space by providing a frequency to remove.
[1584]1053 Flagged data in the scantable gets interpolated over the region.
[1192]1054 No taper is applied.
[1846]1055
[1192]1056 Parameters:
[1846]1057
[1579]1058 start: the start frequency (really a period within the
1059 bandwidth) or period to remove
[1855]1060
[1579]1061 end: the end frequency or period to remove
[1855]1062
[1584]1063 unit: the frequency unit (default "MHz") or "" for
[1579]1064 explicit lag channels
[1846]1065
1066 *Notes*:
1067
[1579]1068 It is recommended to flag edges of the band or strong
[1348]1069 signals beforehand.
[1846]1070
[1192]1071 """
1072 if insitu is None: insitu = rcParams['insitu']
1073 self._math._setinsitu(insitu)
1074 varlist = vars()
[1579]1075 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1076 if not (unit == "" or base.has_key(unit)):
[1192]1077 raise ValueError("%s is not a valid unit." % unit)
[1859]1078 if unit == "":
1079 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1080 else:
1081 s = scantable(self._math._lag_flag(self, start*base[unit],
1082 end*base[unit], "frequency"))
[1192]1083 s._add_history("lag_flag", varlist)
1084 if insitu:
1085 self._assign(s)
1086 else:
1087 return s
[1001]1088
[1862]1089 @asaplog_post_dec
[113]1090 def create_mask(self, *args, **kwargs):
[1846]1091 """\
[1118]1092 Compute and return a mask based on [min, max] windows.
[189]1093 The specified windows are to be INCLUDED, when the mask is
[113]1094 applied.
[1846]1095
[102]1096 Parameters:
[1846]1097
[1118]1098 [min, max], [min2, max2], ...
[1024]1099 Pairs of start/end points (inclusive)specifying the regions
[102]1100 to be masked
[1855]1101
[189]1102 invert: optional argument. If specified as True,
1103 return an inverted mask, i.e. the regions
1104 specified are EXCLUDED
[1855]1105
[513]1106 row: create the mask using the specified row for
1107 unit conversions, default is row=0
1108 only necessary if frequency varies over rows.
[1846]1109
1110 Examples::
1111
[113]1112 scan.set_unit('channel')
[1846]1113 # a)
[1118]1114 msk = scan.create_mask([400, 500], [800, 900])
[189]1115 # masks everything outside 400 and 500
[113]1116 # and 800 and 900 in the unit 'channel'
1117
[1846]1118 # b)
[1118]1119 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1120 # masks the regions between 400 and 500
[113]1121 # and 800 and 900 in the unit 'channel'
[1846]1122
1123 # c)
1124 #mask only channel 400
[1554]1125 msk = scan.create_mask([400])
[1846]1126
[102]1127 """
[1554]1128 row = kwargs.get("row", 0)
[513]1129 data = self._getabcissa(row)
[113]1130 u = self._getcoordinfo()[0]
[1859]1131 if u == "":
1132 u = "channel"
1133 msg = "The current mask window unit is %s" % u
1134 i = self._check_ifs()
1135 if not i:
1136 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1137 asaplog.push(msg)
[102]1138 n = self.nchan()
[1295]1139 msk = _n_bools(n, False)
[710]1140 # test if args is a 'list' or a 'normal *args - UGLY!!!
1141
[1118]1142 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1143 and args or args[0]
[710]1144 for window in ws:
[1554]1145 if len(window) == 1:
1146 window = [window[0], window[0]]
1147 if len(window) == 0 or len(window) > 2:
1148 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1149 if window[0] > window[1]:
1150 tmp = window[0]
1151 window[0] = window[1]
1152 window[1] = tmp
[102]1153 for i in range(n):
[1024]1154 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1155 msk[i] = True
[113]1156 if kwargs.has_key('invert'):
1157 if kwargs.get('invert'):
[1295]1158 msk = mask_not(msk)
[102]1159 return msk
[710]1160
[1819]1161 def get_masklist(self, mask=None, row=0):
[1846]1162 """\
[1819]1163 Compute and return a list of mask windows, [min, max].
[1846]1164
[1819]1165 Parameters:
[1846]1166
[1819]1167 mask: channel mask, created with create_mask.
[1855]1168
[1819]1169 row: calcutate the masklist using the specified row
1170 for unit conversions, default is row=0
1171 only necessary if frequency varies over rows.
[1846]1172
[1819]1173 Returns:
[1846]1174
[1819]1175 [min, max], [min2, max2], ...
1176 Pairs of start/end points (inclusive)specifying
1177 the masked regions
[1846]1178
[1819]1179 """
1180 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1181 raise TypeError("The mask should be list or tuple.")
1182 if len(mask) < 2:
1183 raise TypeError("The mask elements should be > 1")
1184 if self.nchan() != len(mask):
1185 msg = "Number of channels in scantable != number of mask elements"
1186 raise TypeError(msg)
1187 data = self._getabcissa(row)
1188 u = self._getcoordinfo()[0]
[1859]1189 if u == "":
1190 u = "channel"
1191 msg = "The current mask window unit is %s" % u
1192 i = self._check_ifs()
1193 if not i:
1194 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1195 asaplog.push(msg)
[1819]1196 masklist=[]
1197 ist, ien = None, None
1198 ist, ien=self.get_mask_indices(mask)
1199 if ist is not None and ien is not None:
1200 for i in xrange(len(ist)):
1201 range=[data[ist[i]],data[ien[i]]]
1202 range.sort()
1203 masklist.append([range[0],range[1]])
1204 return masklist
1205
1206 def get_mask_indices(self, mask=None):
[1846]1207 """\
[1819]1208 Compute and Return lists of mask start indices and mask end indices.
[1855]1209
1210 Parameters:
1211
[1819]1212 mask: channel mask, created with create_mask.
[1846]1213
[1819]1214 Returns:
[1846]1215
[1819]1216 List of mask start indices and that of mask end indices,
1217 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1218
[1819]1219 """
1220 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1221 raise TypeError("The mask should be list or tuple.")
1222 if len(mask) < 2:
1223 raise TypeError("The mask elements should be > 1")
1224 istart=[]
1225 iend=[]
1226 if mask[0]: istart.append(0)
1227 for i in range(len(mask)-1):
1228 if not mask[i] and mask[i+1]:
1229 istart.append(i+1)
1230 elif mask[i] and not mask[i+1]:
1231 iend.append(i)
1232 if mask[len(mask)-1]: iend.append(len(mask)-1)
1233 if len(istart) != len(iend):
1234 raise RuntimeError("Numbers of mask start != mask end.")
1235 for i in range(len(istart)):
1236 if istart[i] > iend[i]:
1237 raise RuntimeError("Mask start index > mask end index")
1238 break
1239 return istart,iend
1240
1241# def get_restfreqs(self):
1242# """
1243# Get the restfrequency(s) stored in this scantable.
1244# The return value(s) are always of unit 'Hz'
1245# Parameters:
1246# none
1247# Returns:
1248# a list of doubles
1249# """
1250# return list(self._getrestfreqs())
1251
1252 def get_restfreqs(self, ids=None):
[1846]1253 """\
[256]1254 Get the restfrequency(s) stored in this scantable.
1255 The return value(s) are always of unit 'Hz'
[1846]1256
[256]1257 Parameters:
[1846]1258
[1819]1259 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1260 be retrieved
[1846]1261
[256]1262 Returns:
[1846]1263
[1819]1264 dictionary containing ids and a list of doubles for each id
[1846]1265
[256]1266 """
[1819]1267 if ids is None:
1268 rfreqs={}
1269 idlist = self.getmolnos()
1270 for i in idlist:
1271 rfreqs[i]=list(self._getrestfreqs(i))
1272 return rfreqs
1273 else:
1274 if type(ids)==list or type(ids)==tuple:
1275 rfreqs={}
1276 for i in ids:
1277 rfreqs[i]=list(self._getrestfreqs(i))
1278 return rfreqs
1279 else:
1280 return list(self._getrestfreqs(ids))
1281 #return list(self._getrestfreqs(ids))
[102]1282
[931]1283 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1284 """\
[931]1285 Set or replace the restfrequency specified and
[1928]1286 if the 'freqs' argument holds a scalar,
[931]1287 then that rest frequency will be applied to all the selected
1288 data. If the 'freqs' argument holds
1289 a vector, then it MUST be of equal or smaller length than
1290 the number of IFs (and the available restfrequencies will be
1291 replaced by this vector). In this case, *all* data have
1292 the restfrequency set per IF according
1293 to the corresponding value you give in the 'freqs' vector.
[1118]1294 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1295 IF 1 gets restfreq 2e9.
[1846]1296
[1395]1297 You can also specify the frequencies via a linecatalog.
[1153]1298
[931]1299 Parameters:
[1846]1300
[931]1301 freqs: list of rest frequency values or string idenitfiers
[1855]1302
[931]1303 unit: unit for rest frequency (default 'Hz')
[402]1304
[1846]1305
1306 Example::
1307
[1819]1308 # set the given restfrequency for the all currently selected IFs
[931]1309 scan.set_restfreqs(freqs=1.4e9)
[1845]1310 # set restfrequencies for the n IFs (n > 1) in the order of the
1311 # list, i.e
1312 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1313 # len(list_of_restfreqs) == nIF
1314 # for nIF == 1 the following will set multiple restfrequency for
1315 # that IF
[1819]1316 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1317 # set multiple restfrequencies per IF. as a list of lists where
1318 # the outer list has nIF elements, the inner s arbitrary
1319 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1320
[1846]1321 *Note*:
[1845]1322
[931]1323 To do more sophisticate Restfrequency setting, e.g. on a
1324 source and IF basis, use scantable.set_selection() before using
[1846]1325 this function::
[931]1326
[1846]1327 # provided your scantable is called scan
1328 selection = selector()
1329 selection.set_name("ORION*")
1330 selection.set_ifs([1])
1331 scan.set_selection(selection)
1332 scan.set_restfreqs(freqs=86.6e9)
1333
[931]1334 """
1335 varlist = vars()
[1157]1336 from asap import linecatalog
1337 # simple value
[1118]1338 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1339 self._setrestfreqs([freqs], [""], unit)
[1157]1340 # list of values
[1118]1341 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1342 # list values are scalars
[1118]1343 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1344 if len(freqs) == 1:
1345 self._setrestfreqs(freqs, [""], unit)
1346 else:
1347 # allow the 'old' mode of setting mulitple IFs
1348 sel = selector()
1349 savesel = self._getselection()
1350 iflist = self.getifnos()
1351 if len(freqs)>len(iflist):
1352 raise ValueError("number of elements in list of list "
1353 "exeeds the current IF selections")
1354 iflist = self.getifnos()
1355 for i, fval in enumerate(freqs):
1356 sel.set_ifs(iflist[i])
1357 self._setselection(sel)
1358 self._setrestfreqs([fval], [""], unit)
1359 self._setselection(savesel)
1360
1361 # list values are dict, {'value'=, 'name'=)
[1157]1362 elif isinstance(freqs[-1], dict):
[1845]1363 values = []
1364 names = []
1365 for d in freqs:
1366 values.append(d["value"])
1367 names.append(d["name"])
1368 self._setrestfreqs(values, names, unit)
[1819]1369 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1370 sel = selector()
1371 savesel = self._getselection()
[1322]1372 iflist = self.getifnos()
[1819]1373 if len(freqs)>len(iflist):
[1845]1374 raise ValueError("number of elements in list of list exeeds"
1375 " the current IF selections")
1376 for i, fval in enumerate(freqs):
[1322]1377 sel.set_ifs(iflist[i])
[1259]1378 self._setselection(sel)
[1845]1379 self._setrestfreqs(fval, [""], unit)
[1157]1380 self._setselection(savesel)
1381 # freqs are to be taken from a linecatalog
[1153]1382 elif isinstance(freqs, linecatalog):
1383 sel = selector()
1384 savesel = self._getselection()
1385 for i in xrange(freqs.nrow()):
[1322]1386 sel.set_ifs(iflist[i])
[1153]1387 self._setselection(sel)
[1845]1388 self._setrestfreqs([freqs.get_frequency(i)],
1389 [freqs.get_name(i)], "MHz")
[1153]1390 # ensure that we are not iterating past nIF
1391 if i == self.nif()-1: break
1392 self._setselection(savesel)
[931]1393 else:
1394 return
1395 self._add_history("set_restfreqs", varlist)
1396
[1360]1397 def shift_refpix(self, delta):
[1846]1398 """\
[1589]1399 Shift the reference pixel of the Spectra Coordinate by an
1400 integer amount.
[1846]1401
[1589]1402 Parameters:
[1846]1403
[1589]1404 delta: the amount to shift by
[1846]1405
1406 *Note*:
1407
[1589]1408 Be careful using this with broadband data.
[1846]1409
[1360]1410 """
[1731]1411 Scantable.shift_refpix(self, delta)
[931]1412
[1862]1413 @asaplog_post_dec
[1259]1414 def history(self, filename=None):
[1846]1415 """\
[1259]1416 Print the history. Optionally to a file.
[1846]1417
[1348]1418 Parameters:
[1846]1419
[1928]1420 filename: The name of the file to save the history to.
[1846]1421
[1259]1422 """
[484]1423 hist = list(self._gethistory())
[794]1424 out = "-"*80
[484]1425 for h in hist:
[489]1426 if h.startswith("---"):
[1857]1427 out = "\n".join([out, h])
[489]1428 else:
1429 items = h.split("##")
1430 date = items[0]
1431 func = items[1]
1432 items = items[2:]
[794]1433 out += "\n"+date+"\n"
1434 out += "Function: %s\n Parameters:" % (func)
[489]1435 for i in items:
[1928]1436 if i == '':
1437 continue
[489]1438 s = i.split("=")
[1118]1439 out += "\n %s = %s" % (s[0], s[1])
[1857]1440 out = "\n".join([out, "-"*80])
[1259]1441 if filename is not None:
1442 if filename is "":
1443 filename = 'scantable_history.txt'
1444 import os
1445 filename = os.path.expandvars(os.path.expanduser(filename))
1446 if not os.path.isdir(filename):
1447 data = open(filename, 'w')
1448 data.write(out)
1449 data.close()
1450 else:
1451 msg = "Illegal file name '%s'." % (filename)
[1859]1452 raise IOError(msg)
1453 return page(out)
[513]1454 #
1455 # Maths business
1456 #
[1862]1457 @asaplog_post_dec
[931]1458 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1459 """\
[1070]1460 Return the (time) weighted average of a scan.
[1846]1461
1462 *Note*:
1463
[1070]1464 in channels only - align if necessary
[1846]1465
[513]1466 Parameters:
[1846]1467
[513]1468 mask: an optional mask (only used for 'var' and 'tsys'
1469 weighting)
[1855]1470
[558]1471 scanav: True averages each scan separately
1472 False (default) averages all scans together,
[1855]1473
[1099]1474 weight: Weighting scheme.
1475 'none' (mean no weight)
1476 'var' (1/var(spec) weighted)
1477 'tsys' (1/Tsys**2 weighted)
1478 'tint' (integration time weighted)
1479 'tintsys' (Tint/Tsys**2)
1480 'median' ( median averaging)
[535]1481 The default is 'tint'
[1855]1482
[931]1483 align: align the spectra in velocity before averaging. It takes
1484 the time of the first spectrum as reference time.
[1846]1485
1486 Example::
1487
[513]1488 # time average the scantable without using a mask
[710]1489 newscan = scan.average_time()
[1846]1490
[513]1491 """
1492 varlist = vars()
[1593]1493 weight = weight or 'TINT'
1494 mask = mask or ()
1495 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1496 scan = (self, )
[1859]1497
1498 if align:
1499 scan = (self.freq_align(insitu=False), )
1500 s = None
1501 if weight.upper() == 'MEDIAN':
1502 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1503 scanav))
1504 else:
1505 s = scantable(self._math._average(scan, mask, weight.upper(),
1506 scanav))
[1099]1507 s._add_history("average_time", varlist)
[513]1508 return s
[710]1509
[1862]1510 @asaplog_post_dec
[876]1511 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1512 """\
[513]1513 Return a scan where all spectra are converted to either
1514 Jansky or Kelvin depending upon the flux units of the scan table.
1515 By default the function tries to look the values up internally.
1516 If it can't find them (or if you want to over-ride), you must
1517 specify EITHER jyperk OR eta (and D which it will try to look up
1518 also if you don't set it). jyperk takes precedence if you set both.
[1846]1519
[513]1520 Parameters:
[1846]1521
[513]1522 jyperk: the Jy / K conversion factor
[1855]1523
[513]1524 eta: the aperture efficiency
[1855]1525
[1928]1526 d: the geometric diameter (metres)
[1855]1527
[513]1528 insitu: if False a new scantable is returned.
1529 Otherwise, the scaling is done in-situ
1530 The default is taken from .asaprc (False)
[1846]1531
[513]1532 """
1533 if insitu is None: insitu = rcParams['insitu']
[876]1534 self._math._setinsitu(insitu)
[513]1535 varlist = vars()
[1593]1536 jyperk = jyperk or -1.0
1537 d = d or -1.0
1538 eta = eta or -1.0
[876]1539 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1540 s._add_history("convert_flux", varlist)
1541 if insitu: self._assign(s)
1542 else: return s
[513]1543
[1862]1544 @asaplog_post_dec
[876]1545 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1546 """\
[513]1547 Return a scan after applying a gain-elevation correction.
1548 The correction can be made via either a polynomial or a
1549 table-based interpolation (and extrapolation if necessary).
1550 You specify polynomial coefficients, an ascii table or neither.
1551 If you specify neither, then a polynomial correction will be made
1552 with built in coefficients known for certain telescopes (an error
1553 will occur if the instrument is not known).
1554 The data and Tsys are *divided* by the scaling factors.
[1846]1555
[513]1556 Parameters:
[1846]1557
[513]1558 poly: Polynomial coefficients (default None) to compute a
1559 gain-elevation correction as a function of
1560 elevation (in degrees).
[1855]1561
[513]1562 filename: The name of an ascii file holding correction factors.
1563 The first row of the ascii file must give the column
1564 names and these MUST include columns
1565 "ELEVATION" (degrees) and "FACTOR" (multiply data
1566 by this) somewhere.
1567 The second row must give the data type of the
1568 column. Use 'R' for Real and 'I' for Integer.
1569 An example file would be
1570 (actual factors are arbitrary) :
1571
1572 TIME ELEVATION FACTOR
1573 R R R
1574 0.1 0 0.8
1575 0.2 20 0.85
1576 0.3 40 0.9
1577 0.4 60 0.85
1578 0.5 80 0.8
1579 0.6 90 0.75
[1855]1580
[513]1581 method: Interpolation method when correcting from a table.
1582 Values are "nearest", "linear" (default), "cubic"
1583 and "spline"
[1855]1584
[513]1585 insitu: if False a new scantable is returned.
1586 Otherwise, the scaling is done in-situ
1587 The default is taken from .asaprc (False)
[1846]1588
[513]1589 """
1590
1591 if insitu is None: insitu = rcParams['insitu']
[876]1592 self._math._setinsitu(insitu)
[513]1593 varlist = vars()
[1593]1594 poly = poly or ()
[513]1595 from os.path import expandvars
1596 filename = expandvars(filename)
[876]1597 s = scantable(self._math._gainel(self, poly, filename, method))
1598 s._add_history("gain_el", varlist)
[1593]1599 if insitu:
1600 self._assign(s)
1601 else:
1602 return s
[710]1603
[1862]1604 @asaplog_post_dec
[931]1605 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1606 """\
[513]1607 Return a scan where all rows have been aligned in frequency/velocity.
1608 The alignment frequency frame (e.g. LSRK) is that set by function
1609 set_freqframe.
[1846]1610
[513]1611 Parameters:
[1855]1612
[513]1613 reftime: reference time to align at. By default, the time of
1614 the first row of data is used.
[1855]1615
[513]1616 method: Interpolation method for regridding the spectra.
1617 Choose from "nearest", "linear", "cubic" (default)
1618 and "spline"
[1855]1619
[513]1620 insitu: if False a new scantable is returned.
1621 Otherwise, the scaling is done in-situ
1622 The default is taken from .asaprc (False)
[1846]1623
[513]1624 """
[931]1625 if insitu is None: insitu = rcParams["insitu"]
[876]1626 self._math._setinsitu(insitu)
[513]1627 varlist = vars()
[1593]1628 reftime = reftime or ""
[931]1629 s = scantable(self._math._freq_align(self, reftime, method))
[876]1630 s._add_history("freq_align", varlist)
1631 if insitu: self._assign(s)
1632 else: return s
[513]1633
[1862]1634 @asaplog_post_dec
[1725]1635 def opacity(self, tau=None, insitu=None):
[1846]1636 """\
[513]1637 Apply an opacity correction. The data
1638 and Tsys are multiplied by the correction factor.
[1846]1639
[513]1640 Parameters:
[1855]1641
[1689]1642 tau: (list of) opacity from which the correction factor is
[513]1643 exp(tau*ZD)
[1689]1644 where ZD is the zenith-distance.
1645 If a list is provided, it has to be of length nIF,
1646 nIF*nPol or 1 and in order of IF/POL, e.g.
1647 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1648 if tau is `None` the opacities are determined from a
1649 model.
[1855]1650
[513]1651 insitu: if False a new scantable is returned.
1652 Otherwise, the scaling is done in-situ
1653 The default is taken from .asaprc (False)
[1846]1654
[513]1655 """
1656 if insitu is None: insitu = rcParams['insitu']
[876]1657 self._math._setinsitu(insitu)
[513]1658 varlist = vars()
[1689]1659 if not hasattr(tau, "__len__"):
1660 tau = [tau]
[876]1661 s = scantable(self._math._opacity(self, tau))
1662 s._add_history("opacity", varlist)
1663 if insitu: self._assign(s)
1664 else: return s
[513]1665
[1862]1666 @asaplog_post_dec
[513]1667 def bin(self, width=5, insitu=None):
[1846]1668 """\
[513]1669 Return a scan where all spectra have been binned up.
[1846]1670
[1348]1671 Parameters:
[1846]1672
[513]1673 width: The bin width (default=5) in pixels
[1855]1674
[513]1675 insitu: if False a new scantable is returned.
1676 Otherwise, the scaling is done in-situ
1677 The default is taken from .asaprc (False)
[1846]1678
[513]1679 """
1680 if insitu is None: insitu = rcParams['insitu']
[876]1681 self._math._setinsitu(insitu)
[513]1682 varlist = vars()
[876]1683 s = scantable(self._math._bin(self, width))
[1118]1684 s._add_history("bin", varlist)
[1589]1685 if insitu:
1686 self._assign(s)
1687 else:
1688 return s
[513]1689
[1862]1690 @asaplog_post_dec
[513]1691 def resample(self, width=5, method='cubic', insitu=None):
[1846]1692 """\
[1348]1693 Return a scan where all spectra have been binned up.
[1573]1694
[1348]1695 Parameters:
[1846]1696
[513]1697 width: The bin width (default=5) in pixels
[1855]1698
[513]1699 method: Interpolation method when correcting from a table.
1700 Values are "nearest", "linear", "cubic" (default)
1701 and "spline"
[1855]1702
[513]1703 insitu: if False a new scantable is returned.
1704 Otherwise, the scaling is done in-situ
1705 The default is taken from .asaprc (False)
[1846]1706
[513]1707 """
1708 if insitu is None: insitu = rcParams['insitu']
[876]1709 self._math._setinsitu(insitu)
[513]1710 varlist = vars()
[876]1711 s = scantable(self._math._resample(self, method, width))
[1118]1712 s._add_history("resample", varlist)
[876]1713 if insitu: self._assign(s)
1714 else: return s
[513]1715
[1862]1716 @asaplog_post_dec
[946]1717 def average_pol(self, mask=None, weight='none'):
[1846]1718 """\
[946]1719 Average the Polarisations together.
[1846]1720
[946]1721 Parameters:
[1846]1722
[946]1723 mask: An optional mask defining the region, where the
1724 averaging will be applied. The output will have all
1725 specified points masked.
[1855]1726
[946]1727 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1728 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1729
[946]1730 """
1731 varlist = vars()
[1593]1732 mask = mask or ()
[1010]1733 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1734 s._add_history("average_pol", varlist)
[992]1735 return s
[513]1736
[1862]1737 @asaplog_post_dec
[1145]1738 def average_beam(self, mask=None, weight='none'):
[1846]1739 """\
[1145]1740 Average the Beams together.
[1846]1741
[1145]1742 Parameters:
1743 mask: An optional mask defining the region, where the
1744 averaging will be applied. The output will have all
1745 specified points masked.
[1855]1746
[1145]1747 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1748 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1749
[1145]1750 """
1751 varlist = vars()
[1593]1752 mask = mask or ()
[1145]1753 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1754 s._add_history("average_beam", varlist)
1755 return s
1756
[1586]1757 def parallactify(self, pflag):
[1846]1758 """\
[1843]1759 Set a flag to indicate whether this data should be treated as having
[1617]1760 been 'parallactified' (total phase == 0.0)
[1846]1761
[1617]1762 Parameters:
[1855]1763
[1843]1764 pflag: Bool indicating whether to turn this on (True) or
[1617]1765 off (False)
[1846]1766
[1617]1767 """
[1586]1768 varlist = vars()
1769 self._parallactify(pflag)
1770 self._add_history("parallactify", varlist)
1771
[1862]1772 @asaplog_post_dec
[992]1773 def convert_pol(self, poltype=None):
[1846]1774 """\
[992]1775 Convert the data to a different polarisation type.
[1565]1776 Note that you will need cross-polarisation terms for most conversions.
[1846]1777
[992]1778 Parameters:
[1855]1779
[992]1780 poltype: The new polarisation type. Valid types are:
[1565]1781 "linear", "circular", "stokes" and "linpol"
[1846]1782
[992]1783 """
1784 varlist = vars()
[1859]1785 s = scantable(self._math._convertpol(self, poltype))
[1118]1786 s._add_history("convert_pol", varlist)
[992]1787 return s
1788
[1862]1789 @asaplog_post_dec
[1819]1790 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1791 """\
[513]1792 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1793
[513]1794 Parameters:
[1846]1795
[513]1796 kernel: The type of smoothing kernel. Select from
[1574]1797 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1798 or 'poly'
[1855]1799
[513]1800 width: The width of the kernel in pixels. For hanning this is
1801 ignored otherwise it defauls to 5 pixels.
1802 For 'gaussian' it is the Full Width Half
1803 Maximum. For 'boxcar' it is the full width.
[1574]1804 For 'rmedian' and 'poly' it is the half width.
[1855]1805
[1574]1806 order: Optional parameter for 'poly' kernel (default is 2), to
1807 specify the order of the polnomial. Ignored by all other
1808 kernels.
[1855]1809
[1819]1810 plot: plot the original and the smoothed spectra.
1811 In this each indivual fit has to be approved, by
1812 typing 'y' or 'n'
[1855]1813
[513]1814 insitu: if False a new scantable is returned.
1815 Otherwise, the scaling is done in-situ
1816 The default is taken from .asaprc (False)
[1846]1817
[513]1818 """
1819 if insitu is None: insitu = rcParams['insitu']
[876]1820 self._math._setinsitu(insitu)
[513]1821 varlist = vars()
[1819]1822
1823 if plot: orgscan = self.copy()
1824
[1574]1825 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1826 s._add_history("smooth", varlist)
[1819]1827
1828 if plot:
1829 if rcParams['plotter.gui']:
1830 from asap.asaplotgui import asaplotgui as asaplot
1831 else:
1832 from asap.asaplot import asaplot
1833 self._p=asaplot()
1834 self._p.set_panels()
1835 ylab=s._get_ordinate_label()
1836 #self._p.palette(0,["#777777","red"])
1837 for r in xrange(s.nrow()):
1838 xsm=s._getabcissa(r)
1839 ysm=s._getspectrum(r)
1840 xorg=orgscan._getabcissa(r)
1841 yorg=orgscan._getspectrum(r)
1842 self._p.clear()
1843 self._p.hold()
1844 self._p.set_axes('ylabel',ylab)
1845 self._p.set_axes('xlabel',s._getabcissalabel(r))
1846 self._p.set_axes('title',s._getsourcename(r))
1847 self._p.set_line(label='Original',color="#777777")
1848 self._p.plot(xorg,yorg)
1849 self._p.set_line(label='Smoothed',color="red")
1850 self._p.plot(xsm,ysm)
1851 ### Ugly part for legend
1852 for i in [0,1]:
1853 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1854 self._p.release()
1855 ### Ugly part for legend
1856 self._p.subplots[0]['lines']=[]
1857 res = raw_input("Accept smoothing ([y]/n): ")
1858 if res.upper() == 'N':
1859 s._setspectrum(yorg, r)
1860 self._p.unmap()
1861 self._p = None
1862 del orgscan
1863
[876]1864 if insitu: self._assign(s)
1865 else: return s
[513]1866
[1862]1867 @asaplog_post_dec
[1907]1868 def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1869 """\
[513]1870 Return a scan which has been baselined (all rows) by a polynomial.
[1907]1871
[513]1872 Parameters:
[1846]1873
[794]1874 mask: an optional mask
[1855]1875
[794]1876 order: the order of the polynomial (default is 0)
[1855]1877
[1061]1878 plot: plot the fit and the residual. In this each
1879 indivual fit has to be approved, by typing 'y'
1880 or 'n'
[1855]1881
[1391]1882 uselin: use linear polynomial fit
[1855]1883
[794]1884 insitu: if False a new scantable is returned.
1885 Otherwise, the scaling is done in-situ
1886 The default is taken from .asaprc (False)
[1846]1887
[1907]1888 rows: row numbers of spectra to be processed.
1889 (default is None: for all rows)
1890
1891 Example:
[513]1892 # return a scan baselined by a third order polynomial,
1893 # not using a mask
1894 bscan = scan.poly_baseline(order=3)
[1846]1895
[579]1896 """
[513]1897 if insitu is None: insitu = rcParams['insitu']
[1819]1898 if not insitu:
1899 workscan = self.copy()
1900 else:
1901 workscan = self
[513]1902 varlist = vars()
1903 if mask is None:
[1907]1904 mask = [True for i in xrange(self.nchan())]
[1819]1905
[1217]1906 try:
1907 f = fitter()
[1391]1908 if uselin:
1909 f.set_function(lpoly=order)
1910 else:
1911 f.set_function(poly=order)
[1819]1912
[1907]1913 if rows == None:
1914 rows = xrange(workscan.nrow())
1915 elif isinstance(rows, int):
1916 rows = [ rows ]
1917
[1819]1918 if len(rows) > 0:
1919 self.blpars = []
[1907]1920 self.masklists = []
1921 self.actualmask = []
1922
[1819]1923 for r in rows:
1924 f.x = workscan._getabcissa(r)
1925 f.y = workscan._getspectrum(r)
[1907]1926 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]1927 f.data = None
1928 f.fit()
1929 if plot:
1930 f.plot(residual=True)
1931 x = raw_input("Accept fit ( [y]/n ): ")
1932 if x.upper() == 'N':
1933 self.blpars.append(None)
[1907]1934 self.masklists.append(None)
1935 self.actualmask.append(None)
[1819]1936 continue
1937 workscan._setspectrum(f.fitter.getresidual(), r)
1938 self.blpars.append(f.get_parameters())
[1907]1939 self.masklists.append(workscan.get_masklist(f.mask, row=r))
1940 self.actualmask.append(f.mask)
[1819]1941
1942 if plot:
1943 f._p.unmap()
1944 f._p = None
1945 workscan._add_history("poly_baseline", varlist)
[1856]1946 if insitu:
1947 self._assign(workscan)
1948 else:
1949 return workscan
[1217]1950 except RuntimeError:
1951 msg = "The fit failed, possibly because it didn't converge."
[1859]1952 raise RuntimeError(msg)
[513]1953
[1819]1954
[1907]1955 def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1956 """\
1957 Return a scan which has been baselined (all rows) by a polynomial.
1958 Parameters:
1959 mask: an optional mask
1960 order: the order of the polynomial (default is 0)
1961 plot: plot the fit and the residual. In this each
1962 indivual fit has to be approved, by typing 'y'
1963 or 'n'. Ignored if batch = True.
1964 batch: if True a faster algorithm is used and logs
1965 including the fit results are not output
1966 (default is False)
1967 insitu: if False a new scantable is returned.
1968 Otherwise, the scaling is done in-situ
1969 The default is taken from .asaprc (False)
1970 rows: row numbers of spectra to be processed.
1971 (default is None: for all rows)
1972 Example:
1973 # return a scan baselined by a third order polynomial,
1974 # not using a mask
1975 bscan = scan.poly_baseline(order=3)
1976 """
1977 if insitu is None: insitu = rcParams["insitu"]
1978 if insitu:
1979 workscan = self
1980 else:
1981 workscan = self.copy()
1982
1983 varlist = vars()
1984 nchan = workscan.nchan()
1985
1986 if mask is None:
1987 mask = [True for i in xrange(nchan)]
1988
1989 try:
1990 if rows == None:
1991 rows = xrange(workscan.nrow())
1992 elif isinstance(rows, int):
1993 rows = [ rows ]
1994
1995 if len(rows) > 0:
1996 self.blpars = []
1997 self.masklists = []
1998 self.actualmask = []
1999
2000 if batch:
2001 for r in rows:
[1908]2002 workscan._poly_baseline_batch(mask, order, r)
[1907]2003 elif plot:
2004 f = fitter()
2005 f.set_function(lpoly=order)
2006 for r in rows:
2007 f.x = workscan._getabcissa(r)
2008 f.y = workscan._getspectrum(r)
2009 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2010 f.data = None
2011 f.fit()
2012
2013 f.plot(residual=True)
2014 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2015 if accept_fit.upper() == "N":
2016 self.blpars.append(None)
2017 self.masklists.append(None)
2018 self.actualmask.append(None)
2019 continue
2020 workscan._setspectrum(f.fitter.getresidual(), r)
2021 self.blpars.append(f.get_parameters())
2022 self.masklists.append(workscan.get_masklist(f.mask, row=r))
2023 self.actualmask.append(f.mask)
2024
2025 f._p.unmap()
2026 f._p = None
2027 else:
2028 import array
2029 for r in rows:
2030 pars = array.array("f", [0.0 for i in range(order+1)])
2031 pars_adr = pars.buffer_info()[0]
2032 pars_len = pars.buffer_info()[1]
2033
2034 errs = array.array("f", [0.0 for i in range(order+1)])
2035 errs_adr = errs.buffer_info()[0]
2036 errs_len = errs.buffer_info()[1]
2037
2038 fmsk = array.array("i", [1 for i in range(nchan)])
2039 fmsk_adr = fmsk.buffer_info()[0]
2040 fmsk_len = fmsk.buffer_info()[1]
2041
2042 workscan._poly_baseline(mask, order, r, pars_adr, pars_len, errs_adr, errs_len, fmsk_adr, fmsk_len)
2043
2044 params = pars.tolist()
2045 fmtd = ""
2046 for i in xrange(len(params)): fmtd += " p%d= %3.6f," % (i, params[i])
2047 fmtd = fmtd[:-1] # remove trailing ","
2048 errors = errs.tolist()
2049 fmask = fmsk.tolist()
2050 for i in xrange(len(fmask)): fmask[i] = (fmask[i] > 0) # transform (1/0) -> (True/False)
2051
2052 self.blpars.append({"params":params, "fixed":[], "formatted":fmtd, "errors":errors})
2053 self.masklists.append(workscan.get_masklist(fmask, r))
2054 self.actualmask.append(fmask)
2055
2056 asaplog.push(str(fmtd))
2057
2058 workscan._add_history("poly_baseline", varlist)
2059
2060 if insitu:
2061 self._assign(workscan)
2062 else:
2063 return workscan
2064
[1919]2065 except RuntimeError, e:
[1907]2066 msg = "The fit failed, possibly because it didn't converge."
2067 if rcParams["verbose"]:
[1919]2068 asaplog.push(str(e))
[1907]2069 asaplog.push(str(msg))
2070 return
2071 else:
[1919]2072 raise RuntimeError(str(e)+'\n'+msg)
[1907]2073
2074
2075 def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2076 threshold=3, chan_avg_limit=1, plot=False,
[1907]2077 insitu=None, rows=None):
[1846]2078 """\
[1928]2079 Return a scan which has been baselined (all rows by default)
2080 by a polynomial.
[880]2081 Spectral lines are detected first using linefinder and masked out
2082 to avoid them affecting the baseline solution.
2083
2084 Parameters:
[1846]2085
[880]2086 mask: an optional mask retreived from scantable
[1846]2087
2088 edge: an optional number of channel to drop at the edge of
2089 spectrum. If only one value is
[880]2090 specified, the same number will be dropped from
2091 both sides of the spectrum. Default is to keep
[907]2092 all channels. Nested tuples represent individual
[976]2093 edge selection for different IFs (a number of spectral
2094 channels can be different)
[1846]2095
[880]2096 order: the order of the polynomial (default is 0)
[1846]2097
[880]2098 threshold: the threshold used by line finder. It is better to
2099 keep it large as only strong lines affect the
2100 baseline solution.
[1846]2101
[1280]2102 chan_avg_limit:
2103 a maximum number of consequtive spectral channels to
2104 average during the search of weak and broad lines.
2105 The default is no averaging (and no search for weak
2106 lines). If such lines can affect the fitted baseline
2107 (e.g. a high order polynomial is fitted), increase this
2108 parameter (usually values up to 8 are reasonable). Most
2109 users of this method should find the default value
2110 sufficient.
[1846]2111
[1061]2112 plot: plot the fit and the residual. In this each
2113 indivual fit has to be approved, by typing 'y'
2114 or 'n'
[1846]2115
[880]2116 insitu: if False a new scantable is returned.
2117 Otherwise, the scaling is done in-situ
2118 The default is taken from .asaprc (False)
[1907]2119 rows: row numbers of spectra to be processed.
2120 (default is None: for all rows)
[880]2121
[1846]2122
2123 Example::
2124
2125 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2126
[880]2127 """
2128 if insitu is None: insitu = rcParams['insitu']
2129 varlist = vars()
2130 from asap.asaplinefind import linefinder
2131 from asap import _is_sequence_or_number as _is_valid
2132
[976]2133 # check whether edge is set up for each IF individually
[1118]2134 individualedge = False;
2135 if len(edge) > 1:
2136 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2137 individualedge = True;
[907]2138
[1118]2139 if not _is_valid(edge, int) and not individualedge:
[909]2140 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2141 pair of integers specified as a tuple. Nested tuples are allowed \
2142 to make individual selection for different IFs."
[919]2143
[1118]2144 curedge = (0, 0)
2145 if individualedge:
2146 for edgepar in edge:
2147 if not _is_valid(edgepar, int):
2148 raise ValueError, "Each element of the 'edge' tuple has \
2149 to be a pair of integers or an integer."
[907]2150 else:
[1118]2151 curedge = edge;
[880]2152
[1907]2153 if not insitu:
2154 workscan = self.copy()
2155 else:
2156 workscan = self
2157
[880]2158 # setup fitter
2159 f = fitter()
[1907]2160 f.set_function(lpoly=order)
[880]2161
2162 # setup line finder
[1118]2163 fl = linefinder()
[1268]2164 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2165
[907]2166 fl.set_scan(workscan)
2167
[1907]2168 if mask is None:
2169 mask = _n_bools(workscan.nchan(), True)
2170
2171 if rows is None:
2172 rows = xrange(workscan.nrow())
2173 elif isinstance(rows, int):
2174 rows = [ rows ]
2175
[1819]2176 # Save parameters of baseline fits & masklists as a class attribute.
2177 # NOTICE: It does not reflect changes in scantable!
2178 if len(rows) > 0:
2179 self.blpars=[]
2180 self.masklists=[]
[1907]2181 self.actualmask=[]
[880]2182 asaplog.push("Processing:")
2183 for r in rows:
[1118]2184 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2185 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2186 workscan.getpol(r), workscan.getcycle(r))
[880]2187 asaplog.push(msg, False)
[907]2188
[976]2189 # figure out edge parameter
[1118]2190 if individualedge:
2191 if len(edge) >= workscan.getif(r):
2192 raise RuntimeError, "Number of edge elements appear to " \
2193 "be less than the number of IFs"
2194 curedge = edge[workscan.getif(r)]
[919]2195
[1907]2196 actualmask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]2197
[976]2198 # setup line finder
[1819]2199 fl.find_lines(r, actualmask, curedge)
[1907]2200
[1819]2201 f.x = workscan._getabcissa(r)
2202 f.y = workscan._getspectrum(r)
[1907]2203 f.mask = fl.get_mask()
[1819]2204 f.data = None
[880]2205 f.fit()
[1819]2206
2207 # Show mask list
[1907]2208 masklist=workscan.get_masklist(f.mask, row=r)
[1819]2209 msg = "mask range: "+str(masklist)
2210 asaplog.push(msg, False)
2211
[1061]2212 if plot:
2213 f.plot(residual=True)
2214 x = raw_input("Accept fit ( [y]/n ): ")
2215 if x.upper() == 'N':
[1819]2216 self.blpars.append(None)
2217 self.masklists.append(None)
[1907]2218 self.actualmask.append(None)
[1061]2219 continue
[1819]2220
[880]2221 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2222 self.blpars.append(f.get_parameters())
2223 self.masklists.append(masklist)
[1907]2224 self.actualmask.append(f.mask)
[1061]2225 if plot:
2226 f._p.unmap()
2227 f._p = None
2228 workscan._add_history("auto_poly_baseline", varlist)
[880]2229 if insitu:
2230 self._assign(workscan)
2231 else:
2232 return workscan
2233
[1862]2234 @asaplog_post_dec
[914]2235 def rotate_linpolphase(self, angle):
[1846]2236 """\
[914]2237 Rotate the phase of the complex polarization O=Q+iU correlation.
2238 This is always done in situ in the raw data. So if you call this
2239 function more than once then each call rotates the phase further.
[1846]2240
[914]2241 Parameters:
[1846]2242
[914]2243 angle: The angle (degrees) to rotate (add) by.
[1846]2244
2245 Example::
2246
[914]2247 scan.rotate_linpolphase(2.3)
[1846]2248
[914]2249 """
2250 varlist = vars()
[936]2251 self._math._rotate_linpolphase(self, angle)
[914]2252 self._add_history("rotate_linpolphase", varlist)
2253 return
[710]2254
[1862]2255 @asaplog_post_dec
[914]2256 def rotate_xyphase(self, angle):
[1846]2257 """\
[914]2258 Rotate the phase of the XY correlation. This is always done in situ
2259 in the data. So if you call this function more than once
2260 then each call rotates the phase further.
[1846]2261
[914]2262 Parameters:
[1846]2263
[914]2264 angle: The angle (degrees) to rotate (add) by.
[1846]2265
2266 Example::
2267
[914]2268 scan.rotate_xyphase(2.3)
[1846]2269
[914]2270 """
2271 varlist = vars()
[936]2272 self._math._rotate_xyphase(self, angle)
[914]2273 self._add_history("rotate_xyphase", varlist)
2274 return
2275
[1862]2276 @asaplog_post_dec
[914]2277 def swap_linears(self):
[1846]2278 """\
[1573]2279 Swap the linear polarisations XX and YY, or better the first two
[1348]2280 polarisations as this also works for ciculars.
[914]2281 """
2282 varlist = vars()
[936]2283 self._math._swap_linears(self)
[914]2284 self._add_history("swap_linears", varlist)
2285 return
2286
[1862]2287 @asaplog_post_dec
[914]2288 def invert_phase(self):
[1846]2289 """\
[914]2290 Invert the phase of the complex polarisation
2291 """
2292 varlist = vars()
[936]2293 self._math._invert_phase(self)
[914]2294 self._add_history("invert_phase", varlist)
2295 return
2296
[1862]2297 @asaplog_post_dec
[876]2298 def add(self, offset, insitu=None):
[1846]2299 """\
[513]2300 Return a scan where all spectra have the offset added
[1846]2301
[513]2302 Parameters:
[1846]2303
[513]2304 offset: the offset
[1855]2305
[513]2306 insitu: if False a new scantable is returned.
2307 Otherwise, the scaling is done in-situ
2308 The default is taken from .asaprc (False)
[1846]2309
[513]2310 """
2311 if insitu is None: insitu = rcParams['insitu']
[876]2312 self._math._setinsitu(insitu)
[513]2313 varlist = vars()
[876]2314 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2315 s._add_history("add", varlist)
[876]2316 if insitu:
2317 self._assign(s)
2318 else:
[513]2319 return s
2320
[1862]2321 @asaplog_post_dec
[1308]2322 def scale(self, factor, tsys=True, insitu=None):
[1846]2323 """\
2324
[1928]2325 Return a scan where all spectra are scaled by the given 'factor'
[1846]2326
[513]2327 Parameters:
[1846]2328
[1819]2329 factor: the scaling factor (float or 1D float list)
[1855]2330
[513]2331 insitu: if False a new scantable is returned.
2332 Otherwise, the scaling is done in-situ
2333 The default is taken from .asaprc (False)
[1855]2334
[513]2335 tsys: if True (default) then apply the operation to Tsys
2336 as well as the data
[1846]2337
[513]2338 """
2339 if insitu is None: insitu = rcParams['insitu']
[876]2340 self._math._setinsitu(insitu)
[513]2341 varlist = vars()
[1819]2342 s = None
2343 import numpy
2344 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2345 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2346 from asapmath import _array2dOp
2347 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2348 else:
2349 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2350 else:
2351 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2352 s._add_history("scale", varlist)
[876]2353 if insitu:
2354 self._assign(s)
2355 else:
[513]2356 return s
2357
[1504]2358 def set_sourcetype(self, match, matchtype="pattern",
2359 sourcetype="reference"):
[1846]2360 """\
[1502]2361 Set the type of the source to be an source or reference scan
[1846]2362 using the provided pattern.
2363
[1502]2364 Parameters:
[1846]2365
[1504]2366 match: a Unix style pattern, regular expression or selector
[1855]2367
[1504]2368 matchtype: 'pattern' (default) UNIX style pattern or
2369 'regex' regular expression
[1855]2370
[1502]2371 sourcetype: the type of the source to use (source/reference)
[1846]2372
[1502]2373 """
2374 varlist = vars()
2375 basesel = self.get_selection()
2376 stype = -1
2377 if sourcetype.lower().startswith("r"):
2378 stype = 1
2379 elif sourcetype.lower().startswith("s"):
2380 stype = 0
[1504]2381 else:
[1502]2382 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2383 if matchtype.lower().startswith("p"):
2384 matchtype = "pattern"
2385 elif matchtype.lower().startswith("r"):
2386 matchtype = "regex"
2387 else:
2388 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2389 sel = selector()
2390 if isinstance(match, selector):
2391 sel = match
2392 else:
[1504]2393 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2394 self.set_selection(basesel+sel)
2395 self._setsourcetype(stype)
2396 self.set_selection(basesel)
[1573]2397 self._add_history("set_sourcetype", varlist)
[1502]2398
[1862]2399 @asaplog_post_dec
[1857]2400 @preserve_selection
[1819]2401 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2402 """\
[670]2403 This function allows to build quotients automatically.
[1819]2404 It assumes the observation to have the same number of
[670]2405 "ons" and "offs"
[1846]2406
[670]2407 Parameters:
[1846]2408
[710]2409 preserve: you can preserve (default) the continuum or
2410 remove it. The equations used are
[1857]2411
[670]2412 preserve: Output = Toff * (on/off) - Toff
[1857]2413
[1070]2414 remove: Output = Toff * (on/off) - Ton
[1855]2415
[1573]2416 mode: the on/off detection mode
[1348]2417 'paired' (default)
2418 identifies 'off' scans by the
2419 trailing '_R' (Mopra/Parkes) or
2420 '_e'/'_w' (Tid) and matches
2421 on/off pairs from the observing pattern
[1502]2422 'time'
2423 finds the closest off in time
[1348]2424
[1857]2425 .. todo:: verify argument is not implemented
2426
[670]2427 """
[1857]2428 varlist = vars()
[1348]2429 modes = ["time", "paired"]
[670]2430 if not mode in modes:
[876]2431 msg = "please provide valid mode. Valid modes are %s" % (modes)
2432 raise ValueError(msg)
[1348]2433 s = None
2434 if mode.lower() == "paired":
[1857]2435 sel = self.get_selection()
[1875]2436 sel.set_query("SRCTYPE==psoff")
[1356]2437 self.set_selection(sel)
[1348]2438 offs = self.copy()
[1875]2439 sel.set_query("SRCTYPE==pson")
[1356]2440 self.set_selection(sel)
[1348]2441 ons = self.copy()
2442 s = scantable(self._math._quotient(ons, offs, preserve))
2443 elif mode.lower() == "time":
2444 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2445 s._add_history("auto_quotient", varlist)
[876]2446 return s
[710]2447
[1862]2448 @asaplog_post_dec
[1145]2449 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2450 """\
[1143]2451 Form a quotient using "off" beams when observing in "MX" mode.
[1846]2452
[1143]2453 Parameters:
[1846]2454
[1145]2455 mask: an optional mask to be used when weight == 'stddev'
[1855]2456
[1143]2457 weight: How to average the off beams. Default is 'median'.
[1855]2458
[1145]2459 preserve: you can preserve (default) the continuum or
[1855]2460 remove it. The equations used are:
[1846]2461
[1855]2462 preserve: Output = Toff * (on/off) - Toff
2463
2464 remove: Output = Toff * (on/off) - Ton
2465
[1217]2466 """
[1593]2467 mask = mask or ()
[1141]2468 varlist = vars()
2469 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2470 preoff = scantable(self._math._mx_extract(self, 'off'))
2471 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2472 from asapmath import quotient
[1145]2473 q = quotient(on, off, preserve)
[1143]2474 q._add_history("mx_quotient", varlist)
[1217]2475 return q
[513]2476
[1862]2477 @asaplog_post_dec
[718]2478 def freq_switch(self, insitu=None):
[1846]2479 """\
[718]2480 Apply frequency switching to the data.
[1846]2481
[718]2482 Parameters:
[1846]2483
[718]2484 insitu: if False a new scantable is returned.
2485 Otherwise, the swictching is done in-situ
2486 The default is taken from .asaprc (False)
[1846]2487
[718]2488 """
2489 if insitu is None: insitu = rcParams['insitu']
[876]2490 self._math._setinsitu(insitu)
[718]2491 varlist = vars()
[876]2492 s = scantable(self._math._freqswitch(self))
[1118]2493 s._add_history("freq_switch", varlist)
[1856]2494 if insitu:
2495 self._assign(s)
2496 else:
2497 return s
[718]2498
[1862]2499 @asaplog_post_dec
[780]2500 def recalc_azel(self):
[1846]2501 """Recalculate the azimuth and elevation for each position."""
[780]2502 varlist = vars()
[876]2503 self._recalcazel()
[780]2504 self._add_history("recalc_azel", varlist)
2505 return
2506
[1862]2507 @asaplog_post_dec
[513]2508 def __add__(self, other):
2509 varlist = vars()
2510 s = None
2511 if isinstance(other, scantable):
[1573]2512 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2513 elif isinstance(other, float):
[876]2514 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2515 else:
[718]2516 raise TypeError("Other input is not a scantable or float value")
[513]2517 s._add_history("operator +", varlist)
2518 return s
2519
[1862]2520 @asaplog_post_dec
[513]2521 def __sub__(self, other):
2522 """
2523 implicit on all axes and on Tsys
2524 """
2525 varlist = vars()
2526 s = None
2527 if isinstance(other, scantable):
[1588]2528 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2529 elif isinstance(other, float):
[876]2530 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2531 else:
[718]2532 raise TypeError("Other input is not a scantable or float value")
[513]2533 s._add_history("operator -", varlist)
2534 return s
[710]2535
[1862]2536 @asaplog_post_dec
[513]2537 def __mul__(self, other):
2538 """
2539 implicit on all axes and on Tsys
2540 """
2541 varlist = vars()
2542 s = None
2543 if isinstance(other, scantable):
[1588]2544 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2545 elif isinstance(other, float):
[876]2546 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2547 else:
[718]2548 raise TypeError("Other input is not a scantable or float value")
[513]2549 s._add_history("operator *", varlist)
2550 return s
2551
[710]2552
[1862]2553 @asaplog_post_dec
[513]2554 def __div__(self, other):
2555 """
2556 implicit on all axes and on Tsys
2557 """
2558 varlist = vars()
2559 s = None
2560 if isinstance(other, scantable):
[1589]2561 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2562 elif isinstance(other, float):
2563 if other == 0.0:
[718]2564 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2565 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2566 else:
[718]2567 raise TypeError("Other input is not a scantable or float value")
[513]2568 s._add_history("operator /", varlist)
2569 return s
2570
[1862]2571 @asaplog_post_dec
[530]2572 def get_fit(self, row=0):
[1846]2573 """\
[530]2574 Print or return the stored fits for a row in the scantable
[1846]2575
[530]2576 Parameters:
[1846]2577
[530]2578 row: the row which the fit has been applied to.
[1846]2579
[530]2580 """
2581 if row > self.nrow():
2582 return
[976]2583 from asap.asapfit import asapfit
[530]2584 fit = asapfit(self._getfit(row))
[1859]2585 asaplog.push( '%s' %(fit) )
2586 return fit.as_dict()
[530]2587
[1483]2588 def flag_nans(self):
[1846]2589 """\
[1483]2590 Utility function to flag NaN values in the scantable.
2591 """
2592 import numpy
2593 basesel = self.get_selection()
2594 for i in range(self.nrow()):
[1589]2595 sel = self.get_row_selector(i)
2596 self.set_selection(basesel+sel)
[1483]2597 nans = numpy.isnan(self._getspectrum(0))
2598 if numpy.any(nans):
2599 bnans = [ bool(v) for v in nans]
2600 self.flag(bnans)
2601 self.set_selection(basesel)
2602
[1588]2603 def get_row_selector(self, rowno):
2604 return selector(beams=self.getbeam(rowno),
2605 ifs=self.getif(rowno),
2606 pols=self.getpol(rowno),
2607 scans=self.getscan(rowno),
2608 cycles=self.getcycle(rowno))
[1573]2609
[484]2610 def _add_history(self, funcname, parameters):
[1435]2611 if not rcParams['scantable.history']:
2612 return
[484]2613 # create date
2614 sep = "##"
2615 from datetime import datetime
2616 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2617 hist = dstr+sep
2618 hist += funcname+sep#cdate+sep
2619 if parameters.has_key('self'): del parameters['self']
[1118]2620 for k, v in parameters.iteritems():
[484]2621 if type(v) is dict:
[1118]2622 for k2, v2 in v.iteritems():
[484]2623 hist += k2
2624 hist += "="
[1118]2625 if isinstance(v2, scantable):
[484]2626 hist += 'scantable'
2627 elif k2 == 'mask':
[1118]2628 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2629 hist += str(self._zip_mask(v2))
2630 else:
2631 hist += str(v2)
[484]2632 else:
[513]2633 hist += str(v2)
[484]2634 else:
2635 hist += k
2636 hist += "="
[1118]2637 if isinstance(v, scantable):
[484]2638 hist += 'scantable'
2639 elif k == 'mask':
[1118]2640 if isinstance(v, list) or isinstance(v, tuple):
[513]2641 hist += str(self._zip_mask(v))
2642 else:
2643 hist += str(v)
[484]2644 else:
2645 hist += str(v)
2646 hist += sep
2647 hist = hist[:-2] # remove trailing '##'
2648 self._addhistory(hist)
2649
[710]2650
[484]2651 def _zip_mask(self, mask):
2652 mask = list(mask)
2653 i = 0
2654 segments = []
2655 while mask[i:].count(1):
2656 i += mask[i:].index(1)
2657 if mask[i:].count(0):
2658 j = i + mask[i:].index(0)
2659 else:
[710]2660 j = len(mask)
[1118]2661 segments.append([i, j])
[710]2662 i = j
[484]2663 return segments
[714]2664
[626]2665 def _get_ordinate_label(self):
2666 fu = "("+self.get_fluxunit()+")"
2667 import re
2668 lbl = "Intensity"
[1118]2669 if re.match(".K.", fu):
[626]2670 lbl = "Brightness Temperature "+ fu
[1118]2671 elif re.match(".Jy.", fu):
[626]2672 lbl = "Flux density "+ fu
2673 return lbl
[710]2674
[876]2675 def _check_ifs(self):
2676 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2677 nchans = filter(lambda t: t > 0, nchans)
[876]2678 return (sum(nchans)/len(nchans) == nchans[0])
[976]2679
[1862]2680 @asaplog_post_dec
[1916]2681 #def _fill(self, names, unit, average, getpt, antenna):
2682 def _fill(self, names, unit, average, opts={}):
[976]2683 first = True
2684 fullnames = []
2685 for name in names:
2686 name = os.path.expandvars(name)
2687 name = os.path.expanduser(name)
2688 if not os.path.exists(name):
2689 msg = "File '%s' does not exists" % (name)
2690 raise IOError(msg)
2691 fullnames.append(name)
2692 if average:
2693 asaplog.push('Auto averaging integrations')
[1079]2694 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2695 for name in fullnames:
[1073]2696 tbl = Scantable(stype)
[1843]2697 r = filler(tbl)
[1504]2698 rx = rcParams['scantable.reference']
[1843]2699 r.setreferenceexpr(rx)
[976]2700 msg = "Importing %s..." % (name)
[1118]2701 asaplog.push(msg, False)
[1916]2702 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2703 r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2704 r.fill()
[976]2705 if average:
[1118]2706 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2707 if not first:
2708 tbl = self._math._merge([self, tbl])
2709 Scantable.__init__(self, tbl)
[1843]2710 r.close()
[1118]2711 del r, tbl
[976]2712 first = False
[1861]2713 #flush log
2714 asaplog.post()
[976]2715 if unit is not None:
2716 self.set_fluxunit(unit)
[1824]2717 if not is_casapy():
2718 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2719
[1402]2720 def __getitem__(self, key):
2721 if key < 0:
2722 key += self.nrow()
2723 if key >= self.nrow():
2724 raise IndexError("Row index out of range.")
2725 return self._getspectrum(key)
2726
2727 def __setitem__(self, key, value):
2728 if key < 0:
2729 key += self.nrow()
2730 if key >= self.nrow():
2731 raise IndexError("Row index out of range.")
2732 if not hasattr(value, "__len__") or \
2733 len(value) > self.nchan(self.getif(key)):
2734 raise ValueError("Spectrum length doesn't match.")
2735 return self._setspectrum(value, key)
2736
2737 def __len__(self):
2738 return self.nrow()
2739
2740 def __iter__(self):
2741 for i in range(len(self)):
2742 yield self[i]
Note: See TracBrowser for help on using the repository browser.