source: trunk/python/scantable.py@ 1987

Last change on this file since 1987 was 1986, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Bug fix on _check_ifs().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.3 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1948]4import numpy
[1691]5try:
6 from functools import wraps as wraps_dec
7except ImportError:
8 from asap.compatibility import wraps as wraps_dec
9
[1824]10from asap.env import is_casapy
[876]11from asap._asap import Scantable
[1843]12from asap._asap import filler
[1824]13from asap.parameters import rcParams
[1862]14from asap.logging import asaplog, asaplog_post_dec
[1824]15from asap.selector import selector
16from asap.linecatalog import linecatalog
[1600]17from asap.coordinate import coordinate
[1859]18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]19from asap.asapfitter import fitter
[102]20
[1689]21
22def preserve_selection(func):
[1691]23 @wraps_dec(func)
[1689]24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
[1857]26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
[1689]30 return val
31 return wrap
32
[1846]33def is_scantable(filename):
34 """Is the given file a scantable?
[1689]35
[1846]36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
[1883]41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 #if ( l.find('Scantable') != -1 ):
48 if ( l.find('Measurement Set') == -1 ):
49 return True
50 else:
51 return False
52 else:
53 return False
54## return (os.path.isdir(filename)
55## and not os.path.exists(filename+'/table.f1')
56## and os.path.exists(filename+'/table.info'))
[1697]57
[1883]58def is_ms(filename):
59 """Is the given file a MeasurementSet?
[1697]60
[1883]61 Parameters:
62
63 filename: the name of the file/directory to test
64
65 """
66 if ( os.path.isdir(filename)
67 and os.path.exists(filename+'/table.info')
68 and os.path.exists(filename+'/table.dat') ):
69 f=open(filename+'/table.info')
70 l=f.readline()
71 f.close()
72 if ( l.find('Measurement Set') != -1 ):
73 return True
74 else:
75 return False
76 else:
77 return False
78
[876]79class scantable(Scantable):
[1846]80 """\
81 The ASAP container for scans (single-dish data).
[102]82 """
[1819]83
[1862]84 @asaplog_post_dec
[1916]85 #def __init__(self, filename, average=None, unit=None, getpt=None,
86 # antenna=None, parallactify=None):
87 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]88 """\
[102]89 Create a scantable from a saved one or make a reference
[1846]90
[102]91 Parameters:
[1846]92
93 filename: the name of an asap table on disk
94 or
95 the name of a rpfits/sdfits/ms file
96 (integrations within scans are auto averaged
97 and the whole file is read) or
98 [advanced] a reference to an existing scantable
99
100 average: average all integrations withinb a scan on read.
101 The default (True) is taken from .asaprc.
102
[484]103 unit: brightness unit; must be consistent with K or Jy.
[1846]104 Over-rides the default selected by the filler
105 (input rpfits/sdfits/ms) or replaces the value
106 in existing scantables
107
108 getpt: for MeasurementSet input data only:
109 If True, all pointing data are filled.
110 The deafult is False, which makes time to load
111 the MS data faster in some cases.
112
[1920]113 antenna: for MeasurementSet input data only:
114 Antenna selection. integer (id) or string (name or id).
[1846]115
116 parallactify: Indicate that the data had been parallatified. Default
117 is taken from rc file.
118
[710]119 """
[976]120 if average is None:
[710]121 average = rcParams['scantable.autoaverage']
[1916]122 #if getpt is None:
123 # getpt = True
124 #if antenna is not None:
125 # asaplog.push("Antenna selection currently unsupported."
126 # "Using ''")
127 # asaplog.post('WARN')
128 #if antenna is None:
129 # antenna = ''
130 #elif type(antenna) == int:
131 # antenna = '%s' % antenna
132 #elif type(antenna) == list:
133 # tmpstr = ''
134 # for i in range( len(antenna) ):
135 # if type(antenna[i]) == int:
136 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
137 # elif type(antenna[i]) == str:
138 # tmpstr=tmpstr+antenna[i]+','
139 # else:
140 # raise TypeError('Bad antenna selection.')
141 # antenna = tmpstr.rstrip(',')
[1593]142 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]143 varlist = vars()
[876]144 from asap._asap import stmath
[1819]145 self._math = stmath( rcParams['insitu'] )
[876]146 if isinstance(filename, Scantable):
147 Scantable.__init__(self, filename)
[181]148 else:
[1697]149 if isinstance(filename, str):
[976]150 filename = os.path.expandvars(filename)
151 filename = os.path.expanduser(filename)
152 if not os.path.exists(filename):
153 s = "File '%s' not found." % (filename)
154 raise IOError(s)
[1697]155 if is_scantable(filename):
156 ondisk = rcParams['scantable.storage'] == 'disk'
157 Scantable.__init__(self, filename, ondisk)
158 if unit is not None:
159 self.set_fluxunit(unit)
[1819]160 # do not reset to the default freqframe
161 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]162 #elif os.path.isdir(filename) \
163 # and not os.path.exists(filename+'/table.f1'):
164 elif is_ms(filename):
[1916]165 # Measurement Set
166 opts={'ms': {}}
167 mskeys=['getpt','antenna']
168 for key in mskeys:
169 if key in args.keys():
170 opts['ms'][key] = args[key]
171 #self._fill([filename], unit, average, getpt, antenna)
172 self._fill([filename], unit, average, opts)
[1893]173 elif os.path.isfile(filename):
[1916]174 #self._fill([filename], unit, average, getpt, antenna)
175 self._fill([filename], unit, average)
[1883]176 else:
[1819]177 msg = "The given file '%s'is not a valid " \
178 "asap table." % (filename)
[1859]179 raise IOError(msg)
[1118]180 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]181 and isinstance(filename[-1], str):
[1916]182 #self._fill(filename, unit, average, getpt, antenna)
183 self._fill(filename, unit, average)
[1586]184 self.parallactify(parallactify)
[1259]185 self._add_history("scantable", varlist)
[102]186
[1862]187 @asaplog_post_dec
[876]188 def save(self, name=None, format=None, overwrite=False):
[1846]189 """\
[1280]190 Store the scantable on disk. This can be an asap (aips++) Table,
191 SDFITS or MS2 format.
[1846]192
[116]193 Parameters:
[1846]194
[1093]195 name: the name of the outputfile. For format "ASCII"
196 this is the root file name (data in 'name'.txt
[497]197 and header in 'name'_header.txt)
[1855]198
[116]199 format: an optional file format. Default is ASAP.
[1855]200 Allowed are:
201
202 * 'ASAP' (save as ASAP [aips++] Table),
203 * 'SDFITS' (save as SDFITS file)
204 * 'ASCII' (saves as ascii text file)
205 * 'MS2' (saves as an casacore MeasurementSet V2)
206 * 'FITS' (save as image FITS - not readable by class)
207 * 'CLASS' (save as FITS readable by CLASS)
208
[411]209 overwrite: If the file should be overwritten if it exists.
[256]210 The default False is to return with warning
[411]211 without writing the output. USE WITH CARE.
[1855]212
[1846]213 Example::
214
[116]215 scan.save('myscan.asap')
[1118]216 scan.save('myscan.sdfits', 'SDFITS')
[1846]217
[116]218 """
[411]219 from os import path
[1593]220 format = format or rcParams['scantable.save']
[256]221 suffix = '.'+format.lower()
[1118]222 if name is None or name == "":
[256]223 name = 'scantable'+suffix
[718]224 msg = "No filename given. Using default name %s..." % name
225 asaplog.push(msg)
[411]226 name = path.expandvars(name)
[256]227 if path.isfile(name) or path.isdir(name):
228 if not overwrite:
[718]229 msg = "File %s exists." % name
[1859]230 raise IOError(msg)
[451]231 format2 = format.upper()
232 if format2 == 'ASAP':
[116]233 self._save(name)
234 else:
[989]235 from asap._asap import stwriter as stw
[1118]236 writer = stw(format2)
237 writer.write(self, name)
[116]238 return
239
[102]240 def copy(self):
[1846]241 """Return a copy of this scantable.
242
243 *Note*:
244
[1348]245 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]246
247 Example::
248
[102]249 copiedscan = scan.copy()
[1846]250
[102]251 """
[876]252 sd = scantable(Scantable._copy(self))
[113]253 return sd
254
[1093]255 def drop_scan(self, scanid=None):
[1846]256 """\
[1093]257 Return a new scantable where the specified scan number(s) has(have)
258 been dropped.
[1846]259
[1093]260 Parameters:
[1846]261
[1093]262 scanid: a (list of) scan number(s)
[1846]263
[1093]264 """
265 from asap import _is_sequence_or_number as _is_valid
266 from asap import _to_list
267 from asap import unique
268 if not _is_valid(scanid):
[1859]269 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
270 scanid = _to_list(scanid)
271 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
272 for sid in scanid: allscans.remove(sid)
273 if len(allscans) == 0:
274 raise ValueError("Can't remove all scans")
275 sel = selector(scans=allscans)
276 return self._select_copy(sel)
[1093]277
[1594]278 def _select_copy(self, selection):
279 orig = self.get_selection()
280 self.set_selection(orig+selection)
281 cp = self.copy()
282 self.set_selection(orig)
283 return cp
284
[102]285 def get_scan(self, scanid=None):
[1855]286 """\
[102]287 Return a specific scan (by scanno) or collection of scans (by
288 source name) in a new scantable.
[1846]289
290 *Note*:
291
[1348]292 See scantable.drop_scan() for the inverse operation.
[1846]293
[102]294 Parameters:
[1846]295
[513]296 scanid: a (list of) scanno or a source name, unix-style
297 patterns are accepted for source name matching, e.g.
298 '*_R' gets all 'ref scans
[1846]299
300 Example::
301
[513]302 # get all scans containing the source '323p459'
303 newscan = scan.get_scan('323p459')
304 # get all 'off' scans
305 refscans = scan.get_scan('*_R')
306 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]307 newscan = scan.get_scan([0, 2, 7, 10])
[1846]308
[102]309 """
310 if scanid is None:
[1859]311 raise RuntimeError( 'Please specify a scan no or name to '
312 'retrieve from the scantable' )
[102]313 try:
[946]314 bsel = self.get_selection()
315 sel = selector()
[102]316 if type(scanid) is str:
[946]317 sel.set_name(scanid)
[1594]318 return self._select_copy(sel)
[102]319 elif type(scanid) is int:
[946]320 sel.set_scans([scanid])
[1594]321 return self._select_copy(sel)
[381]322 elif type(scanid) is list:
[946]323 sel.set_scans(scanid)
[1594]324 return self._select_copy(sel)
[381]325 else:
[718]326 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]327 raise TypeError(msg)
[102]328 except RuntimeError:
[1859]329 raise
[102]330
331 def __str__(self):
[1118]332 return Scantable._summary(self, True)
[102]333
[976]334 def summary(self, filename=None):
[1846]335 """\
[102]336 Print a summary of the contents of this scantable.
[1846]337
[102]338 Parameters:
[1846]339
[1931]340 filename: the name of a file to write the putput to
[102]341 Default - no file output
[1846]342
[102]343 """
[976]344 info = Scantable._summary(self, True)
[102]345 if filename is not None:
[256]346 if filename is "":
347 filename = 'scantable_summary.txt'
[415]348 from os.path import expandvars, isdir
[411]349 filename = expandvars(filename)
[415]350 if not isdir(filename):
[413]351 data = open(filename, 'w')
352 data.write(info)
353 data.close()
354 else:
[718]355 msg = "Illegal file name '%s'." % (filename)
[1859]356 raise IOError(msg)
357 return page(info)
[710]358
[1512]359 def get_spectrum(self, rowno):
[1471]360 """Return the spectrum for the current row in the scantable as a list.
[1846]361
[1471]362 Parameters:
[1846]363
[1573]364 rowno: the row number to retrieve the spectrum from
[1846]365
[1471]366 """
367 return self._getspectrum(rowno)
[946]368
[1471]369 def get_mask(self, rowno):
370 """Return the mask for the current row in the scantable as a list.
[1846]371
[1471]372 Parameters:
[1846]373
[1573]374 rowno: the row number to retrieve the mask from
[1846]375
[1471]376 """
377 return self._getmask(rowno)
378
379 def set_spectrum(self, spec, rowno):
[1938]380 """Set the spectrum for the current row in the scantable.
[1846]381
[1471]382 Parameters:
[1846]383
[1855]384 spec: the new spectrum
[1846]385
[1855]386 rowno: the row number to set the spectrum for
387
[1471]388 """
389 assert(len(spec) == self.nchan())
390 return self._setspectrum(spec, rowno)
391
[1600]392 def get_coordinate(self, rowno):
393 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]394
395 *Note*:
396
[1600]397 * This coordinate is only valid until a scantable method modifies
398 the frequency axis.
399 * This coordinate does contain the original frequency set-up
400 NOT the new frame. The conversions however are done using the user
401 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
402 use scantable.freq_align first. Without it there is no closure,
[1846]403 i.e.::
[1600]404
[1846]405 c = myscan.get_coordinate(0)
406 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
407
[1600]408 Parameters:
[1846]409
[1600]410 rowno: the row number for the spectral coordinate
411
412 """
413 return coordinate(Scantable.get_coordinate(self, rowno))
414
[946]415 def get_selection(self):
[1846]416 """\
[1005]417 Get the selection object currently set on this scantable.
[1846]418
419 Example::
420
[1005]421 sel = scan.get_selection()
422 sel.set_ifs(0) # select IF 0
423 scan.set_selection(sel) # apply modified selection
[1846]424
[946]425 """
426 return selector(self._getselection())
427
[1576]428 def set_selection(self, selection=None, **kw):
[1846]429 """\
[1005]430 Select a subset of the data. All following operations on this scantable
431 are only applied to thi selection.
[1846]432
[1005]433 Parameters:
[1697]434
[1846]435 selection: a selector object (default unset the selection), or
436 any combination of "pols", "ifs", "beams", "scans",
437 "cycles", "name", "query"
[1697]438
[1846]439 Examples::
[1697]440
[1005]441 sel = selector() # create a selection object
[1118]442 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]443 scan.set_selection(sel) # set the selection
444 scan.summary() # will only print summary of scanno 0 an 3
445 scan.set_selection() # unset the selection
[1697]446 # or the equivalent
447 scan.set_selection(scans=[0,3])
448 scan.summary() # will only print summary of scanno 0 an 3
449 scan.set_selection() # unset the selection
[1846]450
[946]451 """
[1576]452 if selection is None:
453 # reset
454 if len(kw) == 0:
455 selection = selector()
456 else:
457 # try keywords
458 for k in kw:
459 if k not in selector.fields:
460 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
461 selection = selector(**kw)
[946]462 self._setselection(selection)
463
[1819]464 def get_row(self, row=0, insitu=None):
[1846]465 """\
[1819]466 Select a row in the scantable.
467 Return a scantable with single row.
[1846]468
[1819]469 Parameters:
[1846]470
471 row: row no of integration, default is 0.
472 insitu: if False a new scantable is returned. Otherwise, the
473 scaling is done in-situ. The default is taken from .asaprc
474 (False)
475
[1819]476 """
477 if insitu is None: insitu = rcParams['insitu']
478 if not insitu:
479 workscan = self.copy()
480 else:
481 workscan = self
482 # Select a row
483 sel=selector()
484 sel.set_scans([workscan.getscan(row)])
485 sel.set_cycles([workscan.getcycle(row)])
486 sel.set_beams([workscan.getbeam(row)])
487 sel.set_ifs([workscan.getif(row)])
488 sel.set_polarisations([workscan.getpol(row)])
489 sel.set_name(workscan._getsourcename(row))
490 workscan.set_selection(sel)
491 if not workscan.nrow() == 1:
492 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
493 raise RuntimeError(msg)
494 del sel
495 if insitu:
496 self._assign(workscan)
497 else:
498 return workscan
499
[1862]500 @asaplog_post_dec
[1907]501 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]502 """\
[135]503 Determine the specified statistic of the current beam/if/pol
[102]504 Takes a 'mask' as an optional parameter to specify which
505 channels should be excluded.
[1846]506
[102]507 Parameters:
[1846]508
[1819]509 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
510 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]511
[135]512 mask: an optional mask specifying where the statistic
[102]513 should be determined.
[1855]514
[1819]515 form: format string to print statistic values
[1846]516
[1907]517 row: row number of spectrum to process.
518 (default is None: for all rows)
[1846]519
[1907]520 Example:
[113]521 scan.set_unit('channel')
[1118]522 msk = scan.create_mask([100, 200], [500, 600])
[135]523 scan.stats(stat='mean', mask=m)
[1846]524
[102]525 """
[1593]526 mask = mask or []
[876]527 if not self._check_ifs():
[1118]528 raise ValueError("Cannot apply mask as the IFs have different "
529 "number of channels. Please use setselection() "
530 "to select individual IFs")
[1819]531 rtnabc = False
532 if stat.lower().endswith('_abc'): rtnabc = True
533 getchan = False
534 if stat.lower().startswith('min') or stat.lower().startswith('max'):
535 chan = self._math._minmaxchan(self, mask, stat)
536 getchan = True
537 statvals = []
[1907]538 if not rtnabc:
539 if row == None:
540 statvals = self._math._stats(self, mask, stat)
541 else:
542 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]543
[1819]544 #def cb(i):
545 # return statvals[i]
[256]546
[1819]547 #return self._row_callback(cb, stat)
[102]548
[1819]549 label=stat
550 #callback=cb
551 out = ""
552 #outvec = []
553 sep = '-'*50
[1907]554
555 if row == None:
556 rows = xrange(self.nrow())
557 elif isinstance(row, int):
558 rows = [ row ]
559
560 for i in rows:
[1819]561 refstr = ''
562 statunit= ''
563 if getchan:
564 qx, qy = self.chan2data(rowno=i, chan=chan[i])
565 if rtnabc:
566 statvals.append(qx['value'])
567 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
568 statunit= '['+qx['unit']+']'
569 else:
570 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
571
572 tm = self._gettime(i)
573 src = self._getsourcename(i)
574 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
575 out += 'Time[%s]:\n' % (tm)
[1907]576 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
577 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
578 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]579 #outvec.append(callback(i))
[1907]580 if len(rows) > 1:
581 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
582 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
583 else:
584 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
585 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]586 out += sep+"\n"
587
[1859]588 import os
589 if os.environ.has_key( 'USER' ):
590 usr = os.environ['USER']
591 else:
592 import commands
593 usr = commands.getoutput( 'whoami' )
594 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
595 f = open(tmpfile,'w')
596 print >> f, sep
597 print >> f, ' %s %s' % (label, statunit)
598 print >> f, sep
599 print >> f, out
600 f.close()
601 f = open(tmpfile,'r')
602 x = f.readlines()
603 f.close()
604 asaplog.push(''.join(x), False)
605
[1819]606 return statvals
607
608 def chan2data(self, rowno=0, chan=0):
[1846]609 """\
[1819]610 Returns channel/frequency/velocity and spectral value
611 at an arbitrary row and channel in the scantable.
[1846]612
[1819]613 Parameters:
[1846]614
[1819]615 rowno: a row number in the scantable. Default is the
616 first row, i.e. rowno=0
[1855]617
[1819]618 chan: a channel in the scantable. Default is the first
619 channel, i.e. pos=0
[1846]620
[1819]621 """
622 if isinstance(rowno, int) and isinstance(chan, int):
623 qx = {'unit': self.get_unit(),
624 'value': self._getabcissa(rowno)[chan]}
625 qy = {'unit': self.get_fluxunit(),
626 'value': self._getspectrum(rowno)[chan]}
627 return qx, qy
628
[1118]629 def stddev(self, mask=None):
[1846]630 """\
[135]631 Determine the standard deviation of the current beam/if/pol
632 Takes a 'mask' as an optional parameter to specify which
633 channels should be excluded.
[1846]634
[135]635 Parameters:
[1846]636
[135]637 mask: an optional mask specifying where the standard
638 deviation should be determined.
639
[1846]640 Example::
641
[135]642 scan.set_unit('channel')
[1118]643 msk = scan.create_mask([100, 200], [500, 600])
[135]644 scan.stddev(mask=m)
[1846]645
[135]646 """
[1118]647 return self.stats(stat='stddev', mask=mask);
[135]648
[1003]649
[1259]650 def get_column_names(self):
[1846]651 """\
[1003]652 Return a list of column names, which can be used for selection.
653 """
[1259]654 return list(Scantable.get_column_names(self))
[1003]655
[1730]656 def get_tsys(self, row=-1):
[1846]657 """\
[113]658 Return the System temperatures.
[1846]659
660 Parameters:
661
662 row: the rowno to get the information for. (default all rows)
663
[113]664 Returns:
[1846]665
[876]666 a list of Tsys values for the current selection
[1846]667
[113]668 """
[1730]669 if row > -1:
670 return self._get_column(self._gettsys, row)
[876]671 return self._row_callback(self._gettsys, "Tsys")
[256]672
[1730]673
674 def get_weather(self, row=-1):
[1846]675 """\
676 Return the weather informations.
677
678 Parameters:
679
680 row: the rowno to get the information for. (default all rows)
681
682 Returns:
683
684 a dict or list of of dicts of values for the current selection
685
686 """
687
[1730]688 values = self._get_column(self._get_weather, row)
689 if row > -1:
690 return {'temperature': values[0],
691 'pressure': values[1], 'humidity' : values[2],
692 'windspeed' : values[3], 'windaz' : values[4]
693 }
694 else:
695 out = []
696 for r in values:
697
698 out.append({'temperature': r[0],
699 'pressure': r[1], 'humidity' : r[2],
700 'windspeed' : r[3], 'windaz' : r[4]
701 })
702 return out
703
[876]704 def _row_callback(self, callback, label):
705 out = ""
[1118]706 outvec = []
[1590]707 sep = '-'*50
[876]708 for i in range(self.nrow()):
709 tm = self._gettime(i)
710 src = self._getsourcename(i)
[1590]711 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]712 out += 'Time[%s]:\n' % (tm)
[1590]713 if self.nbeam(-1) > 1:
714 out += ' Beam[%d] ' % (self.getbeam(i))
715 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
716 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]717 outvec.append(callback(i))
718 out += '= %3.3f\n' % (outvec[i])
[1590]719 out += sep+'\n'
[1859]720
721 asaplog.push(sep)
722 asaplog.push(" %s" % (label))
723 asaplog.push(sep)
724 asaplog.push(out)
[1861]725 asaplog.post()
[1175]726 return outvec
[256]727
[1947]728 def _get_column(self, callback, row=-1, *args):
[1070]729 """
730 """
731 if row == -1:
[1947]732 return [callback(i, *args) for i in range(self.nrow())]
[1070]733 else:
[1819]734 if 0 <= row < self.nrow():
[1947]735 return callback(row, *args)
[256]736
[1070]737
[1948]738 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]739 """\
[113]740 Get a list of time stamps for the observations.
[1938]741 Return a datetime object or a string (default) for each
742 integration time stamp in the scantable.
[1846]743
[113]744 Parameters:
[1846]745
[1348]746 row: row no of integration. Default -1 return all rows
[1855]747
[1348]748 asdatetime: return values as datetime objects rather than strings
[1846]749
[1948]750 prec: number of digits shown. Default -1 to automatic calculation.
751 Note this number is equals to the digits of MVTime,
752 i.e., 0<prec<3: dates with hh:: only,
753 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]754 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
755
[113]756 """
[1175]757 from datetime import datetime
[1948]758 if prec < 0:
759 # automagically set necessary precision +1
[1950]760 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]761 prec = max(6, int(prec))
762 else:
763 prec = max(0, prec)
764 if asdatetime:
765 #precision can be 1 millisecond at max
766 prec = min(12, prec)
767
[1947]768 times = self._get_column(self._gettime, row, prec)
[1348]769 if not asdatetime:
[1392]770 return times
[1947]771 format = "%Y/%m/%d/%H:%M:%S.%f"
772 if prec < 7:
773 nsub = 1 + (((6-prec)/2) % 3)
774 substr = [".%f","%S","%M"]
775 for i in range(nsub):
776 format = format.replace(substr[i],"")
[1175]777 if isinstance(times, list):
[1947]778 return [datetime.strptime(i, format) for i in times]
[1175]779 else:
[1947]780 return datetime.strptime(times, format)
[102]781
[1348]782
783 def get_inttime(self, row=-1):
[1846]784 """\
[1348]785 Get a list of integration times for the observations.
786 Return a time in seconds for each integration in the scantable.
[1846]787
[1348]788 Parameters:
[1846]789
[1348]790 row: row no of integration. Default -1 return all rows.
[1846]791
[1348]792 """
[1573]793 return self._get_column(self._getinttime, row)
[1348]794
[1573]795
[714]796 def get_sourcename(self, row=-1):
[1846]797 """\
[794]798 Get a list source names for the observations.
[714]799 Return a string for each integration in the scantable.
800 Parameters:
[1846]801
[1348]802 row: row no of integration. Default -1 return all rows.
[1846]803
[714]804 """
[1070]805 return self._get_column(self._getsourcename, row)
[714]806
[794]807 def get_elevation(self, row=-1):
[1846]808 """\
[794]809 Get a list of elevations for the observations.
810 Return a float for each integration in the scantable.
[1846]811
[794]812 Parameters:
[1846]813
[1348]814 row: row no of integration. Default -1 return all rows.
[1846]815
[794]816 """
[1070]817 return self._get_column(self._getelevation, row)
[794]818
819 def get_azimuth(self, row=-1):
[1846]820 """\
[794]821 Get a list of azimuths for the observations.
822 Return a float for each integration in the scantable.
[1846]823
[794]824 Parameters:
[1348]825 row: row no of integration. Default -1 return all rows.
[1846]826
[794]827 """
[1070]828 return self._get_column(self._getazimuth, row)
[794]829
830 def get_parangle(self, row=-1):
[1846]831 """\
[794]832 Get a list of parallactic angles for the observations.
833 Return a float for each integration in the scantable.
[1846]834
[794]835 Parameters:
[1846]836
[1348]837 row: row no of integration. Default -1 return all rows.
[1846]838
[794]839 """
[1070]840 return self._get_column(self._getparangle, row)
[794]841
[1070]842 def get_direction(self, row=-1):
843 """
844 Get a list of Positions on the sky (direction) for the observations.
[1594]845 Return a string for each integration in the scantable.
[1855]846
[1070]847 Parameters:
[1855]848
[1070]849 row: row no of integration. Default -1 return all rows
[1855]850
[1070]851 """
852 return self._get_column(self._getdirection, row)
853
[1391]854 def get_directionval(self, row=-1):
[1846]855 """\
[1391]856 Get a list of Positions on the sky (direction) for the observations.
857 Return a float for each integration in the scantable.
[1846]858
[1391]859 Parameters:
[1846]860
[1391]861 row: row no of integration. Default -1 return all rows
[1846]862
[1391]863 """
864 return self._get_column(self._getdirectionvec, row)
865
[1862]866 @asaplog_post_dec
[102]867 def set_unit(self, unit='channel'):
[1846]868 """\
[102]869 Set the unit for all following operations on this scantable
[1846]870
[102]871 Parameters:
[1846]872
873 unit: optional unit, default is 'channel'. Use one of '*Hz',
874 'km/s', 'channel' or equivalent ''
875
[102]876 """
[484]877 varlist = vars()
[1118]878 if unit in ['', 'pixel', 'channel']:
[113]879 unit = ''
880 inf = list(self._getcoordinfo())
881 inf[0] = unit
882 self._setcoordinfo(inf)
[1118]883 self._add_history("set_unit", varlist)
[113]884
[1862]885 @asaplog_post_dec
[484]886 def set_instrument(self, instr):
[1846]887 """\
[1348]888 Set the instrument for subsequent processing.
[1846]889
[358]890 Parameters:
[1846]891
[710]892 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]893 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]894
[358]895 """
896 self._setInstrument(instr)
[1118]897 self._add_history("set_instument", vars())
[358]898
[1862]899 @asaplog_post_dec
[1190]900 def set_feedtype(self, feedtype):
[1846]901 """\
[1190]902 Overwrite the feed type, which might not be set correctly.
[1846]903
[1190]904 Parameters:
[1846]905
[1190]906 feedtype: 'linear' or 'circular'
[1846]907
[1190]908 """
909 self._setfeedtype(feedtype)
910 self._add_history("set_feedtype", vars())
911
[1862]912 @asaplog_post_dec
[276]913 def set_doppler(self, doppler='RADIO'):
[1846]914 """\
[276]915 Set the doppler for all following operations on this scantable.
[1846]916
[276]917 Parameters:
[1846]918
[276]919 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]920
[276]921 """
[484]922 varlist = vars()
[276]923 inf = list(self._getcoordinfo())
924 inf[2] = doppler
925 self._setcoordinfo(inf)
[1118]926 self._add_history("set_doppler", vars())
[710]927
[1862]928 @asaplog_post_dec
[226]929 def set_freqframe(self, frame=None):
[1846]930 """\
[113]931 Set the frame type of the Spectral Axis.
[1846]932
[113]933 Parameters:
[1846]934
[591]935 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]936 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]937 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]938
939 Example::
940
[113]941 scan.set_freqframe('BARY')
[1846]942
[113]943 """
[1593]944 frame = frame or rcParams['scantable.freqframe']
[484]945 varlist = vars()
[1819]946 # "REST" is not implemented in casacore
947 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
948 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
949 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]950 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]951
[989]952 if frame in valid:
[113]953 inf = list(self._getcoordinfo())
954 inf[1] = frame
955 self._setcoordinfo(inf)
[1118]956 self._add_history("set_freqframe", varlist)
[102]957 else:
[1118]958 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]959 raise TypeError(msg)
[710]960
[1862]961 @asaplog_post_dec
[989]962 def set_dirframe(self, frame=""):
[1846]963 """\
[989]964 Set the frame type of the Direction on the sky.
[1846]965
[989]966 Parameters:
[1846]967
[989]968 frame: an optional frame type, default ''. Valid frames are:
969 'J2000', 'B1950', 'GALACTIC'
[1846]970
971 Example:
972
[989]973 scan.set_dirframe('GALACTIC')
[1846]974
[989]975 """
976 varlist = vars()
[1859]977 Scantable.set_dirframe(self, frame)
[1118]978 self._add_history("set_dirframe", varlist)
[989]979
[113]980 def get_unit(self):
[1846]981 """\
[113]982 Get the default unit set in this scantable
[1846]983
[113]984 Returns:
[1846]985
[113]986 A unit string
[1846]987
[113]988 """
989 inf = self._getcoordinfo()
990 unit = inf[0]
991 if unit == '': unit = 'channel'
992 return unit
[102]993
[1862]994 @asaplog_post_dec
[158]995 def get_abcissa(self, rowno=0):
[1846]996 """\
[158]997 Get the abcissa in the current coordinate setup for the currently
[113]998 selected Beam/IF/Pol
[1846]999
[113]1000 Parameters:
[1846]1001
[226]1002 rowno: an optional row number in the scantable. Default is the
1003 first row, i.e. rowno=0
[1846]1004
[113]1005 Returns:
[1846]1006
[1348]1007 The abcissa values and the format string (as a dictionary)
[1846]1008
[113]1009 """
[256]1010 abc = self._getabcissa(rowno)
[710]1011 lbl = self._getabcissalabel(rowno)
[158]1012 return abc, lbl
[113]1013
[1862]1014 @asaplog_post_dec
[1819]1015 def flag(self, mask=None, unflag=False):
[1846]1016 """\
[1001]1017 Flag the selected data using an optional channel mask.
[1846]1018
[1001]1019 Parameters:
[1846]1020
[1001]1021 mask: an optional channel mask, created with create_mask. Default
1022 (no mask) is all channels.
[1855]1023
[1819]1024 unflag: if True, unflag the data
[1846]1025
[1001]1026 """
1027 varlist = vars()
[1593]1028 mask = mask or []
[1859]1029 self._flag(mask, unflag)
[1001]1030 self._add_history("flag", varlist)
1031
[1862]1032 @asaplog_post_dec
[1819]1033 def flag_row(self, rows=[], unflag=False):
[1846]1034 """\
[1819]1035 Flag the selected data in row-based manner.
[1846]1036
[1819]1037 Parameters:
[1846]1038
[1843]1039 rows: list of row numbers to be flagged. Default is no row
1040 (must be explicitly specified to execute row-based flagging).
[1855]1041
[1819]1042 unflag: if True, unflag the data.
[1846]1043
[1819]1044 """
1045 varlist = vars()
[1859]1046 self._flag_row(rows, unflag)
[1819]1047 self._add_history("flag_row", varlist)
1048
[1862]1049 @asaplog_post_dec
[1819]1050 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1051 """\
[1819]1052 Flag the selected data outside a specified range (in channel-base)
[1846]1053
[1819]1054 Parameters:
[1846]1055
[1819]1056 uthres: upper threshold.
[1855]1057
[1819]1058 dthres: lower threshold
[1846]1059
[1819]1060 clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1061 False for flagging data inside the range.
[1855]1062
[1846]1063 unflag: if True, unflag the data.
1064
[1819]1065 """
1066 varlist = vars()
[1859]1067 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1068 self._add_history("clip", varlist)
1069
[1862]1070 @asaplog_post_dec
[1584]1071 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1072 """\
[1192]1073 Flag the data in 'lag' space by providing a frequency to remove.
[1584]1074 Flagged data in the scantable gets interpolated over the region.
[1192]1075 No taper is applied.
[1846]1076
[1192]1077 Parameters:
[1846]1078
[1579]1079 start: the start frequency (really a period within the
1080 bandwidth) or period to remove
[1855]1081
[1579]1082 end: the end frequency or period to remove
[1855]1083
[1584]1084 unit: the frequency unit (default "MHz") or "" for
[1579]1085 explicit lag channels
[1846]1086
1087 *Notes*:
1088
[1579]1089 It is recommended to flag edges of the band or strong
[1348]1090 signals beforehand.
[1846]1091
[1192]1092 """
1093 if insitu is None: insitu = rcParams['insitu']
1094 self._math._setinsitu(insitu)
1095 varlist = vars()
[1579]1096 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1097 if not (unit == "" or base.has_key(unit)):
[1192]1098 raise ValueError("%s is not a valid unit." % unit)
[1859]1099 if unit == "":
1100 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1101 else:
1102 s = scantable(self._math._lag_flag(self, start*base[unit],
1103 end*base[unit], "frequency"))
[1192]1104 s._add_history("lag_flag", varlist)
1105 if insitu:
1106 self._assign(s)
1107 else:
1108 return s
[1001]1109
[1862]1110 @asaplog_post_dec
[113]1111 def create_mask(self, *args, **kwargs):
[1846]1112 """\
[1118]1113 Compute and return a mask based on [min, max] windows.
[189]1114 The specified windows are to be INCLUDED, when the mask is
[113]1115 applied.
[1846]1116
[102]1117 Parameters:
[1846]1118
[1118]1119 [min, max], [min2, max2], ...
[1024]1120 Pairs of start/end points (inclusive)specifying the regions
[102]1121 to be masked
[1855]1122
[189]1123 invert: optional argument. If specified as True,
1124 return an inverted mask, i.e. the regions
1125 specified are EXCLUDED
[1855]1126
[513]1127 row: create the mask using the specified row for
1128 unit conversions, default is row=0
1129 only necessary if frequency varies over rows.
[1846]1130
1131 Examples::
1132
[113]1133 scan.set_unit('channel')
[1846]1134 # a)
[1118]1135 msk = scan.create_mask([400, 500], [800, 900])
[189]1136 # masks everything outside 400 and 500
[113]1137 # and 800 and 900 in the unit 'channel'
1138
[1846]1139 # b)
[1118]1140 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1141 # masks the regions between 400 and 500
[113]1142 # and 800 and 900 in the unit 'channel'
[1846]1143
1144 # c)
1145 #mask only channel 400
[1554]1146 msk = scan.create_mask([400])
[1846]1147
[102]1148 """
[1554]1149 row = kwargs.get("row", 0)
[513]1150 data = self._getabcissa(row)
[113]1151 u = self._getcoordinfo()[0]
[1859]1152 if u == "":
1153 u = "channel"
1154 msg = "The current mask window unit is %s" % u
1155 i = self._check_ifs()
1156 if not i:
1157 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1158 asaplog.push(msg)
[102]1159 n = self.nchan()
[1295]1160 msk = _n_bools(n, False)
[710]1161 # test if args is a 'list' or a 'normal *args - UGLY!!!
1162
[1118]1163 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1164 and args or args[0]
[710]1165 for window in ws:
[1554]1166 if len(window) == 1:
1167 window = [window[0], window[0]]
1168 if len(window) == 0 or len(window) > 2:
1169 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1170 if window[0] > window[1]:
1171 tmp = window[0]
1172 window[0] = window[1]
1173 window[1] = tmp
[102]1174 for i in range(n):
[1024]1175 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1176 msk[i] = True
[113]1177 if kwargs.has_key('invert'):
1178 if kwargs.get('invert'):
[1295]1179 msk = mask_not(msk)
[102]1180 return msk
[710]1181
[1931]1182 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1183 """\
[1819]1184 Compute and return a list of mask windows, [min, max].
[1846]1185
[1819]1186 Parameters:
[1846]1187
[1819]1188 mask: channel mask, created with create_mask.
[1855]1189
[1819]1190 row: calcutate the masklist using the specified row
1191 for unit conversions, default is row=0
1192 only necessary if frequency varies over rows.
[1846]1193
[1819]1194 Returns:
[1846]1195
[1819]1196 [min, max], [min2, max2], ...
1197 Pairs of start/end points (inclusive)specifying
1198 the masked regions
[1846]1199
[1819]1200 """
1201 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1202 raise TypeError("The mask should be list or tuple.")
1203 if len(mask) < 2:
1204 raise TypeError("The mask elements should be > 1")
1205 if self.nchan() != len(mask):
1206 msg = "Number of channels in scantable != number of mask elements"
1207 raise TypeError(msg)
1208 data = self._getabcissa(row)
1209 u = self._getcoordinfo()[0]
[1859]1210 if u == "":
1211 u = "channel"
1212 msg = "The current mask window unit is %s" % u
1213 i = self._check_ifs()
1214 if not i:
1215 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1216 if not silent:
1217 asaplog.push(msg)
[1819]1218 masklist=[]
1219 ist, ien = None, None
1220 ist, ien=self.get_mask_indices(mask)
1221 if ist is not None and ien is not None:
1222 for i in xrange(len(ist)):
1223 range=[data[ist[i]],data[ien[i]]]
1224 range.sort()
1225 masklist.append([range[0],range[1]])
1226 return masklist
1227
1228 def get_mask_indices(self, mask=None):
[1846]1229 """\
[1819]1230 Compute and Return lists of mask start indices and mask end indices.
[1855]1231
1232 Parameters:
1233
[1819]1234 mask: channel mask, created with create_mask.
[1846]1235
[1819]1236 Returns:
[1846]1237
[1819]1238 List of mask start indices and that of mask end indices,
1239 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1240
[1819]1241 """
1242 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1243 raise TypeError("The mask should be list or tuple.")
1244 if len(mask) < 2:
1245 raise TypeError("The mask elements should be > 1")
1246 istart=[]
1247 iend=[]
1248 if mask[0]: istart.append(0)
1249 for i in range(len(mask)-1):
1250 if not mask[i] and mask[i+1]:
1251 istart.append(i+1)
1252 elif mask[i] and not mask[i+1]:
1253 iend.append(i)
1254 if mask[len(mask)-1]: iend.append(len(mask)-1)
1255 if len(istart) != len(iend):
1256 raise RuntimeError("Numbers of mask start != mask end.")
1257 for i in range(len(istart)):
1258 if istart[i] > iend[i]:
1259 raise RuntimeError("Mask start index > mask end index")
1260 break
1261 return istart,iend
1262
1263# def get_restfreqs(self):
1264# """
1265# Get the restfrequency(s) stored in this scantable.
1266# The return value(s) are always of unit 'Hz'
1267# Parameters:
1268# none
1269# Returns:
1270# a list of doubles
1271# """
1272# return list(self._getrestfreqs())
1273
1274 def get_restfreqs(self, ids=None):
[1846]1275 """\
[256]1276 Get the restfrequency(s) stored in this scantable.
1277 The return value(s) are always of unit 'Hz'
[1846]1278
[256]1279 Parameters:
[1846]1280
[1819]1281 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1282 be retrieved
[1846]1283
[256]1284 Returns:
[1846]1285
[1819]1286 dictionary containing ids and a list of doubles for each id
[1846]1287
[256]1288 """
[1819]1289 if ids is None:
1290 rfreqs={}
1291 idlist = self.getmolnos()
1292 for i in idlist:
1293 rfreqs[i]=list(self._getrestfreqs(i))
1294 return rfreqs
1295 else:
1296 if type(ids)==list or type(ids)==tuple:
1297 rfreqs={}
1298 for i in ids:
1299 rfreqs[i]=list(self._getrestfreqs(i))
1300 return rfreqs
1301 else:
1302 return list(self._getrestfreqs(ids))
1303 #return list(self._getrestfreqs(ids))
[102]1304
[931]1305 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1306 """\
[931]1307 Set or replace the restfrequency specified and
[1938]1308 if the 'freqs' argument holds a scalar,
[931]1309 then that rest frequency will be applied to all the selected
1310 data. If the 'freqs' argument holds
1311 a vector, then it MUST be of equal or smaller length than
1312 the number of IFs (and the available restfrequencies will be
1313 replaced by this vector). In this case, *all* data have
1314 the restfrequency set per IF according
1315 to the corresponding value you give in the 'freqs' vector.
[1118]1316 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1317 IF 1 gets restfreq 2e9.
[1846]1318
[1395]1319 You can also specify the frequencies via a linecatalog.
[1153]1320
[931]1321 Parameters:
[1846]1322
[931]1323 freqs: list of rest frequency values or string idenitfiers
[1855]1324
[931]1325 unit: unit for rest frequency (default 'Hz')
[402]1326
[1846]1327
1328 Example::
1329
[1819]1330 # set the given restfrequency for the all currently selected IFs
[931]1331 scan.set_restfreqs(freqs=1.4e9)
[1845]1332 # set restfrequencies for the n IFs (n > 1) in the order of the
1333 # list, i.e
1334 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1335 # len(list_of_restfreqs) == nIF
1336 # for nIF == 1 the following will set multiple restfrequency for
1337 # that IF
[1819]1338 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1339 # set multiple restfrequencies per IF. as a list of lists where
1340 # the outer list has nIF elements, the inner s arbitrary
1341 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1342
[1846]1343 *Note*:
[1845]1344
[931]1345 To do more sophisticate Restfrequency setting, e.g. on a
1346 source and IF basis, use scantable.set_selection() before using
[1846]1347 this function::
[931]1348
[1846]1349 # provided your scantable is called scan
1350 selection = selector()
1351 selection.set_name("ORION*")
1352 selection.set_ifs([1])
1353 scan.set_selection(selection)
1354 scan.set_restfreqs(freqs=86.6e9)
1355
[931]1356 """
1357 varlist = vars()
[1157]1358 from asap import linecatalog
1359 # simple value
[1118]1360 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1361 self._setrestfreqs([freqs], [""], unit)
[1157]1362 # list of values
[1118]1363 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1364 # list values are scalars
[1118]1365 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1366 if len(freqs) == 1:
1367 self._setrestfreqs(freqs, [""], unit)
1368 else:
1369 # allow the 'old' mode of setting mulitple IFs
1370 sel = selector()
1371 savesel = self._getselection()
1372 iflist = self.getifnos()
1373 if len(freqs)>len(iflist):
1374 raise ValueError("number of elements in list of list "
1375 "exeeds the current IF selections")
1376 iflist = self.getifnos()
1377 for i, fval in enumerate(freqs):
1378 sel.set_ifs(iflist[i])
1379 self._setselection(sel)
1380 self._setrestfreqs([fval], [""], unit)
1381 self._setselection(savesel)
1382
1383 # list values are dict, {'value'=, 'name'=)
[1157]1384 elif isinstance(freqs[-1], dict):
[1845]1385 values = []
1386 names = []
1387 for d in freqs:
1388 values.append(d["value"])
1389 names.append(d["name"])
1390 self._setrestfreqs(values, names, unit)
[1819]1391 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1392 sel = selector()
1393 savesel = self._getselection()
[1322]1394 iflist = self.getifnos()
[1819]1395 if len(freqs)>len(iflist):
[1845]1396 raise ValueError("number of elements in list of list exeeds"
1397 " the current IF selections")
1398 for i, fval in enumerate(freqs):
[1322]1399 sel.set_ifs(iflist[i])
[1259]1400 self._setselection(sel)
[1845]1401 self._setrestfreqs(fval, [""], unit)
[1157]1402 self._setselection(savesel)
1403 # freqs are to be taken from a linecatalog
[1153]1404 elif isinstance(freqs, linecatalog):
1405 sel = selector()
1406 savesel = self._getselection()
1407 for i in xrange(freqs.nrow()):
[1322]1408 sel.set_ifs(iflist[i])
[1153]1409 self._setselection(sel)
[1845]1410 self._setrestfreqs([freqs.get_frequency(i)],
1411 [freqs.get_name(i)], "MHz")
[1153]1412 # ensure that we are not iterating past nIF
1413 if i == self.nif()-1: break
1414 self._setselection(savesel)
[931]1415 else:
1416 return
1417 self._add_history("set_restfreqs", varlist)
1418
[1360]1419 def shift_refpix(self, delta):
[1846]1420 """\
[1589]1421 Shift the reference pixel of the Spectra Coordinate by an
1422 integer amount.
[1846]1423
[1589]1424 Parameters:
[1846]1425
[1589]1426 delta: the amount to shift by
[1846]1427
1428 *Note*:
1429
[1589]1430 Be careful using this with broadband data.
[1846]1431
[1360]1432 """
[1731]1433 Scantable.shift_refpix(self, delta)
[931]1434
[1862]1435 @asaplog_post_dec
[1259]1436 def history(self, filename=None):
[1846]1437 """\
[1259]1438 Print the history. Optionally to a file.
[1846]1439
[1348]1440 Parameters:
[1846]1441
[1928]1442 filename: The name of the file to save the history to.
[1846]1443
[1259]1444 """
[484]1445 hist = list(self._gethistory())
[794]1446 out = "-"*80
[484]1447 for h in hist:
[489]1448 if h.startswith("---"):
[1857]1449 out = "\n".join([out, h])
[489]1450 else:
1451 items = h.split("##")
1452 date = items[0]
1453 func = items[1]
1454 items = items[2:]
[794]1455 out += "\n"+date+"\n"
1456 out += "Function: %s\n Parameters:" % (func)
[489]1457 for i in items:
[1938]1458 if i == '':
1459 continue
[489]1460 s = i.split("=")
[1118]1461 out += "\n %s = %s" % (s[0], s[1])
[1857]1462 out = "\n".join([out, "-"*80])
[1259]1463 if filename is not None:
1464 if filename is "":
1465 filename = 'scantable_history.txt'
1466 import os
1467 filename = os.path.expandvars(os.path.expanduser(filename))
1468 if not os.path.isdir(filename):
1469 data = open(filename, 'w')
1470 data.write(out)
1471 data.close()
1472 else:
1473 msg = "Illegal file name '%s'." % (filename)
[1859]1474 raise IOError(msg)
1475 return page(out)
[513]1476 #
1477 # Maths business
1478 #
[1862]1479 @asaplog_post_dec
[931]1480 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1481 """\
[1070]1482 Return the (time) weighted average of a scan.
[1846]1483
1484 *Note*:
1485
[1070]1486 in channels only - align if necessary
[1846]1487
[513]1488 Parameters:
[1846]1489
[513]1490 mask: an optional mask (only used for 'var' and 'tsys'
1491 weighting)
[1855]1492
[558]1493 scanav: True averages each scan separately
1494 False (default) averages all scans together,
[1855]1495
[1099]1496 weight: Weighting scheme.
1497 'none' (mean no weight)
1498 'var' (1/var(spec) weighted)
1499 'tsys' (1/Tsys**2 weighted)
1500 'tint' (integration time weighted)
1501 'tintsys' (Tint/Tsys**2)
1502 'median' ( median averaging)
[535]1503 The default is 'tint'
[1855]1504
[931]1505 align: align the spectra in velocity before averaging. It takes
1506 the time of the first spectrum as reference time.
[1846]1507
1508 Example::
1509
[513]1510 # time average the scantable without using a mask
[710]1511 newscan = scan.average_time()
[1846]1512
[513]1513 """
1514 varlist = vars()
[1593]1515 weight = weight or 'TINT'
1516 mask = mask or ()
1517 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1518 scan = (self, )
[1859]1519
1520 if align:
1521 scan = (self.freq_align(insitu=False), )
1522 s = None
1523 if weight.upper() == 'MEDIAN':
1524 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1525 scanav))
1526 else:
1527 s = scantable(self._math._average(scan, mask, weight.upper(),
1528 scanav))
[1099]1529 s._add_history("average_time", varlist)
[513]1530 return s
[710]1531
[1862]1532 @asaplog_post_dec
[876]1533 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1534 """\
[513]1535 Return a scan where all spectra are converted to either
1536 Jansky or Kelvin depending upon the flux units of the scan table.
1537 By default the function tries to look the values up internally.
1538 If it can't find them (or if you want to over-ride), you must
1539 specify EITHER jyperk OR eta (and D which it will try to look up
1540 also if you don't set it). jyperk takes precedence if you set both.
[1846]1541
[513]1542 Parameters:
[1846]1543
[513]1544 jyperk: the Jy / K conversion factor
[1855]1545
[513]1546 eta: the aperture efficiency
[1855]1547
[1928]1548 d: the geometric diameter (metres)
[1855]1549
[513]1550 insitu: if False a new scantable is returned.
1551 Otherwise, the scaling is done in-situ
1552 The default is taken from .asaprc (False)
[1846]1553
[513]1554 """
1555 if insitu is None: insitu = rcParams['insitu']
[876]1556 self._math._setinsitu(insitu)
[513]1557 varlist = vars()
[1593]1558 jyperk = jyperk or -1.0
1559 d = d or -1.0
1560 eta = eta or -1.0
[876]1561 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1562 s._add_history("convert_flux", varlist)
1563 if insitu: self._assign(s)
1564 else: return s
[513]1565
[1862]1566 @asaplog_post_dec
[876]1567 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1568 """\
[513]1569 Return a scan after applying a gain-elevation correction.
1570 The correction can be made via either a polynomial or a
1571 table-based interpolation (and extrapolation if necessary).
1572 You specify polynomial coefficients, an ascii table or neither.
1573 If you specify neither, then a polynomial correction will be made
1574 with built in coefficients known for certain telescopes (an error
1575 will occur if the instrument is not known).
1576 The data and Tsys are *divided* by the scaling factors.
[1846]1577
[513]1578 Parameters:
[1846]1579
[513]1580 poly: Polynomial coefficients (default None) to compute a
1581 gain-elevation correction as a function of
1582 elevation (in degrees).
[1855]1583
[513]1584 filename: The name of an ascii file holding correction factors.
1585 The first row of the ascii file must give the column
1586 names and these MUST include columns
1587 "ELEVATION" (degrees) and "FACTOR" (multiply data
1588 by this) somewhere.
1589 The second row must give the data type of the
1590 column. Use 'R' for Real and 'I' for Integer.
1591 An example file would be
1592 (actual factors are arbitrary) :
1593
1594 TIME ELEVATION FACTOR
1595 R R R
1596 0.1 0 0.8
1597 0.2 20 0.85
1598 0.3 40 0.9
1599 0.4 60 0.85
1600 0.5 80 0.8
1601 0.6 90 0.75
[1855]1602
[513]1603 method: Interpolation method when correcting from a table.
1604 Values are "nearest", "linear" (default), "cubic"
1605 and "spline"
[1855]1606
[513]1607 insitu: if False a new scantable is returned.
1608 Otherwise, the scaling is done in-situ
1609 The default is taken from .asaprc (False)
[1846]1610
[513]1611 """
1612
1613 if insitu is None: insitu = rcParams['insitu']
[876]1614 self._math._setinsitu(insitu)
[513]1615 varlist = vars()
[1593]1616 poly = poly or ()
[513]1617 from os.path import expandvars
1618 filename = expandvars(filename)
[876]1619 s = scantable(self._math._gainel(self, poly, filename, method))
1620 s._add_history("gain_el", varlist)
[1593]1621 if insitu:
1622 self._assign(s)
1623 else:
1624 return s
[710]1625
[1862]1626 @asaplog_post_dec
[931]1627 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1628 """\
[513]1629 Return a scan where all rows have been aligned in frequency/velocity.
1630 The alignment frequency frame (e.g. LSRK) is that set by function
1631 set_freqframe.
[1846]1632
[513]1633 Parameters:
[1855]1634
[513]1635 reftime: reference time to align at. By default, the time of
1636 the first row of data is used.
[1855]1637
[513]1638 method: Interpolation method for regridding the spectra.
1639 Choose from "nearest", "linear", "cubic" (default)
1640 and "spline"
[1855]1641
[513]1642 insitu: if False a new scantable is returned.
1643 Otherwise, the scaling is done in-situ
1644 The default is taken from .asaprc (False)
[1846]1645
[513]1646 """
[931]1647 if insitu is None: insitu = rcParams["insitu"]
[876]1648 self._math._setinsitu(insitu)
[513]1649 varlist = vars()
[1593]1650 reftime = reftime or ""
[931]1651 s = scantable(self._math._freq_align(self, reftime, method))
[876]1652 s._add_history("freq_align", varlist)
1653 if insitu: self._assign(s)
1654 else: return s
[513]1655
[1862]1656 @asaplog_post_dec
[1725]1657 def opacity(self, tau=None, insitu=None):
[1846]1658 """\
[513]1659 Apply an opacity correction. The data
1660 and Tsys are multiplied by the correction factor.
[1846]1661
[513]1662 Parameters:
[1855]1663
[1689]1664 tau: (list of) opacity from which the correction factor is
[513]1665 exp(tau*ZD)
[1689]1666 where ZD is the zenith-distance.
1667 If a list is provided, it has to be of length nIF,
1668 nIF*nPol or 1 and in order of IF/POL, e.g.
1669 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1670 if tau is `None` the opacities are determined from a
1671 model.
[1855]1672
[513]1673 insitu: if False a new scantable is returned.
1674 Otherwise, the scaling is done in-situ
1675 The default is taken from .asaprc (False)
[1846]1676
[513]1677 """
1678 if insitu is None: insitu = rcParams['insitu']
[876]1679 self._math._setinsitu(insitu)
[513]1680 varlist = vars()
[1689]1681 if not hasattr(tau, "__len__"):
1682 tau = [tau]
[876]1683 s = scantable(self._math._opacity(self, tau))
1684 s._add_history("opacity", varlist)
1685 if insitu: self._assign(s)
1686 else: return s
[513]1687
[1862]1688 @asaplog_post_dec
[513]1689 def bin(self, width=5, insitu=None):
[1846]1690 """\
[513]1691 Return a scan where all spectra have been binned up.
[1846]1692
[1348]1693 Parameters:
[1846]1694
[513]1695 width: The bin width (default=5) in pixels
[1855]1696
[513]1697 insitu: if False a new scantable is returned.
1698 Otherwise, the scaling is done in-situ
1699 The default is taken from .asaprc (False)
[1846]1700
[513]1701 """
1702 if insitu is None: insitu = rcParams['insitu']
[876]1703 self._math._setinsitu(insitu)
[513]1704 varlist = vars()
[876]1705 s = scantable(self._math._bin(self, width))
[1118]1706 s._add_history("bin", varlist)
[1589]1707 if insitu:
1708 self._assign(s)
1709 else:
1710 return s
[513]1711
[1862]1712 @asaplog_post_dec
[513]1713 def resample(self, width=5, method='cubic', insitu=None):
[1846]1714 """\
[1348]1715 Return a scan where all spectra have been binned up.
[1573]1716
[1348]1717 Parameters:
[1846]1718
[513]1719 width: The bin width (default=5) in pixels
[1855]1720
[513]1721 method: Interpolation method when correcting from a table.
1722 Values are "nearest", "linear", "cubic" (default)
1723 and "spline"
[1855]1724
[513]1725 insitu: if False a new scantable is returned.
1726 Otherwise, the scaling is done in-situ
1727 The default is taken from .asaprc (False)
[1846]1728
[513]1729 """
1730 if insitu is None: insitu = rcParams['insitu']
[876]1731 self._math._setinsitu(insitu)
[513]1732 varlist = vars()
[876]1733 s = scantable(self._math._resample(self, method, width))
[1118]1734 s._add_history("resample", varlist)
[876]1735 if insitu: self._assign(s)
1736 else: return s
[513]1737
[1862]1738 @asaplog_post_dec
[946]1739 def average_pol(self, mask=None, weight='none'):
[1846]1740 """\
[946]1741 Average the Polarisations together.
[1846]1742
[946]1743 Parameters:
[1846]1744
[946]1745 mask: An optional mask defining the region, where the
1746 averaging will be applied. The output will have all
1747 specified points masked.
[1855]1748
[946]1749 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1750 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1751
[946]1752 """
1753 varlist = vars()
[1593]1754 mask = mask or ()
[1010]1755 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1756 s._add_history("average_pol", varlist)
[992]1757 return s
[513]1758
[1862]1759 @asaplog_post_dec
[1145]1760 def average_beam(self, mask=None, weight='none'):
[1846]1761 """\
[1145]1762 Average the Beams together.
[1846]1763
[1145]1764 Parameters:
1765 mask: An optional mask defining the region, where the
1766 averaging will be applied. The output will have all
1767 specified points masked.
[1855]1768
[1145]1769 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1770 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1771
[1145]1772 """
1773 varlist = vars()
[1593]1774 mask = mask or ()
[1145]1775 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1776 s._add_history("average_beam", varlist)
1777 return s
1778
[1586]1779 def parallactify(self, pflag):
[1846]1780 """\
[1843]1781 Set a flag to indicate whether this data should be treated as having
[1617]1782 been 'parallactified' (total phase == 0.0)
[1846]1783
[1617]1784 Parameters:
[1855]1785
[1843]1786 pflag: Bool indicating whether to turn this on (True) or
[1617]1787 off (False)
[1846]1788
[1617]1789 """
[1586]1790 varlist = vars()
1791 self._parallactify(pflag)
1792 self._add_history("parallactify", varlist)
1793
[1862]1794 @asaplog_post_dec
[992]1795 def convert_pol(self, poltype=None):
[1846]1796 """\
[992]1797 Convert the data to a different polarisation type.
[1565]1798 Note that you will need cross-polarisation terms for most conversions.
[1846]1799
[992]1800 Parameters:
[1855]1801
[992]1802 poltype: The new polarisation type. Valid types are:
[1565]1803 "linear", "circular", "stokes" and "linpol"
[1846]1804
[992]1805 """
1806 varlist = vars()
[1859]1807 s = scantable(self._math._convertpol(self, poltype))
[1118]1808 s._add_history("convert_pol", varlist)
[992]1809 return s
1810
[1862]1811 @asaplog_post_dec
[1819]1812 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1813 """\
[513]1814 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1815
[513]1816 Parameters:
[1846]1817
[513]1818 kernel: The type of smoothing kernel. Select from
[1574]1819 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1820 or 'poly'
[1855]1821
[513]1822 width: The width of the kernel in pixels. For hanning this is
1823 ignored otherwise it defauls to 5 pixels.
1824 For 'gaussian' it is the Full Width Half
1825 Maximum. For 'boxcar' it is the full width.
[1574]1826 For 'rmedian' and 'poly' it is the half width.
[1855]1827
[1574]1828 order: Optional parameter for 'poly' kernel (default is 2), to
1829 specify the order of the polnomial. Ignored by all other
1830 kernels.
[1855]1831
[1819]1832 plot: plot the original and the smoothed spectra.
1833 In this each indivual fit has to be approved, by
1834 typing 'y' or 'n'
[1855]1835
[513]1836 insitu: if False a new scantable is returned.
1837 Otherwise, the scaling is done in-situ
1838 The default is taken from .asaprc (False)
[1846]1839
[513]1840 """
1841 if insitu is None: insitu = rcParams['insitu']
[876]1842 self._math._setinsitu(insitu)
[513]1843 varlist = vars()
[1819]1844
1845 if plot: orgscan = self.copy()
1846
[1574]1847 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1848 s._add_history("smooth", varlist)
[1819]1849
1850 if plot:
1851 if rcParams['plotter.gui']:
1852 from asap.asaplotgui import asaplotgui as asaplot
1853 else:
1854 from asap.asaplot import asaplot
1855 self._p=asaplot()
1856 self._p.set_panels()
1857 ylab=s._get_ordinate_label()
1858 #self._p.palette(0,["#777777","red"])
1859 for r in xrange(s.nrow()):
1860 xsm=s._getabcissa(r)
1861 ysm=s._getspectrum(r)
1862 xorg=orgscan._getabcissa(r)
1863 yorg=orgscan._getspectrum(r)
1864 self._p.clear()
1865 self._p.hold()
1866 self._p.set_axes('ylabel',ylab)
1867 self._p.set_axes('xlabel',s._getabcissalabel(r))
1868 self._p.set_axes('title',s._getsourcename(r))
1869 self._p.set_line(label='Original',color="#777777")
1870 self._p.plot(xorg,yorg)
1871 self._p.set_line(label='Smoothed',color="red")
1872 self._p.plot(xsm,ysm)
1873 ### Ugly part for legend
1874 for i in [0,1]:
1875 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1876 self._p.release()
1877 ### Ugly part for legend
1878 self._p.subplots[0]['lines']=[]
1879 res = raw_input("Accept smoothing ([y]/n): ")
1880 if res.upper() == 'N':
1881 s._setspectrum(yorg, r)
1882 self._p.unmap()
1883 self._p = None
1884 del orgscan
1885
[876]1886 if insitu: self._assign(s)
1887 else: return s
[513]1888
[1862]1889 @asaplog_post_dec
[1907]1890 def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1891 """\
[513]1892 Return a scan which has been baselined (all rows) by a polynomial.
[1907]1893
[513]1894 Parameters:
[1846]1895
[794]1896 mask: an optional mask
[1855]1897
[794]1898 order: the order of the polynomial (default is 0)
[1855]1899
[1061]1900 plot: plot the fit and the residual. In this each
1901 indivual fit has to be approved, by typing 'y'
1902 or 'n'
[1855]1903
[1391]1904 uselin: use linear polynomial fit
[1855]1905
[794]1906 insitu: if False a new scantable is returned.
1907 Otherwise, the scaling is done in-situ
1908 The default is taken from .asaprc (False)
[1846]1909
[1907]1910 rows: row numbers of spectra to be processed.
1911 (default is None: for all rows)
1912
1913 Example:
[513]1914 # return a scan baselined by a third order polynomial,
1915 # not using a mask
1916 bscan = scan.poly_baseline(order=3)
[1846]1917
[579]1918 """
[513]1919 if insitu is None: insitu = rcParams['insitu']
[1819]1920 if not insitu:
1921 workscan = self.copy()
1922 else:
1923 workscan = self
[513]1924 varlist = vars()
1925 if mask is None:
[1907]1926 mask = [True for i in xrange(self.nchan())]
[1819]1927
[1217]1928 try:
1929 f = fitter()
[1391]1930 if uselin:
1931 f.set_function(lpoly=order)
1932 else:
1933 f.set_function(poly=order)
[1819]1934
[1907]1935 if rows == None:
1936 rows = xrange(workscan.nrow())
1937 elif isinstance(rows, int):
1938 rows = [ rows ]
1939
[1819]1940 if len(rows) > 0:
1941 self.blpars = []
[1907]1942 self.masklists = []
1943 self.actualmask = []
1944
[1819]1945 for r in rows:
1946 f.x = workscan._getabcissa(r)
1947 f.y = workscan._getspectrum(r)
[1907]1948 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]1949 f.data = None
1950 f.fit()
1951 if plot:
1952 f.plot(residual=True)
1953 x = raw_input("Accept fit ( [y]/n ): ")
1954 if x.upper() == 'N':
1955 self.blpars.append(None)
[1907]1956 self.masklists.append(None)
1957 self.actualmask.append(None)
[1819]1958 continue
1959 workscan._setspectrum(f.fitter.getresidual(), r)
1960 self.blpars.append(f.get_parameters())
[1931]1961 self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
[1907]1962 self.actualmask.append(f.mask)
[1819]1963
1964 if plot:
1965 f._p.unmap()
1966 f._p = None
1967 workscan._add_history("poly_baseline", varlist)
[1856]1968 if insitu:
1969 self._assign(workscan)
1970 else:
1971 return workscan
[1217]1972 except RuntimeError:
1973 msg = "The fit failed, possibly because it didn't converge."
[1859]1974 raise RuntimeError(msg)
[513]1975
[1931]1976 @asaplog_post_dec
[1907]1977 def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1978 """\
1979 Return a scan which has been baselined (all rows) by a polynomial.
1980 Parameters:
1981 mask: an optional mask
1982 order: the order of the polynomial (default is 0)
1983 plot: plot the fit and the residual. In this each
1984 indivual fit has to be approved, by typing 'y'
1985 or 'n'. Ignored if batch = True.
1986 batch: if True a faster algorithm is used and logs
1987 including the fit results are not output
1988 (default is False)
1989 insitu: if False a new scantable is returned.
1990 Otherwise, the scaling is done in-situ
1991 The default is taken from .asaprc (False)
[1931]1992 rows: row numbers of spectra to be baselined.
[1907]1993 (default is None: for all rows)
1994 Example:
1995 # return a scan baselined by a third order polynomial,
1996 # not using a mask
1997 bscan = scan.poly_baseline(order=3)
1998 """
[1931]1999
2000 varlist = vars()
2001
[1907]2002 if insitu is None: insitu = rcParams["insitu"]
2003 if insitu:
2004 workscan = self
2005 else:
2006 workscan = self.copy()
2007
2008 nchan = workscan.nchan()
2009
2010 if mask is None:
2011 mask = [True for i in xrange(nchan)]
2012
2013 try:
2014 if rows == None:
2015 rows = xrange(workscan.nrow())
2016 elif isinstance(rows, int):
2017 rows = [ rows ]
2018
2019 if len(rows) > 0:
[1931]2020 workscan.blpars = []
2021 workscan.masklists = []
2022 workscan.actualmask = []
[1907]2023
2024 if batch:
[1931]2025 workscan._poly_baseline_batch(mask, order)
[1907]2026 elif plot:
2027 f = fitter()
2028 f.set_function(lpoly=order)
2029 for r in rows:
2030 f.x = workscan._getabcissa(r)
2031 f.y = workscan._getspectrum(r)
2032 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2033 f.data = None
2034 f.fit()
2035
2036 f.plot(residual=True)
2037 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2038 if accept_fit.upper() == "N":
2039 self.blpars.append(None)
2040 self.masklists.append(None)
2041 self.actualmask.append(None)
2042 continue
2043 workscan._setspectrum(f.fitter.getresidual(), r)
[1931]2044 workscan.blpars.append(f.get_parameters())
2045 workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2046 workscan.actualmask.append(f.mask)
[1907]2047
2048 f._p.unmap()
2049 f._p = None
2050 else:
2051 for r in rows:
[1931]2052 fitparams = workscan._poly_baseline(mask, order, r)
2053 params = fitparams.getparameters()
2054 fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2055 errors = fitparams.geterrors()
2056 fmask = mask_and(mask, workscan._getmask(r))
2057
2058 workscan.blpars.append({"params":params,
2059 "fixed": fitparams.getfixedparameters(),
2060 "formatted":fmtd, "errors":errors})
2061 workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2062 workscan.actualmask.append(fmask)
[1907]2063
[1931]2064 asaplog.push(fmtd)
[1907]2065
2066 workscan._add_history("poly_baseline", varlist)
2067
2068 if insitu:
2069 self._assign(workscan)
2070 else:
2071 return workscan
2072
[1919]2073 except RuntimeError, e:
[1907]2074 msg = "The fit failed, possibly because it didn't converge."
2075 if rcParams["verbose"]:
[1919]2076 asaplog.push(str(e))
[1907]2077 asaplog.push(str(msg))
2078 return
2079 else:
[1919]2080 raise RuntimeError(str(e)+'\n'+msg)
[1907]2081
2082
2083 def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2084 threshold=3, chan_avg_limit=1, plot=False,
[1907]2085 insitu=None, rows=None):
[1846]2086 """\
[1931]2087 Return a scan which has been baselined (all rows) by a polynomial.
[880]2088 Spectral lines are detected first using linefinder and masked out
2089 to avoid them affecting the baseline solution.
2090
2091 Parameters:
[1846]2092
[880]2093 mask: an optional mask retreived from scantable
[1846]2094
2095 edge: an optional number of channel to drop at the edge of
2096 spectrum. If only one value is
[880]2097 specified, the same number will be dropped from
2098 both sides of the spectrum. Default is to keep
[907]2099 all channels. Nested tuples represent individual
[976]2100 edge selection for different IFs (a number of spectral
2101 channels can be different)
[1846]2102
[880]2103 order: the order of the polynomial (default is 0)
[1846]2104
[880]2105 threshold: the threshold used by line finder. It is better to
2106 keep it large as only strong lines affect the
2107 baseline solution.
[1846]2108
[1280]2109 chan_avg_limit:
2110 a maximum number of consequtive spectral channels to
2111 average during the search of weak and broad lines.
2112 The default is no averaging (and no search for weak
2113 lines). If such lines can affect the fitted baseline
2114 (e.g. a high order polynomial is fitted), increase this
2115 parameter (usually values up to 8 are reasonable). Most
2116 users of this method should find the default value
2117 sufficient.
[1846]2118
[1061]2119 plot: plot the fit and the residual. In this each
2120 indivual fit has to be approved, by typing 'y'
2121 or 'n'
[1846]2122
[880]2123 insitu: if False a new scantable is returned.
2124 Otherwise, the scaling is done in-situ
2125 The default is taken from .asaprc (False)
[1907]2126 rows: row numbers of spectra to be processed.
2127 (default is None: for all rows)
[880]2128
[1846]2129
2130 Example::
2131
2132 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2133
[880]2134 """
2135 if insitu is None: insitu = rcParams['insitu']
2136 varlist = vars()
2137 from asap.asaplinefind import linefinder
2138 from asap import _is_sequence_or_number as _is_valid
2139
[976]2140 # check whether edge is set up for each IF individually
[1118]2141 individualedge = False;
2142 if len(edge) > 1:
2143 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2144 individualedge = True;
[907]2145
[1118]2146 if not _is_valid(edge, int) and not individualedge:
[909]2147 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2148 pair of integers specified as a tuple. Nested tuples are allowed \
2149 to make individual selection for different IFs."
[919]2150
[1118]2151 curedge = (0, 0)
2152 if individualedge:
2153 for edgepar in edge:
2154 if not _is_valid(edgepar, int):
2155 raise ValueError, "Each element of the 'edge' tuple has \
2156 to be a pair of integers or an integer."
[907]2157 else:
[1118]2158 curedge = edge;
[880]2159
[1907]2160 if not insitu:
2161 workscan = self.copy()
2162 else:
2163 workscan = self
2164
[880]2165 # setup fitter
2166 f = fitter()
[1907]2167 f.set_function(lpoly=order)
[880]2168
2169 # setup line finder
[1118]2170 fl = linefinder()
[1268]2171 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2172
[907]2173 fl.set_scan(workscan)
2174
[1907]2175 if mask is None:
2176 mask = _n_bools(workscan.nchan(), True)
2177
2178 if rows is None:
2179 rows = xrange(workscan.nrow())
2180 elif isinstance(rows, int):
2181 rows = [ rows ]
2182
[1819]2183 # Save parameters of baseline fits & masklists as a class attribute.
2184 # NOTICE: It does not reflect changes in scantable!
2185 if len(rows) > 0:
2186 self.blpars=[]
2187 self.masklists=[]
[1907]2188 self.actualmask=[]
[880]2189 asaplog.push("Processing:")
2190 for r in rows:
[1118]2191 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2192 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2193 workscan.getpol(r), workscan.getcycle(r))
[880]2194 asaplog.push(msg, False)
[907]2195
[976]2196 # figure out edge parameter
[1118]2197 if individualedge:
2198 if len(edge) >= workscan.getif(r):
2199 raise RuntimeError, "Number of edge elements appear to " \
2200 "be less than the number of IFs"
2201 curedge = edge[workscan.getif(r)]
[919]2202
[1907]2203 actualmask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]2204
[976]2205 # setup line finder
[1819]2206 fl.find_lines(r, actualmask, curedge)
[1907]2207
[1819]2208 f.x = workscan._getabcissa(r)
2209 f.y = workscan._getspectrum(r)
[1907]2210 f.mask = fl.get_mask()
[1819]2211 f.data = None
[880]2212 f.fit()
[1819]2213
2214 # Show mask list
[1931]2215 masklist=workscan.get_masklist(f.mask, row=r, silent=True)
[1819]2216 msg = "mask range: "+str(masklist)
2217 asaplog.push(msg, False)
2218
[1061]2219 if plot:
2220 f.plot(residual=True)
2221 x = raw_input("Accept fit ( [y]/n ): ")
2222 if x.upper() == 'N':
[1819]2223 self.blpars.append(None)
2224 self.masklists.append(None)
[1907]2225 self.actualmask.append(None)
[1061]2226 continue
[1819]2227
[880]2228 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2229 self.blpars.append(f.get_parameters())
2230 self.masklists.append(masklist)
[1907]2231 self.actualmask.append(f.mask)
[1061]2232 if plot:
2233 f._p.unmap()
2234 f._p = None
2235 workscan._add_history("auto_poly_baseline", varlist)
[880]2236 if insitu:
2237 self._assign(workscan)
2238 else:
2239 return workscan
2240
[1862]2241 @asaplog_post_dec
[914]2242 def rotate_linpolphase(self, angle):
[1846]2243 """\
[914]2244 Rotate the phase of the complex polarization O=Q+iU correlation.
2245 This is always done in situ in the raw data. So if you call this
2246 function more than once then each call rotates the phase further.
[1846]2247
[914]2248 Parameters:
[1846]2249
[914]2250 angle: The angle (degrees) to rotate (add) by.
[1846]2251
2252 Example::
2253
[914]2254 scan.rotate_linpolphase(2.3)
[1846]2255
[914]2256 """
2257 varlist = vars()
[936]2258 self._math._rotate_linpolphase(self, angle)
[914]2259 self._add_history("rotate_linpolphase", varlist)
2260 return
[710]2261
[1862]2262 @asaplog_post_dec
[914]2263 def rotate_xyphase(self, angle):
[1846]2264 """\
[914]2265 Rotate the phase of the XY correlation. This is always done in situ
2266 in the data. So if you call this function more than once
2267 then each call rotates the phase further.
[1846]2268
[914]2269 Parameters:
[1846]2270
[914]2271 angle: The angle (degrees) to rotate (add) by.
[1846]2272
2273 Example::
2274
[914]2275 scan.rotate_xyphase(2.3)
[1846]2276
[914]2277 """
2278 varlist = vars()
[936]2279 self._math._rotate_xyphase(self, angle)
[914]2280 self._add_history("rotate_xyphase", varlist)
2281 return
2282
[1862]2283 @asaplog_post_dec
[914]2284 def swap_linears(self):
[1846]2285 """\
[1573]2286 Swap the linear polarisations XX and YY, or better the first two
[1348]2287 polarisations as this also works for ciculars.
[914]2288 """
2289 varlist = vars()
[936]2290 self._math._swap_linears(self)
[914]2291 self._add_history("swap_linears", varlist)
2292 return
2293
[1862]2294 @asaplog_post_dec
[914]2295 def invert_phase(self):
[1846]2296 """\
[914]2297 Invert the phase of the complex polarisation
2298 """
2299 varlist = vars()
[936]2300 self._math._invert_phase(self)
[914]2301 self._add_history("invert_phase", varlist)
2302 return
2303
[1862]2304 @asaplog_post_dec
[876]2305 def add(self, offset, insitu=None):
[1846]2306 """\
[513]2307 Return a scan where all spectra have the offset added
[1846]2308
[513]2309 Parameters:
[1846]2310
[513]2311 offset: the offset
[1855]2312
[513]2313 insitu: if False a new scantable is returned.
2314 Otherwise, the scaling is done in-situ
2315 The default is taken from .asaprc (False)
[1846]2316
[513]2317 """
2318 if insitu is None: insitu = rcParams['insitu']
[876]2319 self._math._setinsitu(insitu)
[513]2320 varlist = vars()
[876]2321 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2322 s._add_history("add", varlist)
[876]2323 if insitu:
2324 self._assign(s)
2325 else:
[513]2326 return s
2327
[1862]2328 @asaplog_post_dec
[1308]2329 def scale(self, factor, tsys=True, insitu=None):
[1846]2330 """\
2331
[1938]2332 Return a scan where all spectra are scaled by the given 'factor'
[1846]2333
[513]2334 Parameters:
[1846]2335
[1819]2336 factor: the scaling factor (float or 1D float list)
[1855]2337
[513]2338 insitu: if False a new scantable is returned.
2339 Otherwise, the scaling is done in-situ
2340 The default is taken from .asaprc (False)
[1855]2341
[513]2342 tsys: if True (default) then apply the operation to Tsys
2343 as well as the data
[1846]2344
[513]2345 """
2346 if insitu is None: insitu = rcParams['insitu']
[876]2347 self._math._setinsitu(insitu)
[513]2348 varlist = vars()
[1819]2349 s = None
2350 import numpy
2351 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2352 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2353 from asapmath import _array2dOp
2354 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2355 else:
2356 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2357 else:
2358 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2359 s._add_history("scale", varlist)
[876]2360 if insitu:
2361 self._assign(s)
2362 else:
[513]2363 return s
2364
[1504]2365 def set_sourcetype(self, match, matchtype="pattern",
2366 sourcetype="reference"):
[1846]2367 """\
[1502]2368 Set the type of the source to be an source or reference scan
[1846]2369 using the provided pattern.
2370
[1502]2371 Parameters:
[1846]2372
[1504]2373 match: a Unix style pattern, regular expression or selector
[1855]2374
[1504]2375 matchtype: 'pattern' (default) UNIX style pattern or
2376 'regex' regular expression
[1855]2377
[1502]2378 sourcetype: the type of the source to use (source/reference)
[1846]2379
[1502]2380 """
2381 varlist = vars()
2382 basesel = self.get_selection()
2383 stype = -1
2384 if sourcetype.lower().startswith("r"):
2385 stype = 1
2386 elif sourcetype.lower().startswith("s"):
2387 stype = 0
[1504]2388 else:
[1502]2389 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2390 if matchtype.lower().startswith("p"):
2391 matchtype = "pattern"
2392 elif matchtype.lower().startswith("r"):
2393 matchtype = "regex"
2394 else:
2395 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2396 sel = selector()
2397 if isinstance(match, selector):
2398 sel = match
2399 else:
[1504]2400 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2401 self.set_selection(basesel+sel)
2402 self._setsourcetype(stype)
2403 self.set_selection(basesel)
[1573]2404 self._add_history("set_sourcetype", varlist)
[1502]2405
[1862]2406 @asaplog_post_dec
[1857]2407 @preserve_selection
[1819]2408 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2409 """\
[670]2410 This function allows to build quotients automatically.
[1819]2411 It assumes the observation to have the same number of
[670]2412 "ons" and "offs"
[1846]2413
[670]2414 Parameters:
[1846]2415
[710]2416 preserve: you can preserve (default) the continuum or
2417 remove it. The equations used are
[1857]2418
[670]2419 preserve: Output = Toff * (on/off) - Toff
[1857]2420
[1070]2421 remove: Output = Toff * (on/off) - Ton
[1855]2422
[1573]2423 mode: the on/off detection mode
[1348]2424 'paired' (default)
2425 identifies 'off' scans by the
2426 trailing '_R' (Mopra/Parkes) or
2427 '_e'/'_w' (Tid) and matches
2428 on/off pairs from the observing pattern
[1502]2429 'time'
2430 finds the closest off in time
[1348]2431
[1857]2432 .. todo:: verify argument is not implemented
2433
[670]2434 """
[1857]2435 varlist = vars()
[1348]2436 modes = ["time", "paired"]
[670]2437 if not mode in modes:
[876]2438 msg = "please provide valid mode. Valid modes are %s" % (modes)
2439 raise ValueError(msg)
[1348]2440 s = None
2441 if mode.lower() == "paired":
[1857]2442 sel = self.get_selection()
[1875]2443 sel.set_query("SRCTYPE==psoff")
[1356]2444 self.set_selection(sel)
[1348]2445 offs = self.copy()
[1875]2446 sel.set_query("SRCTYPE==pson")
[1356]2447 self.set_selection(sel)
[1348]2448 ons = self.copy()
2449 s = scantable(self._math._quotient(ons, offs, preserve))
2450 elif mode.lower() == "time":
2451 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2452 s._add_history("auto_quotient", varlist)
[876]2453 return s
[710]2454
[1862]2455 @asaplog_post_dec
[1145]2456 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2457 """\
[1143]2458 Form a quotient using "off" beams when observing in "MX" mode.
[1846]2459
[1143]2460 Parameters:
[1846]2461
[1145]2462 mask: an optional mask to be used when weight == 'stddev'
[1855]2463
[1143]2464 weight: How to average the off beams. Default is 'median'.
[1855]2465
[1145]2466 preserve: you can preserve (default) the continuum or
[1855]2467 remove it. The equations used are:
[1846]2468
[1855]2469 preserve: Output = Toff * (on/off) - Toff
2470
2471 remove: Output = Toff * (on/off) - Ton
2472
[1217]2473 """
[1593]2474 mask = mask or ()
[1141]2475 varlist = vars()
2476 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2477 preoff = scantable(self._math._mx_extract(self, 'off'))
2478 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2479 from asapmath import quotient
[1145]2480 q = quotient(on, off, preserve)
[1143]2481 q._add_history("mx_quotient", varlist)
[1217]2482 return q
[513]2483
[1862]2484 @asaplog_post_dec
[718]2485 def freq_switch(self, insitu=None):
[1846]2486 """\
[718]2487 Apply frequency switching to the data.
[1846]2488
[718]2489 Parameters:
[1846]2490
[718]2491 insitu: if False a new scantable is returned.
2492 Otherwise, the swictching is done in-situ
2493 The default is taken from .asaprc (False)
[1846]2494
[718]2495 """
2496 if insitu is None: insitu = rcParams['insitu']
[876]2497 self._math._setinsitu(insitu)
[718]2498 varlist = vars()
[876]2499 s = scantable(self._math._freqswitch(self))
[1118]2500 s._add_history("freq_switch", varlist)
[1856]2501 if insitu:
2502 self._assign(s)
2503 else:
2504 return s
[718]2505
[1862]2506 @asaplog_post_dec
[780]2507 def recalc_azel(self):
[1846]2508 """Recalculate the azimuth and elevation for each position."""
[780]2509 varlist = vars()
[876]2510 self._recalcazel()
[780]2511 self._add_history("recalc_azel", varlist)
2512 return
2513
[1862]2514 @asaplog_post_dec
[513]2515 def __add__(self, other):
2516 varlist = vars()
2517 s = None
2518 if isinstance(other, scantable):
[1573]2519 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2520 elif isinstance(other, float):
[876]2521 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2522 else:
[718]2523 raise TypeError("Other input is not a scantable or float value")
[513]2524 s._add_history("operator +", varlist)
2525 return s
2526
[1862]2527 @asaplog_post_dec
[513]2528 def __sub__(self, other):
2529 """
2530 implicit on all axes and on Tsys
2531 """
2532 varlist = vars()
2533 s = None
2534 if isinstance(other, scantable):
[1588]2535 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2536 elif isinstance(other, float):
[876]2537 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2538 else:
[718]2539 raise TypeError("Other input is not a scantable or float value")
[513]2540 s._add_history("operator -", varlist)
2541 return s
[710]2542
[1862]2543 @asaplog_post_dec
[513]2544 def __mul__(self, other):
2545 """
2546 implicit on all axes and on Tsys
2547 """
2548 varlist = vars()
2549 s = None
2550 if isinstance(other, scantable):
[1588]2551 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2552 elif isinstance(other, float):
[876]2553 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2554 else:
[718]2555 raise TypeError("Other input is not a scantable or float value")
[513]2556 s._add_history("operator *", varlist)
2557 return s
2558
[710]2559
[1862]2560 @asaplog_post_dec
[513]2561 def __div__(self, other):
2562 """
2563 implicit on all axes and on Tsys
2564 """
2565 varlist = vars()
2566 s = None
2567 if isinstance(other, scantable):
[1589]2568 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2569 elif isinstance(other, float):
2570 if other == 0.0:
[718]2571 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2572 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2573 else:
[718]2574 raise TypeError("Other input is not a scantable or float value")
[513]2575 s._add_history("operator /", varlist)
2576 return s
2577
[1862]2578 @asaplog_post_dec
[530]2579 def get_fit(self, row=0):
[1846]2580 """\
[530]2581 Print or return the stored fits for a row in the scantable
[1846]2582
[530]2583 Parameters:
[1846]2584
[530]2585 row: the row which the fit has been applied to.
[1846]2586
[530]2587 """
2588 if row > self.nrow():
2589 return
[976]2590 from asap.asapfit import asapfit
[530]2591 fit = asapfit(self._getfit(row))
[1859]2592 asaplog.push( '%s' %(fit) )
2593 return fit.as_dict()
[530]2594
[1483]2595 def flag_nans(self):
[1846]2596 """\
[1483]2597 Utility function to flag NaN values in the scantable.
2598 """
2599 import numpy
2600 basesel = self.get_selection()
2601 for i in range(self.nrow()):
[1589]2602 sel = self.get_row_selector(i)
2603 self.set_selection(basesel+sel)
[1483]2604 nans = numpy.isnan(self._getspectrum(0))
2605 if numpy.any(nans):
2606 bnans = [ bool(v) for v in nans]
2607 self.flag(bnans)
2608 self.set_selection(basesel)
2609
[1588]2610 def get_row_selector(self, rowno):
2611 return selector(beams=self.getbeam(rowno),
2612 ifs=self.getif(rowno),
2613 pols=self.getpol(rowno),
2614 scans=self.getscan(rowno),
2615 cycles=self.getcycle(rowno))
[1573]2616
[484]2617 def _add_history(self, funcname, parameters):
[1435]2618 if not rcParams['scantable.history']:
2619 return
[484]2620 # create date
2621 sep = "##"
2622 from datetime import datetime
2623 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2624 hist = dstr+sep
2625 hist += funcname+sep#cdate+sep
2626 if parameters.has_key('self'): del parameters['self']
[1118]2627 for k, v in parameters.iteritems():
[484]2628 if type(v) is dict:
[1118]2629 for k2, v2 in v.iteritems():
[484]2630 hist += k2
2631 hist += "="
[1118]2632 if isinstance(v2, scantable):
[484]2633 hist += 'scantable'
2634 elif k2 == 'mask':
[1118]2635 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2636 hist += str(self._zip_mask(v2))
2637 else:
2638 hist += str(v2)
[484]2639 else:
[513]2640 hist += str(v2)
[484]2641 else:
2642 hist += k
2643 hist += "="
[1118]2644 if isinstance(v, scantable):
[484]2645 hist += 'scantable'
2646 elif k == 'mask':
[1118]2647 if isinstance(v, list) or isinstance(v, tuple):
[513]2648 hist += str(self._zip_mask(v))
2649 else:
2650 hist += str(v)
[484]2651 else:
2652 hist += str(v)
2653 hist += sep
2654 hist = hist[:-2] # remove trailing '##'
2655 self._addhistory(hist)
2656
[710]2657
[484]2658 def _zip_mask(self, mask):
2659 mask = list(mask)
2660 i = 0
2661 segments = []
2662 while mask[i:].count(1):
2663 i += mask[i:].index(1)
2664 if mask[i:].count(0):
2665 j = i + mask[i:].index(0)
2666 else:
[710]2667 j = len(mask)
[1118]2668 segments.append([i, j])
[710]2669 i = j
[484]2670 return segments
[714]2671
[626]2672 def _get_ordinate_label(self):
2673 fu = "("+self.get_fluxunit()+")"
2674 import re
2675 lbl = "Intensity"
[1118]2676 if re.match(".K.", fu):
[626]2677 lbl = "Brightness Temperature "+ fu
[1118]2678 elif re.match(".Jy.", fu):
[626]2679 lbl = "Flux density "+ fu
2680 return lbl
[710]2681
[876]2682 def _check_ifs(self):
[1986]2683 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
2684 #nchans = filter(lambda t: t > 0, nchans)
2685 nchans = [self.nchan(i) for i in self.getifnos()]
[876]2686 return (sum(nchans)/len(nchans) == nchans[0])
[976]2687
[1862]2688 @asaplog_post_dec
[1916]2689 #def _fill(self, names, unit, average, getpt, antenna):
2690 def _fill(self, names, unit, average, opts={}):
[976]2691 first = True
2692 fullnames = []
2693 for name in names:
2694 name = os.path.expandvars(name)
2695 name = os.path.expanduser(name)
2696 if not os.path.exists(name):
2697 msg = "File '%s' does not exists" % (name)
2698 raise IOError(msg)
2699 fullnames.append(name)
2700 if average:
2701 asaplog.push('Auto averaging integrations')
[1079]2702 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2703 for name in fullnames:
[1073]2704 tbl = Scantable(stype)
[1843]2705 r = filler(tbl)
[1504]2706 rx = rcParams['scantable.reference']
[1843]2707 r.setreferenceexpr(rx)
[976]2708 msg = "Importing %s..." % (name)
[1118]2709 asaplog.push(msg, False)
[1916]2710 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2711 r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2712 r.fill()
[976]2713 if average:
[1118]2714 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2715 if not first:
2716 tbl = self._math._merge([self, tbl])
2717 Scantable.__init__(self, tbl)
[1843]2718 r.close()
[1118]2719 del r, tbl
[976]2720 first = False
[1861]2721 #flush log
2722 asaplog.post()
[976]2723 if unit is not None:
2724 self.set_fluxunit(unit)
[1824]2725 if not is_casapy():
2726 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2727
[1402]2728 def __getitem__(self, key):
2729 if key < 0:
2730 key += self.nrow()
2731 if key >= self.nrow():
2732 raise IndexError("Row index out of range.")
2733 return self._getspectrum(key)
2734
2735 def __setitem__(self, key, value):
2736 if key < 0:
2737 key += self.nrow()
2738 if key >= self.nrow():
2739 raise IndexError("Row index out of range.")
2740 if not hasattr(value, "__len__") or \
2741 len(value) > self.nchan(self.getif(key)):
2742 raise ValueError("Spectrum length doesn't match.")
2743 return self._setspectrum(value, key)
2744
2745 def __len__(self):
2746 return self.nrow()
2747
2748 def __iter__(self):
2749 for i in range(len(self)):
2750 yield self[i]
Note: See TracBrowser for help on using the repository browser.