source: trunk/python/scantable.py@ 1855

Last change on this file since 1855 was 1855, checked in by Malte Marquarding, 16 years ago

Ticket #194: docstring changes. Play nicely with sphinx.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 91.5 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
13from asap.logging import asaplog, print_log, print_log_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1824]17from asap.utils import _n_bools, mask_not, mask_and, mask_or
[102]18
[1689]19
20def preserve_selection(func):
[1691]21 @wraps_dec(func)
[1689]22 def wrap(obj, *args, **kw):
23 basesel = obj.get_selection()
24 val = func(obj, *args, **kw)
25 obj.set_selection(basesel)
26 return val
27 return wrap
28
[1846]29def is_scantable(filename):
30 """Is the given file a scantable?
[1689]31
[1846]32 Parameters:
33
34 filename: the name of the file/directory to test
35
36 """
[1697]37 return (os.path.isdir(filename)
38 and not os.path.exists(filename+'/table.f1')
39 and os.path.exists(filename+'/table.info'))
40
41
[876]42class scantable(Scantable):
[1846]43 """\
44 The ASAP container for scans (single-dish data).
[102]45 """
[1819]46
[1589]47 @print_log_dec
[1824]48 def __init__(self, filename, average=None, unit=None, getpt=None,
49 antenna=None, parallactify=None):
[1846]50 """\
[102]51 Create a scantable from a saved one or make a reference
[1846]52
[102]53 Parameters:
[1846]54
55 filename: the name of an asap table on disk
56 or
57 the name of a rpfits/sdfits/ms file
58 (integrations within scans are auto averaged
59 and the whole file is read) or
60 [advanced] a reference to an existing scantable
61
62 average: average all integrations withinb a scan on read.
63 The default (True) is taken from .asaprc.
64
[484]65 unit: brightness unit; must be consistent with K or Jy.
[1846]66 Over-rides the default selected by the filler
67 (input rpfits/sdfits/ms) or replaces the value
68 in existing scantables
69
70 getpt: for MeasurementSet input data only:
71 If True, all pointing data are filled.
72 The deafult is False, which makes time to load
73 the MS data faster in some cases.
74
75 antenna: Antenna selection. integer (id) or string (name or id).
76
77 parallactify: Indicate that the data had been parallatified. Default
78 is taken from rc file.
79
[710]80 """
[976]81 if average is None:
[710]82 average = rcParams['scantable.autoaverage']
[1819]83 if getpt is None:
84 getpt = True
[1843]85 if antenna is not None:
86 asaplog.push("Antenna selection currently unsupported."
87 "Using '0'")
88 print_log('WARN')
[1819]89 if antenna is None:
90 antenna = ''
91 elif type(antenna) == int:
[1843]92 antenna = '%s' % antenna
[1819]93 elif type(antenna) == list:
94 tmpstr = ''
95 for i in range( len(antenna) ):
96 if type(antenna[i]) == int:
97 tmpstr = tmpstr + ('%s,'%(antenna[i]))
98 elif type(antenna[i]) == str:
99 tmpstr=tmpstr+antenna[i]+','
100 else:
101 asaplog.push('Bad antenna selection.')
102 print_log('ERROR')
103 return
104 antenna = tmpstr.rstrip(',')
[1593]105 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]106 varlist = vars()
[876]107 from asap._asap import stmath
[1819]108 self._math = stmath( rcParams['insitu'] )
[876]109 if isinstance(filename, Scantable):
110 Scantable.__init__(self, filename)
[181]111 else:
[1697]112 if isinstance(filename, str):
[976]113 filename = os.path.expandvars(filename)
114 filename = os.path.expanduser(filename)
115 if not os.path.exists(filename):
116 s = "File '%s' not found." % (filename)
[718]117 if rcParams['verbose']:
[976]118 asaplog.push(s)
[1819]119 print_log('ERROR')
[718]120 return
[976]121 raise IOError(s)
[1697]122 if is_scantable(filename):
123 ondisk = rcParams['scantable.storage'] == 'disk'
124 Scantable.__init__(self, filename, ondisk)
125 if unit is not None:
126 self.set_fluxunit(unit)
[1819]127 # do not reset to the default freqframe
128 #self.set_freqframe(rcParams['scantable.freqframe'])
129 elif os.path.isdir(filename) \
130 and not os.path.exists(filename+'/table.f1'):
131 msg = "The given file '%s'is not a valid " \
132 "asap table." % (filename)
133 if rcParams['verbose']:
134 #print msg
135 asaplog.push( msg )
136 print_log( 'ERROR' )
137 return
138 else:
139 raise IOError(msg)
[226]140 else:
[1819]141 self._fill([filename], unit, average, getpt, antenna)
[1118]142 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]143 and isinstance(filename[-1], str):
[1819]144 self._fill(filename, unit, average, getpt, antenna)
[1586]145 self.parallactify(parallactify)
[1259]146 self._add_history("scantable", varlist)
[1819]147 print_log()
[102]148
[1589]149 @print_log_dec
[876]150 def save(self, name=None, format=None, overwrite=False):
[1846]151 """\
[1280]152 Store the scantable on disk. This can be an asap (aips++) Table,
153 SDFITS or MS2 format.
[1846]154
[116]155 Parameters:
[1846]156
[1093]157 name: the name of the outputfile. For format "ASCII"
158 this is the root file name (data in 'name'.txt
[497]159 and header in 'name'_header.txt)
[1855]160
[116]161 format: an optional file format. Default is ASAP.
[1855]162 Allowed are:
163
164 * 'ASAP' (save as ASAP [aips++] Table),
165 * 'SDFITS' (save as SDFITS file)
166 * 'ASCII' (saves as ascii text file)
167 * 'MS2' (saves as an casacore MeasurementSet V2)
168 * 'FITS' (save as image FITS - not readable by class)
169 * 'CLASS' (save as FITS readable by CLASS)
170
[411]171 overwrite: If the file should be overwritten if it exists.
[256]172 The default False is to return with warning
[411]173 without writing the output. USE WITH CARE.
[1855]174
[1846]175 Example::
176
[116]177 scan.save('myscan.asap')
[1118]178 scan.save('myscan.sdfits', 'SDFITS')
[1846]179
[116]180 """
[411]181 from os import path
[1593]182 format = format or rcParams['scantable.save']
[256]183 suffix = '.'+format.lower()
[1118]184 if name is None or name == "":
[256]185 name = 'scantable'+suffix
[718]186 msg = "No filename given. Using default name %s..." % name
187 asaplog.push(msg)
[411]188 name = path.expandvars(name)
[256]189 if path.isfile(name) or path.isdir(name):
190 if not overwrite:
[718]191 msg = "File %s exists." % name
192 if rcParams['verbose']:
[1819]193 #print msg
194 asaplog.push( msg )
195 print_log( 'ERROR' )
[718]196 return
197 else:
198 raise IOError(msg)
[451]199 format2 = format.upper()
200 if format2 == 'ASAP':
[116]201 self._save(name)
202 else:
[989]203 from asap._asap import stwriter as stw
[1118]204 writer = stw(format2)
205 writer.write(self, name)
[1819]206 print_log()
[116]207 return
208
[102]209 def copy(self):
[1846]210 """Return a copy of this scantable.
211
212 *Note*:
213
[1348]214 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]215
216 Example::
217
[102]218 copiedscan = scan.copy()
[1846]219
[102]220 """
[876]221 sd = scantable(Scantable._copy(self))
[113]222 return sd
223
[1093]224 def drop_scan(self, scanid=None):
[1846]225 """\
[1093]226 Return a new scantable where the specified scan number(s) has(have)
227 been dropped.
[1846]228
[1093]229 Parameters:
[1846]230
[1093]231 scanid: a (list of) scan number(s)
[1846]232
[1093]233 """
234 from asap import _is_sequence_or_number as _is_valid
235 from asap import _to_list
236 from asap import unique
237 if not _is_valid(scanid):
238 if rcParams['verbose']:
[1819]239 #print "Please specify a scanno to drop from the scantable"
240 asaplog.push( 'Please specify a scanno to drop from the scantable' )
241 print_log( 'ERROR' )
[1093]242 return
243 else:
244 raise RuntimeError("No scan given")
245 try:
246 scanid = _to_list(scanid)
247 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
248 for sid in scanid: allscans.remove(sid)
[1118]249 if len(allscans) == 0:
250 raise ValueError("Can't remove all scans")
[1093]251 except ValueError:
252 if rcParams['verbose']:
[1819]253 #print "Couldn't find any match."
254 print_log()
255 asaplog.push( "Couldn't find any match." )
256 print_log( 'ERROR' )
[1093]257 return
258 else: raise
259 try:
[1593]260 sel = selector(scans=allscans)
[1594]261 return self._select_copy(sel)
[1093]262 except RuntimeError:
[1118]263 if rcParams['verbose']:
[1819]264 #print "Couldn't find any match."
265 print_log()
266 asaplog.push( "Couldn't find any match." )
267 print_log( 'ERROR' )
[1118]268 else:
269 raise
[1093]270
[1594]271 def _select_copy(self, selection):
272 orig = self.get_selection()
273 self.set_selection(orig+selection)
274 cp = self.copy()
275 self.set_selection(orig)
276 return cp
277
[102]278 def get_scan(self, scanid=None):
[1855]279 """\
[102]280 Return a specific scan (by scanno) or collection of scans (by
281 source name) in a new scantable.
[1846]282
283 *Note*:
284
[1348]285 See scantable.drop_scan() for the inverse operation.
[1846]286
[102]287 Parameters:
[1846]288
[513]289 scanid: a (list of) scanno or a source name, unix-style
290 patterns are accepted for source name matching, e.g.
291 '*_R' gets all 'ref scans
[1846]292
293 Example::
294
[513]295 # get all scans containing the source '323p459'
296 newscan = scan.get_scan('323p459')
297 # get all 'off' scans
298 refscans = scan.get_scan('*_R')
299 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]300 newscan = scan.get_scan([0, 2, 7, 10])
[1846]301
[102]302 """
303 if scanid is None:
[718]304 if rcParams['verbose']:
[1819]305 #print "Please specify a scan no or name to " \
306 # "retrieve from the scantable"
[1846]307 asaplog.push( 'Please specify a scan no or name to retrieve'
308 ' from the scantable' )
[1819]309 print_log( 'ERROR' )
[718]310 return
311 else:
312 raise RuntimeError("No scan given")
313
[102]314 try:
[946]315 bsel = self.get_selection()
316 sel = selector()
[102]317 if type(scanid) is str:
[946]318 sel.set_name(scanid)
[1594]319 return self._select_copy(sel)
[102]320 elif type(scanid) is int:
[946]321 sel.set_scans([scanid])
[1594]322 return self._select_copy(sel)
[381]323 elif type(scanid) is list:
[946]324 sel.set_scans(scanid)
[1594]325 return self._select_copy(sel)
[381]326 else:
[718]327 msg = "Illegal scanid type, use 'int' or 'list' if ints."
328 if rcParams['verbose']:
[1819]329 #print msg
330 asaplog.push( msg )
331 print_log( 'ERROR' )
[718]332 else:
333 raise TypeError(msg)
[102]334 except RuntimeError:
[1819]335 if rcParams['verbose']:
336 #print "Couldn't find any match."
337 print_log()
338 asaplog.push( "Couldn't find any match." )
339 print_log( 'ERROR' )
[718]340 else: raise
[102]341
342 def __str__(self):
[1118]343 return Scantable._summary(self, True)
[102]344
[976]345 def summary(self, filename=None):
[1846]346 """\
[102]347 Print a summary of the contents of this scantable.
[1846]348
[102]349 Parameters:
[1846]350
[102]351 filename: the name of a file to write the putput to
352 Default - no file output
[1846]353
[102]354 """
[976]355 info = Scantable._summary(self, True)
[102]356 if filename is not None:
[256]357 if filename is "":
358 filename = 'scantable_summary.txt'
[415]359 from os.path import expandvars, isdir
[411]360 filename = expandvars(filename)
[415]361 if not isdir(filename):
[413]362 data = open(filename, 'w')
363 data.write(info)
364 data.close()
365 else:
[718]366 msg = "Illegal file name '%s'." % (filename)
367 if rcParams['verbose']:
[1819]368 #print msg
369 asaplog.push( msg )
370 print_log( 'ERROR' )
[718]371 else:
372 raise IOError(msg)
373 if rcParams['verbose']:
[794]374 try:
375 from IPython.genutils import page as pager
376 except ImportError:
377 from pydoc import pager
378 pager(info)
[718]379 else:
380 return info
[710]381
[1512]382 def get_spectrum(self, rowno):
[1471]383 """Return the spectrum for the current row in the scantable as a list.
[1846]384
[1471]385 Parameters:
[1846]386
[1573]387 rowno: the row number to retrieve the spectrum from
[1846]388
[1471]389 """
390 return self._getspectrum(rowno)
[946]391
[1471]392 def get_mask(self, rowno):
393 """Return the mask for the current row in the scantable as a list.
[1846]394
[1471]395 Parameters:
[1846]396
[1573]397 rowno: the row number to retrieve the mask from
[1846]398
[1471]399 """
400 return self._getmask(rowno)
401
402 def set_spectrum(self, spec, rowno):
403 """Return the spectrum for the current row in the scantable as a list.
[1846]404
[1471]405 Parameters:
[1846]406
[1855]407 spec: the new spectrum
[1846]408
[1855]409 rowno: the row number to set the spectrum for
410
[1471]411 """
412 assert(len(spec) == self.nchan())
413 return self._setspectrum(spec, rowno)
414
[1600]415 def get_coordinate(self, rowno):
416 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]417
418 *Note*:
419
[1600]420 * This coordinate is only valid until a scantable method modifies
421 the frequency axis.
422 * This coordinate does contain the original frequency set-up
423 NOT the new frame. The conversions however are done using the user
424 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
425 use scantable.freq_align first. Without it there is no closure,
[1846]426 i.e.::
[1600]427
[1846]428 c = myscan.get_coordinate(0)
429 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
430
[1600]431 Parameters:
[1846]432
[1600]433 rowno: the row number for the spectral coordinate
434
435 """
436 return coordinate(Scantable.get_coordinate(self, rowno))
437
[946]438 def get_selection(self):
[1846]439 """\
[1005]440 Get the selection object currently set on this scantable.
[1846]441
442 Example::
443
[1005]444 sel = scan.get_selection()
445 sel.set_ifs(0) # select IF 0
446 scan.set_selection(sel) # apply modified selection
[1846]447
[946]448 """
449 return selector(self._getselection())
450
[1576]451 def set_selection(self, selection=None, **kw):
[1846]452 """\
[1005]453 Select a subset of the data. All following operations on this scantable
454 are only applied to thi selection.
[1846]455
[1005]456 Parameters:
[1697]457
[1846]458 selection: a selector object (default unset the selection), or
459 any combination of "pols", "ifs", "beams", "scans",
460 "cycles", "name", "query"
[1697]461
[1846]462 Examples::
[1697]463
[1005]464 sel = selector() # create a selection object
[1118]465 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]466 scan.set_selection(sel) # set the selection
467 scan.summary() # will only print summary of scanno 0 an 3
468 scan.set_selection() # unset the selection
[1697]469 # or the equivalent
470 scan.set_selection(scans=[0,3])
471 scan.summary() # will only print summary of scanno 0 an 3
472 scan.set_selection() # unset the selection
[1846]473
[946]474 """
[1576]475 if selection is None:
476 # reset
477 if len(kw) == 0:
478 selection = selector()
479 else:
480 # try keywords
481 for k in kw:
482 if k not in selector.fields:
483 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
484 selection = selector(**kw)
[946]485 self._setselection(selection)
486
[1819]487 def get_row(self, row=0, insitu=None):
[1846]488 """\
[1819]489 Select a row in the scantable.
490 Return a scantable with single row.
[1846]491
[1819]492 Parameters:
[1846]493
494 row: row no of integration, default is 0.
495 insitu: if False a new scantable is returned. Otherwise, the
496 scaling is done in-situ. The default is taken from .asaprc
497 (False)
498
[1819]499 """
500 if insitu is None: insitu = rcParams['insitu']
501 if not insitu:
502 workscan = self.copy()
503 else:
504 workscan = self
505 # Select a row
506 sel=selector()
507 sel.set_scans([workscan.getscan(row)])
508 sel.set_cycles([workscan.getcycle(row)])
509 sel.set_beams([workscan.getbeam(row)])
510 sel.set_ifs([workscan.getif(row)])
511 sel.set_polarisations([workscan.getpol(row)])
512 sel.set_name(workscan._getsourcename(row))
513 workscan.set_selection(sel)
514 if not workscan.nrow() == 1:
515 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
516 raise RuntimeError(msg)
517 del sel
518 if insitu:
519 self._assign(workscan)
520 else:
521 return workscan
522
523 #def stats(self, stat='stddev', mask=None):
524 def stats(self, stat='stddev', mask=None, form='3.3f'):
[1846]525 """\
[135]526 Determine the specified statistic of the current beam/if/pol
[102]527 Takes a 'mask' as an optional parameter to specify which
528 channels should be excluded.
[1846]529
[102]530 Parameters:
[1846]531
[1819]532 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
533 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]534
[135]535 mask: an optional mask specifying where the statistic
[102]536 should be determined.
[1855]537
[1819]538 form: format string to print statistic values
[1846]539
540 Example::
541
[113]542 scan.set_unit('channel')
[1118]543 msk = scan.create_mask([100, 200], [500, 600])
[135]544 scan.stats(stat='mean', mask=m)
[1846]545
[102]546 """
[1593]547 mask = mask or []
[876]548 if not self._check_ifs():
[1118]549 raise ValueError("Cannot apply mask as the IFs have different "
550 "number of channels. Please use setselection() "
551 "to select individual IFs")
[1819]552 rtnabc = False
553 if stat.lower().endswith('_abc'): rtnabc = True
554 getchan = False
555 if stat.lower().startswith('min') or stat.lower().startswith('max'):
556 chan = self._math._minmaxchan(self, mask, stat)
557 getchan = True
558 statvals = []
559 if not rtnabc: statvals = self._math._stats(self, mask, stat)
[256]560
[1819]561 #def cb(i):
562 # return statvals[i]
[256]563
[1819]564 #return self._row_callback(cb, stat)
[102]565
[1819]566 label=stat
567 #callback=cb
568 out = ""
569 #outvec = []
570 sep = '-'*50
571 for i in range(self.nrow()):
572 refstr = ''
573 statunit= ''
574 if getchan:
575 qx, qy = self.chan2data(rowno=i, chan=chan[i])
576 if rtnabc:
577 statvals.append(qx['value'])
578 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
579 statunit= '['+qx['unit']+']'
580 else:
581 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
582
583 tm = self._gettime(i)
584 src = self._getsourcename(i)
585 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
586 out += 'Time[%s]:\n' % (tm)
587 if self.nbeam(-1) > 1:
588 out += ' Beam[%d] ' % (self.getbeam(i))
589 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
590 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
591 #outvec.append(callback(i))
592 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
593 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
594 out += sep+"\n"
595
596 if rcParams['verbose']:
597 import os
598 if os.environ.has_key( 'USER' ):
599 usr=os.environ['USER']
600 else:
601 import commands
602 usr=commands.getoutput( 'whoami' )
603 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
604 f=open(tmpfile,'w')
605 print >> f, sep
606 print >> f, ' %s %s' % (label, statunit)
607 print >> f, sep
608 print >> f, out
609 f.close()
610 f=open(tmpfile,'r')
611 x=f.readlines()
612 f.close()
613 blanc=''
614 asaplog.push(blanc.join(x), False)
615 #for xx in x:
616 # asaplog.push( xx, False )
617 print_log()
618 return statvals
619
620 def chan2data(self, rowno=0, chan=0):
[1846]621 """\
[1819]622 Returns channel/frequency/velocity and spectral value
623 at an arbitrary row and channel in the scantable.
[1846]624
[1819]625 Parameters:
[1846]626
[1819]627 rowno: a row number in the scantable. Default is the
628 first row, i.e. rowno=0
[1855]629
[1819]630 chan: a channel in the scantable. Default is the first
631 channel, i.e. pos=0
[1846]632
[1819]633 """
634 if isinstance(rowno, int) and isinstance(chan, int):
635 qx = {'unit': self.get_unit(),
636 'value': self._getabcissa(rowno)[chan]}
637 qy = {'unit': self.get_fluxunit(),
638 'value': self._getspectrum(rowno)[chan]}
639 return qx, qy
640
[1118]641 def stddev(self, mask=None):
[1846]642 """\
[135]643 Determine the standard deviation of the current beam/if/pol
644 Takes a 'mask' as an optional parameter to specify which
645 channels should be excluded.
[1846]646
[135]647 Parameters:
[1846]648
[135]649 mask: an optional mask specifying where the standard
650 deviation should be determined.
651
[1846]652 Example::
653
[135]654 scan.set_unit('channel')
[1118]655 msk = scan.create_mask([100, 200], [500, 600])
[135]656 scan.stddev(mask=m)
[1846]657
[135]658 """
[1118]659 return self.stats(stat='stddev', mask=mask);
[135]660
[1003]661
[1259]662 def get_column_names(self):
[1846]663 """\
[1003]664 Return a list of column names, which can be used for selection.
665 """
[1259]666 return list(Scantable.get_column_names(self))
[1003]667
[1730]668 def get_tsys(self, row=-1):
[1846]669 """\
[113]670 Return the System temperatures.
[1846]671
672 Parameters:
673
674 row: the rowno to get the information for. (default all rows)
675
[113]676 Returns:
[1846]677
[876]678 a list of Tsys values for the current selection
[1846]679
[113]680 """
[1730]681 if row > -1:
682 return self._get_column(self._gettsys, row)
[876]683 return self._row_callback(self._gettsys, "Tsys")
[256]684
[1730]685
686 def get_weather(self, row=-1):
[1846]687 """\
688 Return the weather informations.
689
690 Parameters:
691
692 row: the rowno to get the information for. (default all rows)
693
694 Returns:
695
696 a dict or list of of dicts of values for the current selection
697
698 """
699
[1730]700 values = self._get_column(self._get_weather, row)
701 if row > -1:
702 return {'temperature': values[0],
703 'pressure': values[1], 'humidity' : values[2],
704 'windspeed' : values[3], 'windaz' : values[4]
705 }
706 else:
707 out = []
708 for r in values:
709
710 out.append({'temperature': r[0],
711 'pressure': r[1], 'humidity' : r[2],
712 'windspeed' : r[3], 'windaz' : r[4]
713 })
714 return out
715
[876]716 def _row_callback(self, callback, label):
717 out = ""
[1118]718 outvec = []
[1590]719 sep = '-'*50
[876]720 for i in range(self.nrow()):
721 tm = self._gettime(i)
722 src = self._getsourcename(i)
[1590]723 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]724 out += 'Time[%s]:\n' % (tm)
[1590]725 if self.nbeam(-1) > 1:
726 out += ' Beam[%d] ' % (self.getbeam(i))
727 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
728 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]729 outvec.append(callback(i))
730 out += '= %3.3f\n' % (outvec[i])
[1590]731 out += sep+'\n'
[876]732 if rcParams['verbose']:
[1819]733 asaplog.push(sep)
734 asaplog.push(" %s" % (label))
735 asaplog.push(sep)
736 asaplog.push(out)
737 print_log()
[1175]738 return outvec
[256]739
[1070]740 def _get_column(self, callback, row=-1):
741 """
742 """
743 if row == -1:
744 return [callback(i) for i in range(self.nrow())]
745 else:
[1819]746 if 0 <= row < self.nrow():
[1070]747 return callback(row)
[256]748
[1070]749
[1348]750 def get_time(self, row=-1, asdatetime=False):
[1846]751 """\
[113]752 Get a list of time stamps for the observations.
[1348]753 Return a datetime object for each integration time stamp in the scantable.
[1846]754
[113]755 Parameters:
[1846]756
[1348]757 row: row no of integration. Default -1 return all rows
[1855]758
[1348]759 asdatetime: return values as datetime objects rather than strings
[1846]760
[113]761 """
[1175]762 from time import strptime
763 from datetime import datetime
[1392]764 times = self._get_column(self._gettime, row)
[1348]765 if not asdatetime:
[1392]766 return times
[1175]767 format = "%Y/%m/%d/%H:%M:%S"
768 if isinstance(times, list):
769 return [datetime(*strptime(i, format)[:6]) for i in times]
770 else:
771 return datetime(*strptime(times, format)[:6])
[102]772
[1348]773
774 def get_inttime(self, row=-1):
[1846]775 """\
[1348]776 Get a list of integration times for the observations.
777 Return a time in seconds for each integration in the scantable.
[1846]778
[1348]779 Parameters:
[1846]780
[1348]781 row: row no of integration. Default -1 return all rows.
[1846]782
[1348]783 """
[1573]784 return self._get_column(self._getinttime, row)
[1348]785
[1573]786
[714]787 def get_sourcename(self, row=-1):
[1846]788 """\
[794]789 Get a list source names for the observations.
[714]790 Return a string for each integration in the scantable.
791 Parameters:
[1846]792
[1348]793 row: row no of integration. Default -1 return all rows.
[1846]794
[714]795 """
[1070]796 return self._get_column(self._getsourcename, row)
[714]797
[794]798 def get_elevation(self, row=-1):
[1846]799 """\
[794]800 Get a list of elevations for the observations.
801 Return a float for each integration in the scantable.
[1846]802
[794]803 Parameters:
[1846]804
[1348]805 row: row no of integration. Default -1 return all rows.
[1846]806
[794]807 """
[1070]808 return self._get_column(self._getelevation, row)
[794]809
810 def get_azimuth(self, row=-1):
[1846]811 """\
[794]812 Get a list of azimuths for the observations.
813 Return a float for each integration in the scantable.
[1846]814
[794]815 Parameters:
[1348]816 row: row no of integration. Default -1 return all rows.
[1846]817
[794]818 """
[1070]819 return self._get_column(self._getazimuth, row)
[794]820
821 def get_parangle(self, row=-1):
[1846]822 """\
[794]823 Get a list of parallactic angles for the observations.
824 Return a float for each integration in the scantable.
[1846]825
[794]826 Parameters:
[1846]827
[1348]828 row: row no of integration. Default -1 return all rows.
[1846]829
[794]830 """
[1070]831 return self._get_column(self._getparangle, row)
[794]832
[1070]833 def get_direction(self, row=-1):
834 """
835 Get a list of Positions on the sky (direction) for the observations.
[1594]836 Return a string for each integration in the scantable.
[1855]837
[1070]838 Parameters:
[1855]839
[1070]840 row: row no of integration. Default -1 return all rows
[1855]841
[1070]842 """
843 return self._get_column(self._getdirection, row)
844
[1391]845 def get_directionval(self, row=-1):
[1846]846 """\
[1391]847 Get a list of Positions on the sky (direction) for the observations.
848 Return a float for each integration in the scantable.
[1846]849
[1391]850 Parameters:
[1846]851
[1391]852 row: row no of integration. Default -1 return all rows
[1846]853
[1391]854 """
855 return self._get_column(self._getdirectionvec, row)
856
[1730]857 @print_log_dec
[102]858 def set_unit(self, unit='channel'):
[1846]859 """\
[102]860 Set the unit for all following operations on this scantable
[1846]861
[102]862 Parameters:
[1846]863
864 unit: optional unit, default is 'channel'. Use one of '*Hz',
865 'km/s', 'channel' or equivalent ''
866
[102]867 """
[484]868 varlist = vars()
[1118]869 if unit in ['', 'pixel', 'channel']:
[113]870 unit = ''
871 inf = list(self._getcoordinfo())
872 inf[0] = unit
873 self._setcoordinfo(inf)
[1118]874 self._add_history("set_unit", varlist)
[113]875
[1589]876 @print_log_dec
[484]877 def set_instrument(self, instr):
[1846]878 """\
[1348]879 Set the instrument for subsequent processing.
[1846]880
[358]881 Parameters:
[1846]882
[710]883 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]884 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]885
[358]886 """
887 self._setInstrument(instr)
[1118]888 self._add_history("set_instument", vars())
[1819]889 print_log()
[358]890
[1589]891 @print_log_dec
[1190]892 def set_feedtype(self, feedtype):
[1846]893 """\
[1190]894 Overwrite the feed type, which might not be set correctly.
[1846]895
[1190]896 Parameters:
[1846]897
[1190]898 feedtype: 'linear' or 'circular'
[1846]899
[1190]900 """
901 self._setfeedtype(feedtype)
902 self._add_history("set_feedtype", vars())
[1819]903 print_log()
[1190]904
[1589]905 @print_log_dec
[276]906 def set_doppler(self, doppler='RADIO'):
[1846]907 """\
[276]908 Set the doppler for all following operations on this scantable.
[1846]909
[276]910 Parameters:
[1846]911
[276]912 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]913
[276]914 """
[484]915 varlist = vars()
[276]916 inf = list(self._getcoordinfo())
917 inf[2] = doppler
918 self._setcoordinfo(inf)
[1118]919 self._add_history("set_doppler", vars())
[1819]920 print_log()
[710]921
[1589]922 @print_log_dec
[226]923 def set_freqframe(self, frame=None):
[1846]924 """\
[113]925 Set the frame type of the Spectral Axis.
[1846]926
[113]927 Parameters:
[1846]928
[591]929 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]930 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]931 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]932
933 Example::
934
[113]935 scan.set_freqframe('BARY')
[1846]936
[113]937 """
[1593]938 frame = frame or rcParams['scantable.freqframe']
[484]939 varlist = vars()
[1819]940 # "REST" is not implemented in casacore
941 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
942 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
943 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]944 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]945
[989]946 if frame in valid:
[113]947 inf = list(self._getcoordinfo())
948 inf[1] = frame
949 self._setcoordinfo(inf)
[1118]950 self._add_history("set_freqframe", varlist)
[102]951 else:
[1118]952 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]953 if rcParams['verbose']:
[1819]954 #print msg
955 asaplog.push( msg )
956 print_log( 'ERROR' )
[718]957 else:
958 raise TypeError(msg)
[1819]959 print_log()
[710]960
[989]961 def set_dirframe(self, frame=""):
[1846]962 """\
[989]963 Set the frame type of the Direction on the sky.
[1846]964
[989]965 Parameters:
[1846]966
[989]967 frame: an optional frame type, default ''. Valid frames are:
968 'J2000', 'B1950', 'GALACTIC'
[1846]969
970 Example:
971
[989]972 scan.set_dirframe('GALACTIC')
[1846]973
[989]974 """
975 varlist = vars()
976 try:
977 Scantable.set_dirframe(self, frame)
[1118]978 except RuntimeError, msg:
[989]979 if rcParams['verbose']:
[1819]980 #print msg
981 print_log()
982 asaplog.push( str(msg) )
983 print_log( 'ERROR' )
[989]984 else:
985 raise
[1118]986 self._add_history("set_dirframe", varlist)
[989]987
[113]988 def get_unit(self):
[1846]989 """\
[113]990 Get the default unit set in this scantable
[1846]991
[113]992 Returns:
[1846]993
[113]994 A unit string
[1846]995
[113]996 """
997 inf = self._getcoordinfo()
998 unit = inf[0]
999 if unit == '': unit = 'channel'
1000 return unit
[102]1001
[158]1002 def get_abcissa(self, rowno=0):
[1846]1003 """\
[158]1004 Get the abcissa in the current coordinate setup for the currently
[113]1005 selected Beam/IF/Pol
[1846]1006
[113]1007 Parameters:
[1846]1008
[226]1009 rowno: an optional row number in the scantable. Default is the
1010 first row, i.e. rowno=0
[1846]1011
[113]1012 Returns:
[1846]1013
[1348]1014 The abcissa values and the format string (as a dictionary)
[1846]1015
[113]1016 """
[256]1017 abc = self._getabcissa(rowno)
[710]1018 lbl = self._getabcissalabel(rowno)
[1819]1019 print_log()
[158]1020 return abc, lbl
[113]1021
[1819]1022 def flag(self, mask=None, unflag=False):
[1846]1023 """\
[1001]1024 Flag the selected data using an optional channel mask.
[1846]1025
[1001]1026 Parameters:
[1846]1027
[1001]1028 mask: an optional channel mask, created with create_mask. Default
1029 (no mask) is all channels.
[1855]1030
[1819]1031 unflag: if True, unflag the data
[1846]1032
[1001]1033 """
1034 varlist = vars()
[1593]1035 mask = mask or []
[1001]1036 try:
[1819]1037 self._flag(mask, unflag)
[1118]1038 except RuntimeError, msg:
[1001]1039 if rcParams['verbose']:
[1819]1040 #print msg
1041 print_log()
1042 asaplog.push( str(msg) )
1043 print_log( 'ERROR' )
[1001]1044 return
1045 else: raise
1046 self._add_history("flag", varlist)
1047
[1819]1048 def flag_row(self, rows=[], unflag=False):
[1846]1049 """\
[1819]1050 Flag the selected data in row-based manner.
[1846]1051
[1819]1052 Parameters:
[1846]1053
[1843]1054 rows: list of row numbers to be flagged. Default is no row
1055 (must be explicitly specified to execute row-based flagging).
[1855]1056
[1819]1057 unflag: if True, unflag the data.
[1846]1058
[1819]1059 """
1060 varlist = vars()
1061 try:
1062 self._flag_row(rows, unflag)
1063 except RuntimeError, msg:
1064 if rcParams['verbose']:
1065 print_log()
1066 asaplog.push( str(msg) )
1067 print_log('ERROR')
1068 return
1069 else: raise
1070 self._add_history("flag_row", varlist)
1071
1072 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1073 """\
[1819]1074 Flag the selected data outside a specified range (in channel-base)
[1846]1075
[1819]1076 Parameters:
[1846]1077
[1819]1078 uthres: upper threshold.
[1855]1079
[1819]1080 dthres: lower threshold
[1846]1081
[1819]1082 clipoutside: True for flagging data outside the range [dthres:uthres].
1083 False for glagging data inside the range.
[1855]1084
[1846]1085 unflag: if True, unflag the data.
1086
[1819]1087 """
1088 varlist = vars()
1089 try:
1090 self._clip(uthres, dthres, clipoutside, unflag)
1091 except RuntimeError, msg:
1092 if rcParams['verbose']:
1093 print_log()
1094 asaplog.push(str(msg))
1095 print_log('ERROR')
1096 return
1097 else: raise
1098 self._add_history("clip", varlist)
1099
[1589]1100 @print_log_dec
[1584]1101 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1102 """\
[1192]1103 Flag the data in 'lag' space by providing a frequency to remove.
[1584]1104 Flagged data in the scantable gets interpolated over the region.
[1192]1105 No taper is applied.
[1846]1106
[1192]1107 Parameters:
[1846]1108
[1579]1109 start: the start frequency (really a period within the
1110 bandwidth) or period to remove
[1855]1111
[1579]1112 end: the end frequency or period to remove
[1855]1113
[1584]1114 unit: the frequency unit (default "MHz") or "" for
[1579]1115 explicit lag channels
[1846]1116
1117 *Notes*:
1118
[1579]1119 It is recommended to flag edges of the band or strong
[1348]1120 signals beforehand.
[1846]1121
[1192]1122 """
1123 if insitu is None: insitu = rcParams['insitu']
1124 self._math._setinsitu(insitu)
1125 varlist = vars()
[1579]1126 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1127 if not (unit == "" or base.has_key(unit)):
[1192]1128 raise ValueError("%s is not a valid unit." % unit)
1129 try:
[1579]1130 if unit == "":
1131 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1132 else:
1133 s = scantable(self._math._lag_flag(self, start*base[unit],
1134 end*base[unit], "frequency"))
[1192]1135 except RuntimeError, msg:
1136 if rcParams['verbose']:
[1819]1137 #print msg
1138 print_log()
1139 asaplog.push( str(msg) )
1140 print_log( 'ERROR' )
[1192]1141 return
1142 else: raise
1143 s._add_history("lag_flag", varlist)
[1819]1144 print_log()
[1192]1145 if insitu:
1146 self._assign(s)
1147 else:
1148 return s
[1001]1149
[1589]1150 @print_log_dec
[113]1151 def create_mask(self, *args, **kwargs):
[1846]1152 """\
[1118]1153 Compute and return a mask based on [min, max] windows.
[189]1154 The specified windows are to be INCLUDED, when the mask is
[113]1155 applied.
[1846]1156
[102]1157 Parameters:
[1846]1158
[1118]1159 [min, max], [min2, max2], ...
[1024]1160 Pairs of start/end points (inclusive)specifying the regions
[102]1161 to be masked
[1855]1162
[189]1163 invert: optional argument. If specified as True,
1164 return an inverted mask, i.e. the regions
1165 specified are EXCLUDED
[1855]1166
[513]1167 row: create the mask using the specified row for
1168 unit conversions, default is row=0
1169 only necessary if frequency varies over rows.
[1846]1170
1171 Examples::
1172
[113]1173 scan.set_unit('channel')
[1846]1174 # a)
[1118]1175 msk = scan.create_mask([400, 500], [800, 900])
[189]1176 # masks everything outside 400 and 500
[113]1177 # and 800 and 900 in the unit 'channel'
1178
[1846]1179 # b)
[1118]1180 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1181 # masks the regions between 400 and 500
[113]1182 # and 800 and 900 in the unit 'channel'
[1846]1183
1184 # c)
1185 #mask only channel 400
[1554]1186 msk = scan.create_mask([400])
[1846]1187
[102]1188 """
[1554]1189 row = kwargs.get("row", 0)
[513]1190 data = self._getabcissa(row)
[113]1191 u = self._getcoordinfo()[0]
[718]1192 if rcParams['verbose']:
[113]1193 if u == "": u = "channel"
[718]1194 msg = "The current mask window unit is %s" % u
[1118]1195 i = self._check_ifs()
1196 if not i:
[876]1197 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]1198 asaplog.push(msg)
[102]1199 n = self.nchan()
[1295]1200 msk = _n_bools(n, False)
[710]1201 # test if args is a 'list' or a 'normal *args - UGLY!!!
1202
[1118]1203 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1204 and args or args[0]
[710]1205 for window in ws:
[1554]1206 if len(window) == 1:
1207 window = [window[0], window[0]]
1208 if len(window) == 0 or len(window) > 2:
1209 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1210 if window[0] > window[1]:
1211 tmp = window[0]
1212 window[0] = window[1]
1213 window[1] = tmp
[102]1214 for i in range(n):
[1024]1215 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1216 msk[i] = True
[113]1217 if kwargs.has_key('invert'):
1218 if kwargs.get('invert'):
[1295]1219 msk = mask_not(msk)
[1819]1220 print_log()
[102]1221 return msk
[710]1222
[1819]1223 def get_masklist(self, mask=None, row=0):
[1846]1224 """\
[1819]1225 Compute and return a list of mask windows, [min, max].
[1846]1226
[1819]1227 Parameters:
[1846]1228
[1819]1229 mask: channel mask, created with create_mask.
[1855]1230
[1819]1231 row: calcutate the masklist using the specified row
1232 for unit conversions, default is row=0
1233 only necessary if frequency varies over rows.
[1846]1234
[1819]1235 Returns:
[1846]1236
[1819]1237 [min, max], [min2, max2], ...
1238 Pairs of start/end points (inclusive)specifying
1239 the masked regions
[1846]1240
[1819]1241 """
1242 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1243 raise TypeError("The mask should be list or tuple.")
1244 if len(mask) < 2:
1245 raise TypeError("The mask elements should be > 1")
1246 if self.nchan() != len(mask):
1247 msg = "Number of channels in scantable != number of mask elements"
1248 raise TypeError(msg)
1249 data = self._getabcissa(row)
1250 u = self._getcoordinfo()[0]
1251 if rcParams['verbose']:
1252 if u == "": u = "channel"
1253 msg = "The current mask window unit is %s" % u
1254 i = self._check_ifs()
1255 if not i:
1256 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1257 asaplog.push(msg)
1258 masklist=[]
1259 ist, ien = None, None
1260 ist, ien=self.get_mask_indices(mask)
1261 if ist is not None and ien is not None:
1262 for i in xrange(len(ist)):
1263 range=[data[ist[i]],data[ien[i]]]
1264 range.sort()
1265 masklist.append([range[0],range[1]])
1266 return masklist
1267
1268 def get_mask_indices(self, mask=None):
[1846]1269 """\
[1819]1270 Compute and Return lists of mask start indices and mask end indices.
[1855]1271
1272 Parameters:
1273
[1819]1274 mask: channel mask, created with create_mask.
[1846]1275
[1819]1276 Returns:
[1846]1277
[1819]1278 List of mask start indices and that of mask end indices,
1279 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1280
[1819]1281 """
1282 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1283 raise TypeError("The mask should be list or tuple.")
1284 if len(mask) < 2:
1285 raise TypeError("The mask elements should be > 1")
1286 istart=[]
1287 iend=[]
1288 if mask[0]: istart.append(0)
1289 for i in range(len(mask)-1):
1290 if not mask[i] and mask[i+1]:
1291 istart.append(i+1)
1292 elif mask[i] and not mask[i+1]:
1293 iend.append(i)
1294 if mask[len(mask)-1]: iend.append(len(mask)-1)
1295 if len(istart) != len(iend):
1296 raise RuntimeError("Numbers of mask start != mask end.")
1297 for i in range(len(istart)):
1298 if istart[i] > iend[i]:
1299 raise RuntimeError("Mask start index > mask end index")
1300 break
1301 return istart,iend
1302
1303# def get_restfreqs(self):
1304# """
1305# Get the restfrequency(s) stored in this scantable.
1306# The return value(s) are always of unit 'Hz'
1307# Parameters:
1308# none
1309# Returns:
1310# a list of doubles
1311# """
1312# return list(self._getrestfreqs())
1313
1314 def get_restfreqs(self, ids=None):
[1846]1315 """\
[256]1316 Get the restfrequency(s) stored in this scantable.
1317 The return value(s) are always of unit 'Hz'
[1846]1318
[256]1319 Parameters:
[1846]1320
[1819]1321 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1322 be retrieved
[1846]1323
[256]1324 Returns:
[1846]1325
[1819]1326 dictionary containing ids and a list of doubles for each id
[1846]1327
[256]1328 """
[1819]1329 if ids is None:
1330 rfreqs={}
1331 idlist = self.getmolnos()
1332 for i in idlist:
1333 rfreqs[i]=list(self._getrestfreqs(i))
1334 return rfreqs
1335 else:
1336 if type(ids)==list or type(ids)==tuple:
1337 rfreqs={}
1338 for i in ids:
1339 rfreqs[i]=list(self._getrestfreqs(i))
1340 return rfreqs
1341 else:
1342 return list(self._getrestfreqs(ids))
1343 #return list(self._getrestfreqs(ids))
[102]1344
[931]1345 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1346 """\
[931]1347 Set or replace the restfrequency specified and
1348 If the 'freqs' argument holds a scalar,
1349 then that rest frequency will be applied to all the selected
1350 data. If the 'freqs' argument holds
1351 a vector, then it MUST be of equal or smaller length than
1352 the number of IFs (and the available restfrequencies will be
1353 replaced by this vector). In this case, *all* data have
1354 the restfrequency set per IF according
1355 to the corresponding value you give in the 'freqs' vector.
[1118]1356 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1357 IF 1 gets restfreq 2e9.
[1846]1358
[1395]1359 You can also specify the frequencies via a linecatalog.
[1153]1360
[931]1361 Parameters:
[1846]1362
[931]1363 freqs: list of rest frequency values or string idenitfiers
[1855]1364
[931]1365 unit: unit for rest frequency (default 'Hz')
[402]1366
[1846]1367
1368 Example::
1369
[1819]1370 # set the given restfrequency for the all currently selected IFs
[931]1371 scan.set_restfreqs(freqs=1.4e9)
[1845]1372 # set restfrequencies for the n IFs (n > 1) in the order of the
1373 # list, i.e
1374 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1375 # len(list_of_restfreqs) == nIF
1376 # for nIF == 1 the following will set multiple restfrequency for
1377 # that IF
[1819]1378 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1379 # set multiple restfrequencies per IF. as a list of lists where
1380 # the outer list has nIF elements, the inner s arbitrary
1381 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1382
[1846]1383 *Note*:
[1845]1384
[931]1385 To do more sophisticate Restfrequency setting, e.g. on a
1386 source and IF basis, use scantable.set_selection() before using
[1846]1387 this function::
[931]1388
[1846]1389 # provided your scantable is called scan
1390 selection = selector()
1391 selection.set_name("ORION*")
1392 selection.set_ifs([1])
1393 scan.set_selection(selection)
1394 scan.set_restfreqs(freqs=86.6e9)
1395
[931]1396 """
1397 varlist = vars()
[1157]1398 from asap import linecatalog
1399 # simple value
[1118]1400 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1401 self._setrestfreqs([freqs], [""], unit)
[1157]1402 # list of values
[1118]1403 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1404 # list values are scalars
[1118]1405 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1406 if len(freqs) == 1:
1407 self._setrestfreqs(freqs, [""], unit)
1408 else:
1409 # allow the 'old' mode of setting mulitple IFs
1410 sel = selector()
1411 savesel = self._getselection()
1412 iflist = self.getifnos()
1413 if len(freqs)>len(iflist):
1414 raise ValueError("number of elements in list of list "
1415 "exeeds the current IF selections")
1416 iflist = self.getifnos()
1417 for i, fval in enumerate(freqs):
1418 sel.set_ifs(iflist[i])
1419 self._setselection(sel)
1420 self._setrestfreqs([fval], [""], unit)
1421 self._setselection(savesel)
1422
1423 # list values are dict, {'value'=, 'name'=)
[1157]1424 elif isinstance(freqs[-1], dict):
[1845]1425 values = []
1426 names = []
1427 for d in freqs:
1428 values.append(d["value"])
1429 names.append(d["name"])
1430 self._setrestfreqs(values, names, unit)
[1819]1431 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1432 sel = selector()
1433 savesel = self._getselection()
[1322]1434 iflist = self.getifnos()
[1819]1435 if len(freqs)>len(iflist):
[1845]1436 raise ValueError("number of elements in list of list exeeds"
1437 " the current IF selections")
1438 for i, fval in enumerate(freqs):
[1322]1439 sel.set_ifs(iflist[i])
[1259]1440 self._setselection(sel)
[1845]1441 self._setrestfreqs(fval, [""], unit)
[1157]1442 self._setselection(savesel)
1443 # freqs are to be taken from a linecatalog
[1153]1444 elif isinstance(freqs, linecatalog):
1445 sel = selector()
1446 savesel = self._getselection()
1447 for i in xrange(freqs.nrow()):
[1322]1448 sel.set_ifs(iflist[i])
[1153]1449 self._setselection(sel)
[1845]1450 self._setrestfreqs([freqs.get_frequency(i)],
1451 [freqs.get_name(i)], "MHz")
[1153]1452 # ensure that we are not iterating past nIF
1453 if i == self.nif()-1: break
1454 self._setselection(savesel)
[931]1455 else:
1456 return
1457 self._add_history("set_restfreqs", varlist)
1458
[1360]1459 def shift_refpix(self, delta):
[1846]1460 """\
[1589]1461 Shift the reference pixel of the Spectra Coordinate by an
1462 integer amount.
[1846]1463
[1589]1464 Parameters:
[1846]1465
[1589]1466 delta: the amount to shift by
[1846]1467
1468 *Note*:
1469
[1589]1470 Be careful using this with broadband data.
[1846]1471
[1360]1472 """
[1731]1473 Scantable.shift_refpix(self, delta)
[931]1474
[1259]1475 def history(self, filename=None):
[1846]1476 """\
[1259]1477 Print the history. Optionally to a file.
[1846]1478
[1348]1479 Parameters:
[1846]1480
[1348]1481 filename: The name of the file to save the history to.
[1846]1482
[1259]1483 """
[484]1484 hist = list(self._gethistory())
[794]1485 out = "-"*80
[484]1486 for h in hist:
[489]1487 if h.startswith("---"):
[794]1488 out += "\n"+h
[489]1489 else:
1490 items = h.split("##")
1491 date = items[0]
1492 func = items[1]
1493 items = items[2:]
[794]1494 out += "\n"+date+"\n"
1495 out += "Function: %s\n Parameters:" % (func)
[489]1496 for i in items:
1497 s = i.split("=")
[1118]1498 out += "\n %s = %s" % (s[0], s[1])
[794]1499 out += "\n"+"-"*80
[1259]1500 if filename is not None:
1501 if filename is "":
1502 filename = 'scantable_history.txt'
1503 import os
1504 filename = os.path.expandvars(os.path.expanduser(filename))
1505 if not os.path.isdir(filename):
1506 data = open(filename, 'w')
1507 data.write(out)
1508 data.close()
1509 else:
1510 msg = "Illegal file name '%s'." % (filename)
1511 if rcParams['verbose']:
[1819]1512 #print msg
1513 asaplog.push( msg )
1514 print_log( 'ERROR' )
[1259]1515 else:
1516 raise IOError(msg)
1517 if rcParams['verbose']:
1518 try:
1519 from IPython.genutils import page as pager
1520 except ImportError:
1521 from pydoc import pager
1522 pager(out)
1523 else:
1524 return out
[484]1525 return
[513]1526 #
1527 # Maths business
1528 #
[1589]1529 @print_log_dec
[931]1530 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1531 """\
[1070]1532 Return the (time) weighted average of a scan.
[1846]1533
1534 *Note*:
1535
[1070]1536 in channels only - align if necessary
[1846]1537
[513]1538 Parameters:
[1846]1539
[513]1540 mask: an optional mask (only used for 'var' and 'tsys'
1541 weighting)
[1855]1542
[558]1543 scanav: True averages each scan separately
1544 False (default) averages all scans together,
[1855]1545
[1099]1546 weight: Weighting scheme.
1547 'none' (mean no weight)
1548 'var' (1/var(spec) weighted)
1549 'tsys' (1/Tsys**2 weighted)
1550 'tint' (integration time weighted)
1551 'tintsys' (Tint/Tsys**2)
1552 'median' ( median averaging)
[535]1553 The default is 'tint'
[1855]1554
[931]1555 align: align the spectra in velocity before averaging. It takes
1556 the time of the first spectrum as reference time.
[1846]1557
1558 Example::
1559
[513]1560 # time average the scantable without using a mask
[710]1561 newscan = scan.average_time()
[1846]1562
[513]1563 """
1564 varlist = vars()
[1593]1565 weight = weight or 'TINT'
1566 mask = mask or ()
1567 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1568 scan = (self, )
[989]1569 try:
[1118]1570 if align:
1571 scan = (self.freq_align(insitu=False), )
1572 s = None
1573 if weight.upper() == 'MEDIAN':
1574 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1575 scanav))
1576 else:
1577 s = scantable(self._math._average(scan, mask, weight.upper(),
1578 scanav))
1579 except RuntimeError, msg:
[989]1580 if rcParams['verbose']:
[1819]1581 #print msg
1582 print_log()
1583 asaplog.push( str(msg) )
1584 print_log( 'ERROR' )
[989]1585 return
1586 else: raise
[1099]1587 s._add_history("average_time", varlist)
[1819]1588 print_log()
[513]1589 return s
[710]1590
[1589]1591 @print_log_dec
[876]1592 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1593 """\
[513]1594 Return a scan where all spectra are converted to either
1595 Jansky or Kelvin depending upon the flux units of the scan table.
1596 By default the function tries to look the values up internally.
1597 If it can't find them (or if you want to over-ride), you must
1598 specify EITHER jyperk OR eta (and D which it will try to look up
1599 also if you don't set it). jyperk takes precedence if you set both.
[1846]1600
[513]1601 Parameters:
[1846]1602
[513]1603 jyperk: the Jy / K conversion factor
[1855]1604
[513]1605 eta: the aperture efficiency
[1855]1606
[513]1607 d: the geomtric diameter (metres)
[1855]1608
[513]1609 insitu: if False a new scantable is returned.
1610 Otherwise, the scaling is done in-situ
1611 The default is taken from .asaprc (False)
[1846]1612
[513]1613 """
1614 if insitu is None: insitu = rcParams['insitu']
[876]1615 self._math._setinsitu(insitu)
[513]1616 varlist = vars()
[1593]1617 jyperk = jyperk or -1.0
1618 d = d or -1.0
1619 eta = eta or -1.0
[876]1620 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1621 s._add_history("convert_flux", varlist)
[1819]1622 print_log()
[876]1623 if insitu: self._assign(s)
1624 else: return s
[513]1625
[1589]1626 @print_log_dec
[876]1627 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1628 """\
[513]1629 Return a scan after applying a gain-elevation correction.
1630 The correction can be made via either a polynomial or a
1631 table-based interpolation (and extrapolation if necessary).
1632 You specify polynomial coefficients, an ascii table or neither.
1633 If you specify neither, then a polynomial correction will be made
1634 with built in coefficients known for certain telescopes (an error
1635 will occur if the instrument is not known).
1636 The data and Tsys are *divided* by the scaling factors.
[1846]1637
[513]1638 Parameters:
[1846]1639
[513]1640 poly: Polynomial coefficients (default None) to compute a
1641 gain-elevation correction as a function of
1642 elevation (in degrees).
[1855]1643
[513]1644 filename: The name of an ascii file holding correction factors.
1645 The first row of the ascii file must give the column
1646 names and these MUST include columns
1647 "ELEVATION" (degrees) and "FACTOR" (multiply data
1648 by this) somewhere.
1649 The second row must give the data type of the
1650 column. Use 'R' for Real and 'I' for Integer.
1651 An example file would be
1652 (actual factors are arbitrary) :
1653
1654 TIME ELEVATION FACTOR
1655 R R R
1656 0.1 0 0.8
1657 0.2 20 0.85
1658 0.3 40 0.9
1659 0.4 60 0.85
1660 0.5 80 0.8
1661 0.6 90 0.75
[1855]1662
[513]1663 method: Interpolation method when correcting from a table.
1664 Values are "nearest", "linear" (default), "cubic"
1665 and "spline"
[1855]1666
[513]1667 insitu: if False a new scantable is returned.
1668 Otherwise, the scaling is done in-situ
1669 The default is taken from .asaprc (False)
[1846]1670
[513]1671 """
1672
1673 if insitu is None: insitu = rcParams['insitu']
[876]1674 self._math._setinsitu(insitu)
[513]1675 varlist = vars()
[1593]1676 poly = poly or ()
[513]1677 from os.path import expandvars
1678 filename = expandvars(filename)
[876]1679 s = scantable(self._math._gainel(self, poly, filename, method))
1680 s._add_history("gain_el", varlist)
[1819]1681 print_log()
[1593]1682 if insitu:
1683 self._assign(s)
1684 else:
1685 return s
[710]1686
[1589]1687 @print_log_dec
[931]1688 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1689 """\
[513]1690 Return a scan where all rows have been aligned in frequency/velocity.
1691 The alignment frequency frame (e.g. LSRK) is that set by function
1692 set_freqframe.
[1846]1693
[513]1694 Parameters:
[1855]1695
[513]1696 reftime: reference time to align at. By default, the time of
1697 the first row of data is used.
[1855]1698
[513]1699 method: Interpolation method for regridding the spectra.
1700 Choose from "nearest", "linear", "cubic" (default)
1701 and "spline"
[1855]1702
[513]1703 insitu: if False a new scantable is returned.
1704 Otherwise, the scaling is done in-situ
1705 The default is taken from .asaprc (False)
[1846]1706
[513]1707 """
[931]1708 if insitu is None: insitu = rcParams["insitu"]
[876]1709 self._math._setinsitu(insitu)
[513]1710 varlist = vars()
[1593]1711 reftime = reftime or ""
[931]1712 s = scantable(self._math._freq_align(self, reftime, method))
[876]1713 s._add_history("freq_align", varlist)
[1819]1714 print_log()
[876]1715 if insitu: self._assign(s)
1716 else: return s
[513]1717
[1589]1718 @print_log_dec
[1725]1719 def opacity(self, tau=None, insitu=None):
[1846]1720 """\
[513]1721 Apply an opacity correction. The data
1722 and Tsys are multiplied by the correction factor.
[1846]1723
[513]1724 Parameters:
[1855]1725
[1689]1726 tau: (list of) opacity from which the correction factor is
[513]1727 exp(tau*ZD)
[1689]1728 where ZD is the zenith-distance.
1729 If a list is provided, it has to be of length nIF,
1730 nIF*nPol or 1 and in order of IF/POL, e.g.
1731 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1732 if tau is `None` the opacities are determined from a
1733 model.
[1855]1734
[513]1735 insitu: if False a new scantable is returned.
1736 Otherwise, the scaling is done in-situ
1737 The default is taken from .asaprc (False)
[1846]1738
[513]1739 """
1740 if insitu is None: insitu = rcParams['insitu']
[876]1741 self._math._setinsitu(insitu)
[513]1742 varlist = vars()
[1689]1743 if not hasattr(tau, "__len__"):
1744 tau = [tau]
[876]1745 s = scantable(self._math._opacity(self, tau))
1746 s._add_history("opacity", varlist)
[1819]1747 print_log()
[876]1748 if insitu: self._assign(s)
1749 else: return s
[513]1750
[1589]1751 @print_log_dec
[513]1752 def bin(self, width=5, insitu=None):
[1846]1753 """\
[513]1754 Return a scan where all spectra have been binned up.
[1846]1755
[1348]1756 Parameters:
[1846]1757
[513]1758 width: The bin width (default=5) in pixels
[1855]1759
[513]1760 insitu: if False a new scantable is returned.
1761 Otherwise, the scaling is done in-situ
1762 The default is taken from .asaprc (False)
[1846]1763
[513]1764 """
1765 if insitu is None: insitu = rcParams['insitu']
[876]1766 self._math._setinsitu(insitu)
[513]1767 varlist = vars()
[876]1768 s = scantable(self._math._bin(self, width))
[1118]1769 s._add_history("bin", varlist)
[1819]1770 print_log()
[1589]1771 if insitu:
1772 self._assign(s)
1773 else:
1774 return s
[513]1775
[1589]1776 @print_log_dec
[513]1777 def resample(self, width=5, method='cubic', insitu=None):
[1846]1778 """\
[1348]1779 Return a scan where all spectra have been binned up.
[1573]1780
[1348]1781 Parameters:
[1846]1782
[513]1783 width: The bin width (default=5) in pixels
[1855]1784
[513]1785 method: Interpolation method when correcting from a table.
1786 Values are "nearest", "linear", "cubic" (default)
1787 and "spline"
[1855]1788
[513]1789 insitu: if False a new scantable is returned.
1790 Otherwise, the scaling is done in-situ
1791 The default is taken from .asaprc (False)
[1846]1792
[513]1793 """
1794 if insitu is None: insitu = rcParams['insitu']
[876]1795 self._math._setinsitu(insitu)
[513]1796 varlist = vars()
[876]1797 s = scantable(self._math._resample(self, method, width))
[1118]1798 s._add_history("resample", varlist)
[1819]1799 print_log()
[876]1800 if insitu: self._assign(s)
1801 else: return s
[513]1802
[1589]1803 @print_log_dec
[946]1804 def average_pol(self, mask=None, weight='none'):
[1846]1805 """\
[946]1806 Average the Polarisations together.
[1846]1807
[946]1808 Parameters:
[1846]1809
[946]1810 mask: An optional mask defining the region, where the
1811 averaging will be applied. The output will have all
1812 specified points masked.
[1855]1813
[946]1814 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1815 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1816
[946]1817 """
1818 varlist = vars()
[1593]1819 mask = mask or ()
[1010]1820 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1821 s._add_history("average_pol", varlist)
[1819]1822 print_log()
[992]1823 return s
[513]1824
[1589]1825 @print_log_dec
[1145]1826 def average_beam(self, mask=None, weight='none'):
[1846]1827 """\
[1145]1828 Average the Beams together.
[1846]1829
[1145]1830 Parameters:
1831 mask: An optional mask defining the region, where the
1832 averaging will be applied. The output will have all
1833 specified points masked.
[1855]1834
[1145]1835 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1836 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1837
[1145]1838 """
1839 varlist = vars()
[1593]1840 mask = mask or ()
[1145]1841 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1842 s._add_history("average_beam", varlist)
[1819]1843 print_log()
[1145]1844 return s
1845
[1586]1846 def parallactify(self, pflag):
[1846]1847 """\
[1843]1848 Set a flag to indicate whether this data should be treated as having
[1617]1849 been 'parallactified' (total phase == 0.0)
[1846]1850
[1617]1851 Parameters:
[1855]1852
[1843]1853 pflag: Bool indicating whether to turn this on (True) or
[1617]1854 off (False)
[1846]1855
[1617]1856 """
[1586]1857 varlist = vars()
1858 self._parallactify(pflag)
1859 self._add_history("parallactify", varlist)
1860
[1589]1861 @print_log_dec
[992]1862 def convert_pol(self, poltype=None):
[1846]1863 """\
[992]1864 Convert the data to a different polarisation type.
[1565]1865 Note that you will need cross-polarisation terms for most conversions.
[1846]1866
[992]1867 Parameters:
[1855]1868
[992]1869 poltype: The new polarisation type. Valid types are:
[1565]1870 "linear", "circular", "stokes" and "linpol"
[1846]1871
[992]1872 """
1873 varlist = vars()
1874 try:
1875 s = scantable(self._math._convertpol(self, poltype))
[1118]1876 except RuntimeError, msg:
[992]1877 if rcParams['verbose']:
[1819]1878 #print msg
1879 print_log()
1880 asaplog.push( str(msg) )
1881 print_log( 'ERROR' )
[1118]1882 return
[992]1883 else:
1884 raise
[1118]1885 s._add_history("convert_pol", varlist)
[1819]1886 print_log()
[992]1887 return s
1888
[1589]1889 @print_log_dec
[1819]1890 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1891 """\
[513]1892 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1893
[513]1894 Parameters:
[1846]1895
[513]1896 kernel: The type of smoothing kernel. Select from
[1574]1897 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1898 or 'poly'
[1855]1899
[513]1900 width: The width of the kernel in pixels. For hanning this is
1901 ignored otherwise it defauls to 5 pixels.
1902 For 'gaussian' it is the Full Width Half
1903 Maximum. For 'boxcar' it is the full width.
[1574]1904 For 'rmedian' and 'poly' it is the half width.
[1855]1905
[1574]1906 order: Optional parameter for 'poly' kernel (default is 2), to
1907 specify the order of the polnomial. Ignored by all other
1908 kernels.
[1855]1909
[1819]1910 plot: plot the original and the smoothed spectra.
1911 In this each indivual fit has to be approved, by
1912 typing 'y' or 'n'
[1855]1913
[513]1914 insitu: if False a new scantable is returned.
1915 Otherwise, the scaling is done in-situ
1916 The default is taken from .asaprc (False)
[1846]1917
[513]1918 """
1919 if insitu is None: insitu = rcParams['insitu']
[876]1920 self._math._setinsitu(insitu)
[513]1921 varlist = vars()
[1819]1922
1923 if plot: orgscan = self.copy()
1924
[1574]1925 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1926 s._add_history("smooth", varlist)
[1819]1927
1928 if plot:
1929 if rcParams['plotter.gui']:
1930 from asap.asaplotgui import asaplotgui as asaplot
1931 else:
1932 from asap.asaplot import asaplot
1933 self._p=asaplot()
1934 self._p.set_panels()
1935 ylab=s._get_ordinate_label()
1936 #self._p.palette(0,["#777777","red"])
1937 for r in xrange(s.nrow()):
1938 xsm=s._getabcissa(r)
1939 ysm=s._getspectrum(r)
1940 xorg=orgscan._getabcissa(r)
1941 yorg=orgscan._getspectrum(r)
1942 self._p.clear()
1943 self._p.hold()
1944 self._p.set_axes('ylabel',ylab)
1945 self._p.set_axes('xlabel',s._getabcissalabel(r))
1946 self._p.set_axes('title',s._getsourcename(r))
1947 self._p.set_line(label='Original',color="#777777")
1948 self._p.plot(xorg,yorg)
1949 self._p.set_line(label='Smoothed',color="red")
1950 self._p.plot(xsm,ysm)
1951 ### Ugly part for legend
1952 for i in [0,1]:
1953 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1954 self._p.release()
1955 ### Ugly part for legend
1956 self._p.subplots[0]['lines']=[]
1957 res = raw_input("Accept smoothing ([y]/n): ")
1958 if res.upper() == 'N':
1959 s._setspectrum(yorg, r)
1960 self._p.unmap()
1961 self._p = None
1962 del orgscan
1963
1964 print_log()
[876]1965 if insitu: self._assign(s)
1966 else: return s
[513]1967
[1589]1968 @print_log_dec
[1617]1969 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1970 insitu=None):
[1846]1971 """\
[513]1972 Return a scan which has been baselined (all rows) by a polynomial.
[1846]1973
[513]1974 Parameters:
[1846]1975
[794]1976 mask: an optional mask
[1855]1977
[794]1978 order: the order of the polynomial (default is 0)
[1855]1979
[1061]1980 plot: plot the fit and the residual. In this each
1981 indivual fit has to be approved, by typing 'y'
1982 or 'n'
[1855]1983
[1391]1984 uselin: use linear polynomial fit
[1855]1985
[794]1986 insitu: if False a new scantable is returned.
1987 Otherwise, the scaling is done in-situ
1988 The default is taken from .asaprc (False)
[1846]1989
1990 Example::
1991
[513]1992 # return a scan baselined by a third order polynomial,
1993 # not using a mask
1994 bscan = scan.poly_baseline(order=3)
[1846]1995
[579]1996 """
[513]1997 if insitu is None: insitu = rcParams['insitu']
[1819]1998 if not insitu:
1999 workscan = self.copy()
2000 else:
2001 workscan = self
[513]2002 varlist = vars()
2003 if mask is None:
[1295]2004 mask = [True for i in xrange(self.nchan(-1))]
[1819]2005
[513]2006 from asap.asapfitter import fitter
[1217]2007 try:
2008 f = fitter()
[1391]2009 if uselin:
2010 f.set_function(lpoly=order)
2011 else:
2012 f.set_function(poly=order)
[1819]2013
2014 rows = range(workscan.nrow())
2015 if len(rows) > 0:
2016 self.blpars = []
2017
2018 for r in rows:
2019 # take into account flagtra info (CAS-1434)
2020 flagtra = workscan._getmask(r)
2021 actualmask = mask[:]
2022 if len(actualmask) == 0:
2023 actualmask = list(flagtra[:])
2024 else:
2025 if len(actualmask) != len(flagtra):
2026 raise RuntimeError, "Mask and flagtra have different length"
2027 else:
2028 for i in range(0, len(actualmask)):
2029 actualmask[i] = actualmask[i] and flagtra[i]
2030 f.set_scan(workscan, actualmask)
2031 f.x = workscan._getabcissa(r)
2032 f.y = workscan._getspectrum(r)
2033 f.data = None
2034 f.fit()
2035 if plot:
2036 f.plot(residual=True)
2037 x = raw_input("Accept fit ( [y]/n ): ")
2038 if x.upper() == 'N':
2039 self.blpars.append(None)
2040 continue
2041 workscan._setspectrum(f.fitter.getresidual(), r)
2042 self.blpars.append(f.get_parameters())
2043
2044 if plot:
2045 f._p.unmap()
2046 f._p = None
2047 workscan._add_history("poly_baseline", varlist)
2048 print_log()
2049 if insitu: self._assign(workscan)
2050 else: return workscan
[1217]2051 except RuntimeError:
2052 msg = "The fit failed, possibly because it didn't converge."
2053 if rcParams['verbose']:
[1819]2054 #print msg
2055 print_log()
2056 asaplog.push( str(msg) )
2057 print_log( 'ERROR' )
[1217]2058 return
2059 else:
2060 raise RuntimeError(msg)
[513]2061
[1819]2062
[1118]2063 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]2064 threshold=3, chan_avg_limit=1, plot=False,
2065 insitu=None):
[1846]2066 """\
[880]2067 Return a scan which has been baselined (all rows) by a polynomial.
2068 Spectral lines are detected first using linefinder and masked out
2069 to avoid them affecting the baseline solution.
2070
2071 Parameters:
[1846]2072
[880]2073 mask: an optional mask retreived from scantable
[1846]2074
2075 edge: an optional number of channel to drop at the edge of
2076 spectrum. If only one value is
[880]2077 specified, the same number will be dropped from
2078 both sides of the spectrum. Default is to keep
[907]2079 all channels. Nested tuples represent individual
[976]2080 edge selection for different IFs (a number of spectral
2081 channels can be different)
[1846]2082
[880]2083 order: the order of the polynomial (default is 0)
[1846]2084
[880]2085 threshold: the threshold used by line finder. It is better to
2086 keep it large as only strong lines affect the
2087 baseline solution.
[1846]2088
[1280]2089 chan_avg_limit:
2090 a maximum number of consequtive spectral channels to
2091 average during the search of weak and broad lines.
2092 The default is no averaging (and no search for weak
2093 lines). If such lines can affect the fitted baseline
2094 (e.g. a high order polynomial is fitted), increase this
2095 parameter (usually values up to 8 are reasonable). Most
2096 users of this method should find the default value
2097 sufficient.
[1846]2098
[1061]2099 plot: plot the fit and the residual. In this each
2100 indivual fit has to be approved, by typing 'y'
2101 or 'n'
[1846]2102
[880]2103 insitu: if False a new scantable is returned.
2104 Otherwise, the scaling is done in-situ
2105 The default is taken from .asaprc (False)
2106
[1846]2107
2108 Example::
2109
2110 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2111
[880]2112 """
2113 if insitu is None: insitu = rcParams['insitu']
2114 varlist = vars()
2115 from asap.asapfitter import fitter
2116 from asap.asaplinefind import linefinder
2117 from asap import _is_sequence_or_number as _is_valid
2118
[976]2119 # check whether edge is set up for each IF individually
[1118]2120 individualedge = False;
2121 if len(edge) > 1:
2122 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2123 individualedge = True;
[907]2124
[1118]2125 if not _is_valid(edge, int) and not individualedge:
[909]2126 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2127 pair of integers specified as a tuple. Nested tuples are allowed \
2128 to make individual selection for different IFs."
[919]2129
[1118]2130 curedge = (0, 0)
2131 if individualedge:
2132 for edgepar in edge:
2133 if not _is_valid(edgepar, int):
2134 raise ValueError, "Each element of the 'edge' tuple has \
2135 to be a pair of integers or an integer."
[907]2136 else:
[1118]2137 curedge = edge;
[880]2138
2139 # setup fitter
2140 f = fitter()
2141 f.set_function(poly=order)
2142
2143 # setup line finder
[1118]2144 fl = linefinder()
[1268]2145 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2146
2147 if not insitu:
[1118]2148 workscan = self.copy()
[880]2149 else:
[1118]2150 workscan = self
[880]2151
[907]2152 fl.set_scan(workscan)
2153
[1118]2154 rows = range(workscan.nrow())
[1819]2155 # Save parameters of baseline fits & masklists as a class attribute.
2156 # NOTICE: It does not reflect changes in scantable!
2157 if len(rows) > 0:
2158 self.blpars=[]
2159 self.masklists=[]
[880]2160 asaplog.push("Processing:")
2161 for r in rows:
[1118]2162 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2163 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2164 workscan.getpol(r), workscan.getcycle(r))
[880]2165 asaplog.push(msg, False)
[907]2166
[976]2167 # figure out edge parameter
[1118]2168 if individualedge:
2169 if len(edge) >= workscan.getif(r):
2170 raise RuntimeError, "Number of edge elements appear to " \
2171 "be less than the number of IFs"
2172 curedge = edge[workscan.getif(r)]
[919]2173
[1819]2174 # take into account flagtra info (CAS-1434)
2175 flagtra = workscan._getmask(r)
2176 actualmask = mask[:]
2177 if len(actualmask) == 0:
2178 actualmask = list(flagtra[:])
2179 else:
2180 if len(actualmask) != len(flagtra):
2181 raise RuntimeError, "Mask and flagtra have different length"
2182 else:
2183 for i in range(0, len(actualmask)):
2184 actualmask[i] = actualmask[i] and flagtra[i]
2185
[976]2186 # setup line finder
[1819]2187 fl.find_lines(r, actualmask, curedge)
2188 outmask=fl.get_mask()
2189 f.set_scan(workscan, fl.get_mask())
2190 f.x = workscan._getabcissa(r)
2191 f.y = workscan._getspectrum(r)
2192 f.data = None
[880]2193 f.fit()
[1819]2194
2195 # Show mask list
2196 masklist=workscan.get_masklist(fl.get_mask(),row=r)
2197 msg = "mask range: "+str(masklist)
2198 asaplog.push(msg, False)
2199
[1061]2200 if plot:
2201 f.plot(residual=True)
2202 x = raw_input("Accept fit ( [y]/n ): ")
2203 if x.upper() == 'N':
[1819]2204 self.blpars.append(None)
2205 self.masklists.append(None)
[1061]2206 continue
[1819]2207
[880]2208 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2209 self.blpars.append(f.get_parameters())
2210 self.masklists.append(masklist)
[1061]2211 if plot:
2212 f._p.unmap()
2213 f._p = None
2214 workscan._add_history("auto_poly_baseline", varlist)
[880]2215 if insitu:
2216 self._assign(workscan)
2217 else:
2218 return workscan
2219
[1589]2220 @print_log_dec
[914]2221 def rotate_linpolphase(self, angle):
[1846]2222 """\
[914]2223 Rotate the phase of the complex polarization O=Q+iU correlation.
2224 This is always done in situ in the raw data. So if you call this
2225 function more than once then each call rotates the phase further.
[1846]2226
[914]2227 Parameters:
[1846]2228
[914]2229 angle: The angle (degrees) to rotate (add) by.
[1846]2230
2231 Example::
2232
[914]2233 scan.rotate_linpolphase(2.3)
[1846]2234
[914]2235 """
2236 varlist = vars()
[936]2237 self._math._rotate_linpolphase(self, angle)
[914]2238 self._add_history("rotate_linpolphase", varlist)
[1819]2239 print_log()
[914]2240 return
[710]2241
[1589]2242 @print_log_dec
[914]2243 def rotate_xyphase(self, angle):
[1846]2244 """\
[914]2245 Rotate the phase of the XY correlation. This is always done in situ
2246 in the data. So if you call this function more than once
2247 then each call rotates the phase further.
[1846]2248
[914]2249 Parameters:
[1846]2250
[914]2251 angle: The angle (degrees) to rotate (add) by.
[1846]2252
2253 Example::
2254
[914]2255 scan.rotate_xyphase(2.3)
[1846]2256
[914]2257 """
2258 varlist = vars()
[936]2259 self._math._rotate_xyphase(self, angle)
[914]2260 self._add_history("rotate_xyphase", varlist)
[1819]2261 print_log()
[914]2262 return
2263
[1589]2264 @print_log_dec
[914]2265 def swap_linears(self):
[1846]2266 """\
[1573]2267 Swap the linear polarisations XX and YY, or better the first two
[1348]2268 polarisations as this also works for ciculars.
[914]2269 """
2270 varlist = vars()
[936]2271 self._math._swap_linears(self)
[914]2272 self._add_history("swap_linears", varlist)
[1819]2273 print_log()
[914]2274 return
2275
[1589]2276 @print_log_dec
[914]2277 def invert_phase(self):
[1846]2278 """\
[914]2279 Invert the phase of the complex polarisation
2280 """
2281 varlist = vars()
[936]2282 self._math._invert_phase(self)
[914]2283 self._add_history("invert_phase", varlist)
2284 return
2285
[1589]2286 @print_log_dec
[876]2287 def add(self, offset, insitu=None):
[1846]2288 """\
[513]2289 Return a scan where all spectra have the offset added
[1846]2290
[513]2291 Parameters:
[1846]2292
[513]2293 offset: the offset
[1855]2294
[513]2295 insitu: if False a new scantable is returned.
2296 Otherwise, the scaling is done in-situ
2297 The default is taken from .asaprc (False)
[1846]2298
[513]2299 """
2300 if insitu is None: insitu = rcParams['insitu']
[876]2301 self._math._setinsitu(insitu)
[513]2302 varlist = vars()
[876]2303 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2304 s._add_history("add", varlist)
[876]2305 if insitu:
2306 self._assign(s)
2307 else:
[513]2308 return s
2309
[1589]2310 @print_log_dec
[1308]2311 def scale(self, factor, tsys=True, insitu=None):
[1846]2312 """\
2313
[513]2314 Return a scan where all spectra are scaled by the give 'factor'
[1846]2315
[513]2316 Parameters:
[1846]2317
[1819]2318 factor: the scaling factor (float or 1D float list)
[1855]2319
[513]2320 insitu: if False a new scantable is returned.
2321 Otherwise, the scaling is done in-situ
2322 The default is taken from .asaprc (False)
[1855]2323
[513]2324 tsys: if True (default) then apply the operation to Tsys
2325 as well as the data
[1846]2326
[513]2327 """
2328 if insitu is None: insitu = rcParams['insitu']
[876]2329 self._math._setinsitu(insitu)
[513]2330 varlist = vars()
[1819]2331 s = None
2332 import numpy
2333 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2334 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2335 from asapmath import _array2dOp
2336 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2337 else:
2338 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2339 else:
2340 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2341 s._add_history("scale", varlist)
[1819]2342 print_log()
[876]2343 if insitu:
2344 self._assign(s)
2345 else:
[513]2346 return s
2347
[1504]2348 def set_sourcetype(self, match, matchtype="pattern",
2349 sourcetype="reference"):
[1846]2350 """\
[1502]2351 Set the type of the source to be an source or reference scan
[1846]2352 using the provided pattern.
2353
[1502]2354 Parameters:
[1846]2355
[1504]2356 match: a Unix style pattern, regular expression or selector
[1855]2357
[1504]2358 matchtype: 'pattern' (default) UNIX style pattern or
2359 'regex' regular expression
[1855]2360
[1502]2361 sourcetype: the type of the source to use (source/reference)
[1846]2362
[1502]2363 """
2364 varlist = vars()
2365 basesel = self.get_selection()
2366 stype = -1
2367 if sourcetype.lower().startswith("r"):
2368 stype = 1
2369 elif sourcetype.lower().startswith("s"):
2370 stype = 0
[1504]2371 else:
[1502]2372 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2373 if matchtype.lower().startswith("p"):
2374 matchtype = "pattern"
2375 elif matchtype.lower().startswith("r"):
2376 matchtype = "regex"
2377 else:
2378 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2379 sel = selector()
2380 if isinstance(match, selector):
2381 sel = match
2382 else:
[1504]2383 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2384 self.set_selection(basesel+sel)
2385 self._setsourcetype(stype)
2386 self.set_selection(basesel)
[1573]2387 self._add_history("set_sourcetype", varlist)
[1502]2388
[1589]2389 @print_log_dec
[1819]2390 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2391 """\
[670]2392 This function allows to build quotients automatically.
[1819]2393 It assumes the observation to have the same number of
[670]2394 "ons" and "offs"
[1846]2395
[670]2396 Parameters:
[1846]2397
[710]2398 preserve: you can preserve (default) the continuum or
2399 remove it. The equations used are
[670]2400 preserve: Output = Toff * (on/off) - Toff
[1070]2401 remove: Output = Toff * (on/off) - Ton
[1855]2402
[1573]2403 mode: the on/off detection mode
[1348]2404 'paired' (default)
2405 identifies 'off' scans by the
2406 trailing '_R' (Mopra/Parkes) or
2407 '_e'/'_w' (Tid) and matches
2408 on/off pairs from the observing pattern
[1502]2409 'time'
2410 finds the closest off in time
[1348]2411
[670]2412 """
[1348]2413 modes = ["time", "paired"]
[670]2414 if not mode in modes:
[876]2415 msg = "please provide valid mode. Valid modes are %s" % (modes)
2416 raise ValueError(msg)
2417 varlist = vars()
[1348]2418 s = None
2419 if mode.lower() == "paired":
2420 basesel = self.get_selection()
[1356]2421 sel = selector()+basesel
2422 sel.set_query("SRCTYPE==1")
2423 self.set_selection(sel)
[1348]2424 offs = self.copy()
2425 sel.set_query("SRCTYPE==0")
[1356]2426 self.set_selection(sel)
[1348]2427 ons = self.copy()
2428 s = scantable(self._math._quotient(ons, offs, preserve))
2429 self.set_selection(basesel)
2430 elif mode.lower() == "time":
2431 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2432 s._add_history("auto_quotient", varlist)
[1819]2433 print_log()
[876]2434 return s
[710]2435
[1589]2436 @print_log_dec
[1145]2437 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2438 """\
[1143]2439 Form a quotient using "off" beams when observing in "MX" mode.
[1846]2440
[1143]2441 Parameters:
[1846]2442
[1145]2443 mask: an optional mask to be used when weight == 'stddev'
[1855]2444
[1143]2445 weight: How to average the off beams. Default is 'median'.
[1855]2446
[1145]2447 preserve: you can preserve (default) the continuum or
[1855]2448 remove it. The equations used are:
[1846]2449
[1855]2450 preserve: Output = Toff * (on/off) - Toff
2451
2452 remove: Output = Toff * (on/off) - Ton
2453
[1217]2454 """
[1593]2455 mask = mask or ()
[1141]2456 varlist = vars()
2457 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2458 preoff = scantable(self._math._mx_extract(self, 'off'))
2459 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2460 from asapmath import quotient
[1145]2461 q = quotient(on, off, preserve)
[1143]2462 q._add_history("mx_quotient", varlist)
[1819]2463 print_log()
[1217]2464 return q
[513]2465
[1589]2466 @print_log_dec
[718]2467 def freq_switch(self, insitu=None):
[1846]2468 """\
[718]2469 Apply frequency switching to the data.
[1846]2470
[718]2471 Parameters:
[1846]2472
[718]2473 insitu: if False a new scantable is returned.
2474 Otherwise, the swictching is done in-situ
2475 The default is taken from .asaprc (False)
[1846]2476
[718]2477 """
2478 if insitu is None: insitu = rcParams['insitu']
[876]2479 self._math._setinsitu(insitu)
[718]2480 varlist = vars()
[876]2481 s = scantable(self._math._freqswitch(self))
[1118]2482 s._add_history("freq_switch", varlist)
[1819]2483 print_log()
[876]2484 if insitu: self._assign(s)
2485 else: return s
[718]2486
[1589]2487 @print_log_dec
[780]2488 def recalc_azel(self):
[1846]2489 """Recalculate the azimuth and elevation for each position."""
[780]2490 varlist = vars()
[876]2491 self._recalcazel()
[780]2492 self._add_history("recalc_azel", varlist)
[1819]2493 print_log()
[780]2494 return
2495
[1589]2496 @print_log_dec
[513]2497 def __add__(self, other):
2498 varlist = vars()
2499 s = None
2500 if isinstance(other, scantable):
[1573]2501 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2502 elif isinstance(other, float):
[876]2503 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2504 else:
[718]2505 raise TypeError("Other input is not a scantable or float value")
[513]2506 s._add_history("operator +", varlist)
2507 return s
2508
[1589]2509 @print_log_dec
[513]2510 def __sub__(self, other):
2511 """
2512 implicit on all axes and on Tsys
2513 """
2514 varlist = vars()
2515 s = None
2516 if isinstance(other, scantable):
[1588]2517 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2518 elif isinstance(other, float):
[876]2519 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2520 else:
[718]2521 raise TypeError("Other input is not a scantable or float value")
[513]2522 s._add_history("operator -", varlist)
2523 return s
[710]2524
[1589]2525 @print_log_dec
[513]2526 def __mul__(self, other):
2527 """
2528 implicit on all axes and on Tsys
2529 """
2530 varlist = vars()
2531 s = None
2532 if isinstance(other, scantable):
[1588]2533 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2534 elif isinstance(other, float):
[876]2535 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2536 else:
[718]2537 raise TypeError("Other input is not a scantable or float value")
[513]2538 s._add_history("operator *", varlist)
2539 return s
2540
[710]2541
[1589]2542 @print_log_dec
[513]2543 def __div__(self, other):
2544 """
2545 implicit on all axes and on Tsys
2546 """
2547 varlist = vars()
2548 s = None
2549 if isinstance(other, scantable):
[1589]2550 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2551 elif isinstance(other, float):
2552 if other == 0.0:
[718]2553 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2554 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2555 else:
[718]2556 raise TypeError("Other input is not a scantable or float value")
[513]2557 s._add_history("operator /", varlist)
2558 return s
2559
[530]2560 def get_fit(self, row=0):
[1846]2561 """\
[530]2562 Print or return the stored fits for a row in the scantable
[1846]2563
[530]2564 Parameters:
[1846]2565
[530]2566 row: the row which the fit has been applied to.
[1846]2567
[530]2568 """
2569 if row > self.nrow():
2570 return
[976]2571 from asap.asapfit import asapfit
[530]2572 fit = asapfit(self._getfit(row))
[718]2573 if rcParams['verbose']:
[1819]2574 #print fit
2575 asaplog.push( '%s' %(fit) )
2576 print_log()
[530]2577 return
2578 else:
2579 return fit.as_dict()
2580
[1483]2581 def flag_nans(self):
[1846]2582 """\
[1483]2583 Utility function to flag NaN values in the scantable.
2584 """
2585 import numpy
2586 basesel = self.get_selection()
2587 for i in range(self.nrow()):
[1589]2588 sel = self.get_row_selector(i)
2589 self.set_selection(basesel+sel)
[1483]2590 nans = numpy.isnan(self._getspectrum(0))
2591 if numpy.any(nans):
2592 bnans = [ bool(v) for v in nans]
2593 self.flag(bnans)
2594 self.set_selection(basesel)
2595
[1588]2596 def get_row_selector(self, rowno):
2597 return selector(beams=self.getbeam(rowno),
2598 ifs=self.getif(rowno),
2599 pols=self.getpol(rowno),
2600 scans=self.getscan(rowno),
2601 cycles=self.getcycle(rowno))
[1573]2602
[484]2603 def _add_history(self, funcname, parameters):
[1435]2604 if not rcParams['scantable.history']:
2605 return
[484]2606 # create date
2607 sep = "##"
2608 from datetime import datetime
2609 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2610 hist = dstr+sep
2611 hist += funcname+sep#cdate+sep
2612 if parameters.has_key('self'): del parameters['self']
[1118]2613 for k, v in parameters.iteritems():
[484]2614 if type(v) is dict:
[1118]2615 for k2, v2 in v.iteritems():
[484]2616 hist += k2
2617 hist += "="
[1118]2618 if isinstance(v2, scantable):
[484]2619 hist += 'scantable'
2620 elif k2 == 'mask':
[1118]2621 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2622 hist += str(self._zip_mask(v2))
2623 else:
2624 hist += str(v2)
[484]2625 else:
[513]2626 hist += str(v2)
[484]2627 else:
2628 hist += k
2629 hist += "="
[1118]2630 if isinstance(v, scantable):
[484]2631 hist += 'scantable'
2632 elif k == 'mask':
[1118]2633 if isinstance(v, list) or isinstance(v, tuple):
[513]2634 hist += str(self._zip_mask(v))
2635 else:
2636 hist += str(v)
[484]2637 else:
2638 hist += str(v)
2639 hist += sep
2640 hist = hist[:-2] # remove trailing '##'
2641 self._addhistory(hist)
2642
[710]2643
[484]2644 def _zip_mask(self, mask):
2645 mask = list(mask)
2646 i = 0
2647 segments = []
2648 while mask[i:].count(1):
2649 i += mask[i:].index(1)
2650 if mask[i:].count(0):
2651 j = i + mask[i:].index(0)
2652 else:
[710]2653 j = len(mask)
[1118]2654 segments.append([i, j])
[710]2655 i = j
[484]2656 return segments
[714]2657
[626]2658 def _get_ordinate_label(self):
2659 fu = "("+self.get_fluxunit()+")"
2660 import re
2661 lbl = "Intensity"
[1118]2662 if re.match(".K.", fu):
[626]2663 lbl = "Brightness Temperature "+ fu
[1118]2664 elif re.match(".Jy.", fu):
[626]2665 lbl = "Flux density "+ fu
2666 return lbl
[710]2667
[876]2668 def _check_ifs(self):
2669 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2670 nchans = filter(lambda t: t > 0, nchans)
[876]2671 return (sum(nchans)/len(nchans) == nchans[0])
[976]2672
[1819]2673 def _fill(self, names, unit, average, getpt, antenna):
[976]2674 first = True
2675 fullnames = []
2676 for name in names:
2677 name = os.path.expandvars(name)
2678 name = os.path.expanduser(name)
2679 if not os.path.exists(name):
2680 msg = "File '%s' does not exists" % (name)
2681 if rcParams['verbose']:
2682 asaplog.push(msg)
[1819]2683 print_log( 'ERROR' )
[976]2684 return
2685 raise IOError(msg)
2686 fullnames.append(name)
2687 if average:
2688 asaplog.push('Auto averaging integrations')
[1079]2689 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2690 for name in fullnames:
[1073]2691 tbl = Scantable(stype)
[1843]2692 r = filler(tbl)
[1504]2693 rx = rcParams['scantable.reference']
[1843]2694 r.setreferenceexpr(rx)
[976]2695 msg = "Importing %s..." % (name)
[1118]2696 asaplog.push(msg, False)
[976]2697 print_log()
[1843]2698 r.open(name)# antenna, -1, -1, getpt)
2699 r.fill()
[976]2700 if average:
[1118]2701 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2702 if not first:
2703 tbl = self._math._merge([self, tbl])
2704 Scantable.__init__(self, tbl)
[1843]2705 r.close()
[1118]2706 del r, tbl
[976]2707 first = False
2708 if unit is not None:
2709 self.set_fluxunit(unit)
[1824]2710 if not is_casapy():
2711 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2712
[1402]2713 def __getitem__(self, key):
2714 if key < 0:
2715 key += self.nrow()
2716 if key >= self.nrow():
2717 raise IndexError("Row index out of range.")
2718 return self._getspectrum(key)
2719
2720 def __setitem__(self, key, value):
2721 if key < 0:
2722 key += self.nrow()
2723 if key >= self.nrow():
2724 raise IndexError("Row index out of range.")
2725 if not hasattr(value, "__len__") or \
2726 len(value) > self.nchan(self.getif(key)):
2727 raise ValueError("Spectrum length doesn't match.")
2728 return self._setspectrum(value, key)
2729
2730 def __len__(self):
2731 return self.nrow()
2732
2733 def __iter__(self):
2734 for i in range(len(self)):
2735 yield self[i]
Note: See TracBrowser for help on using the repository browser.