source: trunk/python/scantable.py@ 1919

Last change on this file since 1919 was 1919, checked in by Takeshi Nakazato, 16 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: ori_sio_task_regression

Put in Release Notes: No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Changed variables for memory address in C++ int to long.
I don't believe that this change essentially fixes the problem.
However, it may improve the situation since 'long' is able to
represent larger integer value than 'int'.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.8 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
[1862]13from asap.logging import asaplog, asaplog_post_dec
[1824]14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1859]17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]18from asap.asapfitter import fitter
[102]19
[1689]20
21def preserve_selection(func):
[1691]22 @wraps_dec(func)
[1689]23 def wrap(obj, *args, **kw):
24 basesel = obj.get_selection()
[1857]25 try:
26 val = func(obj, *args, **kw)
27 finally:
28 obj.set_selection(basesel)
[1689]29 return val
30 return wrap
31
[1846]32def is_scantable(filename):
33 """Is the given file a scantable?
[1689]34
[1846]35 Parameters:
36
37 filename: the name of the file/directory to test
38
39 """
[1883]40 if ( os.path.isdir(filename)
41 and os.path.exists(filename+'/table.info')
42 and os.path.exists(filename+'/table.dat') ):
43 f=open(filename+'/table.info')
44 l=f.readline()
45 f.close()
46 #if ( l.find('Scantable') != -1 ):
47 if ( l.find('Measurement Set') == -1 ):
48 return True
49 else:
50 return False
51 else:
52 return False
53## return (os.path.isdir(filename)
54## and not os.path.exists(filename+'/table.f1')
55## and os.path.exists(filename+'/table.info'))
[1697]56
[1883]57def is_ms(filename):
58 """Is the given file a MeasurementSet?
[1697]59
[1883]60 Parameters:
61
62 filename: the name of the file/directory to test
63
64 """
65 if ( os.path.isdir(filename)
66 and os.path.exists(filename+'/table.info')
67 and os.path.exists(filename+'/table.dat') ):
68 f=open(filename+'/table.info')
69 l=f.readline()
70 f.close()
71 if ( l.find('Measurement Set') != -1 ):
72 return True
73 else:
74 return False
75 else:
76 return False
77
[876]78class scantable(Scantable):
[1846]79 """\
80 The ASAP container for scans (single-dish data).
[102]81 """
[1819]82
[1862]83 @asaplog_post_dec
[1916]84 #def __init__(self, filename, average=None, unit=None, getpt=None,
85 # antenna=None, parallactify=None):
86 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]87 """\
[102]88 Create a scantable from a saved one or make a reference
[1846]89
[102]90 Parameters:
[1846]91
92 filename: the name of an asap table on disk
93 or
94 the name of a rpfits/sdfits/ms file
95 (integrations within scans are auto averaged
96 and the whole file is read) or
97 [advanced] a reference to an existing scantable
98
99 average: average all integrations withinb a scan on read.
100 The default (True) is taken from .asaprc.
101
[484]102 unit: brightness unit; must be consistent with K or Jy.
[1846]103 Over-rides the default selected by the filler
104 (input rpfits/sdfits/ms) or replaces the value
105 in existing scantables
106
107 getpt: for MeasurementSet input data only:
108 If True, all pointing data are filled.
109 The deafult is False, which makes time to load
110 the MS data faster in some cases.
111
112 antenna: Antenna selection. integer (id) or string (name or id).
113
114 parallactify: Indicate that the data had been parallatified. Default
115 is taken from rc file.
116
[710]117 """
[976]118 if average is None:
[710]119 average = rcParams['scantable.autoaverage']
[1916]120 #if getpt is None:
121 # getpt = True
122 #if antenna is not None:
123 # asaplog.push("Antenna selection currently unsupported."
124 # "Using ''")
125 # asaplog.post('WARN')
126 #if antenna is None:
127 # antenna = ''
128 #elif type(antenna) == int:
129 # antenna = '%s' % antenna
130 #elif type(antenna) == list:
131 # tmpstr = ''
132 # for i in range( len(antenna) ):
133 # if type(antenna[i]) == int:
134 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
135 # elif type(antenna[i]) == str:
136 # tmpstr=tmpstr+antenna[i]+','
137 # else:
138 # raise TypeError('Bad antenna selection.')
139 # antenna = tmpstr.rstrip(',')
[1593]140 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]141 varlist = vars()
[876]142 from asap._asap import stmath
[1819]143 self._math = stmath( rcParams['insitu'] )
[876]144 if isinstance(filename, Scantable):
145 Scantable.__init__(self, filename)
[181]146 else:
[1697]147 if isinstance(filename, str):
[976]148 filename = os.path.expandvars(filename)
149 filename = os.path.expanduser(filename)
150 if not os.path.exists(filename):
151 s = "File '%s' not found." % (filename)
152 raise IOError(s)
[1697]153 if is_scantable(filename):
154 ondisk = rcParams['scantable.storage'] == 'disk'
155 Scantable.__init__(self, filename, ondisk)
156 if unit is not None:
157 self.set_fluxunit(unit)
[1819]158 # do not reset to the default freqframe
159 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]160 #elif os.path.isdir(filename) \
161 # and not os.path.exists(filename+'/table.f1'):
162 elif is_ms(filename):
[1916]163 # Measurement Set
164 opts={'ms': {}}
165 mskeys=['getpt','antenna']
166 for key in mskeys:
167 if key in args.keys():
168 opts['ms'][key] = args[key]
169 #self._fill([filename], unit, average, getpt, antenna)
170 self._fill([filename], unit, average, opts)
[1893]171 elif os.path.isfile(filename):
[1916]172 #self._fill([filename], unit, average, getpt, antenna)
173 self._fill([filename], unit, average)
[1883]174 else:
[1819]175 msg = "The given file '%s'is not a valid " \
176 "asap table." % (filename)
[1859]177 raise IOError(msg)
[1118]178 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]179 and isinstance(filename[-1], str):
[1916]180 #self._fill(filename, unit, average, getpt, antenna)
181 self._fill(filename, unit, average)
[1586]182 self.parallactify(parallactify)
[1259]183 self._add_history("scantable", varlist)
[102]184
[1862]185 @asaplog_post_dec
[876]186 def save(self, name=None, format=None, overwrite=False):
[1846]187 """\
[1280]188 Store the scantable on disk. This can be an asap (aips++) Table,
189 SDFITS or MS2 format.
[1846]190
[116]191 Parameters:
[1846]192
[1093]193 name: the name of the outputfile. For format "ASCII"
194 this is the root file name (data in 'name'.txt
[497]195 and header in 'name'_header.txt)
[1855]196
[116]197 format: an optional file format. Default is ASAP.
[1855]198 Allowed are:
199
200 * 'ASAP' (save as ASAP [aips++] Table),
201 * 'SDFITS' (save as SDFITS file)
202 * 'ASCII' (saves as ascii text file)
203 * 'MS2' (saves as an casacore MeasurementSet V2)
204 * 'FITS' (save as image FITS - not readable by class)
205 * 'CLASS' (save as FITS readable by CLASS)
206
[411]207 overwrite: If the file should be overwritten if it exists.
[256]208 The default False is to return with warning
[411]209 without writing the output. USE WITH CARE.
[1855]210
[1846]211 Example::
212
[116]213 scan.save('myscan.asap')
[1118]214 scan.save('myscan.sdfits', 'SDFITS')
[1846]215
[116]216 """
[411]217 from os import path
[1593]218 format = format or rcParams['scantable.save']
[256]219 suffix = '.'+format.lower()
[1118]220 if name is None or name == "":
[256]221 name = 'scantable'+suffix
[718]222 msg = "No filename given. Using default name %s..." % name
223 asaplog.push(msg)
[411]224 name = path.expandvars(name)
[256]225 if path.isfile(name) or path.isdir(name):
226 if not overwrite:
[718]227 msg = "File %s exists." % name
[1859]228 raise IOError(msg)
[451]229 format2 = format.upper()
230 if format2 == 'ASAP':
[116]231 self._save(name)
232 else:
[989]233 from asap._asap import stwriter as stw
[1118]234 writer = stw(format2)
235 writer.write(self, name)
[116]236 return
237
[102]238 def copy(self):
[1846]239 """Return a copy of this scantable.
240
241 *Note*:
242
[1348]243 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]244
245 Example::
246
[102]247 copiedscan = scan.copy()
[1846]248
[102]249 """
[876]250 sd = scantable(Scantable._copy(self))
[113]251 return sd
252
[1093]253 def drop_scan(self, scanid=None):
[1846]254 """\
[1093]255 Return a new scantable where the specified scan number(s) has(have)
256 been dropped.
[1846]257
[1093]258 Parameters:
[1846]259
[1093]260 scanid: a (list of) scan number(s)
[1846]261
[1093]262 """
263 from asap import _is_sequence_or_number as _is_valid
264 from asap import _to_list
265 from asap import unique
266 if not _is_valid(scanid):
[1859]267 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
268 scanid = _to_list(scanid)
269 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
270 for sid in scanid: allscans.remove(sid)
271 if len(allscans) == 0:
272 raise ValueError("Can't remove all scans")
273 sel = selector(scans=allscans)
274 return self._select_copy(sel)
[1093]275
[1594]276 def _select_copy(self, selection):
277 orig = self.get_selection()
278 self.set_selection(orig+selection)
279 cp = self.copy()
280 self.set_selection(orig)
281 return cp
282
[102]283 def get_scan(self, scanid=None):
[1855]284 """\
[102]285 Return a specific scan (by scanno) or collection of scans (by
286 source name) in a new scantable.
[1846]287
288 *Note*:
289
[1348]290 See scantable.drop_scan() for the inverse operation.
[1846]291
[102]292 Parameters:
[1846]293
[513]294 scanid: a (list of) scanno or a source name, unix-style
295 patterns are accepted for source name matching, e.g.
296 '*_R' gets all 'ref scans
[1846]297
298 Example::
299
[513]300 # get all scans containing the source '323p459'
301 newscan = scan.get_scan('323p459')
302 # get all 'off' scans
303 refscans = scan.get_scan('*_R')
304 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]305 newscan = scan.get_scan([0, 2, 7, 10])
[1846]306
[102]307 """
308 if scanid is None:
[1859]309 raise RuntimeError( 'Please specify a scan no or name to '
310 'retrieve from the scantable' )
[102]311 try:
[946]312 bsel = self.get_selection()
313 sel = selector()
[102]314 if type(scanid) is str:
[946]315 sel.set_name(scanid)
[1594]316 return self._select_copy(sel)
[102]317 elif type(scanid) is int:
[946]318 sel.set_scans([scanid])
[1594]319 return self._select_copy(sel)
[381]320 elif type(scanid) is list:
[946]321 sel.set_scans(scanid)
[1594]322 return self._select_copy(sel)
[381]323 else:
[718]324 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]325 raise TypeError(msg)
[102]326 except RuntimeError:
[1859]327 raise
[102]328
329 def __str__(self):
[1118]330 return Scantable._summary(self, True)
[102]331
[976]332 def summary(self, filename=None):
[1846]333 """\
[102]334 Print a summary of the contents of this scantable.
[1846]335
[102]336 Parameters:
[1846]337
[102]338 filename: the name of a file to write the putput to
339 Default - no file output
[1846]340
[102]341 """
[976]342 info = Scantable._summary(self, True)
[102]343 if filename is not None:
[256]344 if filename is "":
345 filename = 'scantable_summary.txt'
[415]346 from os.path import expandvars, isdir
[411]347 filename = expandvars(filename)
[415]348 if not isdir(filename):
[413]349 data = open(filename, 'w')
350 data.write(info)
351 data.close()
352 else:
[718]353 msg = "Illegal file name '%s'." % (filename)
[1859]354 raise IOError(msg)
355 return page(info)
[710]356
[1512]357 def get_spectrum(self, rowno):
[1471]358 """Return the spectrum for the current row in the scantable as a list.
[1846]359
[1471]360 Parameters:
[1846]361
[1573]362 rowno: the row number to retrieve the spectrum from
[1846]363
[1471]364 """
365 return self._getspectrum(rowno)
[946]366
[1471]367 def get_mask(self, rowno):
368 """Return the mask for the current row in the scantable as a list.
[1846]369
[1471]370 Parameters:
[1846]371
[1573]372 rowno: the row number to retrieve the mask from
[1846]373
[1471]374 """
375 return self._getmask(rowno)
376
377 def set_spectrum(self, spec, rowno):
378 """Return the spectrum for the current row in the scantable as a list.
[1846]379
[1471]380 Parameters:
[1846]381
[1855]382 spec: the new spectrum
[1846]383
[1855]384 rowno: the row number to set the spectrum for
385
[1471]386 """
387 assert(len(spec) == self.nchan())
388 return self._setspectrum(spec, rowno)
389
[1600]390 def get_coordinate(self, rowno):
391 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]392
393 *Note*:
394
[1600]395 * This coordinate is only valid until a scantable method modifies
396 the frequency axis.
397 * This coordinate does contain the original frequency set-up
398 NOT the new frame. The conversions however are done using the user
399 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
400 use scantable.freq_align first. Without it there is no closure,
[1846]401 i.e.::
[1600]402
[1846]403 c = myscan.get_coordinate(0)
404 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
405
[1600]406 Parameters:
[1846]407
[1600]408 rowno: the row number for the spectral coordinate
409
410 """
411 return coordinate(Scantable.get_coordinate(self, rowno))
412
[946]413 def get_selection(self):
[1846]414 """\
[1005]415 Get the selection object currently set on this scantable.
[1846]416
417 Example::
418
[1005]419 sel = scan.get_selection()
420 sel.set_ifs(0) # select IF 0
421 scan.set_selection(sel) # apply modified selection
[1846]422
[946]423 """
424 return selector(self._getselection())
425
[1576]426 def set_selection(self, selection=None, **kw):
[1846]427 """\
[1005]428 Select a subset of the data. All following operations on this scantable
429 are only applied to thi selection.
[1846]430
[1005]431 Parameters:
[1697]432
[1846]433 selection: a selector object (default unset the selection), or
434 any combination of "pols", "ifs", "beams", "scans",
435 "cycles", "name", "query"
[1697]436
[1846]437 Examples::
[1697]438
[1005]439 sel = selector() # create a selection object
[1118]440 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]441 scan.set_selection(sel) # set the selection
442 scan.summary() # will only print summary of scanno 0 an 3
443 scan.set_selection() # unset the selection
[1697]444 # or the equivalent
445 scan.set_selection(scans=[0,3])
446 scan.summary() # will only print summary of scanno 0 an 3
447 scan.set_selection() # unset the selection
[1846]448
[946]449 """
[1576]450 if selection is None:
451 # reset
452 if len(kw) == 0:
453 selection = selector()
454 else:
455 # try keywords
456 for k in kw:
457 if k not in selector.fields:
458 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
459 selection = selector(**kw)
[946]460 self._setselection(selection)
461
[1819]462 def get_row(self, row=0, insitu=None):
[1846]463 """\
[1819]464 Select a row in the scantable.
465 Return a scantable with single row.
[1846]466
[1819]467 Parameters:
[1846]468
469 row: row no of integration, default is 0.
470 insitu: if False a new scantable is returned. Otherwise, the
471 scaling is done in-situ. The default is taken from .asaprc
472 (False)
473
[1819]474 """
475 if insitu is None: insitu = rcParams['insitu']
476 if not insitu:
477 workscan = self.copy()
478 else:
479 workscan = self
480 # Select a row
481 sel=selector()
482 sel.set_scans([workscan.getscan(row)])
483 sel.set_cycles([workscan.getcycle(row)])
484 sel.set_beams([workscan.getbeam(row)])
485 sel.set_ifs([workscan.getif(row)])
486 sel.set_polarisations([workscan.getpol(row)])
487 sel.set_name(workscan._getsourcename(row))
488 workscan.set_selection(sel)
489 if not workscan.nrow() == 1:
490 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
491 raise RuntimeError(msg)
492 del sel
493 if insitu:
494 self._assign(workscan)
495 else:
496 return workscan
497
[1862]498 @asaplog_post_dec
[1907]499 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]500 """\
[135]501 Determine the specified statistic of the current beam/if/pol
[102]502 Takes a 'mask' as an optional parameter to specify which
503 channels should be excluded.
[1846]504
[102]505 Parameters:
[1846]506
[1819]507 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
508 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]509
[135]510 mask: an optional mask specifying where the statistic
[102]511 should be determined.
[1855]512
[1819]513 form: format string to print statistic values
[1846]514
[1907]515 row: row number of spectrum to process.
516 (default is None: for all rows)
[1846]517
[1907]518 Example:
[113]519 scan.set_unit('channel')
[1118]520 msk = scan.create_mask([100, 200], [500, 600])
[135]521 scan.stats(stat='mean', mask=m)
[1846]522
[102]523 """
[1593]524 mask = mask or []
[876]525 if not self._check_ifs():
[1118]526 raise ValueError("Cannot apply mask as the IFs have different "
527 "number of channels. Please use setselection() "
528 "to select individual IFs")
[1819]529 rtnabc = False
530 if stat.lower().endswith('_abc'): rtnabc = True
531 getchan = False
532 if stat.lower().startswith('min') or stat.lower().startswith('max'):
533 chan = self._math._minmaxchan(self, mask, stat)
534 getchan = True
535 statvals = []
[1907]536 if not rtnabc:
537 if row == None:
538 statvals = self._math._stats(self, mask, stat)
539 else:
540 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]541
[1819]542 #def cb(i):
543 # return statvals[i]
[256]544
[1819]545 #return self._row_callback(cb, stat)
[102]546
[1819]547 label=stat
548 #callback=cb
549 out = ""
550 #outvec = []
551 sep = '-'*50
[1907]552
553 if row == None:
554 rows = xrange(self.nrow())
555 elif isinstance(row, int):
556 rows = [ row ]
557
558 for i in rows:
[1819]559 refstr = ''
560 statunit= ''
561 if getchan:
562 qx, qy = self.chan2data(rowno=i, chan=chan[i])
563 if rtnabc:
564 statvals.append(qx['value'])
565 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
566 statunit= '['+qx['unit']+']'
567 else:
568 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
569
570 tm = self._gettime(i)
571 src = self._getsourcename(i)
572 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
573 out += 'Time[%s]:\n' % (tm)
[1907]574 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
575 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
576 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]577 #outvec.append(callback(i))
[1907]578 if len(rows) > 1:
579 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
580 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
581 else:
582 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
583 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]584 out += sep+"\n"
585
[1859]586 import os
587 if os.environ.has_key( 'USER' ):
588 usr = os.environ['USER']
589 else:
590 import commands
591 usr = commands.getoutput( 'whoami' )
592 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
593 f = open(tmpfile,'w')
594 print >> f, sep
595 print >> f, ' %s %s' % (label, statunit)
596 print >> f, sep
597 print >> f, out
598 f.close()
599 f = open(tmpfile,'r')
600 x = f.readlines()
601 f.close()
602 asaplog.push(''.join(x), False)
603
[1819]604 return statvals
605
606 def chan2data(self, rowno=0, chan=0):
[1846]607 """\
[1819]608 Returns channel/frequency/velocity and spectral value
609 at an arbitrary row and channel in the scantable.
[1846]610
[1819]611 Parameters:
[1846]612
[1819]613 rowno: a row number in the scantable. Default is the
614 first row, i.e. rowno=0
[1855]615
[1819]616 chan: a channel in the scantable. Default is the first
617 channel, i.e. pos=0
[1846]618
[1819]619 """
620 if isinstance(rowno, int) and isinstance(chan, int):
621 qx = {'unit': self.get_unit(),
622 'value': self._getabcissa(rowno)[chan]}
623 qy = {'unit': self.get_fluxunit(),
624 'value': self._getspectrum(rowno)[chan]}
625 return qx, qy
626
[1118]627 def stddev(self, mask=None):
[1846]628 """\
[135]629 Determine the standard deviation of the current beam/if/pol
630 Takes a 'mask' as an optional parameter to specify which
631 channels should be excluded.
[1846]632
[135]633 Parameters:
[1846]634
[135]635 mask: an optional mask specifying where the standard
636 deviation should be determined.
637
[1846]638 Example::
639
[135]640 scan.set_unit('channel')
[1118]641 msk = scan.create_mask([100, 200], [500, 600])
[135]642 scan.stddev(mask=m)
[1846]643
[135]644 """
[1118]645 return self.stats(stat='stddev', mask=mask);
[135]646
[1003]647
[1259]648 def get_column_names(self):
[1846]649 """\
[1003]650 Return a list of column names, which can be used for selection.
651 """
[1259]652 return list(Scantable.get_column_names(self))
[1003]653
[1730]654 def get_tsys(self, row=-1):
[1846]655 """\
[113]656 Return the System temperatures.
[1846]657
658 Parameters:
659
660 row: the rowno to get the information for. (default all rows)
661
[113]662 Returns:
[1846]663
[876]664 a list of Tsys values for the current selection
[1846]665
[113]666 """
[1730]667 if row > -1:
668 return self._get_column(self._gettsys, row)
[876]669 return self._row_callback(self._gettsys, "Tsys")
[256]670
[1730]671
672 def get_weather(self, row=-1):
[1846]673 """\
674 Return the weather informations.
675
676 Parameters:
677
678 row: the rowno to get the information for. (default all rows)
679
680 Returns:
681
682 a dict or list of of dicts of values for the current selection
683
684 """
685
[1730]686 values = self._get_column(self._get_weather, row)
687 if row > -1:
688 return {'temperature': values[0],
689 'pressure': values[1], 'humidity' : values[2],
690 'windspeed' : values[3], 'windaz' : values[4]
691 }
692 else:
693 out = []
694 for r in values:
695
696 out.append({'temperature': r[0],
697 'pressure': r[1], 'humidity' : r[2],
698 'windspeed' : r[3], 'windaz' : r[4]
699 })
700 return out
701
[876]702 def _row_callback(self, callback, label):
703 out = ""
[1118]704 outvec = []
[1590]705 sep = '-'*50
[876]706 for i in range(self.nrow()):
707 tm = self._gettime(i)
708 src = self._getsourcename(i)
[1590]709 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]710 out += 'Time[%s]:\n' % (tm)
[1590]711 if self.nbeam(-1) > 1:
712 out += ' Beam[%d] ' % (self.getbeam(i))
713 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
714 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]715 outvec.append(callback(i))
716 out += '= %3.3f\n' % (outvec[i])
[1590]717 out += sep+'\n'
[1859]718
719 asaplog.push(sep)
720 asaplog.push(" %s" % (label))
721 asaplog.push(sep)
722 asaplog.push(out)
[1861]723 asaplog.post()
[1175]724 return outvec
[256]725
[1070]726 def _get_column(self, callback, row=-1):
727 """
728 """
729 if row == -1:
730 return [callback(i) for i in range(self.nrow())]
731 else:
[1819]732 if 0 <= row < self.nrow():
[1070]733 return callback(row)
[256]734
[1070]735
[1348]736 def get_time(self, row=-1, asdatetime=False):
[1846]737 """\
[113]738 Get a list of time stamps for the observations.
[1348]739 Return a datetime object for each integration time stamp in the scantable.
[1846]740
[113]741 Parameters:
[1846]742
[1348]743 row: row no of integration. Default -1 return all rows
[1855]744
[1348]745 asdatetime: return values as datetime objects rather than strings
[1846]746
[113]747 """
[1175]748 from time import strptime
749 from datetime import datetime
[1392]750 times = self._get_column(self._gettime, row)
[1348]751 if not asdatetime:
[1392]752 return times
[1175]753 format = "%Y/%m/%d/%H:%M:%S"
754 if isinstance(times, list):
755 return [datetime(*strptime(i, format)[:6]) for i in times]
756 else:
757 return datetime(*strptime(times, format)[:6])
[102]758
[1348]759
760 def get_inttime(self, row=-1):
[1846]761 """\
[1348]762 Get a list of integration times for the observations.
763 Return a time in seconds for each integration in the scantable.
[1846]764
[1348]765 Parameters:
[1846]766
[1348]767 row: row no of integration. Default -1 return all rows.
[1846]768
[1348]769 """
[1573]770 return self._get_column(self._getinttime, row)
[1348]771
[1573]772
[714]773 def get_sourcename(self, row=-1):
[1846]774 """\
[794]775 Get a list source names for the observations.
[714]776 Return a string for each integration in the scantable.
777 Parameters:
[1846]778
[1348]779 row: row no of integration. Default -1 return all rows.
[1846]780
[714]781 """
[1070]782 return self._get_column(self._getsourcename, row)
[714]783
[794]784 def get_elevation(self, row=-1):
[1846]785 """\
[794]786 Get a list of elevations for the observations.
787 Return a float for each integration in the scantable.
[1846]788
[794]789 Parameters:
[1846]790
[1348]791 row: row no of integration. Default -1 return all rows.
[1846]792
[794]793 """
[1070]794 return self._get_column(self._getelevation, row)
[794]795
796 def get_azimuth(self, row=-1):
[1846]797 """\
[794]798 Get a list of azimuths for the observations.
799 Return a float for each integration in the scantable.
[1846]800
[794]801 Parameters:
[1348]802 row: row no of integration. Default -1 return all rows.
[1846]803
[794]804 """
[1070]805 return self._get_column(self._getazimuth, row)
[794]806
807 def get_parangle(self, row=-1):
[1846]808 """\
[794]809 Get a list of parallactic angles for the observations.
810 Return a float for each integration in the scantable.
[1846]811
[794]812 Parameters:
[1846]813
[1348]814 row: row no of integration. Default -1 return all rows.
[1846]815
[794]816 """
[1070]817 return self._get_column(self._getparangle, row)
[794]818
[1070]819 def get_direction(self, row=-1):
820 """
821 Get a list of Positions on the sky (direction) for the observations.
[1594]822 Return a string for each integration in the scantable.
[1855]823
[1070]824 Parameters:
[1855]825
[1070]826 row: row no of integration. Default -1 return all rows
[1855]827
[1070]828 """
829 return self._get_column(self._getdirection, row)
830
[1391]831 def get_directionval(self, row=-1):
[1846]832 """\
[1391]833 Get a list of Positions on the sky (direction) for the observations.
834 Return a float for each integration in the scantable.
[1846]835
[1391]836 Parameters:
[1846]837
[1391]838 row: row no of integration. Default -1 return all rows
[1846]839
[1391]840 """
841 return self._get_column(self._getdirectionvec, row)
842
[1862]843 @asaplog_post_dec
[102]844 def set_unit(self, unit='channel'):
[1846]845 """\
[102]846 Set the unit for all following operations on this scantable
[1846]847
[102]848 Parameters:
[1846]849
850 unit: optional unit, default is 'channel'. Use one of '*Hz',
851 'km/s', 'channel' or equivalent ''
852
[102]853 """
[484]854 varlist = vars()
[1118]855 if unit in ['', 'pixel', 'channel']:
[113]856 unit = ''
857 inf = list(self._getcoordinfo())
858 inf[0] = unit
859 self._setcoordinfo(inf)
[1118]860 self._add_history("set_unit", varlist)
[113]861
[1862]862 @asaplog_post_dec
[484]863 def set_instrument(self, instr):
[1846]864 """\
[1348]865 Set the instrument for subsequent processing.
[1846]866
[358]867 Parameters:
[1846]868
[710]869 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]870 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]871
[358]872 """
873 self._setInstrument(instr)
[1118]874 self._add_history("set_instument", vars())
[358]875
[1862]876 @asaplog_post_dec
[1190]877 def set_feedtype(self, feedtype):
[1846]878 """\
[1190]879 Overwrite the feed type, which might not be set correctly.
[1846]880
[1190]881 Parameters:
[1846]882
[1190]883 feedtype: 'linear' or 'circular'
[1846]884
[1190]885 """
886 self._setfeedtype(feedtype)
887 self._add_history("set_feedtype", vars())
888
[1862]889 @asaplog_post_dec
[276]890 def set_doppler(self, doppler='RADIO'):
[1846]891 """\
[276]892 Set the doppler for all following operations on this scantable.
[1846]893
[276]894 Parameters:
[1846]895
[276]896 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]897
[276]898 """
[484]899 varlist = vars()
[276]900 inf = list(self._getcoordinfo())
901 inf[2] = doppler
902 self._setcoordinfo(inf)
[1118]903 self._add_history("set_doppler", vars())
[710]904
[1862]905 @asaplog_post_dec
[226]906 def set_freqframe(self, frame=None):
[1846]907 """\
[113]908 Set the frame type of the Spectral Axis.
[1846]909
[113]910 Parameters:
[1846]911
[591]912 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]913 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]914 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]915
916 Example::
917
[113]918 scan.set_freqframe('BARY')
[1846]919
[113]920 """
[1593]921 frame = frame or rcParams['scantable.freqframe']
[484]922 varlist = vars()
[1819]923 # "REST" is not implemented in casacore
924 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
925 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
926 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]927 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]928
[989]929 if frame in valid:
[113]930 inf = list(self._getcoordinfo())
931 inf[1] = frame
932 self._setcoordinfo(inf)
[1118]933 self._add_history("set_freqframe", varlist)
[102]934 else:
[1118]935 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]936 raise TypeError(msg)
[710]937
[1862]938 @asaplog_post_dec
[989]939 def set_dirframe(self, frame=""):
[1846]940 """\
[989]941 Set the frame type of the Direction on the sky.
[1846]942
[989]943 Parameters:
[1846]944
[989]945 frame: an optional frame type, default ''. Valid frames are:
946 'J2000', 'B1950', 'GALACTIC'
[1846]947
948 Example:
949
[989]950 scan.set_dirframe('GALACTIC')
[1846]951
[989]952 """
953 varlist = vars()
[1859]954 Scantable.set_dirframe(self, frame)
[1118]955 self._add_history("set_dirframe", varlist)
[989]956
[113]957 def get_unit(self):
[1846]958 """\
[113]959 Get the default unit set in this scantable
[1846]960
[113]961 Returns:
[1846]962
[113]963 A unit string
[1846]964
[113]965 """
966 inf = self._getcoordinfo()
967 unit = inf[0]
968 if unit == '': unit = 'channel'
969 return unit
[102]970
[1862]971 @asaplog_post_dec
[158]972 def get_abcissa(self, rowno=0):
[1846]973 """\
[158]974 Get the abcissa in the current coordinate setup for the currently
[113]975 selected Beam/IF/Pol
[1846]976
[113]977 Parameters:
[1846]978
[226]979 rowno: an optional row number in the scantable. Default is the
980 first row, i.e. rowno=0
[1846]981
[113]982 Returns:
[1846]983
[1348]984 The abcissa values and the format string (as a dictionary)
[1846]985
[113]986 """
[256]987 abc = self._getabcissa(rowno)
[710]988 lbl = self._getabcissalabel(rowno)
[158]989 return abc, lbl
[113]990
[1862]991 @asaplog_post_dec
[1819]992 def flag(self, mask=None, unflag=False):
[1846]993 """\
[1001]994 Flag the selected data using an optional channel mask.
[1846]995
[1001]996 Parameters:
[1846]997
[1001]998 mask: an optional channel mask, created with create_mask. Default
999 (no mask) is all channels.
[1855]1000
[1819]1001 unflag: if True, unflag the data
[1846]1002
[1001]1003 """
1004 varlist = vars()
[1593]1005 mask = mask or []
[1859]1006 self._flag(mask, unflag)
[1001]1007 self._add_history("flag", varlist)
1008
[1862]1009 @asaplog_post_dec
[1819]1010 def flag_row(self, rows=[], unflag=False):
[1846]1011 """\
[1819]1012 Flag the selected data in row-based manner.
[1846]1013
[1819]1014 Parameters:
[1846]1015
[1843]1016 rows: list of row numbers to be flagged. Default is no row
1017 (must be explicitly specified to execute row-based flagging).
[1855]1018
[1819]1019 unflag: if True, unflag the data.
[1846]1020
[1819]1021 """
1022 varlist = vars()
[1859]1023 self._flag_row(rows, unflag)
[1819]1024 self._add_history("flag_row", varlist)
1025
[1862]1026 @asaplog_post_dec
[1819]1027 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1028 """\
[1819]1029 Flag the selected data outside a specified range (in channel-base)
[1846]1030
[1819]1031 Parameters:
[1846]1032
[1819]1033 uthres: upper threshold.
[1855]1034
[1819]1035 dthres: lower threshold
[1846]1036
[1819]1037 clipoutside: True for flagging data outside the range [dthres:uthres].
1038 False for glagging data inside the range.
[1855]1039
[1846]1040 unflag: if True, unflag the data.
1041
[1819]1042 """
1043 varlist = vars()
[1859]1044 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1045 self._add_history("clip", varlist)
1046
[1862]1047 @asaplog_post_dec
[1584]1048 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1049 """\
[1192]1050 Flag the data in 'lag' space by providing a frequency to remove.
[1584]1051 Flagged data in the scantable gets interpolated over the region.
[1192]1052 No taper is applied.
[1846]1053
[1192]1054 Parameters:
[1846]1055
[1579]1056 start: the start frequency (really a period within the
1057 bandwidth) or period to remove
[1855]1058
[1579]1059 end: the end frequency or period to remove
[1855]1060
[1584]1061 unit: the frequency unit (default "MHz") or "" for
[1579]1062 explicit lag channels
[1846]1063
1064 *Notes*:
1065
[1579]1066 It is recommended to flag edges of the band or strong
[1348]1067 signals beforehand.
[1846]1068
[1192]1069 """
1070 if insitu is None: insitu = rcParams['insitu']
1071 self._math._setinsitu(insitu)
1072 varlist = vars()
[1579]1073 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1074 if not (unit == "" or base.has_key(unit)):
[1192]1075 raise ValueError("%s is not a valid unit." % unit)
[1859]1076 if unit == "":
1077 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1078 else:
1079 s = scantable(self._math._lag_flag(self, start*base[unit],
1080 end*base[unit], "frequency"))
[1192]1081 s._add_history("lag_flag", varlist)
1082 if insitu:
1083 self._assign(s)
1084 else:
1085 return s
[1001]1086
[1862]1087 @asaplog_post_dec
[113]1088 def create_mask(self, *args, **kwargs):
[1846]1089 """\
[1118]1090 Compute and return a mask based on [min, max] windows.
[189]1091 The specified windows are to be INCLUDED, when the mask is
[113]1092 applied.
[1846]1093
[102]1094 Parameters:
[1846]1095
[1118]1096 [min, max], [min2, max2], ...
[1024]1097 Pairs of start/end points (inclusive)specifying the regions
[102]1098 to be masked
[1855]1099
[189]1100 invert: optional argument. If specified as True,
1101 return an inverted mask, i.e. the regions
1102 specified are EXCLUDED
[1855]1103
[513]1104 row: create the mask using the specified row for
1105 unit conversions, default is row=0
1106 only necessary if frequency varies over rows.
[1846]1107
1108 Examples::
1109
[113]1110 scan.set_unit('channel')
[1846]1111 # a)
[1118]1112 msk = scan.create_mask([400, 500], [800, 900])
[189]1113 # masks everything outside 400 and 500
[113]1114 # and 800 and 900 in the unit 'channel'
1115
[1846]1116 # b)
[1118]1117 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1118 # masks the regions between 400 and 500
[113]1119 # and 800 and 900 in the unit 'channel'
[1846]1120
1121 # c)
1122 #mask only channel 400
[1554]1123 msk = scan.create_mask([400])
[1846]1124
[102]1125 """
[1554]1126 row = kwargs.get("row", 0)
[513]1127 data = self._getabcissa(row)
[113]1128 u = self._getcoordinfo()[0]
[1859]1129 if u == "":
1130 u = "channel"
1131 msg = "The current mask window unit is %s" % u
1132 i = self._check_ifs()
1133 if not i:
1134 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1135 asaplog.push(msg)
[102]1136 n = self.nchan()
[1295]1137 msk = _n_bools(n, False)
[710]1138 # test if args is a 'list' or a 'normal *args - UGLY!!!
1139
[1118]1140 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1141 and args or args[0]
[710]1142 for window in ws:
[1554]1143 if len(window) == 1:
1144 window = [window[0], window[0]]
1145 if len(window) == 0 or len(window) > 2:
1146 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1147 if window[0] > window[1]:
1148 tmp = window[0]
1149 window[0] = window[1]
1150 window[1] = tmp
[102]1151 for i in range(n):
[1024]1152 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1153 msk[i] = True
[113]1154 if kwargs.has_key('invert'):
1155 if kwargs.get('invert'):
[1295]1156 msk = mask_not(msk)
[102]1157 return msk
[710]1158
[1819]1159 def get_masklist(self, mask=None, row=0):
[1846]1160 """\
[1819]1161 Compute and return a list of mask windows, [min, max].
[1846]1162
[1819]1163 Parameters:
[1846]1164
[1819]1165 mask: channel mask, created with create_mask.
[1855]1166
[1819]1167 row: calcutate the masklist using the specified row
1168 for unit conversions, default is row=0
1169 only necessary if frequency varies over rows.
[1846]1170
[1819]1171 Returns:
[1846]1172
[1819]1173 [min, max], [min2, max2], ...
1174 Pairs of start/end points (inclusive)specifying
1175 the masked regions
[1846]1176
[1819]1177 """
1178 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1179 raise TypeError("The mask should be list or tuple.")
1180 if len(mask) < 2:
1181 raise TypeError("The mask elements should be > 1")
1182 if self.nchan() != len(mask):
1183 msg = "Number of channels in scantable != number of mask elements"
1184 raise TypeError(msg)
1185 data = self._getabcissa(row)
1186 u = self._getcoordinfo()[0]
[1859]1187 if u == "":
1188 u = "channel"
1189 msg = "The current mask window unit is %s" % u
1190 i = self._check_ifs()
1191 if not i:
1192 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1193 asaplog.push(msg)
[1819]1194 masklist=[]
1195 ist, ien = None, None
1196 ist, ien=self.get_mask_indices(mask)
1197 if ist is not None and ien is not None:
1198 for i in xrange(len(ist)):
1199 range=[data[ist[i]],data[ien[i]]]
1200 range.sort()
1201 masklist.append([range[0],range[1]])
1202 return masklist
1203
1204 def get_mask_indices(self, mask=None):
[1846]1205 """\
[1819]1206 Compute and Return lists of mask start indices and mask end indices.
[1855]1207
1208 Parameters:
1209
[1819]1210 mask: channel mask, created with create_mask.
[1846]1211
[1819]1212 Returns:
[1846]1213
[1819]1214 List of mask start indices and that of mask end indices,
1215 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1216
[1819]1217 """
1218 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1219 raise TypeError("The mask should be list or tuple.")
1220 if len(mask) < 2:
1221 raise TypeError("The mask elements should be > 1")
1222 istart=[]
1223 iend=[]
1224 if mask[0]: istart.append(0)
1225 for i in range(len(mask)-1):
1226 if not mask[i] and mask[i+1]:
1227 istart.append(i+1)
1228 elif mask[i] and not mask[i+1]:
1229 iend.append(i)
1230 if mask[len(mask)-1]: iend.append(len(mask)-1)
1231 if len(istart) != len(iend):
1232 raise RuntimeError("Numbers of mask start != mask end.")
1233 for i in range(len(istart)):
1234 if istart[i] > iend[i]:
1235 raise RuntimeError("Mask start index > mask end index")
1236 break
1237 return istart,iend
1238
1239# def get_restfreqs(self):
1240# """
1241# Get the restfrequency(s) stored in this scantable.
1242# The return value(s) are always of unit 'Hz'
1243# Parameters:
1244# none
1245# Returns:
1246# a list of doubles
1247# """
1248# return list(self._getrestfreqs())
1249
1250 def get_restfreqs(self, ids=None):
[1846]1251 """\
[256]1252 Get the restfrequency(s) stored in this scantable.
1253 The return value(s) are always of unit 'Hz'
[1846]1254
[256]1255 Parameters:
[1846]1256
[1819]1257 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1258 be retrieved
[1846]1259
[256]1260 Returns:
[1846]1261
[1819]1262 dictionary containing ids and a list of doubles for each id
[1846]1263
[256]1264 """
[1819]1265 if ids is None:
1266 rfreqs={}
1267 idlist = self.getmolnos()
1268 for i in idlist:
1269 rfreqs[i]=list(self._getrestfreqs(i))
1270 return rfreqs
1271 else:
1272 if type(ids)==list or type(ids)==tuple:
1273 rfreqs={}
1274 for i in ids:
1275 rfreqs[i]=list(self._getrestfreqs(i))
1276 return rfreqs
1277 else:
1278 return list(self._getrestfreqs(ids))
1279 #return list(self._getrestfreqs(ids))
[102]1280
[931]1281 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1282 """\
[931]1283 Set or replace the restfrequency specified and
1284 If the 'freqs' argument holds a scalar,
1285 then that rest frequency will be applied to all the selected
1286 data. If the 'freqs' argument holds
1287 a vector, then it MUST be of equal or smaller length than
1288 the number of IFs (and the available restfrequencies will be
1289 replaced by this vector). In this case, *all* data have
1290 the restfrequency set per IF according
1291 to the corresponding value you give in the 'freqs' vector.
[1118]1292 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1293 IF 1 gets restfreq 2e9.
[1846]1294
[1395]1295 You can also specify the frequencies via a linecatalog.
[1153]1296
[931]1297 Parameters:
[1846]1298
[931]1299 freqs: list of rest frequency values or string idenitfiers
[1855]1300
[931]1301 unit: unit for rest frequency (default 'Hz')
[402]1302
[1846]1303
1304 Example::
1305
[1819]1306 # set the given restfrequency for the all currently selected IFs
[931]1307 scan.set_restfreqs(freqs=1.4e9)
[1845]1308 # set restfrequencies for the n IFs (n > 1) in the order of the
1309 # list, i.e
1310 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1311 # len(list_of_restfreqs) == nIF
1312 # for nIF == 1 the following will set multiple restfrequency for
1313 # that IF
[1819]1314 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1315 # set multiple restfrequencies per IF. as a list of lists where
1316 # the outer list has nIF elements, the inner s arbitrary
1317 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1318
[1846]1319 *Note*:
[1845]1320
[931]1321 To do more sophisticate Restfrequency setting, e.g. on a
1322 source and IF basis, use scantable.set_selection() before using
[1846]1323 this function::
[931]1324
[1846]1325 # provided your scantable is called scan
1326 selection = selector()
1327 selection.set_name("ORION*")
1328 selection.set_ifs([1])
1329 scan.set_selection(selection)
1330 scan.set_restfreqs(freqs=86.6e9)
1331
[931]1332 """
1333 varlist = vars()
[1157]1334 from asap import linecatalog
1335 # simple value
[1118]1336 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1337 self._setrestfreqs([freqs], [""], unit)
[1157]1338 # list of values
[1118]1339 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1340 # list values are scalars
[1118]1341 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1342 if len(freqs) == 1:
1343 self._setrestfreqs(freqs, [""], unit)
1344 else:
1345 # allow the 'old' mode of setting mulitple IFs
1346 sel = selector()
1347 savesel = self._getselection()
1348 iflist = self.getifnos()
1349 if len(freqs)>len(iflist):
1350 raise ValueError("number of elements in list of list "
1351 "exeeds the current IF selections")
1352 iflist = self.getifnos()
1353 for i, fval in enumerate(freqs):
1354 sel.set_ifs(iflist[i])
1355 self._setselection(sel)
1356 self._setrestfreqs([fval], [""], unit)
1357 self._setselection(savesel)
1358
1359 # list values are dict, {'value'=, 'name'=)
[1157]1360 elif isinstance(freqs[-1], dict):
[1845]1361 values = []
1362 names = []
1363 for d in freqs:
1364 values.append(d["value"])
1365 names.append(d["name"])
1366 self._setrestfreqs(values, names, unit)
[1819]1367 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1368 sel = selector()
1369 savesel = self._getselection()
[1322]1370 iflist = self.getifnos()
[1819]1371 if len(freqs)>len(iflist):
[1845]1372 raise ValueError("number of elements in list of list exeeds"
1373 " the current IF selections")
1374 for i, fval in enumerate(freqs):
[1322]1375 sel.set_ifs(iflist[i])
[1259]1376 self._setselection(sel)
[1845]1377 self._setrestfreqs(fval, [""], unit)
[1157]1378 self._setselection(savesel)
1379 # freqs are to be taken from a linecatalog
[1153]1380 elif isinstance(freqs, linecatalog):
1381 sel = selector()
1382 savesel = self._getselection()
1383 for i in xrange(freqs.nrow()):
[1322]1384 sel.set_ifs(iflist[i])
[1153]1385 self._setselection(sel)
[1845]1386 self._setrestfreqs([freqs.get_frequency(i)],
1387 [freqs.get_name(i)], "MHz")
[1153]1388 # ensure that we are not iterating past nIF
1389 if i == self.nif()-1: break
1390 self._setselection(savesel)
[931]1391 else:
1392 return
1393 self._add_history("set_restfreqs", varlist)
1394
[1360]1395 def shift_refpix(self, delta):
[1846]1396 """\
[1589]1397 Shift the reference pixel of the Spectra Coordinate by an
1398 integer amount.
[1846]1399
[1589]1400 Parameters:
[1846]1401
[1589]1402 delta: the amount to shift by
[1846]1403
1404 *Note*:
1405
[1589]1406 Be careful using this with broadband data.
[1846]1407
[1360]1408 """
[1731]1409 Scantable.shift_refpix(self, delta)
[931]1410
[1862]1411 @asaplog_post_dec
[1259]1412 def history(self, filename=None):
[1846]1413 """\
[1259]1414 Print the history. Optionally to a file.
[1846]1415
[1348]1416 Parameters:
[1846]1417
[1348]1418 filename: The name of the file to save the history to.
[1846]1419
[1259]1420 """
[484]1421 hist = list(self._gethistory())
[794]1422 out = "-"*80
[484]1423 for h in hist:
[489]1424 if h.startswith("---"):
[1857]1425 out = "\n".join([out, h])
[489]1426 else:
1427 items = h.split("##")
1428 date = items[0]
1429 func = items[1]
1430 items = items[2:]
[794]1431 out += "\n"+date+"\n"
1432 out += "Function: %s\n Parameters:" % (func)
[489]1433 for i in items:
1434 s = i.split("=")
[1118]1435 out += "\n %s = %s" % (s[0], s[1])
[1857]1436 out = "\n".join([out, "-"*80])
[1259]1437 if filename is not None:
1438 if filename is "":
1439 filename = 'scantable_history.txt'
1440 import os
1441 filename = os.path.expandvars(os.path.expanduser(filename))
1442 if not os.path.isdir(filename):
1443 data = open(filename, 'w')
1444 data.write(out)
1445 data.close()
1446 else:
1447 msg = "Illegal file name '%s'." % (filename)
[1859]1448 raise IOError(msg)
1449 return page(out)
[513]1450 #
1451 # Maths business
1452 #
[1862]1453 @asaplog_post_dec
[931]1454 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1455 """\
[1070]1456 Return the (time) weighted average of a scan.
[1846]1457
1458 *Note*:
1459
[1070]1460 in channels only - align if necessary
[1846]1461
[513]1462 Parameters:
[1846]1463
[513]1464 mask: an optional mask (only used for 'var' and 'tsys'
1465 weighting)
[1855]1466
[558]1467 scanav: True averages each scan separately
1468 False (default) averages all scans together,
[1855]1469
[1099]1470 weight: Weighting scheme.
1471 'none' (mean no weight)
1472 'var' (1/var(spec) weighted)
1473 'tsys' (1/Tsys**2 weighted)
1474 'tint' (integration time weighted)
1475 'tintsys' (Tint/Tsys**2)
1476 'median' ( median averaging)
[535]1477 The default is 'tint'
[1855]1478
[931]1479 align: align the spectra in velocity before averaging. It takes
1480 the time of the first spectrum as reference time.
[1846]1481
1482 Example::
1483
[513]1484 # time average the scantable without using a mask
[710]1485 newscan = scan.average_time()
[1846]1486
[513]1487 """
1488 varlist = vars()
[1593]1489 weight = weight or 'TINT'
1490 mask = mask or ()
1491 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1492 scan = (self, )
[1859]1493
1494 if align:
1495 scan = (self.freq_align(insitu=False), )
1496 s = None
1497 if weight.upper() == 'MEDIAN':
1498 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1499 scanav))
1500 else:
1501 s = scantable(self._math._average(scan, mask, weight.upper(),
1502 scanav))
[1099]1503 s._add_history("average_time", varlist)
[513]1504 return s
[710]1505
[1862]1506 @asaplog_post_dec
[876]1507 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1508 """\
[513]1509 Return a scan where all spectra are converted to either
1510 Jansky or Kelvin depending upon the flux units of the scan table.
1511 By default the function tries to look the values up internally.
1512 If it can't find them (or if you want to over-ride), you must
1513 specify EITHER jyperk OR eta (and D which it will try to look up
1514 also if you don't set it). jyperk takes precedence if you set both.
[1846]1515
[513]1516 Parameters:
[1846]1517
[513]1518 jyperk: the Jy / K conversion factor
[1855]1519
[513]1520 eta: the aperture efficiency
[1855]1521
[513]1522 d: the geomtric diameter (metres)
[1855]1523
[513]1524 insitu: if False a new scantable is returned.
1525 Otherwise, the scaling is done in-situ
1526 The default is taken from .asaprc (False)
[1846]1527
[513]1528 """
1529 if insitu is None: insitu = rcParams['insitu']
[876]1530 self._math._setinsitu(insitu)
[513]1531 varlist = vars()
[1593]1532 jyperk = jyperk or -1.0
1533 d = d or -1.0
1534 eta = eta or -1.0
[876]1535 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1536 s._add_history("convert_flux", varlist)
1537 if insitu: self._assign(s)
1538 else: return s
[513]1539
[1862]1540 @asaplog_post_dec
[876]1541 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1542 """\
[513]1543 Return a scan after applying a gain-elevation correction.
1544 The correction can be made via either a polynomial or a
1545 table-based interpolation (and extrapolation if necessary).
1546 You specify polynomial coefficients, an ascii table or neither.
1547 If you specify neither, then a polynomial correction will be made
1548 with built in coefficients known for certain telescopes (an error
1549 will occur if the instrument is not known).
1550 The data and Tsys are *divided* by the scaling factors.
[1846]1551
[513]1552 Parameters:
[1846]1553
[513]1554 poly: Polynomial coefficients (default None) to compute a
1555 gain-elevation correction as a function of
1556 elevation (in degrees).
[1855]1557
[513]1558 filename: The name of an ascii file holding correction factors.
1559 The first row of the ascii file must give the column
1560 names and these MUST include columns
1561 "ELEVATION" (degrees) and "FACTOR" (multiply data
1562 by this) somewhere.
1563 The second row must give the data type of the
1564 column. Use 'R' for Real and 'I' for Integer.
1565 An example file would be
1566 (actual factors are arbitrary) :
1567
1568 TIME ELEVATION FACTOR
1569 R R R
1570 0.1 0 0.8
1571 0.2 20 0.85
1572 0.3 40 0.9
1573 0.4 60 0.85
1574 0.5 80 0.8
1575 0.6 90 0.75
[1855]1576
[513]1577 method: Interpolation method when correcting from a table.
1578 Values are "nearest", "linear" (default), "cubic"
1579 and "spline"
[1855]1580
[513]1581 insitu: if False a new scantable is returned.
1582 Otherwise, the scaling is done in-situ
1583 The default is taken from .asaprc (False)
[1846]1584
[513]1585 """
1586
1587 if insitu is None: insitu = rcParams['insitu']
[876]1588 self._math._setinsitu(insitu)
[513]1589 varlist = vars()
[1593]1590 poly = poly or ()
[513]1591 from os.path import expandvars
1592 filename = expandvars(filename)
[876]1593 s = scantable(self._math._gainel(self, poly, filename, method))
1594 s._add_history("gain_el", varlist)
[1593]1595 if insitu:
1596 self._assign(s)
1597 else:
1598 return s
[710]1599
[1862]1600 @asaplog_post_dec
[931]1601 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1602 """\
[513]1603 Return a scan where all rows have been aligned in frequency/velocity.
1604 The alignment frequency frame (e.g. LSRK) is that set by function
1605 set_freqframe.
[1846]1606
[513]1607 Parameters:
[1855]1608
[513]1609 reftime: reference time to align at. By default, the time of
1610 the first row of data is used.
[1855]1611
[513]1612 method: Interpolation method for regridding the spectra.
1613 Choose from "nearest", "linear", "cubic" (default)
1614 and "spline"
[1855]1615
[513]1616 insitu: if False a new scantable is returned.
1617 Otherwise, the scaling is done in-situ
1618 The default is taken from .asaprc (False)
[1846]1619
[513]1620 """
[931]1621 if insitu is None: insitu = rcParams["insitu"]
[876]1622 self._math._setinsitu(insitu)
[513]1623 varlist = vars()
[1593]1624 reftime = reftime or ""
[931]1625 s = scantable(self._math._freq_align(self, reftime, method))
[876]1626 s._add_history("freq_align", varlist)
1627 if insitu: self._assign(s)
1628 else: return s
[513]1629
[1862]1630 @asaplog_post_dec
[1725]1631 def opacity(self, tau=None, insitu=None):
[1846]1632 """\
[513]1633 Apply an opacity correction. The data
1634 and Tsys are multiplied by the correction factor.
[1846]1635
[513]1636 Parameters:
[1855]1637
[1689]1638 tau: (list of) opacity from which the correction factor is
[513]1639 exp(tau*ZD)
[1689]1640 where ZD is the zenith-distance.
1641 If a list is provided, it has to be of length nIF,
1642 nIF*nPol or 1 and in order of IF/POL, e.g.
1643 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1644 if tau is `None` the opacities are determined from a
1645 model.
[1855]1646
[513]1647 insitu: if False a new scantable is returned.
1648 Otherwise, the scaling is done in-situ
1649 The default is taken from .asaprc (False)
[1846]1650
[513]1651 """
1652 if insitu is None: insitu = rcParams['insitu']
[876]1653 self._math._setinsitu(insitu)
[513]1654 varlist = vars()
[1689]1655 if not hasattr(tau, "__len__"):
1656 tau = [tau]
[876]1657 s = scantable(self._math._opacity(self, tau))
1658 s._add_history("opacity", varlist)
1659 if insitu: self._assign(s)
1660 else: return s
[513]1661
[1862]1662 @asaplog_post_dec
[513]1663 def bin(self, width=5, insitu=None):
[1846]1664 """\
[513]1665 Return a scan where all spectra have been binned up.
[1846]1666
[1348]1667 Parameters:
[1846]1668
[513]1669 width: The bin width (default=5) in pixels
[1855]1670
[513]1671 insitu: if False a new scantable is returned.
1672 Otherwise, the scaling is done in-situ
1673 The default is taken from .asaprc (False)
[1846]1674
[513]1675 """
1676 if insitu is None: insitu = rcParams['insitu']
[876]1677 self._math._setinsitu(insitu)
[513]1678 varlist = vars()
[876]1679 s = scantable(self._math._bin(self, width))
[1118]1680 s._add_history("bin", varlist)
[1589]1681 if insitu:
1682 self._assign(s)
1683 else:
1684 return s
[513]1685
[1862]1686 @asaplog_post_dec
[513]1687 def resample(self, width=5, method='cubic', insitu=None):
[1846]1688 """\
[1348]1689 Return a scan where all spectra have been binned up.
[1573]1690
[1348]1691 Parameters:
[1846]1692
[513]1693 width: The bin width (default=5) in pixels
[1855]1694
[513]1695 method: Interpolation method when correcting from a table.
1696 Values are "nearest", "linear", "cubic" (default)
1697 and "spline"
[1855]1698
[513]1699 insitu: if False a new scantable is returned.
1700 Otherwise, the scaling is done in-situ
1701 The default is taken from .asaprc (False)
[1846]1702
[513]1703 """
1704 if insitu is None: insitu = rcParams['insitu']
[876]1705 self._math._setinsitu(insitu)
[513]1706 varlist = vars()
[876]1707 s = scantable(self._math._resample(self, method, width))
[1118]1708 s._add_history("resample", varlist)
[876]1709 if insitu: self._assign(s)
1710 else: return s
[513]1711
[1862]1712 @asaplog_post_dec
[946]1713 def average_pol(self, mask=None, weight='none'):
[1846]1714 """\
[946]1715 Average the Polarisations together.
[1846]1716
[946]1717 Parameters:
[1846]1718
[946]1719 mask: An optional mask defining the region, where the
1720 averaging will be applied. The output will have all
1721 specified points masked.
[1855]1722
[946]1723 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1724 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1725
[946]1726 """
1727 varlist = vars()
[1593]1728 mask = mask or ()
[1010]1729 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1730 s._add_history("average_pol", varlist)
[992]1731 return s
[513]1732
[1862]1733 @asaplog_post_dec
[1145]1734 def average_beam(self, mask=None, weight='none'):
[1846]1735 """\
[1145]1736 Average the Beams together.
[1846]1737
[1145]1738 Parameters:
1739 mask: An optional mask defining the region, where the
1740 averaging will be applied. The output will have all
1741 specified points masked.
[1855]1742
[1145]1743 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1744 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1745
[1145]1746 """
1747 varlist = vars()
[1593]1748 mask = mask or ()
[1145]1749 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1750 s._add_history("average_beam", varlist)
1751 return s
1752
[1586]1753 def parallactify(self, pflag):
[1846]1754 """\
[1843]1755 Set a flag to indicate whether this data should be treated as having
[1617]1756 been 'parallactified' (total phase == 0.0)
[1846]1757
[1617]1758 Parameters:
[1855]1759
[1843]1760 pflag: Bool indicating whether to turn this on (True) or
[1617]1761 off (False)
[1846]1762
[1617]1763 """
[1586]1764 varlist = vars()
1765 self._parallactify(pflag)
1766 self._add_history("parallactify", varlist)
1767
[1862]1768 @asaplog_post_dec
[992]1769 def convert_pol(self, poltype=None):
[1846]1770 """\
[992]1771 Convert the data to a different polarisation type.
[1565]1772 Note that you will need cross-polarisation terms for most conversions.
[1846]1773
[992]1774 Parameters:
[1855]1775
[992]1776 poltype: The new polarisation type. Valid types are:
[1565]1777 "linear", "circular", "stokes" and "linpol"
[1846]1778
[992]1779 """
1780 varlist = vars()
[1859]1781 s = scantable(self._math._convertpol(self, poltype))
[1118]1782 s._add_history("convert_pol", varlist)
[992]1783 return s
1784
[1862]1785 @asaplog_post_dec
[1819]1786 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1787 """\
[513]1788 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1789
[513]1790 Parameters:
[1846]1791
[513]1792 kernel: The type of smoothing kernel. Select from
[1574]1793 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1794 or 'poly'
[1855]1795
[513]1796 width: The width of the kernel in pixels. For hanning this is
1797 ignored otherwise it defauls to 5 pixels.
1798 For 'gaussian' it is the Full Width Half
1799 Maximum. For 'boxcar' it is the full width.
[1574]1800 For 'rmedian' and 'poly' it is the half width.
[1855]1801
[1574]1802 order: Optional parameter for 'poly' kernel (default is 2), to
1803 specify the order of the polnomial. Ignored by all other
1804 kernels.
[1855]1805
[1819]1806 plot: plot the original and the smoothed spectra.
1807 In this each indivual fit has to be approved, by
1808 typing 'y' or 'n'
[1855]1809
[513]1810 insitu: if False a new scantable is returned.
1811 Otherwise, the scaling is done in-situ
1812 The default is taken from .asaprc (False)
[1846]1813
[513]1814 """
1815 if insitu is None: insitu = rcParams['insitu']
[876]1816 self._math._setinsitu(insitu)
[513]1817 varlist = vars()
[1819]1818
1819 if plot: orgscan = self.copy()
1820
[1574]1821 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1822 s._add_history("smooth", varlist)
[1819]1823
1824 if plot:
1825 if rcParams['plotter.gui']:
1826 from asap.asaplotgui import asaplotgui as asaplot
1827 else:
1828 from asap.asaplot import asaplot
1829 self._p=asaplot()
1830 self._p.set_panels()
1831 ylab=s._get_ordinate_label()
1832 #self._p.palette(0,["#777777","red"])
1833 for r in xrange(s.nrow()):
1834 xsm=s._getabcissa(r)
1835 ysm=s._getspectrum(r)
1836 xorg=orgscan._getabcissa(r)
1837 yorg=orgscan._getspectrum(r)
1838 self._p.clear()
1839 self._p.hold()
1840 self._p.set_axes('ylabel',ylab)
1841 self._p.set_axes('xlabel',s._getabcissalabel(r))
1842 self._p.set_axes('title',s._getsourcename(r))
1843 self._p.set_line(label='Original',color="#777777")
1844 self._p.plot(xorg,yorg)
1845 self._p.set_line(label='Smoothed',color="red")
1846 self._p.plot(xsm,ysm)
1847 ### Ugly part for legend
1848 for i in [0,1]:
1849 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1850 self._p.release()
1851 ### Ugly part for legend
1852 self._p.subplots[0]['lines']=[]
1853 res = raw_input("Accept smoothing ([y]/n): ")
1854 if res.upper() == 'N':
1855 s._setspectrum(yorg, r)
1856 self._p.unmap()
1857 self._p = None
1858 del orgscan
1859
[876]1860 if insitu: self._assign(s)
1861 else: return s
[513]1862
[1862]1863 @asaplog_post_dec
[1907]1864 def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1865 """\
[513]1866 Return a scan which has been baselined (all rows) by a polynomial.
[1907]1867
[513]1868 Parameters:
[1846]1869
[794]1870 mask: an optional mask
[1855]1871
[794]1872 order: the order of the polynomial (default is 0)
[1855]1873
[1061]1874 plot: plot the fit and the residual. In this each
1875 indivual fit has to be approved, by typing 'y'
1876 or 'n'
[1855]1877
[1391]1878 uselin: use linear polynomial fit
[1855]1879
[794]1880 insitu: if False a new scantable is returned.
1881 Otherwise, the scaling is done in-situ
1882 The default is taken from .asaprc (False)
[1846]1883
[1907]1884 rows: row numbers of spectra to be processed.
1885 (default is None: for all rows)
1886
1887 Example:
[513]1888 # return a scan baselined by a third order polynomial,
1889 # not using a mask
1890 bscan = scan.poly_baseline(order=3)
[1846]1891
[579]1892 """
[513]1893 if insitu is None: insitu = rcParams['insitu']
[1819]1894 if not insitu:
1895 workscan = self.copy()
1896 else:
1897 workscan = self
[513]1898 varlist = vars()
1899 if mask is None:
[1907]1900 mask = [True for i in xrange(self.nchan())]
[1819]1901
[1217]1902 try:
1903 f = fitter()
[1391]1904 if uselin:
1905 f.set_function(lpoly=order)
1906 else:
1907 f.set_function(poly=order)
[1819]1908
[1907]1909 if rows == None:
1910 rows = xrange(workscan.nrow())
1911 elif isinstance(rows, int):
1912 rows = [ rows ]
1913
[1819]1914 if len(rows) > 0:
1915 self.blpars = []
[1907]1916 self.masklists = []
1917 self.actualmask = []
1918
[1819]1919 for r in rows:
1920 f.x = workscan._getabcissa(r)
1921 f.y = workscan._getspectrum(r)
[1907]1922 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]1923 f.data = None
1924 f.fit()
1925 if plot:
1926 f.plot(residual=True)
1927 x = raw_input("Accept fit ( [y]/n ): ")
1928 if x.upper() == 'N':
1929 self.blpars.append(None)
[1907]1930 self.masklists.append(None)
1931 self.actualmask.append(None)
[1819]1932 continue
1933 workscan._setspectrum(f.fitter.getresidual(), r)
1934 self.blpars.append(f.get_parameters())
[1907]1935 self.masklists.append(workscan.get_masklist(f.mask, row=r))
1936 self.actualmask.append(f.mask)
[1819]1937
1938 if plot:
1939 f._p.unmap()
1940 f._p = None
1941 workscan._add_history("poly_baseline", varlist)
[1856]1942 if insitu:
1943 self._assign(workscan)
1944 else:
1945 return workscan
[1217]1946 except RuntimeError:
1947 msg = "The fit failed, possibly because it didn't converge."
[1859]1948 raise RuntimeError(msg)
[513]1949
[1819]1950
[1907]1951 def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1952 """\
1953 Return a scan which has been baselined (all rows) by a polynomial.
1954 Parameters:
1955 mask: an optional mask
1956 order: the order of the polynomial (default is 0)
1957 plot: plot the fit and the residual. In this each
1958 indivual fit has to be approved, by typing 'y'
1959 or 'n'. Ignored if batch = True.
1960 batch: if True a faster algorithm is used and logs
1961 including the fit results are not output
1962 (default is False)
1963 insitu: if False a new scantable is returned.
1964 Otherwise, the scaling is done in-situ
1965 The default is taken from .asaprc (False)
1966 rows: row numbers of spectra to be processed.
1967 (default is None: for all rows)
1968 Example:
1969 # return a scan baselined by a third order polynomial,
1970 # not using a mask
1971 bscan = scan.poly_baseline(order=3)
1972 """
1973 if insitu is None: insitu = rcParams["insitu"]
1974 if insitu:
1975 workscan = self
1976 else:
1977 workscan = self.copy()
1978
1979 varlist = vars()
1980 nchan = workscan.nchan()
1981
1982 if mask is None:
1983 mask = [True for i in xrange(nchan)]
1984
1985 try:
1986 if rows == None:
1987 rows = xrange(workscan.nrow())
1988 elif isinstance(rows, int):
1989 rows = [ rows ]
1990
1991 if len(rows) > 0:
1992 self.blpars = []
1993 self.masklists = []
1994 self.actualmask = []
1995
1996 if batch:
1997 for r in rows:
[1908]1998 workscan._poly_baseline_batch(mask, order, r)
[1907]1999 elif plot:
2000 f = fitter()
2001 f.set_function(lpoly=order)
2002 for r in rows:
2003 f.x = workscan._getabcissa(r)
2004 f.y = workscan._getspectrum(r)
2005 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2006 f.data = None
2007 f.fit()
2008
2009 f.plot(residual=True)
2010 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2011 if accept_fit.upper() == "N":
2012 self.blpars.append(None)
2013 self.masklists.append(None)
2014 self.actualmask.append(None)
2015 continue
2016 workscan._setspectrum(f.fitter.getresidual(), r)
2017 self.blpars.append(f.get_parameters())
2018 self.masklists.append(workscan.get_masklist(f.mask, row=r))
2019 self.actualmask.append(f.mask)
2020
2021 f._p.unmap()
2022 f._p = None
2023 else:
2024 import array
2025 for r in rows:
2026 pars = array.array("f", [0.0 for i in range(order+1)])
2027 pars_adr = pars.buffer_info()[0]
2028 pars_len = pars.buffer_info()[1]
2029
2030 errs = array.array("f", [0.0 for i in range(order+1)])
2031 errs_adr = errs.buffer_info()[0]
2032 errs_len = errs.buffer_info()[1]
2033
2034 fmsk = array.array("i", [1 for i in range(nchan)])
2035 fmsk_adr = fmsk.buffer_info()[0]
2036 fmsk_len = fmsk.buffer_info()[1]
2037
2038 workscan._poly_baseline(mask, order, r, pars_adr, pars_len, errs_adr, errs_len, fmsk_adr, fmsk_len)
2039
2040 params = pars.tolist()
2041 fmtd = ""
2042 for i in xrange(len(params)): fmtd += " p%d= %3.6f," % (i, params[i])
2043 fmtd = fmtd[:-1] # remove trailing ","
2044 errors = errs.tolist()
2045 fmask = fmsk.tolist()
2046 for i in xrange(len(fmask)): fmask[i] = (fmask[i] > 0) # transform (1/0) -> (True/False)
2047
2048 self.blpars.append({"params":params, "fixed":[], "formatted":fmtd, "errors":errors})
2049 self.masklists.append(workscan.get_masklist(fmask, r))
2050 self.actualmask.append(fmask)
2051
2052 asaplog.push(str(fmtd))
2053
2054 workscan._add_history("poly_baseline", varlist)
2055
2056 if insitu:
2057 self._assign(workscan)
2058 else:
2059 return workscan
2060
[1919]2061 except RuntimeError, e:
[1907]2062 msg = "The fit failed, possibly because it didn't converge."
2063 if rcParams["verbose"]:
[1919]2064 asaplog.push(str(e))
[1907]2065 asaplog.push(str(msg))
2066 return
2067 else:
[1919]2068 raise RuntimeError(str(e)+'\n'+msg)
[1907]2069
2070
2071 def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2072 threshold=3, chan_avg_limit=1, plot=False,
[1907]2073 insitu=None, rows=None):
[1846]2074 """\
[880]2075 Return a scan which has been baselined (all rows) by a polynomial.
2076 Spectral lines are detected first using linefinder and masked out
2077 to avoid them affecting the baseline solution.
2078
2079 Parameters:
[1846]2080
[880]2081 mask: an optional mask retreived from scantable
[1846]2082
2083 edge: an optional number of channel to drop at the edge of
2084 spectrum. If only one value is
[880]2085 specified, the same number will be dropped from
2086 both sides of the spectrum. Default is to keep
[907]2087 all channels. Nested tuples represent individual
[976]2088 edge selection for different IFs (a number of spectral
2089 channels can be different)
[1846]2090
[880]2091 order: the order of the polynomial (default is 0)
[1846]2092
[880]2093 threshold: the threshold used by line finder. It is better to
2094 keep it large as only strong lines affect the
2095 baseline solution.
[1846]2096
[1280]2097 chan_avg_limit:
2098 a maximum number of consequtive spectral channels to
2099 average during the search of weak and broad lines.
2100 The default is no averaging (and no search for weak
2101 lines). If such lines can affect the fitted baseline
2102 (e.g. a high order polynomial is fitted), increase this
2103 parameter (usually values up to 8 are reasonable). Most
2104 users of this method should find the default value
2105 sufficient.
[1846]2106
[1061]2107 plot: plot the fit and the residual. In this each
2108 indivual fit has to be approved, by typing 'y'
2109 or 'n'
[1846]2110
[880]2111 insitu: if False a new scantable is returned.
2112 Otherwise, the scaling is done in-situ
2113 The default is taken from .asaprc (False)
[1907]2114 rows: row numbers of spectra to be processed.
2115 (default is None: for all rows)
[880]2116
[1846]2117
2118 Example::
2119
2120 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2121
[880]2122 """
2123 if insitu is None: insitu = rcParams['insitu']
2124 varlist = vars()
2125 from asap.asaplinefind import linefinder
2126 from asap import _is_sequence_or_number as _is_valid
2127
[976]2128 # check whether edge is set up for each IF individually
[1118]2129 individualedge = False;
2130 if len(edge) > 1:
2131 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2132 individualedge = True;
[907]2133
[1118]2134 if not _is_valid(edge, int) and not individualedge:
[909]2135 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2136 pair of integers specified as a tuple. Nested tuples are allowed \
2137 to make individual selection for different IFs."
[919]2138
[1118]2139 curedge = (0, 0)
2140 if individualedge:
2141 for edgepar in edge:
2142 if not _is_valid(edgepar, int):
2143 raise ValueError, "Each element of the 'edge' tuple has \
2144 to be a pair of integers or an integer."
[907]2145 else:
[1118]2146 curedge = edge;
[880]2147
[1907]2148 if not insitu:
2149 workscan = self.copy()
2150 else:
2151 workscan = self
2152
[880]2153 # setup fitter
2154 f = fitter()
[1907]2155 f.set_function(lpoly=order)
[880]2156
2157 # setup line finder
[1118]2158 fl = linefinder()
[1268]2159 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2160
[907]2161 fl.set_scan(workscan)
2162
[1907]2163 if mask is None:
2164 mask = _n_bools(workscan.nchan(), True)
2165
2166 if rows is None:
2167 rows = xrange(workscan.nrow())
2168 elif isinstance(rows, int):
2169 rows = [ rows ]
2170
[1819]2171 # Save parameters of baseline fits & masklists as a class attribute.
2172 # NOTICE: It does not reflect changes in scantable!
2173 if len(rows) > 0:
2174 self.blpars=[]
2175 self.masklists=[]
[1907]2176 self.actualmask=[]
[880]2177 asaplog.push("Processing:")
2178 for r in rows:
[1118]2179 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2180 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2181 workscan.getpol(r), workscan.getcycle(r))
[880]2182 asaplog.push(msg, False)
[907]2183
[976]2184 # figure out edge parameter
[1118]2185 if individualedge:
2186 if len(edge) >= workscan.getif(r):
2187 raise RuntimeError, "Number of edge elements appear to " \
2188 "be less than the number of IFs"
2189 curedge = edge[workscan.getif(r)]
[919]2190
[1907]2191 actualmask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
[1819]2192
[976]2193 # setup line finder
[1819]2194 fl.find_lines(r, actualmask, curedge)
[1907]2195
[1819]2196 f.x = workscan._getabcissa(r)
2197 f.y = workscan._getspectrum(r)
[1907]2198 f.mask = fl.get_mask()
[1819]2199 f.data = None
[880]2200 f.fit()
[1819]2201
2202 # Show mask list
[1907]2203 masklist=workscan.get_masklist(f.mask, row=r)
[1819]2204 msg = "mask range: "+str(masklist)
2205 asaplog.push(msg, False)
2206
[1061]2207 if plot:
2208 f.plot(residual=True)
2209 x = raw_input("Accept fit ( [y]/n ): ")
2210 if x.upper() == 'N':
[1819]2211 self.blpars.append(None)
2212 self.masklists.append(None)
[1907]2213 self.actualmask.append(None)
[1061]2214 continue
[1819]2215
[880]2216 workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2217 self.blpars.append(f.get_parameters())
2218 self.masklists.append(masklist)
[1907]2219 self.actualmask.append(f.mask)
[1061]2220 if plot:
2221 f._p.unmap()
2222 f._p = None
2223 workscan._add_history("auto_poly_baseline", varlist)
[880]2224 if insitu:
2225 self._assign(workscan)
2226 else:
2227 return workscan
2228
[1862]2229 @asaplog_post_dec
[914]2230 def rotate_linpolphase(self, angle):
[1846]2231 """\
[914]2232 Rotate the phase of the complex polarization O=Q+iU correlation.
2233 This is always done in situ in the raw data. So if you call this
2234 function more than once then each call rotates the phase further.
[1846]2235
[914]2236 Parameters:
[1846]2237
[914]2238 angle: The angle (degrees) to rotate (add) by.
[1846]2239
2240 Example::
2241
[914]2242 scan.rotate_linpolphase(2.3)
[1846]2243
[914]2244 """
2245 varlist = vars()
[936]2246 self._math._rotate_linpolphase(self, angle)
[914]2247 self._add_history("rotate_linpolphase", varlist)
2248 return
[710]2249
[1862]2250 @asaplog_post_dec
[914]2251 def rotate_xyphase(self, angle):
[1846]2252 """\
[914]2253 Rotate the phase of the XY correlation. This is always done in situ
2254 in the data. So if you call this function more than once
2255 then each call rotates the phase further.
[1846]2256
[914]2257 Parameters:
[1846]2258
[914]2259 angle: The angle (degrees) to rotate (add) by.
[1846]2260
2261 Example::
2262
[914]2263 scan.rotate_xyphase(2.3)
[1846]2264
[914]2265 """
2266 varlist = vars()
[936]2267 self._math._rotate_xyphase(self, angle)
[914]2268 self._add_history("rotate_xyphase", varlist)
2269 return
2270
[1862]2271 @asaplog_post_dec
[914]2272 def swap_linears(self):
[1846]2273 """\
[1573]2274 Swap the linear polarisations XX and YY, or better the first two
[1348]2275 polarisations as this also works for ciculars.
[914]2276 """
2277 varlist = vars()
[936]2278 self._math._swap_linears(self)
[914]2279 self._add_history("swap_linears", varlist)
2280 return
2281
[1862]2282 @asaplog_post_dec
[914]2283 def invert_phase(self):
[1846]2284 """\
[914]2285 Invert the phase of the complex polarisation
2286 """
2287 varlist = vars()
[936]2288 self._math._invert_phase(self)
[914]2289 self._add_history("invert_phase", varlist)
2290 return
2291
[1862]2292 @asaplog_post_dec
[876]2293 def add(self, offset, insitu=None):
[1846]2294 """\
[513]2295 Return a scan where all spectra have the offset added
[1846]2296
[513]2297 Parameters:
[1846]2298
[513]2299 offset: the offset
[1855]2300
[513]2301 insitu: if False a new scantable is returned.
2302 Otherwise, the scaling is done in-situ
2303 The default is taken from .asaprc (False)
[1846]2304
[513]2305 """
2306 if insitu is None: insitu = rcParams['insitu']
[876]2307 self._math._setinsitu(insitu)
[513]2308 varlist = vars()
[876]2309 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2310 s._add_history("add", varlist)
[876]2311 if insitu:
2312 self._assign(s)
2313 else:
[513]2314 return s
2315
[1862]2316 @asaplog_post_dec
[1308]2317 def scale(self, factor, tsys=True, insitu=None):
[1846]2318 """\
2319
[513]2320 Return a scan where all spectra are scaled by the give 'factor'
[1846]2321
[513]2322 Parameters:
[1846]2323
[1819]2324 factor: the scaling factor (float or 1D float list)
[1855]2325
[513]2326 insitu: if False a new scantable is returned.
2327 Otherwise, the scaling is done in-situ
2328 The default is taken from .asaprc (False)
[1855]2329
[513]2330 tsys: if True (default) then apply the operation to Tsys
2331 as well as the data
[1846]2332
[513]2333 """
2334 if insitu is None: insitu = rcParams['insitu']
[876]2335 self._math._setinsitu(insitu)
[513]2336 varlist = vars()
[1819]2337 s = None
2338 import numpy
2339 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2340 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2341 from asapmath import _array2dOp
2342 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2343 else:
2344 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2345 else:
2346 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2347 s._add_history("scale", varlist)
[876]2348 if insitu:
2349 self._assign(s)
2350 else:
[513]2351 return s
2352
[1504]2353 def set_sourcetype(self, match, matchtype="pattern",
2354 sourcetype="reference"):
[1846]2355 """\
[1502]2356 Set the type of the source to be an source or reference scan
[1846]2357 using the provided pattern.
2358
[1502]2359 Parameters:
[1846]2360
[1504]2361 match: a Unix style pattern, regular expression or selector
[1855]2362
[1504]2363 matchtype: 'pattern' (default) UNIX style pattern or
2364 'regex' regular expression
[1855]2365
[1502]2366 sourcetype: the type of the source to use (source/reference)
[1846]2367
[1502]2368 """
2369 varlist = vars()
2370 basesel = self.get_selection()
2371 stype = -1
2372 if sourcetype.lower().startswith("r"):
2373 stype = 1
2374 elif sourcetype.lower().startswith("s"):
2375 stype = 0
[1504]2376 else:
[1502]2377 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2378 if matchtype.lower().startswith("p"):
2379 matchtype = "pattern"
2380 elif matchtype.lower().startswith("r"):
2381 matchtype = "regex"
2382 else:
2383 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2384 sel = selector()
2385 if isinstance(match, selector):
2386 sel = match
2387 else:
[1504]2388 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2389 self.set_selection(basesel+sel)
2390 self._setsourcetype(stype)
2391 self.set_selection(basesel)
[1573]2392 self._add_history("set_sourcetype", varlist)
[1502]2393
[1862]2394 @asaplog_post_dec
[1857]2395 @preserve_selection
[1819]2396 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2397 """\
[670]2398 This function allows to build quotients automatically.
[1819]2399 It assumes the observation to have the same number of
[670]2400 "ons" and "offs"
[1846]2401
[670]2402 Parameters:
[1846]2403
[710]2404 preserve: you can preserve (default) the continuum or
2405 remove it. The equations used are
[1857]2406
[670]2407 preserve: Output = Toff * (on/off) - Toff
[1857]2408
[1070]2409 remove: Output = Toff * (on/off) - Ton
[1855]2410
[1573]2411 mode: the on/off detection mode
[1348]2412 'paired' (default)
2413 identifies 'off' scans by the
2414 trailing '_R' (Mopra/Parkes) or
2415 '_e'/'_w' (Tid) and matches
2416 on/off pairs from the observing pattern
[1502]2417 'time'
2418 finds the closest off in time
[1348]2419
[1857]2420 .. todo:: verify argument is not implemented
2421
[670]2422 """
[1857]2423 varlist = vars()
[1348]2424 modes = ["time", "paired"]
[670]2425 if not mode in modes:
[876]2426 msg = "please provide valid mode. Valid modes are %s" % (modes)
2427 raise ValueError(msg)
[1348]2428 s = None
2429 if mode.lower() == "paired":
[1857]2430 sel = self.get_selection()
[1875]2431 sel.set_query("SRCTYPE==psoff")
[1356]2432 self.set_selection(sel)
[1348]2433 offs = self.copy()
[1875]2434 sel.set_query("SRCTYPE==pson")
[1356]2435 self.set_selection(sel)
[1348]2436 ons = self.copy()
2437 s = scantable(self._math._quotient(ons, offs, preserve))
2438 elif mode.lower() == "time":
2439 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2440 s._add_history("auto_quotient", varlist)
[876]2441 return s
[710]2442
[1862]2443 @asaplog_post_dec
[1145]2444 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2445 """\
[1143]2446 Form a quotient using "off" beams when observing in "MX" mode.
[1846]2447
[1143]2448 Parameters:
[1846]2449
[1145]2450 mask: an optional mask to be used when weight == 'stddev'
[1855]2451
[1143]2452 weight: How to average the off beams. Default is 'median'.
[1855]2453
[1145]2454 preserve: you can preserve (default) the continuum or
[1855]2455 remove it. The equations used are:
[1846]2456
[1855]2457 preserve: Output = Toff * (on/off) - Toff
2458
2459 remove: Output = Toff * (on/off) - Ton
2460
[1217]2461 """
[1593]2462 mask = mask or ()
[1141]2463 varlist = vars()
2464 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2465 preoff = scantable(self._math._mx_extract(self, 'off'))
2466 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2467 from asapmath import quotient
[1145]2468 q = quotient(on, off, preserve)
[1143]2469 q._add_history("mx_quotient", varlist)
[1217]2470 return q
[513]2471
[1862]2472 @asaplog_post_dec
[718]2473 def freq_switch(self, insitu=None):
[1846]2474 """\
[718]2475 Apply frequency switching to the data.
[1846]2476
[718]2477 Parameters:
[1846]2478
[718]2479 insitu: if False a new scantable is returned.
2480 Otherwise, the swictching is done in-situ
2481 The default is taken from .asaprc (False)
[1846]2482
[718]2483 """
2484 if insitu is None: insitu = rcParams['insitu']
[876]2485 self._math._setinsitu(insitu)
[718]2486 varlist = vars()
[876]2487 s = scantable(self._math._freqswitch(self))
[1118]2488 s._add_history("freq_switch", varlist)
[1856]2489 if insitu:
2490 self._assign(s)
2491 else:
2492 return s
[718]2493
[1862]2494 @asaplog_post_dec
[780]2495 def recalc_azel(self):
[1846]2496 """Recalculate the azimuth and elevation for each position."""
[780]2497 varlist = vars()
[876]2498 self._recalcazel()
[780]2499 self._add_history("recalc_azel", varlist)
2500 return
2501
[1862]2502 @asaplog_post_dec
[513]2503 def __add__(self, other):
2504 varlist = vars()
2505 s = None
2506 if isinstance(other, scantable):
[1573]2507 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2508 elif isinstance(other, float):
[876]2509 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2510 else:
[718]2511 raise TypeError("Other input is not a scantable or float value")
[513]2512 s._add_history("operator +", varlist)
2513 return s
2514
[1862]2515 @asaplog_post_dec
[513]2516 def __sub__(self, other):
2517 """
2518 implicit on all axes and on Tsys
2519 """
2520 varlist = vars()
2521 s = None
2522 if isinstance(other, scantable):
[1588]2523 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2524 elif isinstance(other, float):
[876]2525 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2526 else:
[718]2527 raise TypeError("Other input is not a scantable or float value")
[513]2528 s._add_history("operator -", varlist)
2529 return s
[710]2530
[1862]2531 @asaplog_post_dec
[513]2532 def __mul__(self, other):
2533 """
2534 implicit on all axes and on Tsys
2535 """
2536 varlist = vars()
2537 s = None
2538 if isinstance(other, scantable):
[1588]2539 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2540 elif isinstance(other, float):
[876]2541 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2542 else:
[718]2543 raise TypeError("Other input is not a scantable or float value")
[513]2544 s._add_history("operator *", varlist)
2545 return s
2546
[710]2547
[1862]2548 @asaplog_post_dec
[513]2549 def __div__(self, other):
2550 """
2551 implicit on all axes and on Tsys
2552 """
2553 varlist = vars()
2554 s = None
2555 if isinstance(other, scantable):
[1589]2556 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2557 elif isinstance(other, float):
2558 if other == 0.0:
[718]2559 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2560 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2561 else:
[718]2562 raise TypeError("Other input is not a scantable or float value")
[513]2563 s._add_history("operator /", varlist)
2564 return s
2565
[1862]2566 @asaplog_post_dec
[530]2567 def get_fit(self, row=0):
[1846]2568 """\
[530]2569 Print or return the stored fits for a row in the scantable
[1846]2570
[530]2571 Parameters:
[1846]2572
[530]2573 row: the row which the fit has been applied to.
[1846]2574
[530]2575 """
2576 if row > self.nrow():
2577 return
[976]2578 from asap.asapfit import asapfit
[530]2579 fit = asapfit(self._getfit(row))
[1859]2580 asaplog.push( '%s' %(fit) )
2581 return fit.as_dict()
[530]2582
[1483]2583 def flag_nans(self):
[1846]2584 """\
[1483]2585 Utility function to flag NaN values in the scantable.
2586 """
2587 import numpy
2588 basesel = self.get_selection()
2589 for i in range(self.nrow()):
[1589]2590 sel = self.get_row_selector(i)
2591 self.set_selection(basesel+sel)
[1483]2592 nans = numpy.isnan(self._getspectrum(0))
2593 if numpy.any(nans):
2594 bnans = [ bool(v) for v in nans]
2595 self.flag(bnans)
2596 self.set_selection(basesel)
2597
[1588]2598 def get_row_selector(self, rowno):
2599 return selector(beams=self.getbeam(rowno),
2600 ifs=self.getif(rowno),
2601 pols=self.getpol(rowno),
2602 scans=self.getscan(rowno),
2603 cycles=self.getcycle(rowno))
[1573]2604
[484]2605 def _add_history(self, funcname, parameters):
[1435]2606 if not rcParams['scantable.history']:
2607 return
[484]2608 # create date
2609 sep = "##"
2610 from datetime import datetime
2611 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2612 hist = dstr+sep
2613 hist += funcname+sep#cdate+sep
2614 if parameters.has_key('self'): del parameters['self']
[1118]2615 for k, v in parameters.iteritems():
[484]2616 if type(v) is dict:
[1118]2617 for k2, v2 in v.iteritems():
[484]2618 hist += k2
2619 hist += "="
[1118]2620 if isinstance(v2, scantable):
[484]2621 hist += 'scantable'
2622 elif k2 == 'mask':
[1118]2623 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2624 hist += str(self._zip_mask(v2))
2625 else:
2626 hist += str(v2)
[484]2627 else:
[513]2628 hist += str(v2)
[484]2629 else:
2630 hist += k
2631 hist += "="
[1118]2632 if isinstance(v, scantable):
[484]2633 hist += 'scantable'
2634 elif k == 'mask':
[1118]2635 if isinstance(v, list) or isinstance(v, tuple):
[513]2636 hist += str(self._zip_mask(v))
2637 else:
2638 hist += str(v)
[484]2639 else:
2640 hist += str(v)
2641 hist += sep
2642 hist = hist[:-2] # remove trailing '##'
2643 self._addhistory(hist)
2644
[710]2645
[484]2646 def _zip_mask(self, mask):
2647 mask = list(mask)
2648 i = 0
2649 segments = []
2650 while mask[i:].count(1):
2651 i += mask[i:].index(1)
2652 if mask[i:].count(0):
2653 j = i + mask[i:].index(0)
2654 else:
[710]2655 j = len(mask)
[1118]2656 segments.append([i, j])
[710]2657 i = j
[484]2658 return segments
[714]2659
[626]2660 def _get_ordinate_label(self):
2661 fu = "("+self.get_fluxunit()+")"
2662 import re
2663 lbl = "Intensity"
[1118]2664 if re.match(".K.", fu):
[626]2665 lbl = "Brightness Temperature "+ fu
[1118]2666 elif re.match(".Jy.", fu):
[626]2667 lbl = "Flux density "+ fu
2668 return lbl
[710]2669
[876]2670 def _check_ifs(self):
2671 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2672 nchans = filter(lambda t: t > 0, nchans)
[876]2673 return (sum(nchans)/len(nchans) == nchans[0])
[976]2674
[1862]2675 @asaplog_post_dec
[1916]2676 #def _fill(self, names, unit, average, getpt, antenna):
2677 def _fill(self, names, unit, average, opts={}):
[976]2678 first = True
2679 fullnames = []
2680 for name in names:
2681 name = os.path.expandvars(name)
2682 name = os.path.expanduser(name)
2683 if not os.path.exists(name):
2684 msg = "File '%s' does not exists" % (name)
2685 raise IOError(msg)
2686 fullnames.append(name)
2687 if average:
2688 asaplog.push('Auto averaging integrations')
[1079]2689 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2690 for name in fullnames:
[1073]2691 tbl = Scantable(stype)
[1843]2692 r = filler(tbl)
[1504]2693 rx = rcParams['scantable.reference']
[1843]2694 r.setreferenceexpr(rx)
[976]2695 msg = "Importing %s..." % (name)
[1118]2696 asaplog.push(msg, False)
[1916]2697 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2698 r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2699 r.fill()
[976]2700 if average:
[1118]2701 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2702 if not first:
2703 tbl = self._math._merge([self, tbl])
2704 Scantable.__init__(self, tbl)
[1843]2705 r.close()
[1118]2706 del r, tbl
[976]2707 first = False
[1861]2708 #flush log
2709 asaplog.post()
[976]2710 if unit is not None:
2711 self.set_fluxunit(unit)
[1824]2712 if not is_casapy():
2713 self.set_freqframe(rcParams['scantable.freqframe'])
[976]2714
[1402]2715 def __getitem__(self, key):
2716 if key < 0:
2717 key += self.nrow()
2718 if key >= self.nrow():
2719 raise IndexError("Row index out of range.")
2720 return self._getspectrum(key)
2721
2722 def __setitem__(self, key, value):
2723 if key < 0:
2724 key += self.nrow()
2725 if key >= self.nrow():
2726 raise IndexError("Row index out of range.")
2727 if not hasattr(value, "__len__") or \
2728 len(value) > self.nchan(self.getif(key)):
2729 raise ValueError("Spectrum length doesn't match.")
2730 return self._setspectrum(value, key)
2731
2732 def __len__(self):
2733 return self.nrow()
2734
2735 def __iter__(self):
2736 for i in range(len(self)):
2737 yield self[i]
Note: See TracBrowser for help on using the repository browser.