source: trunk/python/scantable.py@ 2166

Last change on this file since 2166 was 2150, checked in by Kana Sugimoto, 14 years ago

New Development: No

JIRA Issue: No (a fix)

Ready for Test: Yes

Interface Changes: added a new method new_asaplot() to module asapplotter which returns asaplot instance based on your backend selection.

What Interface Changed:

Test Programs: sdaverage, sdsmooth with verify=True and plotlevel > 0

Put in Release Notes: No

Module(s):

Description: proper handling of plotting in non-TkAgg backend.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 125.9 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1948]4import numpy
[1691]5try:
6 from functools import wraps as wraps_dec
7except ImportError:
8 from asap.compatibility import wraps as wraps_dec
9
[1824]10from asap.env import is_casapy
[876]11from asap._asap import Scantable
[2004]12from asap._asap import filler, msfiller
[1824]13from asap.parameters import rcParams
[1862]14from asap.logging import asaplog, asaplog_post_dec
[1824]15from asap.selector import selector
16from asap.linecatalog import linecatalog
[1600]17from asap.coordinate import coordinate
[1859]18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]19from asap.asapfitter import fitter
[102]20
[1689]21
22def preserve_selection(func):
[1691]23 @wraps_dec(func)
[1689]24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
[1857]26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
[1689]30 return val
31 return wrap
32
[1846]33def is_scantable(filename):
34 """Is the given file a scantable?
[1689]35
[1846]36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
[1883]41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 #if ( l.find('Scantable') != -1 ):
48 if ( l.find('Measurement Set') == -1 ):
49 return True
50 else:
51 return False
52 else:
53 return False
54## return (os.path.isdir(filename)
55## and not os.path.exists(filename+'/table.f1')
56## and os.path.exists(filename+'/table.info'))
[1697]57
[1883]58def is_ms(filename):
59 """Is the given file a MeasurementSet?
[1697]60
[1883]61 Parameters:
62
63 filename: the name of the file/directory to test
64
65 """
66 if ( os.path.isdir(filename)
67 and os.path.exists(filename+'/table.info')
68 and os.path.exists(filename+'/table.dat') ):
69 f=open(filename+'/table.info')
70 l=f.readline()
71 f.close()
72 if ( l.find('Measurement Set') != -1 ):
73 return True
74 else:
75 return False
76 else:
77 return False
78
[876]79class scantable(Scantable):
[1846]80 """\
81 The ASAP container for scans (single-dish data).
[102]82 """
[1819]83
[1862]84 @asaplog_post_dec
[1916]85 #def __init__(self, filename, average=None, unit=None, getpt=None,
86 # antenna=None, parallactify=None):
87 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]88 """\
[102]89 Create a scantable from a saved one or make a reference
[1846]90
[102]91 Parameters:
[1846]92
93 filename: the name of an asap table on disk
94 or
95 the name of a rpfits/sdfits/ms file
96 (integrations within scans are auto averaged
97 and the whole file is read) or
98 [advanced] a reference to an existing scantable
99
100 average: average all integrations withinb a scan on read.
101 The default (True) is taken from .asaprc.
102
[484]103 unit: brightness unit; must be consistent with K or Jy.
[1846]104 Over-rides the default selected by the filler
105 (input rpfits/sdfits/ms) or replaces the value
106 in existing scantables
107
108 getpt: for MeasurementSet input data only:
109 If True, all pointing data are filled.
110 The deafult is False, which makes time to load
111 the MS data faster in some cases.
112
[1920]113 antenna: for MeasurementSet input data only:
114 Antenna selection. integer (id) or string (name or id).
[1846]115
116 parallactify: Indicate that the data had been parallatified. Default
117 is taken from rc file.
118
[710]119 """
[976]120 if average is None:
[710]121 average = rcParams['scantable.autoaverage']
[1916]122 #if getpt is None:
123 # getpt = True
124 #if antenna is not None:
125 # asaplog.push("Antenna selection currently unsupported."
126 # "Using ''")
127 # asaplog.post('WARN')
128 #if antenna is None:
129 # antenna = ''
130 #elif type(antenna) == int:
131 # antenna = '%s' % antenna
132 #elif type(antenna) == list:
133 # tmpstr = ''
134 # for i in range( len(antenna) ):
135 # if type(antenna[i]) == int:
136 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
137 # elif type(antenna[i]) == str:
138 # tmpstr=tmpstr+antenna[i]+','
139 # else:
140 # raise TypeError('Bad antenna selection.')
141 # antenna = tmpstr.rstrip(',')
[1593]142 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]143 varlist = vars()
[876]144 from asap._asap import stmath
[1819]145 self._math = stmath( rcParams['insitu'] )
[876]146 if isinstance(filename, Scantable):
147 Scantable.__init__(self, filename)
[181]148 else:
[1697]149 if isinstance(filename, str):
[976]150 filename = os.path.expandvars(filename)
151 filename = os.path.expanduser(filename)
152 if not os.path.exists(filename):
153 s = "File '%s' not found." % (filename)
154 raise IOError(s)
[1697]155 if is_scantable(filename):
156 ondisk = rcParams['scantable.storage'] == 'disk'
157 Scantable.__init__(self, filename, ondisk)
158 if unit is not None:
159 self.set_fluxunit(unit)
[2008]160 if average:
161 self._assign( self.average_time( scanav=True ) )
[1819]162 # do not reset to the default freqframe
163 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]164 #elif os.path.isdir(filename) \
165 # and not os.path.exists(filename+'/table.f1'):
166 elif is_ms(filename):
[1916]167 # Measurement Set
168 opts={'ms': {}}
169 mskeys=['getpt','antenna']
170 for key in mskeys:
171 if key in args.keys():
172 opts['ms'][key] = args[key]
173 #self._fill([filename], unit, average, getpt, antenna)
174 self._fill([filename], unit, average, opts)
[1893]175 elif os.path.isfile(filename):
[1916]176 #self._fill([filename], unit, average, getpt, antenna)
177 self._fill([filename], unit, average)
[1883]178 else:
[1819]179 msg = "The given file '%s'is not a valid " \
180 "asap table." % (filename)
[1859]181 raise IOError(msg)
[1118]182 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]183 and isinstance(filename[-1], str):
[1916]184 #self._fill(filename, unit, average, getpt, antenna)
185 self._fill(filename, unit, average)
[1586]186 self.parallactify(parallactify)
[1259]187 self._add_history("scantable", varlist)
[102]188
[1862]189 @asaplog_post_dec
[876]190 def save(self, name=None, format=None, overwrite=False):
[1846]191 """\
[1280]192 Store the scantable on disk. This can be an asap (aips++) Table,
193 SDFITS or MS2 format.
[1846]194
[116]195 Parameters:
[1846]196
[1093]197 name: the name of the outputfile. For format "ASCII"
198 this is the root file name (data in 'name'.txt
[497]199 and header in 'name'_header.txt)
[1855]200
[116]201 format: an optional file format. Default is ASAP.
[1855]202 Allowed are:
203
204 * 'ASAP' (save as ASAP [aips++] Table),
205 * 'SDFITS' (save as SDFITS file)
206 * 'ASCII' (saves as ascii text file)
207 * 'MS2' (saves as an casacore MeasurementSet V2)
208 * 'FITS' (save as image FITS - not readable by class)
209 * 'CLASS' (save as FITS readable by CLASS)
210
[411]211 overwrite: If the file should be overwritten if it exists.
[256]212 The default False is to return with warning
[411]213 without writing the output. USE WITH CARE.
[1855]214
[1846]215 Example::
216
[116]217 scan.save('myscan.asap')
[1118]218 scan.save('myscan.sdfits', 'SDFITS')
[1846]219
[116]220 """
[411]221 from os import path
[1593]222 format = format or rcParams['scantable.save']
[256]223 suffix = '.'+format.lower()
[1118]224 if name is None or name == "":
[256]225 name = 'scantable'+suffix
[718]226 msg = "No filename given. Using default name %s..." % name
227 asaplog.push(msg)
[411]228 name = path.expandvars(name)
[256]229 if path.isfile(name) or path.isdir(name):
230 if not overwrite:
[718]231 msg = "File %s exists." % name
[1859]232 raise IOError(msg)
[451]233 format2 = format.upper()
234 if format2 == 'ASAP':
[116]235 self._save(name)
[2029]236 elif format2 == 'MS2':
237 msopt = {'ms': {'overwrite': overwrite } }
238 from asap._asap import mswriter
239 writer = mswriter( self )
240 writer.write( name, msopt )
[116]241 else:
[989]242 from asap._asap import stwriter as stw
[1118]243 writer = stw(format2)
244 writer.write(self, name)
[116]245 return
246
[102]247 def copy(self):
[1846]248 """Return a copy of this scantable.
249
250 *Note*:
251
[1348]252 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]253
254 Example::
255
[102]256 copiedscan = scan.copy()
[1846]257
[102]258 """
[876]259 sd = scantable(Scantable._copy(self))
[113]260 return sd
261
[1093]262 def drop_scan(self, scanid=None):
[1846]263 """\
[1093]264 Return a new scantable where the specified scan number(s) has(have)
265 been dropped.
[1846]266
[1093]267 Parameters:
[1846]268
[1093]269 scanid: a (list of) scan number(s)
[1846]270
[1093]271 """
272 from asap import _is_sequence_or_number as _is_valid
273 from asap import _to_list
274 from asap import unique
275 if not _is_valid(scanid):
[1859]276 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
277 scanid = _to_list(scanid)
278 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
279 for sid in scanid: allscans.remove(sid)
280 if len(allscans) == 0:
281 raise ValueError("Can't remove all scans")
282 sel = selector(scans=allscans)
283 return self._select_copy(sel)
[1093]284
[1594]285 def _select_copy(self, selection):
286 orig = self.get_selection()
287 self.set_selection(orig+selection)
288 cp = self.copy()
289 self.set_selection(orig)
290 return cp
291
[102]292 def get_scan(self, scanid=None):
[1855]293 """\
[102]294 Return a specific scan (by scanno) or collection of scans (by
295 source name) in a new scantable.
[1846]296
297 *Note*:
298
[1348]299 See scantable.drop_scan() for the inverse operation.
[1846]300
[102]301 Parameters:
[1846]302
[513]303 scanid: a (list of) scanno or a source name, unix-style
304 patterns are accepted for source name matching, e.g.
305 '*_R' gets all 'ref scans
[1846]306
307 Example::
308
[513]309 # get all scans containing the source '323p459'
310 newscan = scan.get_scan('323p459')
311 # get all 'off' scans
312 refscans = scan.get_scan('*_R')
313 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]314 newscan = scan.get_scan([0, 2, 7, 10])
[1846]315
[102]316 """
317 if scanid is None:
[1859]318 raise RuntimeError( 'Please specify a scan no or name to '
319 'retrieve from the scantable' )
[102]320 try:
[946]321 bsel = self.get_selection()
322 sel = selector()
[102]323 if type(scanid) is str:
[946]324 sel.set_name(scanid)
[1594]325 return self._select_copy(sel)
[102]326 elif type(scanid) is int:
[946]327 sel.set_scans([scanid])
[1594]328 return self._select_copy(sel)
[381]329 elif type(scanid) is list:
[946]330 sel.set_scans(scanid)
[1594]331 return self._select_copy(sel)
[381]332 else:
[718]333 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]334 raise TypeError(msg)
[102]335 except RuntimeError:
[1859]336 raise
[102]337
338 def __str__(self):
[1118]339 return Scantable._summary(self, True)
[102]340
[976]341 def summary(self, filename=None):
[1846]342 """\
[102]343 Print a summary of the contents of this scantable.
[1846]344
[102]345 Parameters:
[1846]346
[1931]347 filename: the name of a file to write the putput to
[102]348 Default - no file output
[1846]349
[102]350 """
[976]351 info = Scantable._summary(self, True)
[102]352 if filename is not None:
[256]353 if filename is "":
354 filename = 'scantable_summary.txt'
[415]355 from os.path import expandvars, isdir
[411]356 filename = expandvars(filename)
[415]357 if not isdir(filename):
[413]358 data = open(filename, 'w')
359 data.write(info)
360 data.close()
361 else:
[718]362 msg = "Illegal file name '%s'." % (filename)
[1859]363 raise IOError(msg)
364 return page(info)
[710]365
[1512]366 def get_spectrum(self, rowno):
[1471]367 """Return the spectrum for the current row in the scantable as a list.
[1846]368
[1471]369 Parameters:
[1846]370
[1573]371 rowno: the row number to retrieve the spectrum from
[1846]372
[1471]373 """
374 return self._getspectrum(rowno)
[946]375
[1471]376 def get_mask(self, rowno):
377 """Return the mask for the current row in the scantable as a list.
[1846]378
[1471]379 Parameters:
[1846]380
[1573]381 rowno: the row number to retrieve the mask from
[1846]382
[1471]383 """
384 return self._getmask(rowno)
385
386 def set_spectrum(self, spec, rowno):
[1938]387 """Set the spectrum for the current row in the scantable.
[1846]388
[1471]389 Parameters:
[1846]390
[1855]391 spec: the new spectrum
[1846]392
[1855]393 rowno: the row number to set the spectrum for
394
[1471]395 """
396 assert(len(spec) == self.nchan())
397 return self._setspectrum(spec, rowno)
398
[1600]399 def get_coordinate(self, rowno):
400 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]401
402 *Note*:
403
[1600]404 * This coordinate is only valid until a scantable method modifies
405 the frequency axis.
406 * This coordinate does contain the original frequency set-up
407 NOT the new frame. The conversions however are done using the user
408 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
409 use scantable.freq_align first. Without it there is no closure,
[1846]410 i.e.::
[1600]411
[1846]412 c = myscan.get_coordinate(0)
413 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
414
[1600]415 Parameters:
[1846]416
[1600]417 rowno: the row number for the spectral coordinate
418
419 """
420 return coordinate(Scantable.get_coordinate(self, rowno))
421
[946]422 def get_selection(self):
[1846]423 """\
[1005]424 Get the selection object currently set on this scantable.
[1846]425
426 Example::
427
[1005]428 sel = scan.get_selection()
429 sel.set_ifs(0) # select IF 0
430 scan.set_selection(sel) # apply modified selection
[1846]431
[946]432 """
433 return selector(self._getselection())
434
[1576]435 def set_selection(self, selection=None, **kw):
[1846]436 """\
[1005]437 Select a subset of the data. All following operations on this scantable
438 are only applied to thi selection.
[1846]439
[1005]440 Parameters:
[1697]441
[1846]442 selection: a selector object (default unset the selection), or
443 any combination of "pols", "ifs", "beams", "scans",
444 "cycles", "name", "query"
[1697]445
[1846]446 Examples::
[1697]447
[1005]448 sel = selector() # create a selection object
[1118]449 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]450 scan.set_selection(sel) # set the selection
451 scan.summary() # will only print summary of scanno 0 an 3
452 scan.set_selection() # unset the selection
[1697]453 # or the equivalent
454 scan.set_selection(scans=[0,3])
455 scan.summary() # will only print summary of scanno 0 an 3
456 scan.set_selection() # unset the selection
[1846]457
[946]458 """
[1576]459 if selection is None:
460 # reset
461 if len(kw) == 0:
462 selection = selector()
463 else:
464 # try keywords
465 for k in kw:
466 if k not in selector.fields:
467 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
468 selection = selector(**kw)
[946]469 self._setselection(selection)
470
[1819]471 def get_row(self, row=0, insitu=None):
[1846]472 """\
[1819]473 Select a row in the scantable.
474 Return a scantable with single row.
[1846]475
[1819]476 Parameters:
[1846]477
478 row: row no of integration, default is 0.
479 insitu: if False a new scantable is returned. Otherwise, the
480 scaling is done in-situ. The default is taken from .asaprc
481 (False)
482
[1819]483 """
484 if insitu is None: insitu = rcParams['insitu']
485 if not insitu:
486 workscan = self.copy()
487 else:
488 workscan = self
489 # Select a row
490 sel=selector()
[1992]491 sel.set_rows([row])
492 #sel.set_scans([workscan.getscan(row)])
493 #sel.set_cycles([workscan.getcycle(row)])
494 #sel.set_beams([workscan.getbeam(row)])
495 #sel.set_ifs([workscan.getif(row)])
496 #sel.set_polarisations([workscan.getpol(row)])
497 #sel.set_name(workscan._getsourcename(row))
[1819]498 workscan.set_selection(sel)
499 if not workscan.nrow() == 1:
500 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
501 raise RuntimeError(msg)
502 del sel
503 if insitu:
504 self._assign(workscan)
505 else:
506 return workscan
507
[1862]508 @asaplog_post_dec
[1907]509 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]510 """\
[135]511 Determine the specified statistic of the current beam/if/pol
[102]512 Takes a 'mask' as an optional parameter to specify which
513 channels should be excluded.
[1846]514
[102]515 Parameters:
[1846]516
[1819]517 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
518 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]519
[135]520 mask: an optional mask specifying where the statistic
[102]521 should be determined.
[1855]522
[1819]523 form: format string to print statistic values
[1846]524
[1907]525 row: row number of spectrum to process.
526 (default is None: for all rows)
[1846]527
[1907]528 Example:
[113]529 scan.set_unit('channel')
[1118]530 msk = scan.create_mask([100, 200], [500, 600])
[135]531 scan.stats(stat='mean', mask=m)
[1846]532
[102]533 """
[1593]534 mask = mask or []
[876]535 if not self._check_ifs():
[1118]536 raise ValueError("Cannot apply mask as the IFs have different "
537 "number of channels. Please use setselection() "
538 "to select individual IFs")
[1819]539 rtnabc = False
540 if stat.lower().endswith('_abc'): rtnabc = True
541 getchan = False
542 if stat.lower().startswith('min') or stat.lower().startswith('max'):
543 chan = self._math._minmaxchan(self, mask, stat)
544 getchan = True
545 statvals = []
[1907]546 if not rtnabc:
547 if row == None:
548 statvals = self._math._stats(self, mask, stat)
549 else:
550 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]551
[1819]552 #def cb(i):
553 # return statvals[i]
[256]554
[1819]555 #return self._row_callback(cb, stat)
[102]556
[1819]557 label=stat
558 #callback=cb
559 out = ""
560 #outvec = []
561 sep = '-'*50
[1907]562
563 if row == None:
564 rows = xrange(self.nrow())
565 elif isinstance(row, int):
566 rows = [ row ]
567
568 for i in rows:
[1819]569 refstr = ''
570 statunit= ''
571 if getchan:
572 qx, qy = self.chan2data(rowno=i, chan=chan[i])
573 if rtnabc:
574 statvals.append(qx['value'])
575 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
576 statunit= '['+qx['unit']+']'
577 else:
578 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
579
580 tm = self._gettime(i)
581 src = self._getsourcename(i)
582 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
583 out += 'Time[%s]:\n' % (tm)
[1907]584 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
585 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
586 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]587 #outvec.append(callback(i))
[1907]588 if len(rows) > 1:
589 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
590 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
591 else:
592 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
593 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]594 out += sep+"\n"
595
[1859]596 import os
597 if os.environ.has_key( 'USER' ):
598 usr = os.environ['USER']
599 else:
600 import commands
601 usr = commands.getoutput( 'whoami' )
602 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
603 f = open(tmpfile,'w')
604 print >> f, sep
605 print >> f, ' %s %s' % (label, statunit)
606 print >> f, sep
607 print >> f, out
608 f.close()
609 f = open(tmpfile,'r')
610 x = f.readlines()
611 f.close()
612 asaplog.push(''.join(x), False)
613
[1819]614 return statvals
615
616 def chan2data(self, rowno=0, chan=0):
[1846]617 """\
[1819]618 Returns channel/frequency/velocity and spectral value
619 at an arbitrary row and channel in the scantable.
[1846]620
[1819]621 Parameters:
[1846]622
[1819]623 rowno: a row number in the scantable. Default is the
624 first row, i.e. rowno=0
[1855]625
[1819]626 chan: a channel in the scantable. Default is the first
627 channel, i.e. pos=0
[1846]628
[1819]629 """
630 if isinstance(rowno, int) and isinstance(chan, int):
631 qx = {'unit': self.get_unit(),
632 'value': self._getabcissa(rowno)[chan]}
633 qy = {'unit': self.get_fluxunit(),
634 'value': self._getspectrum(rowno)[chan]}
635 return qx, qy
636
[1118]637 def stddev(self, mask=None):
[1846]638 """\
[135]639 Determine the standard deviation of the current beam/if/pol
640 Takes a 'mask' as an optional parameter to specify which
641 channels should be excluded.
[1846]642
[135]643 Parameters:
[1846]644
[135]645 mask: an optional mask specifying where the standard
646 deviation should be determined.
647
[1846]648 Example::
649
[135]650 scan.set_unit('channel')
[1118]651 msk = scan.create_mask([100, 200], [500, 600])
[135]652 scan.stddev(mask=m)
[1846]653
[135]654 """
[1118]655 return self.stats(stat='stddev', mask=mask);
[135]656
[1003]657
[1259]658 def get_column_names(self):
[1846]659 """\
[1003]660 Return a list of column names, which can be used for selection.
661 """
[1259]662 return list(Scantable.get_column_names(self))
[1003]663
[1730]664 def get_tsys(self, row=-1):
[1846]665 """\
[113]666 Return the System temperatures.
[1846]667
668 Parameters:
669
670 row: the rowno to get the information for. (default all rows)
671
[113]672 Returns:
[1846]673
[876]674 a list of Tsys values for the current selection
[1846]675
[113]676 """
[1730]677 if row > -1:
678 return self._get_column(self._gettsys, row)
[876]679 return self._row_callback(self._gettsys, "Tsys")
[256]680
[1730]681
682 def get_weather(self, row=-1):
[1846]683 """\
684 Return the weather informations.
685
686 Parameters:
687
688 row: the rowno to get the information for. (default all rows)
689
690 Returns:
691
692 a dict or list of of dicts of values for the current selection
693
694 """
695
[1730]696 values = self._get_column(self._get_weather, row)
697 if row > -1:
698 return {'temperature': values[0],
699 'pressure': values[1], 'humidity' : values[2],
700 'windspeed' : values[3], 'windaz' : values[4]
701 }
702 else:
703 out = []
704 for r in values:
705
706 out.append({'temperature': r[0],
707 'pressure': r[1], 'humidity' : r[2],
708 'windspeed' : r[3], 'windaz' : r[4]
709 })
710 return out
711
[876]712 def _row_callback(self, callback, label):
713 out = ""
[1118]714 outvec = []
[1590]715 sep = '-'*50
[876]716 for i in range(self.nrow()):
717 tm = self._gettime(i)
718 src = self._getsourcename(i)
[1590]719 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]720 out += 'Time[%s]:\n' % (tm)
[1590]721 if self.nbeam(-1) > 1:
722 out += ' Beam[%d] ' % (self.getbeam(i))
723 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
724 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]725 outvec.append(callback(i))
726 out += '= %3.3f\n' % (outvec[i])
[1590]727 out += sep+'\n'
[1859]728
729 asaplog.push(sep)
730 asaplog.push(" %s" % (label))
731 asaplog.push(sep)
732 asaplog.push(out)
[1861]733 asaplog.post()
[1175]734 return outvec
[256]735
[1947]736 def _get_column(self, callback, row=-1, *args):
[1070]737 """
738 """
739 if row == -1:
[1947]740 return [callback(i, *args) for i in range(self.nrow())]
[1070]741 else:
[1819]742 if 0 <= row < self.nrow():
[1947]743 return callback(row, *args)
[256]744
[1070]745
[1948]746 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]747 """\
[113]748 Get a list of time stamps for the observations.
[1938]749 Return a datetime object or a string (default) for each
750 integration time stamp in the scantable.
[1846]751
[113]752 Parameters:
[1846]753
[1348]754 row: row no of integration. Default -1 return all rows
[1855]755
[1348]756 asdatetime: return values as datetime objects rather than strings
[1846]757
[1948]758 prec: number of digits shown. Default -1 to automatic calculation.
759 Note this number is equals to the digits of MVTime,
760 i.e., 0<prec<3: dates with hh:: only,
761 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]762 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
763
[113]764 """
[1175]765 from datetime import datetime
[1948]766 if prec < 0:
767 # automagically set necessary precision +1
[1950]768 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]769 prec = max(6, int(prec))
770 else:
771 prec = max(0, prec)
772 if asdatetime:
773 #precision can be 1 millisecond at max
774 prec = min(12, prec)
775
[1947]776 times = self._get_column(self._gettime, row, prec)
[1348]777 if not asdatetime:
[1392]778 return times
[1947]779 format = "%Y/%m/%d/%H:%M:%S.%f"
780 if prec < 7:
781 nsub = 1 + (((6-prec)/2) % 3)
782 substr = [".%f","%S","%M"]
783 for i in range(nsub):
784 format = format.replace(substr[i],"")
[1175]785 if isinstance(times, list):
[1947]786 return [datetime.strptime(i, format) for i in times]
[1175]787 else:
[1947]788 return datetime.strptime(times, format)
[102]789
[1348]790
791 def get_inttime(self, row=-1):
[1846]792 """\
[1348]793 Get a list of integration times for the observations.
794 Return a time in seconds for each integration in the scantable.
[1846]795
[1348]796 Parameters:
[1846]797
[1348]798 row: row no of integration. Default -1 return all rows.
[1846]799
[1348]800 """
[1573]801 return self._get_column(self._getinttime, row)
[1348]802
[1573]803
[714]804 def get_sourcename(self, row=-1):
[1846]805 """\
[794]806 Get a list source names for the observations.
[714]807 Return a string for each integration in the scantable.
808 Parameters:
[1846]809
[1348]810 row: row no of integration. Default -1 return all rows.
[1846]811
[714]812 """
[1070]813 return self._get_column(self._getsourcename, row)
[714]814
[794]815 def get_elevation(self, row=-1):
[1846]816 """\
[794]817 Get a list of elevations for the observations.
818 Return a float for each integration in the scantable.
[1846]819
[794]820 Parameters:
[1846]821
[1348]822 row: row no of integration. Default -1 return all rows.
[1846]823
[794]824 """
[1070]825 return self._get_column(self._getelevation, row)
[794]826
827 def get_azimuth(self, row=-1):
[1846]828 """\
[794]829 Get a list of azimuths for the observations.
830 Return a float for each integration in the scantable.
[1846]831
[794]832 Parameters:
[1348]833 row: row no of integration. Default -1 return all rows.
[1846]834
[794]835 """
[1070]836 return self._get_column(self._getazimuth, row)
[794]837
838 def get_parangle(self, row=-1):
[1846]839 """\
[794]840 Get a list of parallactic angles for the observations.
841 Return a float for each integration in the scantable.
[1846]842
[794]843 Parameters:
[1846]844
[1348]845 row: row no of integration. Default -1 return all rows.
[1846]846
[794]847 """
[1070]848 return self._get_column(self._getparangle, row)
[794]849
[1070]850 def get_direction(self, row=-1):
851 """
852 Get a list of Positions on the sky (direction) for the observations.
[1594]853 Return a string for each integration in the scantable.
[1855]854
[1070]855 Parameters:
[1855]856
[1070]857 row: row no of integration. Default -1 return all rows
[1855]858
[1070]859 """
860 return self._get_column(self._getdirection, row)
861
[1391]862 def get_directionval(self, row=-1):
[1846]863 """\
[1391]864 Get a list of Positions on the sky (direction) for the observations.
865 Return a float for each integration in the scantable.
[1846]866
[1391]867 Parameters:
[1846]868
[1391]869 row: row no of integration. Default -1 return all rows
[1846]870
[1391]871 """
872 return self._get_column(self._getdirectionvec, row)
873
[1862]874 @asaplog_post_dec
[102]875 def set_unit(self, unit='channel'):
[1846]876 """\
[102]877 Set the unit for all following operations on this scantable
[1846]878
[102]879 Parameters:
[1846]880
881 unit: optional unit, default is 'channel'. Use one of '*Hz',
882 'km/s', 'channel' or equivalent ''
883
[102]884 """
[484]885 varlist = vars()
[1118]886 if unit in ['', 'pixel', 'channel']:
[113]887 unit = ''
888 inf = list(self._getcoordinfo())
889 inf[0] = unit
890 self._setcoordinfo(inf)
[1118]891 self._add_history("set_unit", varlist)
[113]892
[1862]893 @asaplog_post_dec
[484]894 def set_instrument(self, instr):
[1846]895 """\
[1348]896 Set the instrument for subsequent processing.
[1846]897
[358]898 Parameters:
[1846]899
[710]900 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]901 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]902
[358]903 """
904 self._setInstrument(instr)
[1118]905 self._add_history("set_instument", vars())
[358]906
[1862]907 @asaplog_post_dec
[1190]908 def set_feedtype(self, feedtype):
[1846]909 """\
[1190]910 Overwrite the feed type, which might not be set correctly.
[1846]911
[1190]912 Parameters:
[1846]913
[1190]914 feedtype: 'linear' or 'circular'
[1846]915
[1190]916 """
917 self._setfeedtype(feedtype)
918 self._add_history("set_feedtype", vars())
919
[1862]920 @asaplog_post_dec
[276]921 def set_doppler(self, doppler='RADIO'):
[1846]922 """\
[276]923 Set the doppler for all following operations on this scantable.
[1846]924
[276]925 Parameters:
[1846]926
[276]927 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]928
[276]929 """
[484]930 varlist = vars()
[276]931 inf = list(self._getcoordinfo())
932 inf[2] = doppler
933 self._setcoordinfo(inf)
[1118]934 self._add_history("set_doppler", vars())
[710]935
[1862]936 @asaplog_post_dec
[226]937 def set_freqframe(self, frame=None):
[1846]938 """\
[113]939 Set the frame type of the Spectral Axis.
[1846]940
[113]941 Parameters:
[1846]942
[591]943 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]944 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]945 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]946
947 Example::
948
[113]949 scan.set_freqframe('BARY')
[1846]950
[113]951 """
[1593]952 frame = frame or rcParams['scantable.freqframe']
[484]953 varlist = vars()
[1819]954 # "REST" is not implemented in casacore
955 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
956 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
957 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]958 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]959
[989]960 if frame in valid:
[113]961 inf = list(self._getcoordinfo())
962 inf[1] = frame
963 self._setcoordinfo(inf)
[1118]964 self._add_history("set_freqframe", varlist)
[102]965 else:
[1118]966 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]967 raise TypeError(msg)
[710]968
[1862]969 @asaplog_post_dec
[989]970 def set_dirframe(self, frame=""):
[1846]971 """\
[989]972 Set the frame type of the Direction on the sky.
[1846]973
[989]974 Parameters:
[1846]975
[989]976 frame: an optional frame type, default ''. Valid frames are:
977 'J2000', 'B1950', 'GALACTIC'
[1846]978
979 Example:
980
[989]981 scan.set_dirframe('GALACTIC')
[1846]982
[989]983 """
984 varlist = vars()
[1859]985 Scantable.set_dirframe(self, frame)
[1118]986 self._add_history("set_dirframe", varlist)
[989]987
[113]988 def get_unit(self):
[1846]989 """\
[113]990 Get the default unit set in this scantable
[1846]991
[113]992 Returns:
[1846]993
[113]994 A unit string
[1846]995
[113]996 """
997 inf = self._getcoordinfo()
998 unit = inf[0]
999 if unit == '': unit = 'channel'
1000 return unit
[102]1001
[1862]1002 @asaplog_post_dec
[158]1003 def get_abcissa(self, rowno=0):
[1846]1004 """\
[158]1005 Get the abcissa in the current coordinate setup for the currently
[113]1006 selected Beam/IF/Pol
[1846]1007
[113]1008 Parameters:
[1846]1009
[226]1010 rowno: an optional row number in the scantable. Default is the
1011 first row, i.e. rowno=0
[1846]1012
[113]1013 Returns:
[1846]1014
[1348]1015 The abcissa values and the format string (as a dictionary)
[1846]1016
[113]1017 """
[256]1018 abc = self._getabcissa(rowno)
[710]1019 lbl = self._getabcissalabel(rowno)
[158]1020 return abc, lbl
[113]1021
[1862]1022 @asaplog_post_dec
[1994]1023 def flag(self, row=-1, mask=None, unflag=False):
[1846]1024 """\
[1001]1025 Flag the selected data using an optional channel mask.
[1846]1026
[1001]1027 Parameters:
[1846]1028
[1994]1029 row: an optional row number in the scantable.
1030 Default -1 flags all rows
1031
[1001]1032 mask: an optional channel mask, created with create_mask. Default
1033 (no mask) is all channels.
[1855]1034
[1819]1035 unflag: if True, unflag the data
[1846]1036
[1001]1037 """
1038 varlist = vars()
[1593]1039 mask = mask or []
[1994]1040 self._flag(row, mask, unflag)
[1001]1041 self._add_history("flag", varlist)
1042
[1862]1043 @asaplog_post_dec
[1819]1044 def flag_row(self, rows=[], unflag=False):
[1846]1045 """\
[1819]1046 Flag the selected data in row-based manner.
[1846]1047
[1819]1048 Parameters:
[1846]1049
[1843]1050 rows: list of row numbers to be flagged. Default is no row
1051 (must be explicitly specified to execute row-based flagging).
[1855]1052
[1819]1053 unflag: if True, unflag the data.
[1846]1054
[1819]1055 """
1056 varlist = vars()
[1859]1057 self._flag_row(rows, unflag)
[1819]1058 self._add_history("flag_row", varlist)
1059
[1862]1060 @asaplog_post_dec
[1819]1061 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1062 """\
[1819]1063 Flag the selected data outside a specified range (in channel-base)
[1846]1064
[1819]1065 Parameters:
[1846]1066
[1819]1067 uthres: upper threshold.
[1855]1068
[1819]1069 dthres: lower threshold
[1846]1070
[1819]1071 clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1072 False for flagging data inside the range.
[1855]1073
[1846]1074 unflag: if True, unflag the data.
1075
[1819]1076 """
1077 varlist = vars()
[1859]1078 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1079 self._add_history("clip", varlist)
1080
[1862]1081 @asaplog_post_dec
[1584]1082 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1083 """\
[1192]1084 Flag the data in 'lag' space by providing a frequency to remove.
[1584]1085 Flagged data in the scantable gets interpolated over the region.
[1192]1086 No taper is applied.
[1846]1087
[1192]1088 Parameters:
[1846]1089
[1579]1090 start: the start frequency (really a period within the
1091 bandwidth) or period to remove
[1855]1092
[1579]1093 end: the end frequency or period to remove
[1855]1094
[1584]1095 unit: the frequency unit (default "MHz") or "" for
[1579]1096 explicit lag channels
[1846]1097
1098 *Notes*:
1099
[1579]1100 It is recommended to flag edges of the band or strong
[1348]1101 signals beforehand.
[1846]1102
[1192]1103 """
1104 if insitu is None: insitu = rcParams['insitu']
1105 self._math._setinsitu(insitu)
1106 varlist = vars()
[1579]1107 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1108 if not (unit == "" or base.has_key(unit)):
[1192]1109 raise ValueError("%s is not a valid unit." % unit)
[1859]1110 if unit == "":
1111 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1112 else:
1113 s = scantable(self._math._lag_flag(self, start*base[unit],
1114 end*base[unit], "frequency"))
[1192]1115 s._add_history("lag_flag", varlist)
1116 if insitu:
1117 self._assign(s)
1118 else:
1119 return s
[1001]1120
[1862]1121 @asaplog_post_dec
[113]1122 def create_mask(self, *args, **kwargs):
[1846]1123 """\
[1118]1124 Compute and return a mask based on [min, max] windows.
[189]1125 The specified windows are to be INCLUDED, when the mask is
[113]1126 applied.
[1846]1127
[102]1128 Parameters:
[1846]1129
[1118]1130 [min, max], [min2, max2], ...
[1024]1131 Pairs of start/end points (inclusive)specifying the regions
[102]1132 to be masked
[1855]1133
[189]1134 invert: optional argument. If specified as True,
1135 return an inverted mask, i.e. the regions
1136 specified are EXCLUDED
[1855]1137
[513]1138 row: create the mask using the specified row for
1139 unit conversions, default is row=0
1140 only necessary if frequency varies over rows.
[1846]1141
1142 Examples::
1143
[113]1144 scan.set_unit('channel')
[1846]1145 # a)
[1118]1146 msk = scan.create_mask([400, 500], [800, 900])
[189]1147 # masks everything outside 400 and 500
[113]1148 # and 800 and 900 in the unit 'channel'
1149
[1846]1150 # b)
[1118]1151 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1152 # masks the regions between 400 and 500
[113]1153 # and 800 and 900 in the unit 'channel'
[1846]1154
1155 # c)
1156 #mask only channel 400
[1554]1157 msk = scan.create_mask([400])
[1846]1158
[102]1159 """
[1554]1160 row = kwargs.get("row", 0)
[513]1161 data = self._getabcissa(row)
[113]1162 u = self._getcoordinfo()[0]
[1859]1163 if u == "":
1164 u = "channel"
1165 msg = "The current mask window unit is %s" % u
1166 i = self._check_ifs()
1167 if not i:
1168 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1169 asaplog.push(msg)
[102]1170 n = self.nchan()
[1295]1171 msk = _n_bools(n, False)
[710]1172 # test if args is a 'list' or a 'normal *args - UGLY!!!
1173
[1118]1174 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1175 and args or args[0]
[710]1176 for window in ws:
[1554]1177 if len(window) == 1:
1178 window = [window[0], window[0]]
1179 if len(window) == 0 or len(window) > 2:
1180 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1181 if window[0] > window[1]:
1182 tmp = window[0]
1183 window[0] = window[1]
1184 window[1] = tmp
[102]1185 for i in range(n):
[1024]1186 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1187 msk[i] = True
[113]1188 if kwargs.has_key('invert'):
1189 if kwargs.get('invert'):
[1295]1190 msk = mask_not(msk)
[102]1191 return msk
[710]1192
[1931]1193 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1194 """\
[1819]1195 Compute and return a list of mask windows, [min, max].
[1846]1196
[1819]1197 Parameters:
[1846]1198
[1819]1199 mask: channel mask, created with create_mask.
[1855]1200
[1819]1201 row: calcutate the masklist using the specified row
1202 for unit conversions, default is row=0
1203 only necessary if frequency varies over rows.
[1846]1204
[1819]1205 Returns:
[1846]1206
[1819]1207 [min, max], [min2, max2], ...
1208 Pairs of start/end points (inclusive)specifying
1209 the masked regions
[1846]1210
[1819]1211 """
1212 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1213 raise TypeError("The mask should be list or tuple.")
1214 if len(mask) < 2:
1215 raise TypeError("The mask elements should be > 1")
1216 if self.nchan() != len(mask):
1217 msg = "Number of channels in scantable != number of mask elements"
1218 raise TypeError(msg)
1219 data = self._getabcissa(row)
1220 u = self._getcoordinfo()[0]
[1859]1221 if u == "":
1222 u = "channel"
1223 msg = "The current mask window unit is %s" % u
1224 i = self._check_ifs()
1225 if not i:
1226 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1227 if not silent:
1228 asaplog.push(msg)
[1819]1229 masklist=[]
1230 ist, ien = None, None
1231 ist, ien=self.get_mask_indices(mask)
1232 if ist is not None and ien is not None:
1233 for i in xrange(len(ist)):
1234 range=[data[ist[i]],data[ien[i]]]
1235 range.sort()
1236 masklist.append([range[0],range[1]])
1237 return masklist
1238
1239 def get_mask_indices(self, mask=None):
[1846]1240 """\
[1819]1241 Compute and Return lists of mask start indices and mask end indices.
[1855]1242
1243 Parameters:
1244
[1819]1245 mask: channel mask, created with create_mask.
[1846]1246
[1819]1247 Returns:
[1846]1248
[1819]1249 List of mask start indices and that of mask end indices,
1250 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1251
[1819]1252 """
1253 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1254 raise TypeError("The mask should be list or tuple.")
1255 if len(mask) < 2:
1256 raise TypeError("The mask elements should be > 1")
1257 istart=[]
1258 iend=[]
1259 if mask[0]: istart.append(0)
1260 for i in range(len(mask)-1):
1261 if not mask[i] and mask[i+1]:
1262 istart.append(i+1)
1263 elif mask[i] and not mask[i+1]:
1264 iend.append(i)
1265 if mask[len(mask)-1]: iend.append(len(mask)-1)
1266 if len(istart) != len(iend):
1267 raise RuntimeError("Numbers of mask start != mask end.")
1268 for i in range(len(istart)):
1269 if istart[i] > iend[i]:
1270 raise RuntimeError("Mask start index > mask end index")
1271 break
1272 return istart,iend
1273
[2013]1274 @asaplog_post_dec
1275 def parse_maskexpr(self,maskstring):
1276 """
1277 Parse CASA type mask selection syntax (IF dependent).
1278
1279 Parameters:
1280 maskstring : A string mask selection expression.
1281 A comma separated selections mean different IF -
1282 channel combinations. IFs and channel selections
1283 are partitioned by a colon, ':'.
1284 examples:
[2015]1285 '' = all IFs (all channels)
[2013]1286 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1287 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1288 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1289 all channels in IF8
1290 Returns:
1291 A dictionary of selected (valid) IF and masklist pairs,
1292 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1293 """
1294 if not isinstance(maskstring,str):
1295 asaplog.post()
1296 asaplog.push("Invalid mask expression")
1297 asaplog.post("ERROR")
1298
1299 valid_ifs = self.getifnos()
1300 frequnit = self.get_unit()
1301 seldict = {}
[2015]1302 if maskstring == "":
1303 maskstring = str(valid_ifs)[1:-1]
[2013]1304 ## split each selection
1305 sellist = maskstring.split(',')
1306 for currselstr in sellist:
1307 selset = currselstr.split(':')
1308 # spw and mask string (may include ~, < or >)
1309 spwmasklist = self._parse_selection(selset[0],typestr='integer',
1310 offset=1,minval=min(valid_ifs),
1311 maxval=max(valid_ifs))
1312 for spwlist in spwmasklist:
1313 selspws = []
1314 for ispw in range(spwlist[0],spwlist[1]+1):
1315 # Put into the list only if ispw exists
1316 if valid_ifs.count(ispw):
1317 selspws.append(ispw)
1318 del spwmasklist, spwlist
1319
1320 # parse frequency mask list
1321 if len(selset) > 1:
1322 freqmasklist = self._parse_selection(selset[1],typestr='float',
1323 offset=0.)
1324 else:
1325 # want to select the whole spectrum
1326 freqmasklist = [None]
1327
1328 ## define a dictionary of spw - masklist combination
1329 for ispw in selspws:
1330 #print "working on", ispw
1331 spwstr = str(ispw)
1332 if len(selspws) == 0:
1333 # empty spw
1334 continue
1335 else:
1336 ## want to get min and max of the spw and
1337 ## offset to set for '<' and '>'
1338 if frequnit == 'channel':
1339 minfreq = 0
1340 maxfreq = self.nchan(ifno=ispw)
1341 offset = 0.5
1342 else:
1343 ## This is ugly part. need improvement
1344 for ifrow in xrange(self.nrow()):
1345 if self.getif(ifrow) == ispw:
1346 #print "IF",ispw,"found in row =",ifrow
1347 break
1348 freqcoord = self.get_coordinate(ifrow)
1349 freqs = self._getabcissa(ifrow)
1350 minfreq = min(freqs)
1351 maxfreq = max(freqs)
1352 if len(freqs) == 1:
1353 offset = 0.5
1354 elif frequnit.find('Hz') > 0:
1355 offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1356 -freqcoord.to_frequency(0,unit=frequnit))*0.5
1357 elif frequnit.find('m/s') > 0:
1358 offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1359 -freqcoord.to_velocity(0,unit=frequnit))*0.5
1360 else:
1361 asaplog.post()
1362 asaplog.push("Invalid frequency unit")
1363 asaplog.post("ERROR")
1364 del freqs, freqcoord, ifrow
1365 for freq in freqmasklist:
1366 selmask = freq or [minfreq, maxfreq]
1367 if selmask[0] == None:
1368 ## selection was "<freq[1]".
1369 if selmask[1] < minfreq:
1370 ## avoid adding region selection
1371 selmask = None
1372 else:
1373 selmask = [minfreq,selmask[1]-offset]
1374 elif selmask[1] == None:
1375 ## selection was ">freq[0]"
1376 if selmask[0] > maxfreq:
1377 ## avoid adding region selection
1378 selmask = None
1379 else:
1380 selmask = [selmask[0]+offset,maxfreq]
1381 if selmask:
1382 if not seldict.has_key(spwstr):
1383 # new spw selection
1384 seldict[spwstr] = []
1385 seldict[spwstr] += [selmask]
1386 del minfreq,maxfreq,offset,freq,selmask
1387 del spwstr
1388 del freqmasklist
1389 del valid_ifs
1390 if len(seldict) == 0:
1391 asaplog.post()
1392 asaplog.push("No valid selection in the mask expression: "+maskstring)
1393 asaplog.post("WARN")
1394 return None
1395 msg = "Selected masklist:\n"
1396 for sif, lmask in seldict.iteritems():
1397 msg += " IF"+sif+" - "+str(lmask)+"\n"
1398 asaplog.push(msg)
1399 return seldict
1400
1401 def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1402 """
1403 Parameters:
1404 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1405 typestr : The type of the values in returned list
1406 ('integer' or 'float')
1407 offset : The offset value to subtract from or add to
1408 the boundary value if the selection string
1409 includes '<' or '>'
1410 minval, maxval : The minimum/maximum values to set if the
1411 selection string includes '<' or '>'.
1412 The list element is filled with None by default.
1413 Returns:
1414 A list of min/max pair of selections.
1415 Example:
1416 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1417 returns [[0,2],[5,7],[9,9]]
1418 """
1419 selgroups = selstr.split(';')
1420 sellists = []
1421 if typestr.lower().startswith('int'):
1422 formatfunc = int
1423 else:
1424 formatfunc = float
1425
1426 for currsel in selgroups:
1427 if currsel.find('~') > 0:
1428 minsel = formatfunc(currsel.split('~')[0].strip())
1429 maxsel = formatfunc(currsel.split('~')[1].strip())
1430 elif currsel.strip().startswith('<'):
1431 minsel = minval
1432 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1433 - formatfunc(offset)
1434 elif currsel.strip().startswith('>'):
1435 minsel = formatfunc(currsel.split('>')[1].strip()) \
1436 + formatfunc(offset)
1437 maxsel = maxval
1438 else:
1439 minsel = formatfunc(currsel)
1440 maxsel = formatfunc(currsel)
1441 sellists.append([minsel,maxsel])
1442 return sellists
1443
[1819]1444# def get_restfreqs(self):
1445# """
1446# Get the restfrequency(s) stored in this scantable.
1447# The return value(s) are always of unit 'Hz'
1448# Parameters:
1449# none
1450# Returns:
1451# a list of doubles
1452# """
1453# return list(self._getrestfreqs())
1454
1455 def get_restfreqs(self, ids=None):
[1846]1456 """\
[256]1457 Get the restfrequency(s) stored in this scantable.
1458 The return value(s) are always of unit 'Hz'
[1846]1459
[256]1460 Parameters:
[1846]1461
[1819]1462 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1463 be retrieved
[1846]1464
[256]1465 Returns:
[1846]1466
[1819]1467 dictionary containing ids and a list of doubles for each id
[1846]1468
[256]1469 """
[1819]1470 if ids is None:
1471 rfreqs={}
1472 idlist = self.getmolnos()
1473 for i in idlist:
1474 rfreqs[i]=list(self._getrestfreqs(i))
1475 return rfreqs
1476 else:
1477 if type(ids)==list or type(ids)==tuple:
1478 rfreqs={}
1479 for i in ids:
1480 rfreqs[i]=list(self._getrestfreqs(i))
1481 return rfreqs
1482 else:
1483 return list(self._getrestfreqs(ids))
1484 #return list(self._getrestfreqs(ids))
[102]1485
[931]1486 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1487 """\
[931]1488 Set or replace the restfrequency specified and
[1938]1489 if the 'freqs' argument holds a scalar,
[931]1490 then that rest frequency will be applied to all the selected
1491 data. If the 'freqs' argument holds
1492 a vector, then it MUST be of equal or smaller length than
1493 the number of IFs (and the available restfrequencies will be
1494 replaced by this vector). In this case, *all* data have
1495 the restfrequency set per IF according
1496 to the corresponding value you give in the 'freqs' vector.
[1118]1497 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1498 IF 1 gets restfreq 2e9.
[1846]1499
[1395]1500 You can also specify the frequencies via a linecatalog.
[1153]1501
[931]1502 Parameters:
[1846]1503
[931]1504 freqs: list of rest frequency values or string idenitfiers
[1855]1505
[931]1506 unit: unit for rest frequency (default 'Hz')
[402]1507
[1846]1508
1509 Example::
1510
[1819]1511 # set the given restfrequency for the all currently selected IFs
[931]1512 scan.set_restfreqs(freqs=1.4e9)
[1845]1513 # set restfrequencies for the n IFs (n > 1) in the order of the
1514 # list, i.e
1515 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1516 # len(list_of_restfreqs) == nIF
1517 # for nIF == 1 the following will set multiple restfrequency for
1518 # that IF
[1819]1519 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1520 # set multiple restfrequencies per IF. as a list of lists where
1521 # the outer list has nIF elements, the inner s arbitrary
1522 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1523
[1846]1524 *Note*:
[1845]1525
[931]1526 To do more sophisticate Restfrequency setting, e.g. on a
1527 source and IF basis, use scantable.set_selection() before using
[1846]1528 this function::
[931]1529
[1846]1530 # provided your scantable is called scan
1531 selection = selector()
1532 selection.set_name("ORION*")
1533 selection.set_ifs([1])
1534 scan.set_selection(selection)
1535 scan.set_restfreqs(freqs=86.6e9)
1536
[931]1537 """
1538 varlist = vars()
[1157]1539 from asap import linecatalog
1540 # simple value
[1118]1541 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1542 self._setrestfreqs([freqs], [""], unit)
[1157]1543 # list of values
[1118]1544 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1545 # list values are scalars
[1118]1546 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1547 if len(freqs) == 1:
1548 self._setrestfreqs(freqs, [""], unit)
1549 else:
1550 # allow the 'old' mode of setting mulitple IFs
1551 sel = selector()
1552 savesel = self._getselection()
1553 iflist = self.getifnos()
1554 if len(freqs)>len(iflist):
1555 raise ValueError("number of elements in list of list "
1556 "exeeds the current IF selections")
1557 iflist = self.getifnos()
1558 for i, fval in enumerate(freqs):
1559 sel.set_ifs(iflist[i])
1560 self._setselection(sel)
1561 self._setrestfreqs([fval], [""], unit)
1562 self._setselection(savesel)
1563
1564 # list values are dict, {'value'=, 'name'=)
[1157]1565 elif isinstance(freqs[-1], dict):
[1845]1566 values = []
1567 names = []
1568 for d in freqs:
1569 values.append(d["value"])
1570 names.append(d["name"])
1571 self._setrestfreqs(values, names, unit)
[1819]1572 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1573 sel = selector()
1574 savesel = self._getselection()
[1322]1575 iflist = self.getifnos()
[1819]1576 if len(freqs)>len(iflist):
[1845]1577 raise ValueError("number of elements in list of list exeeds"
1578 " the current IF selections")
1579 for i, fval in enumerate(freqs):
[1322]1580 sel.set_ifs(iflist[i])
[1259]1581 self._setselection(sel)
[1845]1582 self._setrestfreqs(fval, [""], unit)
[1157]1583 self._setselection(savesel)
1584 # freqs are to be taken from a linecatalog
[1153]1585 elif isinstance(freqs, linecatalog):
1586 sel = selector()
1587 savesel = self._getselection()
1588 for i in xrange(freqs.nrow()):
[1322]1589 sel.set_ifs(iflist[i])
[1153]1590 self._setselection(sel)
[1845]1591 self._setrestfreqs([freqs.get_frequency(i)],
1592 [freqs.get_name(i)], "MHz")
[1153]1593 # ensure that we are not iterating past nIF
1594 if i == self.nif()-1: break
1595 self._setselection(savesel)
[931]1596 else:
1597 return
1598 self._add_history("set_restfreqs", varlist)
1599
[1360]1600 def shift_refpix(self, delta):
[1846]1601 """\
[1589]1602 Shift the reference pixel of the Spectra Coordinate by an
1603 integer amount.
[1846]1604
[1589]1605 Parameters:
[1846]1606
[1589]1607 delta: the amount to shift by
[1846]1608
1609 *Note*:
1610
[1589]1611 Be careful using this with broadband data.
[1846]1612
[1360]1613 """
[1731]1614 Scantable.shift_refpix(self, delta)
[931]1615
[1862]1616 @asaplog_post_dec
[1259]1617 def history(self, filename=None):
[1846]1618 """\
[1259]1619 Print the history. Optionally to a file.
[1846]1620
[1348]1621 Parameters:
[1846]1622
[1928]1623 filename: The name of the file to save the history to.
[1846]1624
[1259]1625 """
[484]1626 hist = list(self._gethistory())
[794]1627 out = "-"*80
[484]1628 for h in hist:
[489]1629 if h.startswith("---"):
[1857]1630 out = "\n".join([out, h])
[489]1631 else:
1632 items = h.split("##")
1633 date = items[0]
1634 func = items[1]
1635 items = items[2:]
[794]1636 out += "\n"+date+"\n"
1637 out += "Function: %s\n Parameters:" % (func)
[489]1638 for i in items:
[1938]1639 if i == '':
1640 continue
[489]1641 s = i.split("=")
[1118]1642 out += "\n %s = %s" % (s[0], s[1])
[1857]1643 out = "\n".join([out, "-"*80])
[1259]1644 if filename is not None:
1645 if filename is "":
1646 filename = 'scantable_history.txt'
1647 import os
1648 filename = os.path.expandvars(os.path.expanduser(filename))
1649 if not os.path.isdir(filename):
1650 data = open(filename, 'w')
1651 data.write(out)
1652 data.close()
1653 else:
1654 msg = "Illegal file name '%s'." % (filename)
[1859]1655 raise IOError(msg)
1656 return page(out)
[513]1657 #
1658 # Maths business
1659 #
[1862]1660 @asaplog_post_dec
[931]1661 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1662 """\
[1070]1663 Return the (time) weighted average of a scan.
[1846]1664
1665 *Note*:
1666
[1070]1667 in channels only - align if necessary
[1846]1668
[513]1669 Parameters:
[1846]1670
[513]1671 mask: an optional mask (only used for 'var' and 'tsys'
1672 weighting)
[1855]1673
[558]1674 scanav: True averages each scan separately
1675 False (default) averages all scans together,
[1855]1676
[1099]1677 weight: Weighting scheme.
1678 'none' (mean no weight)
1679 'var' (1/var(spec) weighted)
1680 'tsys' (1/Tsys**2 weighted)
1681 'tint' (integration time weighted)
1682 'tintsys' (Tint/Tsys**2)
1683 'median' ( median averaging)
[535]1684 The default is 'tint'
[1855]1685
[931]1686 align: align the spectra in velocity before averaging. It takes
1687 the time of the first spectrum as reference time.
[1846]1688
1689 Example::
1690
[513]1691 # time average the scantable without using a mask
[710]1692 newscan = scan.average_time()
[1846]1693
[513]1694 """
1695 varlist = vars()
[1593]1696 weight = weight or 'TINT'
1697 mask = mask or ()
1698 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1699 scan = (self, )
[1859]1700
1701 if align:
1702 scan = (self.freq_align(insitu=False), )
1703 s = None
1704 if weight.upper() == 'MEDIAN':
1705 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1706 scanav))
1707 else:
1708 s = scantable(self._math._average(scan, mask, weight.upper(),
1709 scanav))
[1099]1710 s._add_history("average_time", varlist)
[513]1711 return s
[710]1712
[1862]1713 @asaplog_post_dec
[876]1714 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1715 """\
[513]1716 Return a scan where all spectra are converted to either
1717 Jansky or Kelvin depending upon the flux units of the scan table.
1718 By default the function tries to look the values up internally.
1719 If it can't find them (or if you want to over-ride), you must
1720 specify EITHER jyperk OR eta (and D which it will try to look up
1721 also if you don't set it). jyperk takes precedence if you set both.
[1846]1722
[513]1723 Parameters:
[1846]1724
[513]1725 jyperk: the Jy / K conversion factor
[1855]1726
[513]1727 eta: the aperture efficiency
[1855]1728
[1928]1729 d: the geometric diameter (metres)
[1855]1730
[513]1731 insitu: if False a new scantable is returned.
1732 Otherwise, the scaling is done in-situ
1733 The default is taken from .asaprc (False)
[1846]1734
[513]1735 """
1736 if insitu is None: insitu = rcParams['insitu']
[876]1737 self._math._setinsitu(insitu)
[513]1738 varlist = vars()
[1593]1739 jyperk = jyperk or -1.0
1740 d = d or -1.0
1741 eta = eta or -1.0
[876]1742 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1743 s._add_history("convert_flux", varlist)
1744 if insitu: self._assign(s)
1745 else: return s
[513]1746
[1862]1747 @asaplog_post_dec
[876]1748 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1749 """\
[513]1750 Return a scan after applying a gain-elevation correction.
1751 The correction can be made via either a polynomial or a
1752 table-based interpolation (and extrapolation if necessary).
1753 You specify polynomial coefficients, an ascii table or neither.
1754 If you specify neither, then a polynomial correction will be made
1755 with built in coefficients known for certain telescopes (an error
1756 will occur if the instrument is not known).
1757 The data and Tsys are *divided* by the scaling factors.
[1846]1758
[513]1759 Parameters:
[1846]1760
[513]1761 poly: Polynomial coefficients (default None) to compute a
1762 gain-elevation correction as a function of
1763 elevation (in degrees).
[1855]1764
[513]1765 filename: The name of an ascii file holding correction factors.
1766 The first row of the ascii file must give the column
1767 names and these MUST include columns
1768 "ELEVATION" (degrees) and "FACTOR" (multiply data
1769 by this) somewhere.
1770 The second row must give the data type of the
1771 column. Use 'R' for Real and 'I' for Integer.
1772 An example file would be
1773 (actual factors are arbitrary) :
1774
1775 TIME ELEVATION FACTOR
1776 R R R
1777 0.1 0 0.8
1778 0.2 20 0.85
1779 0.3 40 0.9
1780 0.4 60 0.85
1781 0.5 80 0.8
1782 0.6 90 0.75
[1855]1783
[513]1784 method: Interpolation method when correcting from a table.
1785 Values are "nearest", "linear" (default), "cubic"
1786 and "spline"
[1855]1787
[513]1788 insitu: if False a new scantable is returned.
1789 Otherwise, the scaling is done in-situ
1790 The default is taken from .asaprc (False)
[1846]1791
[513]1792 """
1793
1794 if insitu is None: insitu = rcParams['insitu']
[876]1795 self._math._setinsitu(insitu)
[513]1796 varlist = vars()
[1593]1797 poly = poly or ()
[513]1798 from os.path import expandvars
1799 filename = expandvars(filename)
[876]1800 s = scantable(self._math._gainel(self, poly, filename, method))
1801 s._add_history("gain_el", varlist)
[1593]1802 if insitu:
1803 self._assign(s)
1804 else:
1805 return s
[710]1806
[1862]1807 @asaplog_post_dec
[931]1808 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1809 """\
[513]1810 Return a scan where all rows have been aligned in frequency/velocity.
1811 The alignment frequency frame (e.g. LSRK) is that set by function
1812 set_freqframe.
[1846]1813
[513]1814 Parameters:
[1855]1815
[513]1816 reftime: reference time to align at. By default, the time of
1817 the first row of data is used.
[1855]1818
[513]1819 method: Interpolation method for regridding the spectra.
1820 Choose from "nearest", "linear", "cubic" (default)
1821 and "spline"
[1855]1822
[513]1823 insitu: if False a new scantable is returned.
1824 Otherwise, the scaling is done in-situ
1825 The default is taken from .asaprc (False)
[1846]1826
[513]1827 """
[931]1828 if insitu is None: insitu = rcParams["insitu"]
[876]1829 self._math._setinsitu(insitu)
[513]1830 varlist = vars()
[1593]1831 reftime = reftime or ""
[931]1832 s = scantable(self._math._freq_align(self, reftime, method))
[876]1833 s._add_history("freq_align", varlist)
1834 if insitu: self._assign(s)
1835 else: return s
[513]1836
[1862]1837 @asaplog_post_dec
[1725]1838 def opacity(self, tau=None, insitu=None):
[1846]1839 """\
[513]1840 Apply an opacity correction. The data
1841 and Tsys are multiplied by the correction factor.
[1846]1842
[513]1843 Parameters:
[1855]1844
[1689]1845 tau: (list of) opacity from which the correction factor is
[513]1846 exp(tau*ZD)
[1689]1847 where ZD is the zenith-distance.
1848 If a list is provided, it has to be of length nIF,
1849 nIF*nPol or 1 and in order of IF/POL, e.g.
1850 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1851 if tau is `None` the opacities are determined from a
1852 model.
[1855]1853
[513]1854 insitu: if False a new scantable is returned.
1855 Otherwise, the scaling is done in-situ
1856 The default is taken from .asaprc (False)
[1846]1857
[513]1858 """
1859 if insitu is None: insitu = rcParams['insitu']
[876]1860 self._math._setinsitu(insitu)
[513]1861 varlist = vars()
[1689]1862 if not hasattr(tau, "__len__"):
1863 tau = [tau]
[876]1864 s = scantable(self._math._opacity(self, tau))
1865 s._add_history("opacity", varlist)
1866 if insitu: self._assign(s)
1867 else: return s
[513]1868
[1862]1869 @asaplog_post_dec
[513]1870 def bin(self, width=5, insitu=None):
[1846]1871 """\
[513]1872 Return a scan where all spectra have been binned up.
[1846]1873
[1348]1874 Parameters:
[1846]1875
[513]1876 width: The bin width (default=5) in pixels
[1855]1877
[513]1878 insitu: if False a new scantable is returned.
1879 Otherwise, the scaling is done in-situ
1880 The default is taken from .asaprc (False)
[1846]1881
[513]1882 """
1883 if insitu is None: insitu = rcParams['insitu']
[876]1884 self._math._setinsitu(insitu)
[513]1885 varlist = vars()
[876]1886 s = scantable(self._math._bin(self, width))
[1118]1887 s._add_history("bin", varlist)
[1589]1888 if insitu:
1889 self._assign(s)
1890 else:
1891 return s
[513]1892
[1862]1893 @asaplog_post_dec
[513]1894 def resample(self, width=5, method='cubic', insitu=None):
[1846]1895 """\
[1348]1896 Return a scan where all spectra have been binned up.
[1573]1897
[1348]1898 Parameters:
[1846]1899
[513]1900 width: The bin width (default=5) in pixels
[1855]1901
[513]1902 method: Interpolation method when correcting from a table.
1903 Values are "nearest", "linear", "cubic" (default)
1904 and "spline"
[1855]1905
[513]1906 insitu: if False a new scantable is returned.
1907 Otherwise, the scaling is done in-situ
1908 The default is taken from .asaprc (False)
[1846]1909
[513]1910 """
1911 if insitu is None: insitu = rcParams['insitu']
[876]1912 self._math._setinsitu(insitu)
[513]1913 varlist = vars()
[876]1914 s = scantable(self._math._resample(self, method, width))
[1118]1915 s._add_history("resample", varlist)
[876]1916 if insitu: self._assign(s)
1917 else: return s
[513]1918
[1862]1919 @asaplog_post_dec
[946]1920 def average_pol(self, mask=None, weight='none'):
[1846]1921 """\
[946]1922 Average the Polarisations together.
[1846]1923
[946]1924 Parameters:
[1846]1925
[946]1926 mask: An optional mask defining the region, where the
1927 averaging will be applied. The output will have all
1928 specified points masked.
[1855]1929
[946]1930 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1931 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1932
[946]1933 """
1934 varlist = vars()
[1593]1935 mask = mask or ()
[1010]1936 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1937 s._add_history("average_pol", varlist)
[992]1938 return s
[513]1939
[1862]1940 @asaplog_post_dec
[1145]1941 def average_beam(self, mask=None, weight='none'):
[1846]1942 """\
[1145]1943 Average the Beams together.
[1846]1944
[1145]1945 Parameters:
1946 mask: An optional mask defining the region, where the
1947 averaging will be applied. The output will have all
1948 specified points masked.
[1855]1949
[1145]1950 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1951 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1952
[1145]1953 """
1954 varlist = vars()
[1593]1955 mask = mask or ()
[1145]1956 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1957 s._add_history("average_beam", varlist)
1958 return s
1959
[1586]1960 def parallactify(self, pflag):
[1846]1961 """\
[1843]1962 Set a flag to indicate whether this data should be treated as having
[1617]1963 been 'parallactified' (total phase == 0.0)
[1846]1964
[1617]1965 Parameters:
[1855]1966
[1843]1967 pflag: Bool indicating whether to turn this on (True) or
[1617]1968 off (False)
[1846]1969
[1617]1970 """
[1586]1971 varlist = vars()
1972 self._parallactify(pflag)
1973 self._add_history("parallactify", varlist)
1974
[1862]1975 @asaplog_post_dec
[992]1976 def convert_pol(self, poltype=None):
[1846]1977 """\
[992]1978 Convert the data to a different polarisation type.
[1565]1979 Note that you will need cross-polarisation terms for most conversions.
[1846]1980
[992]1981 Parameters:
[1855]1982
[992]1983 poltype: The new polarisation type. Valid types are:
[1565]1984 "linear", "circular", "stokes" and "linpol"
[1846]1985
[992]1986 """
1987 varlist = vars()
[1859]1988 s = scantable(self._math._convertpol(self, poltype))
[1118]1989 s._add_history("convert_pol", varlist)
[992]1990 return s
1991
[1862]1992 @asaplog_post_dec
[1819]1993 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1994 """\
[513]1995 Smooth the spectrum by the specified kernel (conserving flux).
[1846]1996
[513]1997 Parameters:
[1846]1998
[513]1999 kernel: The type of smoothing kernel. Select from
[1574]2000 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2001 or 'poly'
[1855]2002
[513]2003 width: The width of the kernel in pixels. For hanning this is
2004 ignored otherwise it defauls to 5 pixels.
2005 For 'gaussian' it is the Full Width Half
2006 Maximum. For 'boxcar' it is the full width.
[1574]2007 For 'rmedian' and 'poly' it is the half width.
[1855]2008
[1574]2009 order: Optional parameter for 'poly' kernel (default is 2), to
2010 specify the order of the polnomial. Ignored by all other
2011 kernels.
[1855]2012
[1819]2013 plot: plot the original and the smoothed spectra.
2014 In this each indivual fit has to be approved, by
2015 typing 'y' or 'n'
[1855]2016
[513]2017 insitu: if False a new scantable is returned.
2018 Otherwise, the scaling is done in-situ
2019 The default is taken from .asaprc (False)
[1846]2020
[513]2021 """
2022 if insitu is None: insitu = rcParams['insitu']
[876]2023 self._math._setinsitu(insitu)
[513]2024 varlist = vars()
[1819]2025
2026 if plot: orgscan = self.copy()
2027
[1574]2028 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2029 s._add_history("smooth", varlist)
[1819]2030
2031 if plot:
[2150]2032 from asap.asapplotter import new_asaplot
2033 theplot = new_asaplot(rcParams['plotter.gui'])
2034 theplot.set_panels()
[1819]2035 ylab=s._get_ordinate_label()
[2150]2036 #theplot.palette(0,["#777777","red"])
[1819]2037 for r in xrange(s.nrow()):
2038 xsm=s._getabcissa(r)
2039 ysm=s._getspectrum(r)
2040 xorg=orgscan._getabcissa(r)
2041 yorg=orgscan._getspectrum(r)
[2150]2042 theplot.clear()
2043 theplot.hold()
2044 theplot.set_axes('ylabel',ylab)
2045 theplot.set_axes('xlabel',s._getabcissalabel(r))
2046 theplot.set_axes('title',s._getsourcename(r))
2047 theplot.set_line(label='Original',color="#777777")
2048 theplot.plot(xorg,yorg)
2049 theplot.set_line(label='Smoothed',color="red")
2050 theplot.plot(xsm,ysm)
[1819]2051 ### Ugly part for legend
2052 for i in [0,1]:
[2150]2053 theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2054 theplot.release()
[1819]2055 ### Ugly part for legend
[2150]2056 theplot.subplots[0]['lines']=[]
[1819]2057 res = raw_input("Accept smoothing ([y]/n): ")
2058 if res.upper() == 'N':
2059 s._setspectrum(yorg, r)
[2150]2060 theplot.quit()
2061 del theplot
[1819]2062 del orgscan
2063
[876]2064 if insitu: self._assign(s)
2065 else: return s
[513]2066
[2012]2067
[1862]2068 @asaplog_post_dec
[2081]2069 def sinusoid_baseline(self, insitu=None, mask=None, nwave=None, maxwavelength=None,
2070 clipthresh=None, clipniter=None, plot=None, getresidual=None, outlog=None, blfile=None):
[2047]2071 """\
[2094]2072 Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2073 Parameters:
[2081]2074 insitu: If False a new scantable is returned.
2075 Otherwise, the scaling is done in-situ
2076 The default is taken from .asaprc (False)
2077 mask: An optional mask
2078 nwave: the maximum wave number of sinusoids within
2079 maxwavelength * (spectral range).
2080 The default is 3 (i.e., sinusoids with wave
2081 number of 0(=constant), 1, 2, and 3 are
2082 used for fitting). Also it is possible to
2083 explicitly specify all the wave numbers to
2084 be used, by giving a list including them
2085 (e.g. [0,1,2,15,16]).
2086 maxwavelength: the longest sinusoidal wavelength. The
2087 default is 1.0 (unit: spectral range).
2088 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2129]2089 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2081]2090 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2091 plot the fit and the residual. In this each
2092 indivual fit has to be approved, by typing 'y'
2093 or 'n'
2094 getresidual: if False, returns best-fit values instead of
2095 residual. (default is True)
2096 outlog: Output the coefficients of the best-fit
2097 function to logger (default is False)
2098 blfile: Name of a text file in which the best-fit
2099 parameter values to be written
2100 (default is "": no file/logger output)
[2047]2101
2102 Example:
2103 # return a scan baselined by a combination of sinusoidal curves having
[2081]2104 # wave numbers in spectral window up to 10,
[2047]2105 # also with 3-sigma clipping, iteration up to 4 times
[2081]2106 bscan = scan.sinusoid_baseline(nwave=10,clipthresh=3.0,clipniter=4)
2107
2108 Note:
2109 The best-fit parameter values output in logger and/or blfile are now
2110 based on specunit of 'channel'.
[2047]2111 """
2112
2113 varlist = vars()
2114
2115 if insitu is None: insitu = rcParams["insitu"]
2116 if insitu:
2117 workscan = self
2118 else:
2119 workscan = self.copy()
2120
2121 nchan = workscan.nchan()
2122
[2081]2123 if mask is None: mask = [True for i in xrange(nchan)]
2124 if nwave is None: nwave = 3
2125 if maxwavelength is None: maxwavelength = 1.0
2126 if clipthresh is None: clipthresh = 3.0
[2129]2127 if clipniter is None: clipniter = 0
[2081]2128 if plot is None: plot = False
2129 if getresidual is None: getresidual = True
2130 if outlog is None: outlog = False
2131 if blfile is None: blfile = ""
[2047]2132
[2081]2133 if isinstance(nwave, int):
2134 in_nwave = nwave
2135 nwave = []
2136 for i in xrange(in_nwave+1): nwave.append(i)
2137
[2047]2138 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2139
2140 try:
[2081]2141 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2142 workscan._sinusoid_baseline(mask, nwave, maxwavelength, clipthresh, clipniter, getresidual, outlog, blfile)
[2047]2143
2144 workscan._add_history("sinusoid_baseline", varlist)
2145
2146 if insitu:
2147 self._assign(workscan)
2148 else:
2149 return workscan
2150
2151 except RuntimeError, e:
2152 msg = "The fit failed, possibly because it didn't converge."
2153 if rcParams["verbose"]:
2154 asaplog.push(str(e))
2155 asaplog.push(str(msg))
2156 return
2157 else:
2158 raise RuntimeError(str(e)+'\n'+msg)
2159
2160
[2081]2161 def auto_sinusoid_baseline(self, insitu=None, mask=None, nwave=None, maxwavelength=None,
[2047]2162 clipthresh=None, clipniter=None, edge=None, threshold=None,
[2081]2163 chan_avg_limit=None, plot=None, getresidual=None, outlog=None, blfile=None):
[2047]2164 """\
[2094]2165 Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2166 Spectral lines are detected first using linefinder and masked out
2167 to avoid them affecting the baseline solution.
2168
2169 Parameters:
[2081]2170 insitu: if False a new scantable is returned.
2171 Otherwise, the scaling is done in-situ
2172 The default is taken from .asaprc (False)
2173 mask: an optional mask retreived from scantable
2174 nwave: the maximum wave number of sinusoids within
2175 maxwavelength * (spectral range).
2176 The default is 3 (i.e., sinusoids with wave
2177 number of 0(=constant), 1, 2, and 3 are
2178 used for fitting). Also it is possible to
2179 explicitly specify all the wave numbers to
2180 be used, by giving a list including them
2181 (e.g. [0,1,2,15,16]).
2182 maxwavelength: the longest sinusoidal wavelength. The
2183 default is 1.0 (unit: spectral range).
2184 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2129]2185 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2081]2186 edge: an optional number of channel to drop at
2187 the edge of spectrum. If only one value is
2188 specified, the same number will be dropped
2189 from both sides of the spectrum. Default
2190 is to keep all channels. Nested tuples
2191 represent individual edge selection for
2192 different IFs (a number of spectral channels
2193 can be different)
2194 threshold: the threshold used by line finder. It is
2195 better to keep it large as only strong lines
2196 affect the baseline solution.
2197 chan_avg_limit:a maximum number of consequtive spectral
2198 channels to average during the search of
2199 weak and broad lines. The default is no
2200 averaging (and no search for weak lines).
2201 If such lines can affect the fitted baseline
2202 (e.g. a high order polynomial is fitted),
2203 increase this parameter (usually values up
2204 to 8 are reasonable). Most users of this
2205 method should find the default value sufficient.
2206 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2207 plot the fit and the residual. In this each
2208 indivual fit has to be approved, by typing 'y'
2209 or 'n'
2210 getresidual: if False, returns best-fit values instead of
2211 residual. (default is True)
2212 outlog: Output the coefficients of the best-fit
2213 function to logger (default is False)
2214 blfile: Name of a text file in which the best-fit
2215 parameter values to be written
2216 (default is "": no file/logger output)
[2047]2217
2218 Example:
[2081]2219 bscan = scan.auto_sinusoid_baseline(nwave=10, insitu=False)
2220
2221 Note:
2222 The best-fit parameter values output in logger and/or blfile are now
2223 based on specunit of 'channel'.
[2047]2224 """
2225
2226 varlist = vars()
2227
2228 if insitu is None: insitu = rcParams['insitu']
2229 if insitu:
2230 workscan = self
2231 else:
2232 workscan = self.copy()
2233
2234 nchan = workscan.nchan()
2235
[2081]2236 if mask is None: mask = [True for i in xrange(nchan)]
2237 if nwave is None: nwave = 3
2238 if maxwavelength is None: maxwavelength = 1.0
2239 if clipthresh is None: clipthresh = 3.0
[2129]2240 if clipniter is None: clipniter = 0
[2081]2241 if edge is None: edge = (0,0)
2242 if threshold is None: threshold = 3
[2047]2243 if chan_avg_limit is None: chan_avg_limit = 1
[2081]2244 if plot is None: plot = False
2245 if getresidual is None: getresidual = True
2246 if outlog is None: outlog = False
2247 if blfile is None: blfile = ""
[2047]2248
2249 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2250
2251 from asap.asaplinefind import linefinder
2252 from asap import _is_sequence_or_number as _is_valid
2253
[2081]2254 if isinstance(nwave, int):
2255 in_nwave = nwave
2256 nwave = []
2257 for i in xrange(in_nwave+1): nwave.append(i)
2258
[2047]2259 if not (isinstance(edge, list) or isinstance(edge, tuple)): edge = [ edge ]
2260 individualedge = False;
2261 if len(edge) > 1: individualedge = isinstance(edge[0], list) or isinstance(edge[0], tuple)
2262
2263 if individualedge:
2264 for edgepar in edge:
2265 if not _is_valid(edgepar, int):
2266 raise ValueError, "Each element of the 'edge' tuple has \
2267 to be a pair of integers or an integer."
2268 else:
2269 if not _is_valid(edge, int):
2270 raise ValueError, "Parameter 'edge' has to be an integer or a \
2271 pair of integers specified as a tuple. \
2272 Nested tuples are allowed \
2273 to make individual selection for different IFs."
2274
2275 if len(edge) > 1:
2276 curedge = edge
2277 else:
2278 curedge = edge + edge
2279
2280 try:
[2081]2281 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2047]2282 if individualedge:
2283 curedge = []
2284 for i in xrange(len(edge)):
2285 curedge += edge[i]
2286
[2081]2287 workscan._auto_sinusoid_baseline(mask, nwave, maxwavelength, clipthresh, clipniter, curedge, threshold, chan_avg_limit, getresidual, outlog, blfile)
[2047]2288
2289 workscan._add_history("auto_sinusoid_baseline", varlist)
2290
2291 if insitu:
2292 self._assign(workscan)
2293 else:
2294 return workscan
2295
2296 except RuntimeError, e:
2297 msg = "The fit failed, possibly because it didn't converge."
2298 if rcParams["verbose"]:
2299 asaplog.push(str(e))
2300 asaplog.push(str(msg))
2301 return
2302 else:
2303 raise RuntimeError(str(e)+'\n'+msg)
2304
2305
2306 @asaplog_post_dec
[2094]2307 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2308 clipthresh=None, clipniter=None, plot=None, getresidual=None, outlog=None, blfile=None):
[1846]2309 """\
[2012]2310 Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
[513]2311 Parameters:
[2012]2312 insitu: If False a new scantable is returned.
2313 Otherwise, the scaling is done in-situ
2314 The default is taken from .asaprc (False)
2315 mask: An optional mask
2316 npiece: Number of pieces. (default is 2)
2317 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2129]2318 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2012]2319 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2320 plot the fit and the residual. In this each
2321 indivual fit has to be approved, by typing 'y'
2322 or 'n'
[2094]2323 getresidual:if False, returns best-fit values instead of
2324 residual. (default is True)
[2012]2325 outlog: Output the coefficients of the best-fit
2326 function to logger (default is False)
2327 blfile: Name of a text file in which the best-fit
2328 parameter values to be written
2329 (default is "": no file/logger output)
[1846]2330
[2012]2331 Example:
2332 # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
2333 # also with 3-sigma clipping, iteration up to 4 times
2334 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2335
2336 Note:
2337 The best-fit parameter values output in logger and/or blfile are now
2338 based on specunit of 'channel'.
[2012]2339 """
2340
2341 varlist = vars()
2342
2343 if insitu is None: insitu = rcParams["insitu"]
2344 if insitu:
2345 workscan = self
2346 else:
2347 workscan = self.copy()
[1855]2348
[2012]2349 nchan = workscan.nchan()
2350
[2094]2351 if mask is None: mask = [True for i in xrange(nchan)]
2352 if npiece is None: npiece = 2
2353 if clipthresh is None: clipthresh = 3.0
[2129]2354 if clipniter is None: clipniter = 0
[2094]2355 if plot is None: plot = False
2356 if getresidual is None: getresidual = True
2357 if outlog is None: outlog = False
2358 if blfile is None: blfile = ""
[1855]2359
[2012]2360 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2361
2362 try:
2363 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2094]2364 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, outlog, blfile)
[2012]2365
2366 workscan._add_history("cspline_baseline", varlist)
2367
2368 if insitu:
2369 self._assign(workscan)
2370 else:
2371 return workscan
2372
2373 except RuntimeError, e:
2374 msg = "The fit failed, possibly because it didn't converge."
2375 if rcParams["verbose"]:
2376 asaplog.push(str(e))
2377 asaplog.push(str(msg))
2378 return
2379 else:
2380 raise RuntimeError(str(e)+'\n'+msg)
[1855]2381
2382
[2012]2383 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None,
2384 clipniter=None, edge=None, threshold=None,
[2094]2385 chan_avg_limit=None, getresidual=None, plot=None, outlog=None, blfile=None):
[2012]2386 """\
2387 Return a scan which has been baselined (all rows) by cubic spline
2388 function (piecewise cubic polynomial).
2389 Spectral lines are detected first using linefinder and masked out
2390 to avoid them affecting the baseline solution.
2391
2392 Parameters:
[794]2393 insitu: if False a new scantable is returned.
2394 Otherwise, the scaling is done in-situ
2395 The default is taken from .asaprc (False)
[2012]2396 mask: an optional mask retreived from scantable
2397 npiece: Number of pieces. (default is 2)
2398 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2129]2399 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2012]2400 edge: an optional number of channel to drop at
2401 the edge of spectrum. If only one value is
2402 specified, the same number will be dropped
2403 from both sides of the spectrum. Default
2404 is to keep all channels. Nested tuples
2405 represent individual edge selection for
2406 different IFs (a number of spectral channels
2407 can be different)
2408 threshold: the threshold used by line finder. It is
2409 better to keep it large as only strong lines
2410 affect the baseline solution.
2411 chan_avg_limit:
2412 a maximum number of consequtive spectral
2413 channels to average during the search of
2414 weak and broad lines. The default is no
2415 averaging (and no search for weak lines).
2416 If such lines can affect the fitted baseline
2417 (e.g. a high order polynomial is fitted),
2418 increase this parameter (usually values up
2419 to 8 are reasonable). Most users of this
2420 method should find the default value sufficient.
2421 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2422 plot the fit and the residual. In this each
2423 indivual fit has to be approved, by typing 'y'
2424 or 'n'
[2094]2425 getresidual:if False, returns best-fit values instead of
2426 residual. (default is True)
[2012]2427 outlog: Output the coefficients of the best-fit
2428 function to logger (default is False)
2429 blfile: Name of a text file in which the best-fit
2430 parameter values to be written
2431 (default is "": no file/logger output)
[1846]2432
[1907]2433 Example:
[2012]2434 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]2435
2436 Note:
2437 The best-fit parameter values output in logger and/or blfile are now
2438 based on specunit of 'channel'.
[2012]2439 """
[1846]2440
[2012]2441 varlist = vars()
2442
[513]2443 if insitu is None: insitu = rcParams['insitu']
[2012]2444 if insitu:
2445 workscan = self
2446 else:
[1819]2447 workscan = self.copy()
[2012]2448
2449 nchan = workscan.nchan()
2450
[2094]2451 if mask is None: mask = [True for i in xrange(nchan)]
2452 if npiece is None: npiece = 2
2453 if clipthresh is None: clipthresh = 3.0
[2129]2454 if clipniter is None: clipniter = 0
[2094]2455 if edge is None: edge = (0, 0)
2456 if threshold is None: threshold = 3
[2012]2457 if chan_avg_limit is None: chan_avg_limit = 1
[2094]2458 if plot is None: plot = False
2459 if getresidual is None: getresidual = True
2460 if outlog is None: outlog = False
2461 if blfile is None: blfile = ""
[2012]2462
2463 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2464
2465 from asap.asaplinefind import linefinder
2466 from asap import _is_sequence_or_number as _is_valid
2467
2468 if not (isinstance(edge, list) or isinstance(edge, tuple)): edge = [ edge ]
2469 individualedge = False;
2470 if len(edge) > 1: individualedge = isinstance(edge[0], list) or isinstance(edge[0], tuple)
2471
2472 if individualedge:
2473 for edgepar in edge:
2474 if not _is_valid(edgepar, int):
2475 raise ValueError, "Each element of the 'edge' tuple has \
2476 to be a pair of integers or an integer."
[1819]2477 else:
[2012]2478 if not _is_valid(edge, int):
2479 raise ValueError, "Parameter 'edge' has to be an integer or a \
2480 pair of integers specified as a tuple. \
2481 Nested tuples are allowed \
2482 to make individual selection for different IFs."
[1819]2483
[2012]2484 if len(edge) > 1:
2485 curedge = edge
[1391]2486 else:
[2012]2487 curedge = edge + edge
[1819]2488
[2012]2489 try:
2490 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2491 if individualedge:
2492 curedge = []
2493 for i in xrange(len(edge)):
2494 curedge += edge[i]
2495
[2094]2496 workscan._auto_cspline_baseline(mask, npiece, clipthresh, clipniter, curedge, threshold, chan_avg_limit, getresidual, outlog, blfile)
[2012]2497
2498 workscan._add_history("auto_cspline_baseline", varlist)
[1907]2499
[1856]2500 if insitu:
2501 self._assign(workscan)
2502 else:
2503 return workscan
[2012]2504
2505 except RuntimeError, e:
[1217]2506 msg = "The fit failed, possibly because it didn't converge."
[2012]2507 if rcParams["verbose"]:
2508 asaplog.push(str(e))
2509 asaplog.push(str(msg))
2510 return
2511 else:
2512 raise RuntimeError(str(e)+'\n'+msg)
[513]2513
[2012]2514
[1931]2515 @asaplog_post_dec
[2094]2516 def poly_baseline(self, insitu=None, mask=None, order=None, plot=None, getresidual=None, outlog=None, blfile=None):
[1907]2517 """\
2518 Return a scan which has been baselined (all rows) by a polynomial.
2519 Parameters:
[2012]2520 insitu: if False a new scantable is returned.
2521 Otherwise, the scaling is done in-situ
2522 The default is taken from .asaprc (False)
[1907]2523 mask: an optional mask
2524 order: the order of the polynomial (default is 0)
2525 plot: plot the fit and the residual. In this each
2526 indivual fit has to be approved, by typing 'y'
[2012]2527 or 'n'
[2094]2528 getresidual:if False, returns best-fit values instead of
2529 residual. (default is True)
[2012]2530 outlog: Output the coefficients of the best-fit
2531 function to logger (default is False)
2532 blfile: Name of a text file in which the best-fit
2533 parameter values to be written
2534 (default is "": no file/logger output)
2535
[1907]2536 Example:
2537 # return a scan baselined by a third order polynomial,
2538 # not using a mask
2539 bscan = scan.poly_baseline(order=3)
2540 """
[1931]2541
2542 varlist = vars()
2543
[1907]2544 if insitu is None: insitu = rcParams["insitu"]
2545 if insitu:
2546 workscan = self
2547 else:
2548 workscan = self.copy()
2549
2550 nchan = workscan.nchan()
2551
[2094]2552 if mask is None: mask = [True for i in xrange(nchan)]
2553 if order is None: order = 0
2554 if plot is None: plot = False
2555 if getresidual is None: getresidual = True
2556 if outlog is None: outlog = False
2557 if blfile is None: blfile = ""
[1907]2558
[2012]2559 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2560
[1907]2561 try:
[2012]2562 rows = xrange(workscan.nrow())
[1907]2563
[2012]2564 #if len(rows) > 0: workscan._init_blinfo()
[1907]2565
[2012]2566 if plot:
2567 if outblfile: blf = open(blfile, "a")
2568
[1907]2569 f = fitter()
2570 f.set_function(lpoly=order)
2571 for r in rows:
2572 f.x = workscan._getabcissa(r)
2573 f.y = workscan._getspectrum(r)
2574 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2575 f.data = None
2576 f.fit()
2577
2578 f.plot(residual=True)
2579 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2580 if accept_fit.upper() == "N":
[2012]2581 #workscan._append_blinfo(None, None, None)
[1907]2582 continue
[2012]2583
2584 blpars = f.get_parameters()
2585 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2586 #workscan._append_blinfo(blpars, masklist, f.mask)
[2094]2587 workscan._setspectrum((f.fitter.getresidual() if getresidual else f.fitter.getfit()), r)
[1907]2588
[2012]2589 if outblfile:
2590 rms = workscan.get_rms(f.mask, r)
2591 dataout = workscan.format_blparams_row(blpars["params"], blpars["fixed"], rms, str(masklist), r, True)
2592 blf.write(dataout)
2593
[1907]2594 f._p.unmap()
2595 f._p = None
[2012]2596
2597 if outblfile: blf.close()
[1907]2598 else:
[2094]2599 workscan._poly_baseline(mask, order, getresidual, outlog, blfile)
[1907]2600
2601 workscan._add_history("poly_baseline", varlist)
2602
2603 if insitu:
2604 self._assign(workscan)
2605 else:
2606 return workscan
2607
[1919]2608 except RuntimeError, e:
[1907]2609 msg = "The fit failed, possibly because it didn't converge."
2610 if rcParams["verbose"]:
[1919]2611 asaplog.push(str(e))
[1907]2612 asaplog.push(str(msg))
2613 return
2614 else:
[1919]2615 raise RuntimeError(str(e)+'\n'+msg)
[1907]2616
2617
[2012]2618 def auto_poly_baseline(self, insitu=None, mask=None, order=None, edge=None, threshold=None,
[2094]2619 chan_avg_limit=None, plot=None, getresidual=None, outlog=None, blfile=None):
[1846]2620 """\
[1931]2621 Return a scan which has been baselined (all rows) by a polynomial.
[880]2622 Spectral lines are detected first using linefinder and masked out
2623 to avoid them affecting the baseline solution.
2624
2625 Parameters:
[2012]2626 insitu: if False a new scantable is returned.
2627 Otherwise, the scaling is done in-situ
2628 The default is taken from .asaprc (False)
[880]2629 mask: an optional mask retreived from scantable
2630 order: the order of the polynomial (default is 0)
[2012]2631 edge: an optional number of channel to drop at
2632 the edge of spectrum. If only one value is
2633 specified, the same number will be dropped
2634 from both sides of the spectrum. Default
2635 is to keep all channels. Nested tuples
2636 represent individual edge selection for
2637 different IFs (a number of spectral channels
2638 can be different)
2639 threshold: the threshold used by line finder. It is
2640 better to keep it large as only strong lines
2641 affect the baseline solution.
[1280]2642 chan_avg_limit:
[2012]2643 a maximum number of consequtive spectral
2644 channels to average during the search of
2645 weak and broad lines. The default is no
2646 averaging (and no search for weak lines).
2647 If such lines can affect the fitted baseline
2648 (e.g. a high order polynomial is fitted),
2649 increase this parameter (usually values up
2650 to 8 are reasonable). Most users of this
2651 method should find the default value sufficient.
[1061]2652 plot: plot the fit and the residual. In this each
2653 indivual fit has to be approved, by typing 'y'
2654 or 'n'
[2094]2655 getresidual:if False, returns best-fit values instead of
2656 residual. (default is True)
[2012]2657 outlog: Output the coefficients of the best-fit
2658 function to logger (default is False)
2659 blfile: Name of a text file in which the best-fit
2660 parameter values to be written
2661 (default is "": no file/logger output)
[1846]2662
[2012]2663 Example:
2664 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2665 """
[880]2666
[2012]2667 varlist = vars()
[1846]2668
[2012]2669 if insitu is None: insitu = rcParams['insitu']
2670 if insitu:
2671 workscan = self
2672 else:
2673 workscan = self.copy()
[1846]2674
[2012]2675 nchan = workscan.nchan()
2676
[2094]2677 if mask is None: mask = [True for i in xrange(nchan)]
2678 if order is None: order = 0
2679 if edge is None: edge = (0, 0)
2680 if threshold is None: threshold = 3
[2012]2681 if chan_avg_limit is None: chan_avg_limit = 1
[2094]2682 if plot is None: plot = False
2683 if getresidual is None: getresidual = True
2684 if outlog is None: outlog = False
2685 if blfile is None: blfile = ""
[1846]2686
[2012]2687 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2688
[880]2689 from asap.asaplinefind import linefinder
2690 from asap import _is_sequence_or_number as _is_valid
2691
[2012]2692 if not (isinstance(edge, list) or isinstance(edge, tuple)): edge = [ edge ]
[1118]2693 individualedge = False;
[2012]2694 if len(edge) > 1: individualedge = isinstance(edge[0], list) or isinstance(edge[0], tuple)
[907]2695
[1118]2696 if individualedge:
2697 for edgepar in edge:
2698 if not _is_valid(edgepar, int):
2699 raise ValueError, "Each element of the 'edge' tuple has \
2700 to be a pair of integers or an integer."
[907]2701 else:
[2012]2702 if not _is_valid(edge, int):
2703 raise ValueError, "Parameter 'edge' has to be an integer or a \
2704 pair of integers specified as a tuple. \
2705 Nested tuples are allowed \
2706 to make individual selection for different IFs."
[880]2707
[2012]2708 if len(edge) > 1:
2709 curedge = edge
2710 else:
2711 curedge = edge + edge
[1907]2712
[2012]2713 try:
2714 rows = xrange(workscan.nrow())
2715
2716 #if len(rows) > 0: workscan._init_blinfo()
[880]2717
[2012]2718 if plot:
2719 if outblfile: blf = open(blfile, "a")
2720
2721 fl = linefinder()
2722 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2723 fl.set_scan(workscan)
2724 f = fitter()
2725 f.set_function(lpoly=order)
[880]2726
[2012]2727 for r in rows:
2728 if individualedge:
2729 if len(edge) <= workscan.getif(r):
2730 raise RuntimeError, "Number of edge elements appear to " \
2731 "be less than the number of IFs"
2732 else:
2733 curedge = edge[workscan.getif(r)]
[907]2734
[2012]2735 fl.find_lines(r, mask_and(mask, workscan._getmask(r)), curedge) # (CAS-1434)
2736
2737 f.x = workscan._getabcissa(r)
2738 f.y = workscan._getspectrum(r)
2739 f.mask = fl.get_mask()
2740 f.data = None
2741 f.fit()
2742
2743 f.plot(residual=True)
2744 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2745 if accept_fit.upper() == "N":
2746 #workscan._append_blinfo(None, None, None)
2747 continue
2748
2749 blpars = f.get_parameters()
2750 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2751 #workscan._append_blinfo(blpars, masklist, f.mask)
[2094]2752 workscan._setspectrum((f.fitter.getresidual() if getresidual else f.fitter.getfit()), r)
[2012]2753
2754 if outblfile:
2755 rms = workscan.get_rms(f.mask, r)
2756 dataout = workscan.format_blparams_row(blpars["params"], blpars["fixed"], rms, str(masklist), r, True)
2757 blf.write(dataout)
2758
2759 f._p.unmap()
2760 f._p = None
2761
2762 if outblfile: blf.close()
2763
2764 else:
2765 if individualedge:
2766 curedge = []
2767 for i in xrange(len(edge)):
2768 curedge += edge[i]
2769
[2094]2770 workscan._auto_poly_baseline(mask, order, curedge, threshold, chan_avg_limit, getresidual, outlog, blfile)
[2012]2771
2772 workscan._add_history("auto_poly_baseline", varlist)
2773
2774 if insitu:
2775 self._assign(workscan)
2776 else:
2777 return workscan
2778
2779 except RuntimeError, e:
2780 msg = "The fit failed, possibly because it didn't converge."
2781 if rcParams["verbose"]:
2782 asaplog.push(str(e))
2783 asaplog.push(str(msg))
2784 return
2785 else:
2786 raise RuntimeError(str(e)+'\n'+msg)
2787
2788
2789 ### OBSOLETE ##################################################################
2790 @asaplog_post_dec
2791 def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
2792 """
2793 Return a scan which has been baselined (all rows) by a polynomial.
[1907]2794
[2012]2795 Parameters:
2796
2797 mask: an optional mask
2798
2799 order: the order of the polynomial (default is 0)
2800
2801 plot: plot the fit and the residual. In this each
2802 indivual fit has to be approved, by typing 'y'
2803 or 'n'
2804
2805 uselin: use linear polynomial fit
2806
2807 insitu: if False a new scantable is returned.
2808 Otherwise, the scaling is done in-situ
2809 The default is taken from .asaprc (False)
2810
2811 rows: row numbers of spectra to be processed.
2812 (default is None: for all rows)
[1907]2813
[2012]2814 Example:
2815 # return a scan baselined by a third order polynomial,
2816 # not using a mask
2817 bscan = scan.poly_baseline(order=3)
[907]2818
[2012]2819 """
2820 if insitu is None: insitu = rcParams['insitu']
2821 if not insitu:
2822 workscan = self.copy()
2823 else:
2824 workscan = self
2825 varlist = vars()
2826 if mask is None:
2827 mask = [True for i in xrange(self.nchan())]
[919]2828
[2012]2829 try:
2830 f = fitter()
2831 if uselin:
2832 f.set_function(lpoly=order)
2833 else:
2834 f.set_function(poly=order)
[1819]2835
[2012]2836 if rows == None:
2837 rows = xrange(workscan.nrow())
2838 elif isinstance(rows, int):
2839 rows = [ rows ]
[1907]2840
[2012]2841 if len(rows) > 0:
2842 self.blpars = []
2843 self.masklists = []
2844 self.actualmask = []
2845
2846 for r in rows:
2847 f.x = workscan._getabcissa(r)
2848 f.y = workscan._getspectrum(r)
2849 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2850 f.data = None
2851 f.fit()
2852 if plot:
2853 f.plot(residual=True)
2854 x = raw_input("Accept fit ( [y]/n ): ")
2855 if x.upper() == 'N':
2856 self.blpars.append(None)
2857 self.masklists.append(None)
2858 self.actualmask.append(None)
2859 continue
2860 workscan._setspectrum(f.fitter.getresidual(), r)
2861 self.blpars.append(f.get_parameters())
2862 self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
2863 self.actualmask.append(f.mask)
[1819]2864
[1061]2865 if plot:
[2012]2866 f._p.unmap()
2867 f._p = None
2868 workscan._add_history("poly_baseline", varlist)
2869 if insitu:
2870 self._assign(workscan)
2871 else:
2872 return workscan
2873 except RuntimeError:
2874 msg = "The fit failed, possibly because it didn't converge."
2875 raise RuntimeError(msg)
[1819]2876
[2012]2877 def _init_blinfo(self):
2878 """\
2879 Initialise the following three auxiliary members:
2880 blpars : parameters of the best-fit baseline,
2881 masklists : mask data (edge positions of masked channels) and
2882 actualmask : mask data (in boolean list),
2883 to keep for use later (including output to logger/text files).
2884 Used by poly_baseline() and auto_poly_baseline() in case of
2885 'plot=True'.
2886 """
2887 self.blpars = []
2888 self.masklists = []
2889 self.actualmask = []
2890 return
[880]2891
[2012]2892 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
2893 """\
2894 Append baseline-fitting related info to blpars, masklist and
2895 actualmask.
2896 """
2897 self.blpars.append(data_blpars)
2898 self.masklists.append(data_masklists)
2899 self.actualmask.append(data_actualmask)
2900 return
2901
[1862]2902 @asaplog_post_dec
[914]2903 def rotate_linpolphase(self, angle):
[1846]2904 """\
[914]2905 Rotate the phase of the complex polarization O=Q+iU correlation.
2906 This is always done in situ in the raw data. So if you call this
2907 function more than once then each call rotates the phase further.
[1846]2908
[914]2909 Parameters:
[1846]2910
[914]2911 angle: The angle (degrees) to rotate (add) by.
[1846]2912
2913 Example::
2914
[914]2915 scan.rotate_linpolphase(2.3)
[1846]2916
[914]2917 """
2918 varlist = vars()
[936]2919 self._math._rotate_linpolphase(self, angle)
[914]2920 self._add_history("rotate_linpolphase", varlist)
2921 return
[710]2922
[1862]2923 @asaplog_post_dec
[914]2924 def rotate_xyphase(self, angle):
[1846]2925 """\
[914]2926 Rotate the phase of the XY correlation. This is always done in situ
2927 in the data. So if you call this function more than once
2928 then each call rotates the phase further.
[1846]2929
[914]2930 Parameters:
[1846]2931
[914]2932 angle: The angle (degrees) to rotate (add) by.
[1846]2933
2934 Example::
2935
[914]2936 scan.rotate_xyphase(2.3)
[1846]2937
[914]2938 """
2939 varlist = vars()
[936]2940 self._math._rotate_xyphase(self, angle)
[914]2941 self._add_history("rotate_xyphase", varlist)
2942 return
2943
[1862]2944 @asaplog_post_dec
[914]2945 def swap_linears(self):
[1846]2946 """\
[1573]2947 Swap the linear polarisations XX and YY, or better the first two
[1348]2948 polarisations as this also works for ciculars.
[914]2949 """
2950 varlist = vars()
[936]2951 self._math._swap_linears(self)
[914]2952 self._add_history("swap_linears", varlist)
2953 return
2954
[1862]2955 @asaplog_post_dec
[914]2956 def invert_phase(self):
[1846]2957 """\
[914]2958 Invert the phase of the complex polarisation
2959 """
2960 varlist = vars()
[936]2961 self._math._invert_phase(self)
[914]2962 self._add_history("invert_phase", varlist)
2963 return
2964
[1862]2965 @asaplog_post_dec
[876]2966 def add(self, offset, insitu=None):
[1846]2967 """\
[513]2968 Return a scan where all spectra have the offset added
[1846]2969
[513]2970 Parameters:
[1846]2971
[513]2972 offset: the offset
[1855]2973
[513]2974 insitu: if False a new scantable is returned.
2975 Otherwise, the scaling is done in-situ
2976 The default is taken from .asaprc (False)
[1846]2977
[513]2978 """
2979 if insitu is None: insitu = rcParams['insitu']
[876]2980 self._math._setinsitu(insitu)
[513]2981 varlist = vars()
[876]2982 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2983 s._add_history("add", varlist)
[876]2984 if insitu:
2985 self._assign(s)
2986 else:
[513]2987 return s
2988
[1862]2989 @asaplog_post_dec
[1308]2990 def scale(self, factor, tsys=True, insitu=None):
[1846]2991 """\
2992
[1938]2993 Return a scan where all spectra are scaled by the given 'factor'
[1846]2994
[513]2995 Parameters:
[1846]2996
[1819]2997 factor: the scaling factor (float or 1D float list)
[1855]2998
[513]2999 insitu: if False a new scantable is returned.
3000 Otherwise, the scaling is done in-situ
3001 The default is taken from .asaprc (False)
[1855]3002
[513]3003 tsys: if True (default) then apply the operation to Tsys
3004 as well as the data
[1846]3005
[513]3006 """
3007 if insitu is None: insitu = rcParams['insitu']
[876]3008 self._math._setinsitu(insitu)
[513]3009 varlist = vars()
[1819]3010 s = None
3011 import numpy
3012 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3013 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3014 from asapmath import _array2dOp
3015 s = _array2dOp( self.copy(), factor, "MUL", tsys )
3016 else:
3017 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3018 else:
3019 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]3020 s._add_history("scale", varlist)
[876]3021 if insitu:
3022 self._assign(s)
3023 else:
[513]3024 return s
3025
[1504]3026 def set_sourcetype(self, match, matchtype="pattern",
3027 sourcetype="reference"):
[1846]3028 """\
[1502]3029 Set the type of the source to be an source or reference scan
[1846]3030 using the provided pattern.
3031
[1502]3032 Parameters:
[1846]3033
[1504]3034 match: a Unix style pattern, regular expression or selector
[1855]3035
[1504]3036 matchtype: 'pattern' (default) UNIX style pattern or
3037 'regex' regular expression
[1855]3038
[1502]3039 sourcetype: the type of the source to use (source/reference)
[1846]3040
[1502]3041 """
3042 varlist = vars()
3043 basesel = self.get_selection()
3044 stype = -1
3045 if sourcetype.lower().startswith("r"):
3046 stype = 1
3047 elif sourcetype.lower().startswith("s"):
3048 stype = 0
[1504]3049 else:
[1502]3050 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]3051 if matchtype.lower().startswith("p"):
3052 matchtype = "pattern"
3053 elif matchtype.lower().startswith("r"):
3054 matchtype = "regex"
3055 else:
3056 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3057 sel = selector()
3058 if isinstance(match, selector):
3059 sel = match
3060 else:
[1504]3061 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]3062 self.set_selection(basesel+sel)
3063 self._setsourcetype(stype)
3064 self.set_selection(basesel)
[1573]3065 self._add_history("set_sourcetype", varlist)
[1502]3066
[1862]3067 @asaplog_post_dec
[1857]3068 @preserve_selection
[1819]3069 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3070 """\
[670]3071 This function allows to build quotients automatically.
[1819]3072 It assumes the observation to have the same number of
[670]3073 "ons" and "offs"
[1846]3074
[670]3075 Parameters:
[1846]3076
[710]3077 preserve: you can preserve (default) the continuum or
3078 remove it. The equations used are
[1857]3079
[670]3080 preserve: Output = Toff * (on/off) - Toff
[1857]3081
[1070]3082 remove: Output = Toff * (on/off) - Ton
[1855]3083
[1573]3084 mode: the on/off detection mode
[1348]3085 'paired' (default)
3086 identifies 'off' scans by the
3087 trailing '_R' (Mopra/Parkes) or
3088 '_e'/'_w' (Tid) and matches
3089 on/off pairs from the observing pattern
[1502]3090 'time'
3091 finds the closest off in time
[1348]3092
[1857]3093 .. todo:: verify argument is not implemented
3094
[670]3095 """
[1857]3096 varlist = vars()
[1348]3097 modes = ["time", "paired"]
[670]3098 if not mode in modes:
[876]3099 msg = "please provide valid mode. Valid modes are %s" % (modes)
3100 raise ValueError(msg)
[1348]3101 s = None
3102 if mode.lower() == "paired":
[1857]3103 sel = self.get_selection()
[1875]3104 sel.set_query("SRCTYPE==psoff")
[1356]3105 self.set_selection(sel)
[1348]3106 offs = self.copy()
[1875]3107 sel.set_query("SRCTYPE==pson")
[1356]3108 self.set_selection(sel)
[1348]3109 ons = self.copy()
3110 s = scantable(self._math._quotient(ons, offs, preserve))
3111 elif mode.lower() == "time":
3112 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3113 s._add_history("auto_quotient", varlist)
[876]3114 return s
[710]3115
[1862]3116 @asaplog_post_dec
[1145]3117 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3118 """\
[1143]3119 Form a quotient using "off" beams when observing in "MX" mode.
[1846]3120
[1143]3121 Parameters:
[1846]3122
[1145]3123 mask: an optional mask to be used when weight == 'stddev'
[1855]3124
[1143]3125 weight: How to average the off beams. Default is 'median'.
[1855]3126
[1145]3127 preserve: you can preserve (default) the continuum or
[1855]3128 remove it. The equations used are:
[1846]3129
[1855]3130 preserve: Output = Toff * (on/off) - Toff
3131
3132 remove: Output = Toff * (on/off) - Ton
3133
[1217]3134 """
[1593]3135 mask = mask or ()
[1141]3136 varlist = vars()
3137 on = scantable(self._math._mx_extract(self, 'on'))
[1143]3138 preoff = scantable(self._math._mx_extract(self, 'off'))
3139 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3140 from asapmath import quotient
[1145]3141 q = quotient(on, off, preserve)
[1143]3142 q._add_history("mx_quotient", varlist)
[1217]3143 return q
[513]3144
[1862]3145 @asaplog_post_dec
[718]3146 def freq_switch(self, insitu=None):
[1846]3147 """\
[718]3148 Apply frequency switching to the data.
[1846]3149
[718]3150 Parameters:
[1846]3151
[718]3152 insitu: if False a new scantable is returned.
3153 Otherwise, the swictching is done in-situ
3154 The default is taken from .asaprc (False)
[1846]3155
[718]3156 """
3157 if insitu is None: insitu = rcParams['insitu']
[876]3158 self._math._setinsitu(insitu)
[718]3159 varlist = vars()
[876]3160 s = scantable(self._math._freqswitch(self))
[1118]3161 s._add_history("freq_switch", varlist)
[1856]3162 if insitu:
3163 self._assign(s)
3164 else:
3165 return s
[718]3166
[1862]3167 @asaplog_post_dec
[780]3168 def recalc_azel(self):
[1846]3169 """Recalculate the azimuth and elevation for each position."""
[780]3170 varlist = vars()
[876]3171 self._recalcazel()
[780]3172 self._add_history("recalc_azel", varlist)
3173 return
3174
[1862]3175 @asaplog_post_dec
[513]3176 def __add__(self, other):
3177 varlist = vars()
3178 s = None
3179 if isinstance(other, scantable):
[1573]3180 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]3181 elif isinstance(other, float):
[876]3182 s = scantable(self._math._unaryop(self, other, "ADD", False))
[2144]3183 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3184 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3185 from asapmath import _array2dOp
3186 s = _array2dOp( self.copy(), other, "ADD", False )
3187 else:
3188 s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
[513]3189 else:
[718]3190 raise TypeError("Other input is not a scantable or float value")
[513]3191 s._add_history("operator +", varlist)
3192 return s
3193
[1862]3194 @asaplog_post_dec
[513]3195 def __sub__(self, other):
3196 """
3197 implicit on all axes and on Tsys
3198 """
3199 varlist = vars()
3200 s = None
3201 if isinstance(other, scantable):
[1588]3202 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]3203 elif isinstance(other, float):
[876]3204 s = scantable(self._math._unaryop(self, other, "SUB", False))
[2144]3205 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3206 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3207 from asapmath import _array2dOp
3208 s = _array2dOp( self.copy(), other, "SUB", False )
3209 else:
3210 s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
[513]3211 else:
[718]3212 raise TypeError("Other input is not a scantable or float value")
[513]3213 s._add_history("operator -", varlist)
3214 return s
[710]3215
[1862]3216 @asaplog_post_dec
[513]3217 def __mul__(self, other):
3218 """
3219 implicit on all axes and on Tsys
3220 """
3221 varlist = vars()
3222 s = None
3223 if isinstance(other, scantable):
[1588]3224 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]3225 elif isinstance(other, float):
[876]3226 s = scantable(self._math._unaryop(self, other, "MUL", False))
[2144]3227 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3228 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3229 from asapmath import _array2dOp
3230 s = _array2dOp( self.copy(), other, "MUL", False )
3231 else:
3232 s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
[513]3233 else:
[718]3234 raise TypeError("Other input is not a scantable or float value")
[513]3235 s._add_history("operator *", varlist)
3236 return s
3237
[710]3238
[1862]3239 @asaplog_post_dec
[513]3240 def __div__(self, other):
3241 """
3242 implicit on all axes and on Tsys
3243 """
3244 varlist = vars()
3245 s = None
3246 if isinstance(other, scantable):
[1589]3247 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]3248 elif isinstance(other, float):
3249 if other == 0.0:
[718]3250 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]3251 s = scantable(self._math._unaryop(self, other, "DIV", False))
[2144]3252 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3253 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3254 from asapmath import _array2dOp
3255 s = _array2dOp( self.copy(), other, "DIV", False )
3256 else:
3257 s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
[513]3258 else:
[718]3259 raise TypeError("Other input is not a scantable or float value")
[513]3260 s._add_history("operator /", varlist)
3261 return s
3262
[1862]3263 @asaplog_post_dec
[530]3264 def get_fit(self, row=0):
[1846]3265 """\
[530]3266 Print or return the stored fits for a row in the scantable
[1846]3267
[530]3268 Parameters:
[1846]3269
[530]3270 row: the row which the fit has been applied to.
[1846]3271
[530]3272 """
3273 if row > self.nrow():
3274 return
[976]3275 from asap.asapfit import asapfit
[530]3276 fit = asapfit(self._getfit(row))
[1859]3277 asaplog.push( '%s' %(fit) )
3278 return fit.as_dict()
[530]3279
[1483]3280 def flag_nans(self):
[1846]3281 """\
[1483]3282 Utility function to flag NaN values in the scantable.
3283 """
3284 import numpy
3285 basesel = self.get_selection()
3286 for i in range(self.nrow()):
[1589]3287 sel = self.get_row_selector(i)
3288 self.set_selection(basesel+sel)
[1483]3289 nans = numpy.isnan(self._getspectrum(0))
3290 if numpy.any(nans):
3291 bnans = [ bool(v) for v in nans]
3292 self.flag(bnans)
3293 self.set_selection(basesel)
3294
[1588]3295 def get_row_selector(self, rowno):
[1992]3296 #return selector(beams=self.getbeam(rowno),
3297 # ifs=self.getif(rowno),
3298 # pols=self.getpol(rowno),
3299 # scans=self.getscan(rowno),
3300 # cycles=self.getcycle(rowno))
3301 return selector(rows=[rowno])
[1573]3302
[484]3303 def _add_history(self, funcname, parameters):
[1435]3304 if not rcParams['scantable.history']:
3305 return
[484]3306 # create date
3307 sep = "##"
3308 from datetime import datetime
3309 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3310 hist = dstr+sep
3311 hist += funcname+sep#cdate+sep
3312 if parameters.has_key('self'): del parameters['self']
[1118]3313 for k, v in parameters.iteritems():
[484]3314 if type(v) is dict:
[1118]3315 for k2, v2 in v.iteritems():
[484]3316 hist += k2
3317 hist += "="
[1118]3318 if isinstance(v2, scantable):
[484]3319 hist += 'scantable'
3320 elif k2 == 'mask':
[1118]3321 if isinstance(v2, list) or isinstance(v2, tuple):
[513]3322 hist += str(self._zip_mask(v2))
3323 else:
3324 hist += str(v2)
[484]3325 else:
[513]3326 hist += str(v2)
[484]3327 else:
3328 hist += k
3329 hist += "="
[1118]3330 if isinstance(v, scantable):
[484]3331 hist += 'scantable'
3332 elif k == 'mask':
[1118]3333 if isinstance(v, list) or isinstance(v, tuple):
[513]3334 hist += str(self._zip_mask(v))
3335 else:
3336 hist += str(v)
[484]3337 else:
3338 hist += str(v)
3339 hist += sep
3340 hist = hist[:-2] # remove trailing '##'
3341 self._addhistory(hist)
3342
[710]3343
[484]3344 def _zip_mask(self, mask):
3345 mask = list(mask)
3346 i = 0
3347 segments = []
3348 while mask[i:].count(1):
3349 i += mask[i:].index(1)
3350 if mask[i:].count(0):
3351 j = i + mask[i:].index(0)
3352 else:
[710]3353 j = len(mask)
[1118]3354 segments.append([i, j])
[710]3355 i = j
[484]3356 return segments
[714]3357
[626]3358 def _get_ordinate_label(self):
3359 fu = "("+self.get_fluxunit()+")"
3360 import re
3361 lbl = "Intensity"
[1118]3362 if re.match(".K.", fu):
[626]3363 lbl = "Brightness Temperature "+ fu
[1118]3364 elif re.match(".Jy.", fu):
[626]3365 lbl = "Flux density "+ fu
3366 return lbl
[710]3367
[876]3368 def _check_ifs(self):
[1986]3369 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3370 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]3371 nchans = filter(lambda t: t > 0, nchans)
[876]3372 return (sum(nchans)/len(nchans) == nchans[0])
[976]3373
[1862]3374 @asaplog_post_dec
[1916]3375 #def _fill(self, names, unit, average, getpt, antenna):
3376 def _fill(self, names, unit, average, opts={}):
[976]3377 first = True
3378 fullnames = []
3379 for name in names:
3380 name = os.path.expandvars(name)
3381 name = os.path.expanduser(name)
3382 if not os.path.exists(name):
3383 msg = "File '%s' does not exists" % (name)
3384 raise IOError(msg)
3385 fullnames.append(name)
3386 if average:
3387 asaplog.push('Auto averaging integrations')
[1079]3388 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]3389 for name in fullnames:
[1073]3390 tbl = Scantable(stype)
[2004]3391 if is_ms( name ):
3392 r = msfiller( tbl )
3393 else:
3394 r = filler( tbl )
3395 rx = rcParams['scantable.reference']
3396 r.setreferenceexpr(rx)
3397 #r = filler(tbl)
3398 #rx = rcParams['scantable.reference']
3399 #r.setreferenceexpr(rx)
[976]3400 msg = "Importing %s..." % (name)
[1118]3401 asaplog.push(msg, False)
[1916]3402 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]3403 r.open(name, opts)# antenna, -1, -1, getpt)
[1843]3404 r.fill()
[976]3405 if average:
[1118]3406 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]3407 if not first:
3408 tbl = self._math._merge([self, tbl])
3409 Scantable.__init__(self, tbl)
[1843]3410 r.close()
[1118]3411 del r, tbl
[976]3412 first = False
[1861]3413 #flush log
3414 asaplog.post()
[976]3415 if unit is not None:
3416 self.set_fluxunit(unit)
[1824]3417 if not is_casapy():
3418 self.set_freqframe(rcParams['scantable.freqframe'])
[976]3419
[2012]3420
[1402]3421 def __getitem__(self, key):
3422 if key < 0:
3423 key += self.nrow()
3424 if key >= self.nrow():
3425 raise IndexError("Row index out of range.")
3426 return self._getspectrum(key)
3427
3428 def __setitem__(self, key, value):
3429 if key < 0:
3430 key += self.nrow()
3431 if key >= self.nrow():
3432 raise IndexError("Row index out of range.")
3433 if not hasattr(value, "__len__") or \
3434 len(value) > self.nchan(self.getif(key)):
3435 raise ValueError("Spectrum length doesn't match.")
3436 return self._setspectrum(value, key)
3437
3438 def __len__(self):
3439 return self.nrow()
3440
3441 def __iter__(self):
3442 for i in range(len(self)):
3443 yield self[i]
Note: See TracBrowser for help on using the repository browser.