source: trunk/python/scantable.py@ 2410

Last change on this file since 2410 was 2410, checked in by Takeshi Nakazato, 13 years ago

New Development: No

JIRA Issue: Yes CAS-3606/CAS-3757

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: sdbaseline unit test

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Fixed a bug that baseline functions doesn't work when multi-IF with different
nchan are processed at once.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 137.3 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2315]4import tempfile
[1948]5import numpy
[1691]6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
[1824]11from asap.env import is_casapy
[876]12from asap._asap import Scantable
[2004]13from asap._asap import filler, msfiller
[1824]14from asap.parameters import rcParams
[1862]15from asap.logging import asaplog, asaplog_post_dec
[1824]16from asap.selector import selector
17from asap.linecatalog import linecatalog
[1600]18from asap.coordinate import coordinate
[1859]19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]20from asap.asapfitter import fitter
[102]21
[1689]22
23def preserve_selection(func):
[1691]24 @wraps_dec(func)
[1689]25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
[1857]27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
[1689]31 return val
32 return wrap
33
[1846]34def is_scantable(filename):
35 """Is the given file a scantable?
[1689]36
[1846]37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
[1883]42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 #if ( l.find('Scantable') != -1 ):
49 if ( l.find('Measurement Set') == -1 ):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
[1697]58
[1883]59def is_ms(filename):
60 """Is the given file a MeasurementSet?
[1697]61
[1883]62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
[2186]79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
[2277]91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
[2277]98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
[2186]102 ** when len(edge)!=2
[2277]103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]111 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]112 when [5,10] given, it is interpreted as
[2277]113 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
[2189]173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
[876]176class scantable(Scantable):
[1846]177 """\
178 The ASAP container for scans (single-dish data).
[102]179 """
[1819]180
[1862]181 @asaplog_post_dec
[2315]182 def __init__(self, filename, average=None, unit=None, parallactify=None,
183 **args):
[1846]184 """\
[102]185 Create a scantable from a saved one or make a reference
[1846]186
[102]187 Parameters:
[1846]188
189 filename: the name of an asap table on disk
190 or
191 the name of a rpfits/sdfits/ms file
192 (integrations within scans are auto averaged
193 and the whole file is read) or
194 [advanced] a reference to an existing scantable
195
196 average: average all integrations withinb a scan on read.
197 The default (True) is taken from .asaprc.
198
[484]199 unit: brightness unit; must be consistent with K or Jy.
[1846]200 Over-rides the default selected by the filler
201 (input rpfits/sdfits/ms) or replaces the value
202 in existing scantables
203
[1920]204 antenna: for MeasurementSet input data only:
[2349]205 Antenna selection. integer (id) or string
206 (name or id).
[1846]207
[2349]208 parallactify: Indicate that the data had been parallactified.
209 Default (false) is taken from rc file.
[1846]210
[710]211 """
[976]212 if average is None:
[710]213 average = rcParams['scantable.autoaverage']
[1593]214 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]215 varlist = vars()
[876]216 from asap._asap import stmath
[1819]217 self._math = stmath( rcParams['insitu'] )
[876]218 if isinstance(filename, Scantable):
219 Scantable.__init__(self, filename)
[181]220 else:
[1697]221 if isinstance(filename, str):
[976]222 filename = os.path.expandvars(filename)
223 filename = os.path.expanduser(filename)
224 if not os.path.exists(filename):
225 s = "File '%s' not found." % (filename)
226 raise IOError(s)
[1697]227 if is_scantable(filename):
228 ondisk = rcParams['scantable.storage'] == 'disk'
229 Scantable.__init__(self, filename, ondisk)
230 if unit is not None:
231 self.set_fluxunit(unit)
[2008]232 if average:
233 self._assign( self.average_time( scanav=True ) )
[1819]234 # do not reset to the default freqframe
235 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]236 elif is_ms(filename):
[1916]237 # Measurement Set
238 opts={'ms': {}}
239 mskeys=['getpt','antenna']
240 for key in mskeys:
241 if key in args.keys():
242 opts['ms'][key] = args[key]
243 self._fill([filename], unit, average, opts)
[1893]244 elif os.path.isfile(filename):
[1916]245 self._fill([filename], unit, average)
[2350]246 # only apply to new data not "copy constructor"
247 self.parallactify(parallactify)
[1883]248 else:
[1819]249 msg = "The given file '%s'is not a valid " \
250 "asap table." % (filename)
[1859]251 raise IOError(msg)
[1118]252 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]253 and isinstance(filename[-1], str):
[1916]254 self._fill(filename, unit, average)
[1586]255 self.parallactify(parallactify)
[1259]256 self._add_history("scantable", varlist)
[102]257
[1862]258 @asaplog_post_dec
[876]259 def save(self, name=None, format=None, overwrite=False):
[1846]260 """\
[1280]261 Store the scantable on disk. This can be an asap (aips++) Table,
262 SDFITS or MS2 format.
[1846]263
[116]264 Parameters:
[1846]265
[1093]266 name: the name of the outputfile. For format "ASCII"
267 this is the root file name (data in 'name'.txt
[497]268 and header in 'name'_header.txt)
[1855]269
[116]270 format: an optional file format. Default is ASAP.
[1855]271 Allowed are:
272
273 * 'ASAP' (save as ASAP [aips++] Table),
274 * 'SDFITS' (save as SDFITS file)
275 * 'ASCII' (saves as ascii text file)
276 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]277 * 'FITS' (save as image FITS - not readable by
278 class)
[1855]279 * 'CLASS' (save as FITS readable by CLASS)
280
[411]281 overwrite: If the file should be overwritten if it exists.
[256]282 The default False is to return with warning
[411]283 without writing the output. USE WITH CARE.
[1855]284
[1846]285 Example::
286
[116]287 scan.save('myscan.asap')
[1118]288 scan.save('myscan.sdfits', 'SDFITS')
[1846]289
[116]290 """
[411]291 from os import path
[1593]292 format = format or rcParams['scantable.save']
[256]293 suffix = '.'+format.lower()
[1118]294 if name is None or name == "":
[256]295 name = 'scantable'+suffix
[718]296 msg = "No filename given. Using default name %s..." % name
297 asaplog.push(msg)
[411]298 name = path.expandvars(name)
[256]299 if path.isfile(name) or path.isdir(name):
300 if not overwrite:
[718]301 msg = "File %s exists." % name
[1859]302 raise IOError(msg)
[451]303 format2 = format.upper()
304 if format2 == 'ASAP':
[116]305 self._save(name)
[2029]306 elif format2 == 'MS2':
307 msopt = {'ms': {'overwrite': overwrite } }
308 from asap._asap import mswriter
309 writer = mswriter( self )
310 writer.write( name, msopt )
[116]311 else:
[989]312 from asap._asap import stwriter as stw
[1118]313 writer = stw(format2)
314 writer.write(self, name)
[116]315 return
316
[102]317 def copy(self):
[1846]318 """Return a copy of this scantable.
319
320 *Note*:
321
[1348]322 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]323
324 Example::
325
[102]326 copiedscan = scan.copy()
[1846]327
[102]328 """
[876]329 sd = scantable(Scantable._copy(self))
[113]330 return sd
331
[1093]332 def drop_scan(self, scanid=None):
[1846]333 """\
[1093]334 Return a new scantable where the specified scan number(s) has(have)
335 been dropped.
[1846]336
[1093]337 Parameters:
[1846]338
[1093]339 scanid: a (list of) scan number(s)
[1846]340
[1093]341 """
342 from asap import _is_sequence_or_number as _is_valid
343 from asap import _to_list
344 from asap import unique
345 if not _is_valid(scanid):
[2315]346 raise RuntimeError( 'Please specify a scanno to drop from the'
347 ' scantable' )
[1859]348 scanid = _to_list(scanid)
349 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
350 for sid in scanid: allscans.remove(sid)
351 if len(allscans) == 0:
352 raise ValueError("Can't remove all scans")
353 sel = selector(scans=allscans)
354 return self._select_copy(sel)
[1093]355
[1594]356 def _select_copy(self, selection):
357 orig = self.get_selection()
358 self.set_selection(orig+selection)
359 cp = self.copy()
360 self.set_selection(orig)
361 return cp
362
[102]363 def get_scan(self, scanid=None):
[1855]364 """\
[102]365 Return a specific scan (by scanno) or collection of scans (by
366 source name) in a new scantable.
[1846]367
368 *Note*:
369
[1348]370 See scantable.drop_scan() for the inverse operation.
[1846]371
[102]372 Parameters:
[1846]373
[513]374 scanid: a (list of) scanno or a source name, unix-style
375 patterns are accepted for source name matching, e.g.
376 '*_R' gets all 'ref scans
[1846]377
378 Example::
379
[513]380 # get all scans containing the source '323p459'
381 newscan = scan.get_scan('323p459')
382 # get all 'off' scans
383 refscans = scan.get_scan('*_R')
384 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]385 newscan = scan.get_scan([0, 2, 7, 10])
[1846]386
[102]387 """
388 if scanid is None:
[1859]389 raise RuntimeError( 'Please specify a scan no or name to '
390 'retrieve from the scantable' )
[102]391 try:
[946]392 bsel = self.get_selection()
393 sel = selector()
[102]394 if type(scanid) is str:
[946]395 sel.set_name(scanid)
[1594]396 return self._select_copy(sel)
[102]397 elif type(scanid) is int:
[946]398 sel.set_scans([scanid])
[1594]399 return self._select_copy(sel)
[381]400 elif type(scanid) is list:
[946]401 sel.set_scans(scanid)
[1594]402 return self._select_copy(sel)
[381]403 else:
[718]404 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]405 raise TypeError(msg)
[102]406 except RuntimeError:
[1859]407 raise
[102]408
409 def __str__(self):
[2315]410 tempFile = tempfile.NamedTemporaryFile()
411 Scantable._summary(self, tempFile.name)
412 tempFile.seek(0)
413 asaplog.clear()
414 return tempFile.file.read()
[102]415
[2315]416 @asaplog_post_dec
[976]417 def summary(self, filename=None):
[1846]418 """\
[102]419 Print a summary of the contents of this scantable.
[1846]420
[102]421 Parameters:
[1846]422
[1931]423 filename: the name of a file to write the putput to
[102]424 Default - no file output
[1846]425
[102]426 """
[2286]427# info = Scantable._summary(self)
[102]428 if filename is not None:
[256]429 if filename is "":
430 filename = 'scantable_summary.txt'
[415]431 from os.path import expandvars, isdir
[411]432 filename = expandvars(filename)
[2286]433# if not isdir(filename):
434# data = open(filename, 'w')
435# data.write(info)
436# data.close()
437# else:
438 if isdir(filename):
[718]439 msg = "Illegal file name '%s'." % (filename)
[1859]440 raise IOError(msg)
[2286]441 else:
442 filename = ""
443 Scantable._summary(self, filename)
[2290]444# info = Scantable._summary(self, filename)
[2286]445# return page(info)
[710]446
[1512]447 def get_spectrum(self, rowno):
[1471]448 """Return the spectrum for the current row in the scantable as a list.
[1846]449
[1471]450 Parameters:
[1846]451
[1573]452 rowno: the row number to retrieve the spectrum from
[1846]453
[1471]454 """
455 return self._getspectrum(rowno)
[946]456
[1471]457 def get_mask(self, rowno):
458 """Return the mask for the current row in the scantable as a list.
[1846]459
[1471]460 Parameters:
[1846]461
[1573]462 rowno: the row number to retrieve the mask from
[1846]463
[1471]464 """
465 return self._getmask(rowno)
466
467 def set_spectrum(self, spec, rowno):
[1938]468 """Set the spectrum for the current row in the scantable.
[1846]469
[1471]470 Parameters:
[1846]471
[1855]472 spec: the new spectrum
[1846]473
[1855]474 rowno: the row number to set the spectrum for
475
[1471]476 """
[2348]477 assert(len(spec) == self.nchan(self.getif(rowno)))
[1471]478 return self._setspectrum(spec, rowno)
479
[1600]480 def get_coordinate(self, rowno):
481 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]482
483 *Note*:
484
[1600]485 * This coordinate is only valid until a scantable method modifies
486 the frequency axis.
487 * This coordinate does contain the original frequency set-up
488 NOT the new frame. The conversions however are done using the user
489 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
490 use scantable.freq_align first. Without it there is no closure,
[1846]491 i.e.::
[1600]492
[1846]493 c = myscan.get_coordinate(0)
494 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
495
[1600]496 Parameters:
[1846]497
[1600]498 rowno: the row number for the spectral coordinate
499
500 """
501 return coordinate(Scantable.get_coordinate(self, rowno))
502
[946]503 def get_selection(self):
[1846]504 """\
[1005]505 Get the selection object currently set on this scantable.
[1846]506
507 Example::
508
[1005]509 sel = scan.get_selection()
510 sel.set_ifs(0) # select IF 0
511 scan.set_selection(sel) # apply modified selection
[1846]512
[946]513 """
514 return selector(self._getselection())
515
[1576]516 def set_selection(self, selection=None, **kw):
[1846]517 """\
[1005]518 Select a subset of the data. All following operations on this scantable
519 are only applied to thi selection.
[1846]520
[1005]521 Parameters:
[1697]522
[1846]523 selection: a selector object (default unset the selection), or
524 any combination of "pols", "ifs", "beams", "scans",
525 "cycles", "name", "query"
[1697]526
[1846]527 Examples::
[1697]528
[1005]529 sel = selector() # create a selection object
[1118]530 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]531 scan.set_selection(sel) # set the selection
532 scan.summary() # will only print summary of scanno 0 an 3
533 scan.set_selection() # unset the selection
[1697]534 # or the equivalent
535 scan.set_selection(scans=[0,3])
536 scan.summary() # will only print summary of scanno 0 an 3
537 scan.set_selection() # unset the selection
[1846]538
[946]539 """
[1576]540 if selection is None:
541 # reset
542 if len(kw) == 0:
543 selection = selector()
544 else:
545 # try keywords
546 for k in kw:
547 if k not in selector.fields:
[2320]548 raise KeyError("Invalid selection key '%s', "
549 "valid keys are %s" % (k,
550 selector.fields))
[1576]551 selection = selector(**kw)
[946]552 self._setselection(selection)
553
[1819]554 def get_row(self, row=0, insitu=None):
[1846]555 """\
[1819]556 Select a row in the scantable.
557 Return a scantable with single row.
[1846]558
[1819]559 Parameters:
[1846]560
561 row: row no of integration, default is 0.
562 insitu: if False a new scantable is returned. Otherwise, the
563 scaling is done in-situ. The default is taken from .asaprc
564 (False)
565
[1819]566 """
[2349]567 if insitu is None:
568 insitu = rcParams['insitu']
[1819]569 if not insitu:
570 workscan = self.copy()
571 else:
572 workscan = self
573 # Select a row
[2349]574 sel = selector()
[1992]575 sel.set_rows([row])
[1819]576 workscan.set_selection(sel)
577 if not workscan.nrow() == 1:
[2349]578 msg = "Could not identify single row. %d rows selected." \
579 % (workscan.nrow())
[1819]580 raise RuntimeError(msg)
581 if insitu:
582 self._assign(workscan)
583 else:
584 return workscan
585
[1862]586 @asaplog_post_dec
[1907]587 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]588 """\
[135]589 Determine the specified statistic of the current beam/if/pol
[102]590 Takes a 'mask' as an optional parameter to specify which
591 channels should be excluded.
[1846]592
[102]593 Parameters:
[1846]594
[1819]595 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
596 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]597
[135]598 mask: an optional mask specifying where the statistic
[102]599 should be determined.
[1855]600
[1819]601 form: format string to print statistic values
[1846]602
[1907]603 row: row number of spectrum to process.
604 (default is None: for all rows)
[1846]605
[1907]606 Example:
[113]607 scan.set_unit('channel')
[1118]608 msk = scan.create_mask([100, 200], [500, 600])
[135]609 scan.stats(stat='mean', mask=m)
[1846]610
[102]611 """
[1593]612 mask = mask or []
[876]613 if not self._check_ifs():
[1118]614 raise ValueError("Cannot apply mask as the IFs have different "
615 "number of channels. Please use setselection() "
616 "to select individual IFs")
[1819]617 rtnabc = False
618 if stat.lower().endswith('_abc'): rtnabc = True
619 getchan = False
620 if stat.lower().startswith('min') or stat.lower().startswith('max'):
621 chan = self._math._minmaxchan(self, mask, stat)
622 getchan = True
623 statvals = []
[1907]624 if not rtnabc:
625 if row == None:
626 statvals = self._math._stats(self, mask, stat)
627 else:
628 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]629
[1819]630 #def cb(i):
631 # return statvals[i]
[256]632
[1819]633 #return self._row_callback(cb, stat)
[102]634
[1819]635 label=stat
636 #callback=cb
637 out = ""
638 #outvec = []
639 sep = '-'*50
[1907]640
641 if row == None:
642 rows = xrange(self.nrow())
643 elif isinstance(row, int):
644 rows = [ row ]
645
646 for i in rows:
[1819]647 refstr = ''
648 statunit= ''
649 if getchan:
650 qx, qy = self.chan2data(rowno=i, chan=chan[i])
651 if rtnabc:
652 statvals.append(qx['value'])
653 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
654 statunit= '['+qx['unit']+']'
655 else:
656 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
657
658 tm = self._gettime(i)
659 src = self._getsourcename(i)
660 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
661 out += 'Time[%s]:\n' % (tm)
[1907]662 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
663 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
664 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]665 #outvec.append(callback(i))
[1907]666 if len(rows) > 1:
667 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
668 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
669 else:
670 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
671 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]672 out += sep+"\n"
673
[1859]674 import os
675 if os.environ.has_key( 'USER' ):
676 usr = os.environ['USER']
677 else:
678 import commands
679 usr = commands.getoutput( 'whoami' )
680 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
681 f = open(tmpfile,'w')
682 print >> f, sep
683 print >> f, ' %s %s' % (label, statunit)
684 print >> f, sep
685 print >> f, out
686 f.close()
687 f = open(tmpfile,'r')
688 x = f.readlines()
689 f.close()
690 asaplog.push(''.join(x), False)
691
[1819]692 return statvals
693
694 def chan2data(self, rowno=0, chan=0):
[1846]695 """\
[1819]696 Returns channel/frequency/velocity and spectral value
697 at an arbitrary row and channel in the scantable.
[1846]698
[1819]699 Parameters:
[1846]700
[1819]701 rowno: a row number in the scantable. Default is the
702 first row, i.e. rowno=0
[1855]703
[1819]704 chan: a channel in the scantable. Default is the first
705 channel, i.e. pos=0
[1846]706
[1819]707 """
708 if isinstance(rowno, int) and isinstance(chan, int):
709 qx = {'unit': self.get_unit(),
710 'value': self._getabcissa(rowno)[chan]}
711 qy = {'unit': self.get_fluxunit(),
712 'value': self._getspectrum(rowno)[chan]}
713 return qx, qy
714
[1118]715 def stddev(self, mask=None):
[1846]716 """\
[135]717 Determine the standard deviation of the current beam/if/pol
718 Takes a 'mask' as an optional parameter to specify which
719 channels should be excluded.
[1846]720
[135]721 Parameters:
[1846]722
[135]723 mask: an optional mask specifying where the standard
724 deviation should be determined.
725
[1846]726 Example::
727
[135]728 scan.set_unit('channel')
[1118]729 msk = scan.create_mask([100, 200], [500, 600])
[135]730 scan.stddev(mask=m)
[1846]731
[135]732 """
[1118]733 return self.stats(stat='stddev', mask=mask);
[135]734
[1003]735
[1259]736 def get_column_names(self):
[1846]737 """\
[1003]738 Return a list of column names, which can be used for selection.
739 """
[1259]740 return list(Scantable.get_column_names(self))
[1003]741
[1730]742 def get_tsys(self, row=-1):
[1846]743 """\
[113]744 Return the System temperatures.
[1846]745
746 Parameters:
747
748 row: the rowno to get the information for. (default all rows)
749
[113]750 Returns:
[1846]751
[876]752 a list of Tsys values for the current selection
[1846]753
[113]754 """
[1730]755 if row > -1:
756 return self._get_column(self._gettsys, row)
[876]757 return self._row_callback(self._gettsys, "Tsys")
[256]758
[2406]759 def get_tsysspectrum(self, row=-1):
760 """\
761 Return the channel dependent system temperatures.
[1730]762
[2406]763 Parameters:
764
765 row: the rowno to get the information for. (default all rows)
766
767 Returns:
768
769 a list of Tsys values for the current selection
770
771 """
772 return self._get_column( self._gettsysspectrum, row )
773
[1730]774 def get_weather(self, row=-1):
[1846]775 """\
776 Return the weather informations.
777
778 Parameters:
779
780 row: the rowno to get the information for. (default all rows)
781
782 Returns:
783
784 a dict or list of of dicts of values for the current selection
785
786 """
787
[1730]788 values = self._get_column(self._get_weather, row)
789 if row > -1:
790 return {'temperature': values[0],
791 'pressure': values[1], 'humidity' : values[2],
792 'windspeed' : values[3], 'windaz' : values[4]
793 }
794 else:
795 out = []
796 for r in values:
797
798 out.append({'temperature': r[0],
799 'pressure': r[1], 'humidity' : r[2],
800 'windspeed' : r[3], 'windaz' : r[4]
801 })
802 return out
803
[876]804 def _row_callback(self, callback, label):
805 out = ""
[1118]806 outvec = []
[1590]807 sep = '-'*50
[876]808 for i in range(self.nrow()):
809 tm = self._gettime(i)
810 src = self._getsourcename(i)
[1590]811 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]812 out += 'Time[%s]:\n' % (tm)
[1590]813 if self.nbeam(-1) > 1:
814 out += ' Beam[%d] ' % (self.getbeam(i))
815 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
816 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]817 outvec.append(callback(i))
818 out += '= %3.3f\n' % (outvec[i])
[1590]819 out += sep+'\n'
[1859]820
821 asaplog.push(sep)
822 asaplog.push(" %s" % (label))
823 asaplog.push(sep)
824 asaplog.push(out)
[1861]825 asaplog.post()
[1175]826 return outvec
[256]827
[1947]828 def _get_column(self, callback, row=-1, *args):
[1070]829 """
830 """
831 if row == -1:
[1947]832 return [callback(i, *args) for i in range(self.nrow())]
[1070]833 else:
[1819]834 if 0 <= row < self.nrow():
[1947]835 return callback(row, *args)
[256]836
[1070]837
[1948]838 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]839 """\
[113]840 Get a list of time stamps for the observations.
[1938]841 Return a datetime object or a string (default) for each
842 integration time stamp in the scantable.
[1846]843
[113]844 Parameters:
[1846]845
[1348]846 row: row no of integration. Default -1 return all rows
[1855]847
[1348]848 asdatetime: return values as datetime objects rather than strings
[1846]849
[2349]850 prec: number of digits shown. Default -1 to automatic
851 calculation.
[1948]852 Note this number is equals to the digits of MVTime,
853 i.e., 0<prec<3: dates with hh:: only,
854 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]855 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
856
[113]857 """
[1175]858 from datetime import datetime
[1948]859 if prec < 0:
860 # automagically set necessary precision +1
[2349]861 prec = 7 - \
862 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]863 prec = max(6, int(prec))
864 else:
865 prec = max(0, prec)
866 if asdatetime:
867 #precision can be 1 millisecond at max
868 prec = min(12, prec)
869
[1947]870 times = self._get_column(self._gettime, row, prec)
[1348]871 if not asdatetime:
[1392]872 return times
[1947]873 format = "%Y/%m/%d/%H:%M:%S.%f"
874 if prec < 7:
875 nsub = 1 + (((6-prec)/2) % 3)
876 substr = [".%f","%S","%M"]
877 for i in range(nsub):
878 format = format.replace(substr[i],"")
[1175]879 if isinstance(times, list):
[1947]880 return [datetime.strptime(i, format) for i in times]
[1175]881 else:
[1947]882 return datetime.strptime(times, format)
[102]883
[1348]884
885 def get_inttime(self, row=-1):
[1846]886 """\
[1348]887 Get a list of integration times for the observations.
888 Return a time in seconds for each integration in the scantable.
[1846]889
[1348]890 Parameters:
[1846]891
[1348]892 row: row no of integration. Default -1 return all rows.
[1846]893
[1348]894 """
[1573]895 return self._get_column(self._getinttime, row)
[1348]896
[1573]897
[714]898 def get_sourcename(self, row=-1):
[1846]899 """\
[794]900 Get a list source names for the observations.
[714]901 Return a string for each integration in the scantable.
902 Parameters:
[1846]903
[1348]904 row: row no of integration. Default -1 return all rows.
[1846]905
[714]906 """
[1070]907 return self._get_column(self._getsourcename, row)
[714]908
[794]909 def get_elevation(self, row=-1):
[1846]910 """\
[794]911 Get a list of elevations for the observations.
912 Return a float for each integration in the scantable.
[1846]913
[794]914 Parameters:
[1846]915
[1348]916 row: row no of integration. Default -1 return all rows.
[1846]917
[794]918 """
[1070]919 return self._get_column(self._getelevation, row)
[794]920
921 def get_azimuth(self, row=-1):
[1846]922 """\
[794]923 Get a list of azimuths for the observations.
924 Return a float for each integration in the scantable.
[1846]925
[794]926 Parameters:
[1348]927 row: row no of integration. Default -1 return all rows.
[1846]928
[794]929 """
[1070]930 return self._get_column(self._getazimuth, row)
[794]931
932 def get_parangle(self, row=-1):
[1846]933 """\
[794]934 Get a list of parallactic angles for the observations.
935 Return a float for each integration in the scantable.
[1846]936
[794]937 Parameters:
[1846]938
[1348]939 row: row no of integration. Default -1 return all rows.
[1846]940
[794]941 """
[1070]942 return self._get_column(self._getparangle, row)
[794]943
[1070]944 def get_direction(self, row=-1):
945 """
946 Get a list of Positions on the sky (direction) for the observations.
[1594]947 Return a string for each integration in the scantable.
[1855]948
[1070]949 Parameters:
[1855]950
[1070]951 row: row no of integration. Default -1 return all rows
[1855]952
[1070]953 """
954 return self._get_column(self._getdirection, row)
955
[1391]956 def get_directionval(self, row=-1):
[1846]957 """\
[1391]958 Get a list of Positions on the sky (direction) for the observations.
959 Return a float for each integration in the scantable.
[1846]960
[1391]961 Parameters:
[1846]962
[1391]963 row: row no of integration. Default -1 return all rows
[1846]964
[1391]965 """
966 return self._get_column(self._getdirectionvec, row)
967
[1862]968 @asaplog_post_dec
[102]969 def set_unit(self, unit='channel'):
[1846]970 """\
[102]971 Set the unit for all following operations on this scantable
[1846]972
[102]973 Parameters:
[1846]974
975 unit: optional unit, default is 'channel'. Use one of '*Hz',
976 'km/s', 'channel' or equivalent ''
977
[102]978 """
[484]979 varlist = vars()
[1118]980 if unit in ['', 'pixel', 'channel']:
[113]981 unit = ''
982 inf = list(self._getcoordinfo())
983 inf[0] = unit
984 self._setcoordinfo(inf)
[1118]985 self._add_history("set_unit", varlist)
[113]986
[1862]987 @asaplog_post_dec
[484]988 def set_instrument(self, instr):
[1846]989 """\
[1348]990 Set the instrument for subsequent processing.
[1846]991
[358]992 Parameters:
[1846]993
[710]994 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]995 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]996
[358]997 """
998 self._setInstrument(instr)
[1118]999 self._add_history("set_instument", vars())
[358]1000
[1862]1001 @asaplog_post_dec
[1190]1002 def set_feedtype(self, feedtype):
[1846]1003 """\
[1190]1004 Overwrite the feed type, which might not be set correctly.
[1846]1005
[1190]1006 Parameters:
[1846]1007
[1190]1008 feedtype: 'linear' or 'circular'
[1846]1009
[1190]1010 """
1011 self._setfeedtype(feedtype)
1012 self._add_history("set_feedtype", vars())
1013
[1862]1014 @asaplog_post_dec
[276]1015 def set_doppler(self, doppler='RADIO'):
[1846]1016 """\
[276]1017 Set the doppler for all following operations on this scantable.
[1846]1018
[276]1019 Parameters:
[1846]1020
[276]1021 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1022
[276]1023 """
[484]1024 varlist = vars()
[276]1025 inf = list(self._getcoordinfo())
1026 inf[2] = doppler
1027 self._setcoordinfo(inf)
[1118]1028 self._add_history("set_doppler", vars())
[710]1029
[1862]1030 @asaplog_post_dec
[226]1031 def set_freqframe(self, frame=None):
[1846]1032 """\
[113]1033 Set the frame type of the Spectral Axis.
[1846]1034
[113]1035 Parameters:
[1846]1036
[591]1037 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1038 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1039 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1040
1041 Example::
1042
[113]1043 scan.set_freqframe('BARY')
[1846]1044
[113]1045 """
[1593]1046 frame = frame or rcParams['scantable.freqframe']
[484]1047 varlist = vars()
[1819]1048 # "REST" is not implemented in casacore
1049 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1050 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1051 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1052 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1053
[989]1054 if frame in valid:
[113]1055 inf = list(self._getcoordinfo())
1056 inf[1] = frame
1057 self._setcoordinfo(inf)
[1118]1058 self._add_history("set_freqframe", varlist)
[102]1059 else:
[1118]1060 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1061 raise TypeError(msg)
[710]1062
[1862]1063 @asaplog_post_dec
[989]1064 def set_dirframe(self, frame=""):
[1846]1065 """\
[989]1066 Set the frame type of the Direction on the sky.
[1846]1067
[989]1068 Parameters:
[1846]1069
[989]1070 frame: an optional frame type, default ''. Valid frames are:
1071 'J2000', 'B1950', 'GALACTIC'
[1846]1072
1073 Example:
1074
[989]1075 scan.set_dirframe('GALACTIC')
[1846]1076
[989]1077 """
1078 varlist = vars()
[1859]1079 Scantable.set_dirframe(self, frame)
[1118]1080 self._add_history("set_dirframe", varlist)
[989]1081
[113]1082 def get_unit(self):
[1846]1083 """\
[113]1084 Get the default unit set in this scantable
[1846]1085
[113]1086 Returns:
[1846]1087
[113]1088 A unit string
[1846]1089
[113]1090 """
1091 inf = self._getcoordinfo()
1092 unit = inf[0]
1093 if unit == '': unit = 'channel'
1094 return unit
[102]1095
[1862]1096 @asaplog_post_dec
[158]1097 def get_abcissa(self, rowno=0):
[1846]1098 """\
[158]1099 Get the abcissa in the current coordinate setup for the currently
[113]1100 selected Beam/IF/Pol
[1846]1101
[113]1102 Parameters:
[1846]1103
[226]1104 rowno: an optional row number in the scantable. Default is the
1105 first row, i.e. rowno=0
[1846]1106
[113]1107 Returns:
[1846]1108
[1348]1109 The abcissa values and the format string (as a dictionary)
[1846]1110
[113]1111 """
[256]1112 abc = self._getabcissa(rowno)
[710]1113 lbl = self._getabcissalabel(rowno)
[158]1114 return abc, lbl
[113]1115
[1862]1116 @asaplog_post_dec
[2322]1117 def flag(self, mask=None, unflag=False, row=-1):
[1846]1118 """\
[1001]1119 Flag the selected data using an optional channel mask.
[1846]1120
[1001]1121 Parameters:
[1846]1122
[1001]1123 mask: an optional channel mask, created with create_mask. Default
1124 (no mask) is all channels.
[1855]1125
[1819]1126 unflag: if True, unflag the data
[1846]1127
[2322]1128 row: an optional row number in the scantable.
1129 Default -1 flags all rows
1130
[1001]1131 """
1132 varlist = vars()
[1593]1133 mask = mask or []
[1994]1134 self._flag(row, mask, unflag)
[1001]1135 self._add_history("flag", varlist)
1136
[1862]1137 @asaplog_post_dec
[2322]1138 def flag_row(self, rows=None, unflag=False):
[1846]1139 """\
[1819]1140 Flag the selected data in row-based manner.
[1846]1141
[1819]1142 Parameters:
[1846]1143
[1843]1144 rows: list of row numbers to be flagged. Default is no row
[2322]1145 (must be explicitly specified to execute row-based
1146 flagging).
[1855]1147
[1819]1148 unflag: if True, unflag the data.
[1846]1149
[1819]1150 """
1151 varlist = vars()
[2322]1152 if rows is None:
1153 rows = []
[1859]1154 self._flag_row(rows, unflag)
[1819]1155 self._add_history("flag_row", varlist)
1156
[1862]1157 @asaplog_post_dec
[1819]1158 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1159 """\
[1819]1160 Flag the selected data outside a specified range (in channel-base)
[1846]1161
[1819]1162 Parameters:
[1846]1163
[1819]1164 uthres: upper threshold.
[1855]1165
[1819]1166 dthres: lower threshold
[1846]1167
[2322]1168 clipoutside: True for flagging data outside the range
1169 [dthres:uthres].
[1928]1170 False for flagging data inside the range.
[1855]1171
[1846]1172 unflag: if True, unflag the data.
1173
[1819]1174 """
1175 varlist = vars()
[1859]1176 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1177 self._add_history("clip", varlist)
1178
[1862]1179 @asaplog_post_dec
[1584]1180 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1181 """\
[1192]1182 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1183 Flagged data in the scantable get interpolated over the region.
[1192]1184 No taper is applied.
[1846]1185
[1192]1186 Parameters:
[1846]1187
[1579]1188 start: the start frequency (really a period within the
1189 bandwidth) or period to remove
[1855]1190
[1579]1191 end: the end frequency or period to remove
[1855]1192
[1584]1193 unit: the frequency unit (default "MHz") or "" for
[1579]1194 explicit lag channels
[1846]1195
1196 *Notes*:
1197
[1579]1198 It is recommended to flag edges of the band or strong
[1348]1199 signals beforehand.
[1846]1200
[1192]1201 """
1202 if insitu is None: insitu = rcParams['insitu']
1203 self._math._setinsitu(insitu)
1204 varlist = vars()
[1579]1205 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1206 if not (unit == "" or base.has_key(unit)):
[1192]1207 raise ValueError("%s is not a valid unit." % unit)
[1859]1208 if unit == "":
1209 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1210 else:
1211 s = scantable(self._math._lag_flag(self, start*base[unit],
1212 end*base[unit], "frequency"))
[1192]1213 s._add_history("lag_flag", varlist)
1214 if insitu:
1215 self._assign(s)
1216 else:
1217 return s
[1001]1218
[1862]1219 @asaplog_post_dec
[2349]1220 def fft(self, rowno=None, mask=None, getrealimag=False):
[2177]1221 """\
1222 Apply FFT to the spectra.
1223 Flagged data in the scantable get interpolated over the region.
1224
1225 Parameters:
[2186]1226
1227 rowno: The row number(s) to be processed. int, list
[2349]1228 and tuple are accepted. By default (None), FFT
[2186]1229 is applied to the whole data.
1230
1231 mask: Auxiliary channel mask(s). Given as a boolean
1232 list, it is applied to all specified rows.
1233 A list of boolean lists can also be used to
1234 apply different masks. In the latter case, the
1235 length of 'mask' must be the same as that of
[2349]1236 'rowno'. The default is None.
[2177]1237
1238 getrealimag: If True, returns the real and imaginary part
1239 values of the complex results.
1240 If False (the default), returns the amplitude
1241 (absolute value) normalised with Ndata/2 and
1242 phase (argument, in unit of radian).
1243
1244 Returns:
1245
[2186]1246 A list of dictionaries containing the results for each spectrum.
1247 Each dictionary contains two values, the real and the imaginary
1248 parts when getrealimag = True, or the amplitude(absolute value)
1249 and the phase(argument) when getrealimag = False. The key for
1250 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1251 respectively.
1252 """
[2349]1253 if rowno is None:
1254 rowno = []
1255 if mask is None:
1256 mask = []
[2177]1257 if isinstance(rowno, int):
1258 rowno = [rowno]
1259 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1260 raise TypeError("The row number(s) must be int, list or tuple.")
1261
1262 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1263
1264 if not (isinstance(mask, list) or isinstance(mask, tuple)):
[2349]1265 raise TypeError("The mask must be a boolean list or a list of "
1266 "boolean list.")
[2186]1267 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1268 if isinstance(mask[0], bool): mask = [mask]
1269 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
[2349]1270 raise TypeError("The mask must be a boolean list or a list of "
1271 "boolean list.")
[2186]1272
1273 usecommonmask = (len(mask) == 1)
1274 if not usecommonmask:
1275 if len(mask) != len(rowno):
[2349]1276 raise ValueError("When specifying masks for each spectrum, "
1277 "the numbers of them must be identical.")
[2186]1278 for amask in mask:
1279 if len(amask) != self.nchan():
[2349]1280 raise ValueError("The spectra and the mask have different "
1281 "length.")
[2177]1282
[2186]1283 res = []
1284
1285 imask = 0
1286 for whichrow in rowno:
1287 fspec = self._fft(whichrow, mask[imask], getrealimag)
1288 nspec = len(fspec)
[2177]1289
[2349]1290 i = 0
1291 v1 = []
1292 v2 = []
1293 reselem = {"real":[],"imag":[]} if getrealimag \
1294 else {"ampl":[],"phase":[]}
[2177]1295
[2186]1296 while (i < nspec):
1297 v1.append(fspec[i])
1298 v2.append(fspec[i+1])
[2349]1299 i += 2
[2186]1300
[2177]1301 if getrealimag:
[2186]1302 reselem["real"] += v1
1303 reselem["imag"] += v2
[2177]1304 else:
[2186]1305 reselem["ampl"] += v1
1306 reselem["phase"] += v2
[2177]1307
[2186]1308 res.append(reselem)
1309
[2349]1310 if not usecommonmask:
1311 imask += 1
[2186]1312
[2177]1313 return res
1314
1315 @asaplog_post_dec
[113]1316 def create_mask(self, *args, **kwargs):
[1846]1317 """\
[1118]1318 Compute and return a mask based on [min, max] windows.
[189]1319 The specified windows are to be INCLUDED, when the mask is
[113]1320 applied.
[1846]1321
[102]1322 Parameters:
[1846]1323
[1118]1324 [min, max], [min2, max2], ...
[1024]1325 Pairs of start/end points (inclusive)specifying the regions
[102]1326 to be masked
[1855]1327
[189]1328 invert: optional argument. If specified as True,
1329 return an inverted mask, i.e. the regions
1330 specified are EXCLUDED
[1855]1331
[513]1332 row: create the mask using the specified row for
1333 unit conversions, default is row=0
1334 only necessary if frequency varies over rows.
[1846]1335
1336 Examples::
1337
[113]1338 scan.set_unit('channel')
[1846]1339 # a)
[1118]1340 msk = scan.create_mask([400, 500], [800, 900])
[189]1341 # masks everything outside 400 and 500
[113]1342 # and 800 and 900 in the unit 'channel'
1343
[1846]1344 # b)
[1118]1345 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1346 # masks the regions between 400 and 500
[113]1347 # and 800 and 900 in the unit 'channel'
[1846]1348
1349 # c)
1350 #mask only channel 400
[1554]1351 msk = scan.create_mask([400])
[1846]1352
[102]1353 """
[1554]1354 row = kwargs.get("row", 0)
[513]1355 data = self._getabcissa(row)
[113]1356 u = self._getcoordinfo()[0]
[1859]1357 if u == "":
1358 u = "channel"
1359 msg = "The current mask window unit is %s" % u
1360 i = self._check_ifs()
1361 if not i:
1362 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1363 asaplog.push(msg)
[2348]1364 n = len(data)
[1295]1365 msk = _n_bools(n, False)
[710]1366 # test if args is a 'list' or a 'normal *args - UGLY!!!
1367
[2349]1368 ws = (isinstance(args[-1][-1], int)
1369 or isinstance(args[-1][-1], float)) and args or args[0]
[710]1370 for window in ws:
[1554]1371 if len(window) == 1:
1372 window = [window[0], window[0]]
1373 if len(window) == 0 or len(window) > 2:
[2349]1374 raise ValueError("A window needs to be defined as "
1375 "[start(, end)]")
[1545]1376 if window[0] > window[1]:
1377 tmp = window[0]
1378 window[0] = window[1]
1379 window[1] = tmp
[102]1380 for i in range(n):
[1024]1381 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1382 msk[i] = True
[113]1383 if kwargs.has_key('invert'):
1384 if kwargs.get('invert'):
[1295]1385 msk = mask_not(msk)
[102]1386 return msk
[710]1387
[1931]1388 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1389 """\
[1819]1390 Compute and return a list of mask windows, [min, max].
[1846]1391
[1819]1392 Parameters:
[1846]1393
[1819]1394 mask: channel mask, created with create_mask.
[1855]1395
[1819]1396 row: calcutate the masklist using the specified row
1397 for unit conversions, default is row=0
1398 only necessary if frequency varies over rows.
[1846]1399
[1819]1400 Returns:
[1846]1401
[1819]1402 [min, max], [min2, max2], ...
1403 Pairs of start/end points (inclusive)specifying
1404 the masked regions
[1846]1405
[1819]1406 """
1407 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1408 raise TypeError("The mask should be list or tuple.")
1409 if len(mask) < 2:
1410 raise TypeError("The mask elements should be > 1")
[2348]1411 data = self._getabcissa(row)
1412 if len(data) != len(mask):
[1819]1413 msg = "Number of channels in scantable != number of mask elements"
1414 raise TypeError(msg)
1415 u = self._getcoordinfo()[0]
[1859]1416 if u == "":
1417 u = "channel"
1418 msg = "The current mask window unit is %s" % u
1419 i = self._check_ifs()
1420 if not i:
1421 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1422 if not silent:
1423 asaplog.push(msg)
[2349]1424 masklist = []
[1819]1425 ist, ien = None, None
1426 ist, ien=self.get_mask_indices(mask)
1427 if ist is not None and ien is not None:
1428 for i in xrange(len(ist)):
1429 range=[data[ist[i]],data[ien[i]]]
1430 range.sort()
1431 masklist.append([range[0],range[1]])
1432 return masklist
1433
1434 def get_mask_indices(self, mask=None):
[1846]1435 """\
[1819]1436 Compute and Return lists of mask start indices and mask end indices.
[1855]1437
1438 Parameters:
1439
[1819]1440 mask: channel mask, created with create_mask.
[1846]1441
[1819]1442 Returns:
[1846]1443
[1819]1444 List of mask start indices and that of mask end indices,
1445 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1446
[1819]1447 """
1448 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1449 raise TypeError("The mask should be list or tuple.")
1450 if len(mask) < 2:
1451 raise TypeError("The mask elements should be > 1")
[2349]1452 istart = []
1453 iend = []
1454 if mask[0]:
1455 istart.append(0)
[1819]1456 for i in range(len(mask)-1):
1457 if not mask[i] and mask[i+1]:
1458 istart.append(i+1)
1459 elif mask[i] and not mask[i+1]:
1460 iend.append(i)
[2349]1461 if mask[len(mask)-1]:
1462 iend.append(len(mask)-1)
[1819]1463 if len(istart) != len(iend):
1464 raise RuntimeError("Numbers of mask start != mask end.")
1465 for i in range(len(istart)):
1466 if istart[i] > iend[i]:
1467 raise RuntimeError("Mask start index > mask end index")
1468 break
1469 return istart,iend
1470
[2013]1471 @asaplog_post_dec
[2349]1472 def parse_maskexpr(self, maskstring):
[2013]1473 """
1474 Parse CASA type mask selection syntax (IF dependent).
1475
1476 Parameters:
1477 maskstring : A string mask selection expression.
1478 A comma separated selections mean different IF -
1479 channel combinations. IFs and channel selections
1480 are partitioned by a colon, ':'.
1481 examples:
[2015]1482 '' = all IFs (all channels)
[2013]1483 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1484 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1485 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1486 all channels in IF8
1487 Returns:
1488 A dictionary of selected (valid) IF and masklist pairs,
1489 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1490 """
1491 if not isinstance(maskstring,str):
1492 asaplog.post()
1493 asaplog.push("Invalid mask expression")
1494 asaplog.post("ERROR")
1495
1496 valid_ifs = self.getifnos()
1497 frequnit = self.get_unit()
1498 seldict = {}
[2015]1499 if maskstring == "":
1500 maskstring = str(valid_ifs)[1:-1]
[2013]1501 ## split each selection
1502 sellist = maskstring.split(',')
1503 for currselstr in sellist:
1504 selset = currselstr.split(':')
1505 # spw and mask string (may include ~, < or >)
[2349]1506 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1507 offset=1, minval=min(valid_ifs),
1508 maxval=max(valid_ifs))
[2013]1509 for spwlist in spwmasklist:
1510 selspws = []
1511 for ispw in range(spwlist[0],spwlist[1]+1):
1512 # Put into the list only if ispw exists
1513 if valid_ifs.count(ispw):
1514 selspws.append(ispw)
1515 del spwmasklist, spwlist
1516
1517 # parse frequency mask list
1518 if len(selset) > 1:
[2349]1519 freqmasklist = self._parse_selection(selset[1], typestr='float',
1520 offset=0.)
[2013]1521 else:
1522 # want to select the whole spectrum
1523 freqmasklist = [None]
1524
1525 ## define a dictionary of spw - masklist combination
1526 for ispw in selspws:
1527 #print "working on", ispw
1528 spwstr = str(ispw)
1529 if len(selspws) == 0:
1530 # empty spw
1531 continue
1532 else:
1533 ## want to get min and max of the spw and
1534 ## offset to set for '<' and '>'
1535 if frequnit == 'channel':
1536 minfreq = 0
1537 maxfreq = self.nchan(ifno=ispw)
1538 offset = 0.5
1539 else:
1540 ## This is ugly part. need improvement
1541 for ifrow in xrange(self.nrow()):
1542 if self.getif(ifrow) == ispw:
1543 #print "IF",ispw,"found in row =",ifrow
1544 break
1545 freqcoord = self.get_coordinate(ifrow)
1546 freqs = self._getabcissa(ifrow)
1547 minfreq = min(freqs)
1548 maxfreq = max(freqs)
1549 if len(freqs) == 1:
1550 offset = 0.5
1551 elif frequnit.find('Hz') > 0:
[2349]1552 offset = abs(freqcoord.to_frequency(1,
1553 unit=frequnit)
1554 -freqcoord.to_frequency(0,
1555 unit=frequnit)
1556 )*0.5
[2013]1557 elif frequnit.find('m/s') > 0:
[2349]1558 offset = abs(freqcoord.to_velocity(1,
1559 unit=frequnit)
1560 -freqcoord.to_velocity(0,
1561 unit=frequnit)
1562 )*0.5
[2013]1563 else:
1564 asaplog.post()
1565 asaplog.push("Invalid frequency unit")
1566 asaplog.post("ERROR")
1567 del freqs, freqcoord, ifrow
1568 for freq in freqmasklist:
1569 selmask = freq or [minfreq, maxfreq]
1570 if selmask[0] == None:
1571 ## selection was "<freq[1]".
1572 if selmask[1] < minfreq:
1573 ## avoid adding region selection
1574 selmask = None
1575 else:
1576 selmask = [minfreq,selmask[1]-offset]
1577 elif selmask[1] == None:
1578 ## selection was ">freq[0]"
1579 if selmask[0] > maxfreq:
1580 ## avoid adding region selection
1581 selmask = None
1582 else:
1583 selmask = [selmask[0]+offset,maxfreq]
1584 if selmask:
1585 if not seldict.has_key(spwstr):
1586 # new spw selection
1587 seldict[spwstr] = []
1588 seldict[spwstr] += [selmask]
1589 del minfreq,maxfreq,offset,freq,selmask
1590 del spwstr
1591 del freqmasklist
1592 del valid_ifs
1593 if len(seldict) == 0:
1594 asaplog.post()
[2349]1595 asaplog.push("No valid selection in the mask expression: "
1596 +maskstring)
[2013]1597 asaplog.post("WARN")
1598 return None
1599 msg = "Selected masklist:\n"
1600 for sif, lmask in seldict.iteritems():
1601 msg += " IF"+sif+" - "+str(lmask)+"\n"
1602 asaplog.push(msg)
1603 return seldict
1604
[2349]1605 def _parse_selection(self, selstr, typestr='float', offset=0.,
[2351]1606 minval=None, maxval=None):
[2013]1607 """
1608 Parameters:
1609 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1610 typestr : The type of the values in returned list
1611 ('integer' or 'float')
1612 offset : The offset value to subtract from or add to
1613 the boundary value if the selection string
1614 includes '<' or '>'
1615 minval, maxval : The minimum/maximum values to set if the
1616 selection string includes '<' or '>'.
1617 The list element is filled with None by default.
1618 Returns:
1619 A list of min/max pair of selections.
1620 Example:
1621 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1622 returns [[0,2],[5,7],[9,9]]
1623 """
1624 selgroups = selstr.split(';')
1625 sellists = []
1626 if typestr.lower().startswith('int'):
1627 formatfunc = int
1628 else:
1629 formatfunc = float
1630
1631 for currsel in selgroups:
1632 if currsel.find('~') > 0:
1633 minsel = formatfunc(currsel.split('~')[0].strip())
1634 maxsel = formatfunc(currsel.split('~')[1].strip())
1635 elif currsel.strip().startswith('<'):
1636 minsel = minval
1637 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1638 - formatfunc(offset)
1639 elif currsel.strip().startswith('>'):
1640 minsel = formatfunc(currsel.split('>')[1].strip()) \
1641 + formatfunc(offset)
1642 maxsel = maxval
1643 else:
1644 minsel = formatfunc(currsel)
1645 maxsel = formatfunc(currsel)
1646 sellists.append([minsel,maxsel])
1647 return sellists
1648
[1819]1649# def get_restfreqs(self):
1650# """
1651# Get the restfrequency(s) stored in this scantable.
1652# The return value(s) are always of unit 'Hz'
1653# Parameters:
1654# none
1655# Returns:
1656# a list of doubles
1657# """
1658# return list(self._getrestfreqs())
1659
1660 def get_restfreqs(self, ids=None):
[1846]1661 """\
[256]1662 Get the restfrequency(s) stored in this scantable.
1663 The return value(s) are always of unit 'Hz'
[1846]1664
[256]1665 Parameters:
[1846]1666
[1819]1667 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1668 be retrieved
[1846]1669
[256]1670 Returns:
[1846]1671
[1819]1672 dictionary containing ids and a list of doubles for each id
[1846]1673
[256]1674 """
[1819]1675 if ids is None:
[2349]1676 rfreqs = {}
[1819]1677 idlist = self.getmolnos()
1678 for i in idlist:
[2349]1679 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1680 return rfreqs
1681 else:
[2349]1682 if type(ids) == list or type(ids) == tuple:
1683 rfreqs = {}
[1819]1684 for i in ids:
[2349]1685 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1686 return rfreqs
1687 else:
1688 return list(self._getrestfreqs(ids))
[102]1689
[2349]1690 @asaplog_post_dec
[931]1691 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1692 """\
[931]1693 Set or replace the restfrequency specified and
[1938]1694 if the 'freqs' argument holds a scalar,
[931]1695 then that rest frequency will be applied to all the selected
1696 data. If the 'freqs' argument holds
1697 a vector, then it MUST be of equal or smaller length than
1698 the number of IFs (and the available restfrequencies will be
1699 replaced by this vector). In this case, *all* data have
1700 the restfrequency set per IF according
1701 to the corresponding value you give in the 'freqs' vector.
[1118]1702 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1703 IF 1 gets restfreq 2e9.
[1846]1704
[1395]1705 You can also specify the frequencies via a linecatalog.
[1153]1706
[931]1707 Parameters:
[1846]1708
[931]1709 freqs: list of rest frequency values or string idenitfiers
[1855]1710
[931]1711 unit: unit for rest frequency (default 'Hz')
[402]1712
[1846]1713
1714 Example::
1715
[1819]1716 # set the given restfrequency for the all currently selected IFs
[931]1717 scan.set_restfreqs(freqs=1.4e9)
[1845]1718 # set restfrequencies for the n IFs (n > 1) in the order of the
1719 # list, i.e
1720 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1721 # len(list_of_restfreqs) == nIF
1722 # for nIF == 1 the following will set multiple restfrequency for
1723 # that IF
[1819]1724 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1725 # set multiple restfrequencies per IF. as a list of lists where
1726 # the outer list has nIF elements, the inner s arbitrary
1727 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1728
[1846]1729 *Note*:
[1845]1730
[931]1731 To do more sophisticate Restfrequency setting, e.g. on a
1732 source and IF basis, use scantable.set_selection() before using
[1846]1733 this function::
[931]1734
[1846]1735 # provided your scantable is called scan
1736 selection = selector()
1737 selection.set_name("ORION*")
1738 selection.set_ifs([1])
1739 scan.set_selection(selection)
1740 scan.set_restfreqs(freqs=86.6e9)
1741
[931]1742 """
1743 varlist = vars()
[1157]1744 from asap import linecatalog
1745 # simple value
[1118]1746 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1747 self._setrestfreqs([freqs], [""], unit)
[1157]1748 # list of values
[1118]1749 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1750 # list values are scalars
[1118]1751 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1752 if len(freqs) == 1:
1753 self._setrestfreqs(freqs, [""], unit)
1754 else:
1755 # allow the 'old' mode of setting mulitple IFs
1756 sel = selector()
1757 savesel = self._getselection()
1758 iflist = self.getifnos()
1759 if len(freqs)>len(iflist):
1760 raise ValueError("number of elements in list of list "
1761 "exeeds the current IF selections")
1762 iflist = self.getifnos()
1763 for i, fval in enumerate(freqs):
1764 sel.set_ifs(iflist[i])
1765 self._setselection(sel)
1766 self._setrestfreqs([fval], [""], unit)
1767 self._setselection(savesel)
1768
1769 # list values are dict, {'value'=, 'name'=)
[1157]1770 elif isinstance(freqs[-1], dict):
[1845]1771 values = []
1772 names = []
1773 for d in freqs:
1774 values.append(d["value"])
1775 names.append(d["name"])
1776 self._setrestfreqs(values, names, unit)
[1819]1777 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1778 sel = selector()
1779 savesel = self._getselection()
[1322]1780 iflist = self.getifnos()
[1819]1781 if len(freqs)>len(iflist):
[1845]1782 raise ValueError("number of elements in list of list exeeds"
1783 " the current IF selections")
1784 for i, fval in enumerate(freqs):
[1322]1785 sel.set_ifs(iflist[i])
[1259]1786 self._setselection(sel)
[1845]1787 self._setrestfreqs(fval, [""], unit)
[1157]1788 self._setselection(savesel)
1789 # freqs are to be taken from a linecatalog
[1153]1790 elif isinstance(freqs, linecatalog):
1791 sel = selector()
1792 savesel = self._getselection()
1793 for i in xrange(freqs.nrow()):
[1322]1794 sel.set_ifs(iflist[i])
[1153]1795 self._setselection(sel)
[1845]1796 self._setrestfreqs([freqs.get_frequency(i)],
1797 [freqs.get_name(i)], "MHz")
[1153]1798 # ensure that we are not iterating past nIF
1799 if i == self.nif()-1: break
1800 self._setselection(savesel)
[931]1801 else:
1802 return
1803 self._add_history("set_restfreqs", varlist)
1804
[2349]1805 @asaplog_post_dec
[1360]1806 def shift_refpix(self, delta):
[1846]1807 """\
[1589]1808 Shift the reference pixel of the Spectra Coordinate by an
1809 integer amount.
[1846]1810
[1589]1811 Parameters:
[1846]1812
[1589]1813 delta: the amount to shift by
[1846]1814
1815 *Note*:
1816
[1589]1817 Be careful using this with broadband data.
[1846]1818
[1360]1819 """
[2349]1820 varlist = vars()
[1731]1821 Scantable.shift_refpix(self, delta)
[2349]1822 s._add_history("shift_refpix", varlist)
[931]1823
[1862]1824 @asaplog_post_dec
[1259]1825 def history(self, filename=None):
[1846]1826 """\
[1259]1827 Print the history. Optionally to a file.
[1846]1828
[1348]1829 Parameters:
[1846]1830
[1928]1831 filename: The name of the file to save the history to.
[1846]1832
[1259]1833 """
[484]1834 hist = list(self._gethistory())
[794]1835 out = "-"*80
[484]1836 for h in hist:
[489]1837 if h.startswith("---"):
[1857]1838 out = "\n".join([out, h])
[489]1839 else:
1840 items = h.split("##")
1841 date = items[0]
1842 func = items[1]
1843 items = items[2:]
[794]1844 out += "\n"+date+"\n"
1845 out += "Function: %s\n Parameters:" % (func)
[489]1846 for i in items:
[1938]1847 if i == '':
1848 continue
[489]1849 s = i.split("=")
[1118]1850 out += "\n %s = %s" % (s[0], s[1])
[1857]1851 out = "\n".join([out, "-"*80])
[1259]1852 if filename is not None:
1853 if filename is "":
1854 filename = 'scantable_history.txt'
1855 import os
1856 filename = os.path.expandvars(os.path.expanduser(filename))
1857 if not os.path.isdir(filename):
1858 data = open(filename, 'w')
1859 data.write(out)
1860 data.close()
1861 else:
1862 msg = "Illegal file name '%s'." % (filename)
[1859]1863 raise IOError(msg)
1864 return page(out)
[2349]1865
[513]1866 #
1867 # Maths business
1868 #
[1862]1869 @asaplog_post_dec
[931]1870 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1871 """\
[2349]1872 Return the (time) weighted average of a scan. Scans will be averaged
1873 only if the source direction (RA/DEC) is within 1' otherwise
[1846]1874
1875 *Note*:
1876
[1070]1877 in channels only - align if necessary
[1846]1878
[513]1879 Parameters:
[1846]1880
[513]1881 mask: an optional mask (only used for 'var' and 'tsys'
1882 weighting)
[1855]1883
[558]1884 scanav: True averages each scan separately
1885 False (default) averages all scans together,
[1855]1886
[1099]1887 weight: Weighting scheme.
1888 'none' (mean no weight)
1889 'var' (1/var(spec) weighted)
1890 'tsys' (1/Tsys**2 weighted)
1891 'tint' (integration time weighted)
1892 'tintsys' (Tint/Tsys**2)
1893 'median' ( median averaging)
[535]1894 The default is 'tint'
[1855]1895
[931]1896 align: align the spectra in velocity before averaging. It takes
1897 the time of the first spectrum as reference time.
[1846]1898
1899 Example::
1900
[513]1901 # time average the scantable without using a mask
[710]1902 newscan = scan.average_time()
[1846]1903
[513]1904 """
1905 varlist = vars()
[1593]1906 weight = weight or 'TINT'
1907 mask = mask or ()
1908 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1909 scan = (self, )
[1859]1910
1911 if align:
1912 scan = (self.freq_align(insitu=False), )
1913 s = None
1914 if weight.upper() == 'MEDIAN':
1915 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1916 scanav))
1917 else:
1918 s = scantable(self._math._average(scan, mask, weight.upper(),
1919 scanav))
[1099]1920 s._add_history("average_time", varlist)
[513]1921 return s
[710]1922
[1862]1923 @asaplog_post_dec
[876]1924 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1925 """\
[513]1926 Return a scan where all spectra are converted to either
1927 Jansky or Kelvin depending upon the flux units of the scan table.
1928 By default the function tries to look the values up internally.
1929 If it can't find them (or if you want to over-ride), you must
1930 specify EITHER jyperk OR eta (and D which it will try to look up
1931 also if you don't set it). jyperk takes precedence if you set both.
[1846]1932
[513]1933 Parameters:
[1846]1934
[513]1935 jyperk: the Jy / K conversion factor
[1855]1936
[513]1937 eta: the aperture efficiency
[1855]1938
[1928]1939 d: the geometric diameter (metres)
[1855]1940
[513]1941 insitu: if False a new scantable is returned.
1942 Otherwise, the scaling is done in-situ
1943 The default is taken from .asaprc (False)
[1846]1944
[513]1945 """
1946 if insitu is None: insitu = rcParams['insitu']
[876]1947 self._math._setinsitu(insitu)
[513]1948 varlist = vars()
[1593]1949 jyperk = jyperk or -1.0
1950 d = d or -1.0
1951 eta = eta or -1.0
[876]1952 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1953 s._add_history("convert_flux", varlist)
1954 if insitu: self._assign(s)
1955 else: return s
[513]1956
[1862]1957 @asaplog_post_dec
[876]1958 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1959 """\
[513]1960 Return a scan after applying a gain-elevation correction.
1961 The correction can be made via either a polynomial or a
1962 table-based interpolation (and extrapolation if necessary).
1963 You specify polynomial coefficients, an ascii table or neither.
1964 If you specify neither, then a polynomial correction will be made
1965 with built in coefficients known for certain telescopes (an error
1966 will occur if the instrument is not known).
1967 The data and Tsys are *divided* by the scaling factors.
[1846]1968
[513]1969 Parameters:
[1846]1970
[513]1971 poly: Polynomial coefficients (default None) to compute a
1972 gain-elevation correction as a function of
1973 elevation (in degrees).
[1855]1974
[513]1975 filename: The name of an ascii file holding correction factors.
1976 The first row of the ascii file must give the column
1977 names and these MUST include columns
1978 "ELEVATION" (degrees) and "FACTOR" (multiply data
1979 by this) somewhere.
1980 The second row must give the data type of the
1981 column. Use 'R' for Real and 'I' for Integer.
1982 An example file would be
1983 (actual factors are arbitrary) :
1984
1985 TIME ELEVATION FACTOR
1986 R R R
1987 0.1 0 0.8
1988 0.2 20 0.85
1989 0.3 40 0.9
1990 0.4 60 0.85
1991 0.5 80 0.8
1992 0.6 90 0.75
[1855]1993
[513]1994 method: Interpolation method when correcting from a table.
1995 Values are "nearest", "linear" (default), "cubic"
1996 and "spline"
[1855]1997
[513]1998 insitu: if False a new scantable is returned.
1999 Otherwise, the scaling is done in-situ
2000 The default is taken from .asaprc (False)
[1846]2001
[513]2002 """
2003
2004 if insitu is None: insitu = rcParams['insitu']
[876]2005 self._math._setinsitu(insitu)
[513]2006 varlist = vars()
[1593]2007 poly = poly or ()
[513]2008 from os.path import expandvars
2009 filename = expandvars(filename)
[876]2010 s = scantable(self._math._gainel(self, poly, filename, method))
2011 s._add_history("gain_el", varlist)
[1593]2012 if insitu:
2013 self._assign(s)
2014 else:
2015 return s
[710]2016
[1862]2017 @asaplog_post_dec
[931]2018 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2019 """\
[513]2020 Return a scan where all rows have been aligned in frequency/velocity.
2021 The alignment frequency frame (e.g. LSRK) is that set by function
2022 set_freqframe.
[1846]2023
[513]2024 Parameters:
[1855]2025
[513]2026 reftime: reference time to align at. By default, the time of
2027 the first row of data is used.
[1855]2028
[513]2029 method: Interpolation method for regridding the spectra.
2030 Choose from "nearest", "linear", "cubic" (default)
2031 and "spline"
[1855]2032
[513]2033 insitu: if False a new scantable is returned.
2034 Otherwise, the scaling is done in-situ
2035 The default is taken from .asaprc (False)
[1846]2036
[513]2037 """
[931]2038 if insitu is None: insitu = rcParams["insitu"]
[876]2039 self._math._setinsitu(insitu)
[513]2040 varlist = vars()
[1593]2041 reftime = reftime or ""
[931]2042 s = scantable(self._math._freq_align(self, reftime, method))
[876]2043 s._add_history("freq_align", varlist)
[2349]2044 if insitu:
2045 self._assign(s)
2046 else:
2047 return s
[513]2048
[1862]2049 @asaplog_post_dec
[1725]2050 def opacity(self, tau=None, insitu=None):
[1846]2051 """\
[513]2052 Apply an opacity correction. The data
2053 and Tsys are multiplied by the correction factor.
[1846]2054
[513]2055 Parameters:
[1855]2056
[1689]2057 tau: (list of) opacity from which the correction factor is
[513]2058 exp(tau*ZD)
[1689]2059 where ZD is the zenith-distance.
2060 If a list is provided, it has to be of length nIF,
2061 nIF*nPol or 1 and in order of IF/POL, e.g.
2062 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2063 if tau is `None` the opacities are determined from a
2064 model.
[1855]2065
[513]2066 insitu: if False a new scantable is returned.
2067 Otherwise, the scaling is done in-situ
2068 The default is taken from .asaprc (False)
[1846]2069
[513]2070 """
[2349]2071 if insitu is None:
2072 insitu = rcParams['insitu']
[876]2073 self._math._setinsitu(insitu)
[513]2074 varlist = vars()
[1689]2075 if not hasattr(tau, "__len__"):
2076 tau = [tau]
[876]2077 s = scantable(self._math._opacity(self, tau))
2078 s._add_history("opacity", varlist)
[2349]2079 if insitu:
2080 self._assign(s)
2081 else:
2082 return s
[513]2083
[1862]2084 @asaplog_post_dec
[513]2085 def bin(self, width=5, insitu=None):
[1846]2086 """\
[513]2087 Return a scan where all spectra have been binned up.
[1846]2088
[1348]2089 Parameters:
[1846]2090
[513]2091 width: The bin width (default=5) in pixels
[1855]2092
[513]2093 insitu: if False a new scantable is returned.
2094 Otherwise, the scaling is done in-situ
2095 The default is taken from .asaprc (False)
[1846]2096
[513]2097 """
[2349]2098 if insitu is None:
2099 insitu = rcParams['insitu']
[876]2100 self._math._setinsitu(insitu)
[513]2101 varlist = vars()
[876]2102 s = scantable(self._math._bin(self, width))
[1118]2103 s._add_history("bin", varlist)
[1589]2104 if insitu:
2105 self._assign(s)
2106 else:
2107 return s
[513]2108
[1862]2109 @asaplog_post_dec
[513]2110 def resample(self, width=5, method='cubic', insitu=None):
[1846]2111 """\
[1348]2112 Return a scan where all spectra have been binned up.
[1573]2113
[1348]2114 Parameters:
[1846]2115
[513]2116 width: The bin width (default=5) in pixels
[1855]2117
[513]2118 method: Interpolation method when correcting from a table.
2119 Values are "nearest", "linear", "cubic" (default)
2120 and "spline"
[1855]2121
[513]2122 insitu: if False a new scantable is returned.
2123 Otherwise, the scaling is done in-situ
2124 The default is taken from .asaprc (False)
[1846]2125
[513]2126 """
[2349]2127 if insitu is None:
2128 insitu = rcParams['insitu']
[876]2129 self._math._setinsitu(insitu)
[513]2130 varlist = vars()
[876]2131 s = scantable(self._math._resample(self, method, width))
[1118]2132 s._add_history("resample", varlist)
[2349]2133 if insitu:
2134 self._assign(s)
2135 else:
2136 return s
[513]2137
[1862]2138 @asaplog_post_dec
[946]2139 def average_pol(self, mask=None, weight='none'):
[1846]2140 """\
[946]2141 Average the Polarisations together.
[1846]2142
[946]2143 Parameters:
[1846]2144
[946]2145 mask: An optional mask defining the region, where the
2146 averaging will be applied. The output will have all
2147 specified points masked.
[1855]2148
[946]2149 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2150 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2151
[946]2152 """
2153 varlist = vars()
[1593]2154 mask = mask or ()
[1010]2155 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2156 s._add_history("average_pol", varlist)
[992]2157 return s
[513]2158
[1862]2159 @asaplog_post_dec
[1145]2160 def average_beam(self, mask=None, weight='none'):
[1846]2161 """\
[1145]2162 Average the Beams together.
[1846]2163
[1145]2164 Parameters:
2165 mask: An optional mask defining the region, where the
2166 averaging will be applied. The output will have all
2167 specified points masked.
[1855]2168
[1145]2169 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2170 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2171
[1145]2172 """
2173 varlist = vars()
[1593]2174 mask = mask or ()
[1145]2175 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2176 s._add_history("average_beam", varlist)
2177 return s
2178
[1586]2179 def parallactify(self, pflag):
[1846]2180 """\
[1843]2181 Set a flag to indicate whether this data should be treated as having
[1617]2182 been 'parallactified' (total phase == 0.0)
[1846]2183
[1617]2184 Parameters:
[1855]2185
[1843]2186 pflag: Bool indicating whether to turn this on (True) or
[1617]2187 off (False)
[1846]2188
[1617]2189 """
[1586]2190 varlist = vars()
2191 self._parallactify(pflag)
2192 self._add_history("parallactify", varlist)
2193
[1862]2194 @asaplog_post_dec
[992]2195 def convert_pol(self, poltype=None):
[1846]2196 """\
[992]2197 Convert the data to a different polarisation type.
[1565]2198 Note that you will need cross-polarisation terms for most conversions.
[1846]2199
[992]2200 Parameters:
[1855]2201
[992]2202 poltype: The new polarisation type. Valid types are:
[1565]2203 "linear", "circular", "stokes" and "linpol"
[1846]2204
[992]2205 """
2206 varlist = vars()
[1859]2207 s = scantable(self._math._convertpol(self, poltype))
[1118]2208 s._add_history("convert_pol", varlist)
[992]2209 return s
2210
[1862]2211 @asaplog_post_dec
[2269]2212 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2213 insitu=None):
[1846]2214 """\
[513]2215 Smooth the spectrum by the specified kernel (conserving flux).
[1846]2216
[513]2217 Parameters:
[1846]2218
[513]2219 kernel: The type of smoothing kernel. Select from
[1574]2220 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2221 or 'poly'
[1855]2222
[513]2223 width: The width of the kernel in pixels. For hanning this is
2224 ignored otherwise it defauls to 5 pixels.
2225 For 'gaussian' it is the Full Width Half
2226 Maximum. For 'boxcar' it is the full width.
[1574]2227 For 'rmedian' and 'poly' it is the half width.
[1855]2228
[1574]2229 order: Optional parameter for 'poly' kernel (default is 2), to
2230 specify the order of the polnomial. Ignored by all other
2231 kernels.
[1855]2232
[1819]2233 plot: plot the original and the smoothed spectra.
2234 In this each indivual fit has to be approved, by
2235 typing 'y' or 'n'
[1855]2236
[513]2237 insitu: if False a new scantable is returned.
2238 Otherwise, the scaling is done in-situ
2239 The default is taken from .asaprc (False)
[1846]2240
[513]2241 """
2242 if insitu is None: insitu = rcParams['insitu']
[876]2243 self._math._setinsitu(insitu)
[513]2244 varlist = vars()
[1819]2245
2246 if plot: orgscan = self.copy()
2247
[1574]2248 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2249 s._add_history("smooth", varlist)
[1819]2250
2251 if plot:
[2150]2252 from asap.asapplotter import new_asaplot
2253 theplot = new_asaplot(rcParams['plotter.gui'])
2254 theplot.set_panels()
[1819]2255 ylab=s._get_ordinate_label()
[2150]2256 #theplot.palette(0,["#777777","red"])
[1819]2257 for r in xrange(s.nrow()):
2258 xsm=s._getabcissa(r)
2259 ysm=s._getspectrum(r)
2260 xorg=orgscan._getabcissa(r)
2261 yorg=orgscan._getspectrum(r)
[2150]2262 theplot.clear()
2263 theplot.hold()
2264 theplot.set_axes('ylabel',ylab)
2265 theplot.set_axes('xlabel',s._getabcissalabel(r))
2266 theplot.set_axes('title',s._getsourcename(r))
2267 theplot.set_line(label='Original',color="#777777")
2268 theplot.plot(xorg,yorg)
2269 theplot.set_line(label='Smoothed',color="red")
2270 theplot.plot(xsm,ysm)
[1819]2271 ### Ugly part for legend
2272 for i in [0,1]:
[2349]2273 theplot.subplots[0]['lines'].append(
2274 [theplot.subplots[0]['axes'].lines[i]]
2275 )
[2150]2276 theplot.release()
[1819]2277 ### Ugly part for legend
[2150]2278 theplot.subplots[0]['lines']=[]
[1819]2279 res = raw_input("Accept smoothing ([y]/n): ")
2280 if res.upper() == 'N':
2281 s._setspectrum(yorg, r)
[2150]2282 theplot.quit()
2283 del theplot
[1819]2284 del orgscan
2285
[876]2286 if insitu: self._assign(s)
2287 else: return s
[513]2288
[2186]2289 @asaplog_post_dec
2290 def _parse_wn(self, wn):
2291 if isinstance(wn, list) or isinstance(wn, tuple):
2292 return wn
2293 elif isinstance(wn, int):
2294 return [ wn ]
2295 elif isinstance(wn, str):
[2277]2296 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2297 val = wn.split('-')
2298 val = [int(val[0]), int(val[1])]
2299 val.sort()
2300 res = [i for i in xrange(val[0], val[1]+1)]
[2277]2301 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2302 val = int(wn[2:])+1
2303 res = [i for i in xrange(val)]
[2277]2304 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2305 val = int(wn[:-2])+1
2306 res = [i for i in xrange(val)]
[2277]2307 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]2308 val = int(wn[1:])
2309 res = [i for i in xrange(val)]
[2277]2310 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]2311 val = int(wn[:-1])
2312 res = [i for i in xrange(val)]
[2277]2313 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
[2186]2314 val = int(wn[2:])
2315 res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2316 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
[2186]2317 val = int(wn[:-2])
2318 res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2319 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
[2186]2320 val = int(wn[1:])+1
2321 res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2322 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
[2186]2323 val = int(wn[:-1])+1
2324 res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2325
[2186]2326 return res
2327 else:
2328 msg = 'wrong value given for addwn/rejwn'
2329 raise RuntimeError(msg)
2330
2331
[1862]2332 @asaplog_post_dec
[2277]2333 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
[2269]2334 fftmethod=None, fftthresh=None,
2335 addwn=None, rejwn=None, clipthresh=None,
2336 clipniter=None, plot=None,
2337 getresidual=None, showprogress=None,
2338 minnrow=None, outlog=None, blfile=None):
[2047]2339 """\
[2349]2340 Return a scan which has been baselined (all rows) with sinusoidal
2341 functions.
2342
[2047]2343 Parameters:
[2186]2344 insitu: if False a new scantable is returned.
[2081]2345 Otherwise, the scaling is done in-situ
2346 The default is taken from .asaprc (False)
[2186]2347 mask: an optional mask
2348 applyfft: if True use some method, such as FFT, to find
2349 strongest sinusoidal components in the wavenumber
2350 domain to be used for baseline fitting.
2351 default is True.
2352 fftmethod: method to find the strong sinusoidal components.
2353 now only 'fft' is available and it is the default.
2354 fftthresh: the threshold to select wave numbers to be used for
2355 fitting from the distribution of amplitudes in the
2356 wavenumber domain.
2357 both float and string values accepted.
2358 given a float value, the unit is set to sigma.
2359 for string values, allowed formats include:
[2349]2360 'xsigma' or 'x' (= x-sigma level. e.g.,
2361 '3sigma'), or
[2186]2362 'topx' (= the x strongest ones, e.g. 'top5').
2363 default is 3.0 (unit: sigma).
2364 addwn: the additional wave numbers to be used for fitting.
2365 list or integer value is accepted to specify every
2366 wave numbers. also string value can be used in case
2367 you need to specify wave numbers in a certain range,
2368 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2369 '<a' (= 0,1,...,a-2,a-1),
2370 '>=a' (= a, a+1, ... up to the maximum wave
2371 number corresponding to the Nyquist
2372 frequency for the case of FFT).
2373 default is [].
2374 rejwn: the wave numbers NOT to be used for fitting.
2375 can be set just as addwn but has higher priority:
2376 wave numbers which are specified both in addwn
2377 and rejwn will NOT be used. default is [].
[2081]2378 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2379 clipniter: maximum number of iteration of 'clipthresh'-sigma
2380 clipping (default is 0)
[2081]2381 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2382 plot the fit and the residual. In this each
2383 indivual fit has to be approved, by typing 'y'
2384 or 'n'
2385 getresidual: if False, returns best-fit values instead of
2386 residual. (default is True)
[2189]2387 showprogress: show progress status for large data.
2388 default is True.
2389 minnrow: minimum number of input spectra to show.
2390 default is 1000.
[2081]2391 outlog: Output the coefficients of the best-fit
2392 function to logger (default is False)
2393 blfile: Name of a text file in which the best-fit
2394 parameter values to be written
[2186]2395 (default is '': no file/logger output)
[2047]2396
2397 Example:
[2349]2398 # return a scan baselined by a combination of sinusoidal curves
2399 # having wave numbers in spectral window up to 10,
[2047]2400 # also with 3-sigma clipping, iteration up to 4 times
[2186]2401 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]2402
2403 Note:
2404 The best-fit parameter values output in logger and/or blfile are now
2405 based on specunit of 'channel'.
[2047]2406 """
2407
[2186]2408 try:
2409 varlist = vars()
[2047]2410
[2186]2411 if insitu is None: insitu = rcParams['insitu']
2412 if insitu:
2413 workscan = self
2414 else:
2415 workscan = self.copy()
2416
[2410]2417 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2418 if mask is None: mask = []
[2186]2419 if applyfft is None: applyfft = True
2420 if fftmethod is None: fftmethod = 'fft'
2421 if fftthresh is None: fftthresh = 3.0
2422 if addwn is None: addwn = []
2423 if rejwn is None: rejwn = []
2424 if clipthresh is None: clipthresh = 3.0
2425 if clipniter is None: clipniter = 0
2426 if plot is None: plot = False
2427 if getresidual is None: getresidual = True
[2189]2428 if showprogress is None: showprogress = True
2429 if minnrow is None: minnrow = 1000
[2186]2430 if outlog is None: outlog = False
2431 if blfile is None: blfile = ''
[2047]2432
[2081]2433 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2349]2434 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2435 str(fftthresh).lower(),
2436 workscan._parse_wn(addwn),
2437 workscan._parse_wn(rejwn), clipthresh,
2438 clipniter, getresidual,
2439 pack_progress_params(showprogress,
2440 minnrow), outlog,
2441 blfile)
[2186]2442 workscan._add_history('sinusoid_baseline', varlist)
[2047]2443
2444 if insitu:
2445 self._assign(workscan)
2446 else:
2447 return workscan
2448
2449 except RuntimeError, e:
[2186]2450 raise_fitting_failure_exception(e)
[2047]2451
2452
[2186]2453 @asaplog_post_dec
[2349]2454 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2455 fftmethod=None, fftthresh=None,
2456 addwn=None, rejwn=None, clipthresh=None,
2457 clipniter=None, edge=None, threshold=None,
2458 chan_avg_limit=None, plot=None,
2459 getresidual=None, showprogress=None,
2460 minnrow=None, outlog=None, blfile=None):
[2047]2461 """\
[2349]2462 Return a scan which has been baselined (all rows) with sinusoidal
2463 functions.
[2047]2464 Spectral lines are detected first using linefinder and masked out
2465 to avoid them affecting the baseline solution.
2466
2467 Parameters:
[2189]2468 insitu: if False a new scantable is returned.
2469 Otherwise, the scaling is done in-situ
2470 The default is taken from .asaprc (False)
2471 mask: an optional mask retreived from scantable
2472 applyfft: if True use some method, such as FFT, to find
2473 strongest sinusoidal components in the wavenumber
2474 domain to be used for baseline fitting.
2475 default is True.
2476 fftmethod: method to find the strong sinusoidal components.
2477 now only 'fft' is available and it is the default.
2478 fftthresh: the threshold to select wave numbers to be used for
2479 fitting from the distribution of amplitudes in the
2480 wavenumber domain.
2481 both float and string values accepted.
2482 given a float value, the unit is set to sigma.
2483 for string values, allowed formats include:
[2349]2484 'xsigma' or 'x' (= x-sigma level. e.g.,
2485 '3sigma'), or
[2189]2486 'topx' (= the x strongest ones, e.g. 'top5').
2487 default is 3.0 (unit: sigma).
2488 addwn: the additional wave numbers to be used for fitting.
2489 list or integer value is accepted to specify every
2490 wave numbers. also string value can be used in case
2491 you need to specify wave numbers in a certain range,
2492 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2493 '<a' (= 0,1,...,a-2,a-1),
2494 '>=a' (= a, a+1, ... up to the maximum wave
2495 number corresponding to the Nyquist
2496 frequency for the case of FFT).
2497 default is [].
2498 rejwn: the wave numbers NOT to be used for fitting.
2499 can be set just as addwn but has higher priority:
2500 wave numbers which are specified both in addwn
2501 and rejwn will NOT be used. default is [].
2502 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2503 clipniter: maximum number of iteration of 'clipthresh'-sigma
2504 clipping (default is 0)
[2189]2505 edge: an optional number of channel to drop at
2506 the edge of spectrum. If only one value is
2507 specified, the same number will be dropped
2508 from both sides of the spectrum. Default
2509 is to keep all channels. Nested tuples
2510 represent individual edge selection for
2511 different IFs (a number of spectral channels
2512 can be different)
2513 threshold: the threshold used by line finder. It is
2514 better to keep it large as only strong lines
2515 affect the baseline solution.
2516 chan_avg_limit: a maximum number of consequtive spectral
2517 channels to average during the search of
2518 weak and broad lines. The default is no
2519 averaging (and no search for weak lines).
2520 If such lines can affect the fitted baseline
2521 (e.g. a high order polynomial is fitted),
2522 increase this parameter (usually values up
2523 to 8 are reasonable). Most users of this
2524 method should find the default value sufficient.
2525 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2526 plot the fit and the residual. In this each
2527 indivual fit has to be approved, by typing 'y'
2528 or 'n'
2529 getresidual: if False, returns best-fit values instead of
2530 residual. (default is True)
2531 showprogress: show progress status for large data.
2532 default is True.
2533 minnrow: minimum number of input spectra to show.
2534 default is 1000.
2535 outlog: Output the coefficients of the best-fit
2536 function to logger (default is False)
2537 blfile: Name of a text file in which the best-fit
2538 parameter values to be written
2539 (default is "": no file/logger output)
[2047]2540
2541 Example:
[2186]2542 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]2543
2544 Note:
2545 The best-fit parameter values output in logger and/or blfile are now
2546 based on specunit of 'channel'.
[2047]2547 """
2548
[2186]2549 try:
2550 varlist = vars()
[2047]2551
[2186]2552 if insitu is None: insitu = rcParams['insitu']
2553 if insitu:
2554 workscan = self
[2047]2555 else:
[2186]2556 workscan = self.copy()
2557
[2410]2558 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2559 if mask is None: mask = []
[2186]2560 if applyfft is None: applyfft = True
2561 if fftmethod is None: fftmethod = 'fft'
2562 if fftthresh is None: fftthresh = 3.0
2563 if addwn is None: addwn = []
2564 if rejwn is None: rejwn = []
2565 if clipthresh is None: clipthresh = 3.0
2566 if clipniter is None: clipniter = 0
2567 if edge is None: edge = (0,0)
2568 if threshold is None: threshold = 3
2569 if chan_avg_limit is None: chan_avg_limit = 1
2570 if plot is None: plot = False
2571 if getresidual is None: getresidual = True
[2189]2572 if showprogress is None: showprogress = True
2573 if minnrow is None: minnrow = 1000
[2186]2574 if outlog is None: outlog = False
2575 if blfile is None: blfile = ''
[2047]2576
[2277]2577 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2349]2578 workscan._auto_sinusoid_baseline(mask, applyfft,
2579 fftmethod.lower(),
2580 str(fftthresh).lower(),
2581 workscan._parse_wn(addwn),
2582 workscan._parse_wn(rejwn),
2583 clipthresh, clipniter,
2584 normalise_edge_param(edge),
2585 threshold, chan_avg_limit,
2586 getresidual,
2587 pack_progress_params(showprogress,
2588 minnrow),
2589 outlog, blfile)
[2047]2590 workscan._add_history("auto_sinusoid_baseline", varlist)
2591
2592 if insitu:
2593 self._assign(workscan)
2594 else:
2595 return workscan
2596
2597 except RuntimeError, e:
[2186]2598 raise_fitting_failure_exception(e)
[2047]2599
2600 @asaplog_post_dec
[2349]2601 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2602 clipthresh=None, clipniter=None, plot=None,
2603 getresidual=None, showprogress=None, minnrow=None,
2604 outlog=None, blfile=None):
[1846]2605 """\
[2349]2606 Return a scan which has been baselined (all rows) by cubic spline
2607 function (piecewise cubic polynomial).
2608
[513]2609 Parameters:
[2189]2610 insitu: If False a new scantable is returned.
2611 Otherwise, the scaling is done in-situ
2612 The default is taken from .asaprc (False)
2613 mask: An optional mask
2614 npiece: Number of pieces. (default is 2)
2615 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2616 clipniter: maximum number of iteration of 'clipthresh'-sigma
2617 clipping (default is 0)
[2189]2618 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2619 plot the fit and the residual. In this each
2620 indivual fit has to be approved, by typing 'y'
2621 or 'n'
2622 getresidual: if False, returns best-fit values instead of
2623 residual. (default is True)
2624 showprogress: show progress status for large data.
2625 default is True.
2626 minnrow: minimum number of input spectra to show.
2627 default is 1000.
2628 outlog: Output the coefficients of the best-fit
2629 function to logger (default is False)
2630 blfile: Name of a text file in which the best-fit
2631 parameter values to be written
2632 (default is "": no file/logger output)
[1846]2633
[2012]2634 Example:
[2349]2635 # return a scan baselined by a cubic spline consisting of 2 pieces
2636 # (i.e., 1 internal knot),
[2012]2637 # also with 3-sigma clipping, iteration up to 4 times
2638 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2639
2640 Note:
2641 The best-fit parameter values output in logger and/or blfile are now
2642 based on specunit of 'channel'.
[2012]2643 """
2644
[2186]2645 try:
2646 varlist = vars()
2647
2648 if insitu is None: insitu = rcParams['insitu']
2649 if insitu:
2650 workscan = self
2651 else:
2652 workscan = self.copy()
[1855]2653
[2410]2654 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2655 if mask is None: mask = []
[2189]2656 if npiece is None: npiece = 2
2657 if clipthresh is None: clipthresh = 3.0
2658 if clipniter is None: clipniter = 0
2659 if plot is None: plot = False
2660 if getresidual is None: getresidual = True
2661 if showprogress is None: showprogress = True
2662 if minnrow is None: minnrow = 1000
2663 if outlog is None: outlog = False
2664 if blfile is None: blfile = ''
[1855]2665
[2012]2666 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2349]2667 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2668 getresidual,
2669 pack_progress_params(showprogress,
2670 minnrow), outlog,
2671 blfile)
[2012]2672 workscan._add_history("cspline_baseline", varlist)
2673
2674 if insitu:
2675 self._assign(workscan)
2676 else:
2677 return workscan
2678
2679 except RuntimeError, e:
[2186]2680 raise_fitting_failure_exception(e)
[1855]2681
[2186]2682 @asaplog_post_dec
[2349]2683 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2684 clipthresh=None, clipniter=None,
2685 edge=None, threshold=None, chan_avg_limit=None,
2686 getresidual=None, plot=None,
2687 showprogress=None, minnrow=None, outlog=None,
2688 blfile=None):
[2012]2689 """\
2690 Return a scan which has been baselined (all rows) by cubic spline
2691 function (piecewise cubic polynomial).
2692 Spectral lines are detected first using linefinder and masked out
2693 to avoid them affecting the baseline solution.
2694
2695 Parameters:
[2189]2696 insitu: if False a new scantable is returned.
2697 Otherwise, the scaling is done in-situ
2698 The default is taken from .asaprc (False)
2699 mask: an optional mask retreived from scantable
2700 npiece: Number of pieces. (default is 2)
2701 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2702 clipniter: maximum number of iteration of 'clipthresh'-sigma
2703 clipping (default is 0)
[2189]2704 edge: an optional number of channel to drop at
2705 the edge of spectrum. If only one value is
2706 specified, the same number will be dropped
2707 from both sides of the spectrum. Default
2708 is to keep all channels. Nested tuples
2709 represent individual edge selection for
2710 different IFs (a number of spectral channels
2711 can be different)
2712 threshold: the threshold used by line finder. It is
2713 better to keep it large as only strong lines
2714 affect the baseline solution.
2715 chan_avg_limit: a maximum number of consequtive spectral
2716 channels to average during the search of
2717 weak and broad lines. The default is no
2718 averaging (and no search for weak lines).
2719 If such lines can affect the fitted baseline
2720 (e.g. a high order polynomial is fitted),
2721 increase this parameter (usually values up
2722 to 8 are reasonable). Most users of this
2723 method should find the default value sufficient.
2724 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2725 plot the fit and the residual. In this each
2726 indivual fit has to be approved, by typing 'y'
2727 or 'n'
2728 getresidual: if False, returns best-fit values instead of
2729 residual. (default is True)
2730 showprogress: show progress status for large data.
2731 default is True.
2732 minnrow: minimum number of input spectra to show.
2733 default is 1000.
2734 outlog: Output the coefficients of the best-fit
2735 function to logger (default is False)
2736 blfile: Name of a text file in which the best-fit
2737 parameter values to be written
2738 (default is "": no file/logger output)
[1846]2739
[1907]2740 Example:
[2012]2741 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]2742
2743 Note:
2744 The best-fit parameter values output in logger and/or blfile are now
2745 based on specunit of 'channel'.
[2012]2746 """
[1846]2747
[2186]2748 try:
2749 varlist = vars()
[2012]2750
[2186]2751 if insitu is None: insitu = rcParams['insitu']
2752 if insitu:
2753 workscan = self
[1391]2754 else:
[2186]2755 workscan = self.copy()
2756
[2410]2757 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2758 if mask is None: mask = []
[2186]2759 if npiece is None: npiece = 2
2760 if clipthresh is None: clipthresh = 3.0
2761 if clipniter is None: clipniter = 0
2762 if edge is None: edge = (0, 0)
2763 if threshold is None: threshold = 3
2764 if chan_avg_limit is None: chan_avg_limit = 1
2765 if plot is None: plot = False
2766 if getresidual is None: getresidual = True
[2189]2767 if showprogress is None: showprogress = True
2768 if minnrow is None: minnrow = 1000
[2186]2769 if outlog is None: outlog = False
2770 if blfile is None: blfile = ''
[1819]2771
[2277]2772 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2269]2773 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2774 clipniter,
2775 normalise_edge_param(edge),
2776 threshold,
2777 chan_avg_limit, getresidual,
2778 pack_progress_params(showprogress,
2779 minnrow),
2780 outlog, blfile)
[2012]2781 workscan._add_history("auto_cspline_baseline", varlist)
[1907]2782
[1856]2783 if insitu:
2784 self._assign(workscan)
2785 else:
2786 return workscan
[2012]2787
2788 except RuntimeError, e:
[2186]2789 raise_fitting_failure_exception(e)
[513]2790
[1931]2791 @asaplog_post_dec
[2269]2792 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2793 getresidual=None, showprogress=None, minnrow=None,
2794 outlog=None, blfile=None):
[1907]2795 """\
2796 Return a scan which has been baselined (all rows) by a polynomial.
2797 Parameters:
[2189]2798 insitu: if False a new scantable is returned.
2799 Otherwise, the scaling is done in-situ
2800 The default is taken from .asaprc (False)
2801 mask: an optional mask
2802 order: the order of the polynomial (default is 0)
2803 plot: plot the fit and the residual. In this each
2804 indivual fit has to be approved, by typing 'y'
2805 or 'n'
2806 getresidual: if False, returns best-fit values instead of
2807 residual. (default is True)
2808 showprogress: show progress status for large data.
2809 default is True.
2810 minnrow: minimum number of input spectra to show.
2811 default is 1000.
2812 outlog: Output the coefficients of the best-fit
2813 function to logger (default is False)
2814 blfile: Name of a text file in which the best-fit
2815 parameter values to be written
2816 (default is "": no file/logger output)
[2012]2817
[1907]2818 Example:
2819 # return a scan baselined by a third order polynomial,
2820 # not using a mask
2821 bscan = scan.poly_baseline(order=3)
2822 """
[1931]2823
[2186]2824 try:
2825 varlist = vars()
[1931]2826
[2269]2827 if insitu is None:
2828 insitu = rcParams["insitu"]
[2186]2829 if insitu:
2830 workscan = self
2831 else:
2832 workscan = self.copy()
[1907]2833
[2410]2834 #if mask is None: mask = [True for i in \
2835 # xrange(workscan.nchan())]
2836 if mask is None: mask = []
[2189]2837 if order is None: order = 0
2838 if plot is None: plot = False
2839 if getresidual is None: getresidual = True
2840 if showprogress is None: showprogress = True
2841 if minnrow is None: minnrow = 1000
2842 if outlog is None: outlog = False
2843 if blfile is None: blfile = ""
[1907]2844
[2012]2845 if plot:
[2269]2846 outblfile = (blfile != "") and \
[2349]2847 os.path.exists(os.path.expanduser(
2848 os.path.expandvars(blfile))
2849 )
[2269]2850 if outblfile:
2851 blf = open(blfile, "a")
[2012]2852
[1907]2853 f = fitter()
2854 f.set_function(lpoly=order)
[2186]2855
2856 rows = xrange(workscan.nrow())
2857 #if len(rows) > 0: workscan._init_blinfo()
2858
[1907]2859 for r in rows:
2860 f.x = workscan._getabcissa(r)
2861 f.y = workscan._getspectrum(r)
2862 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2863 f.data = None
2864 f.fit()
2865
2866 f.plot(residual=True)
2867 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2868 if accept_fit.upper() == "N":
[2012]2869 #workscan._append_blinfo(None, None, None)
[1907]2870 continue
[2012]2871
2872 blpars = f.get_parameters()
2873 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2874 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2875 workscan._setspectrum((f.fitter.getresidual()
2876 if getresidual else f.fitter.getfit()), r)
[1907]2877
[2012]2878 if outblfile:
2879 rms = workscan.get_rms(f.mask, r)
[2269]2880 dataout = \
2881 workscan.format_blparams_row(blpars["params"],
2882 blpars["fixed"],
2883 rms, str(masklist),
2884 r, True)
[2012]2885 blf.write(dataout)
2886
[1907]2887 f._p.unmap()
2888 f._p = None
[2012]2889
[2349]2890 if outblfile:
2891 blf.close()
[1907]2892 else:
[2269]2893 workscan._poly_baseline(mask, order, getresidual,
2894 pack_progress_params(showprogress,
2895 minnrow),
2896 outlog, blfile)
[1907]2897
2898 workscan._add_history("poly_baseline", varlist)
2899
2900 if insitu:
2901 self._assign(workscan)
2902 else:
2903 return workscan
2904
[1919]2905 except RuntimeError, e:
[2186]2906 raise_fitting_failure_exception(e)
[1907]2907
[2186]2908 @asaplog_post_dec
[2269]2909 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2910 threshold=None, chan_avg_limit=None,
2911 plot=None, insitu=None,
2912 getresidual=None, showprogress=None,
2913 minnrow=None, outlog=None, blfile=None):
[1846]2914 """\
[1931]2915 Return a scan which has been baselined (all rows) by a polynomial.
[880]2916 Spectral lines are detected first using linefinder and masked out
2917 to avoid them affecting the baseline solution.
2918
2919 Parameters:
[2189]2920 insitu: if False a new scantable is returned.
2921 Otherwise, the scaling is done in-situ
2922 The default is taken from .asaprc (False)
2923 mask: an optional mask retreived from scantable
2924 order: the order of the polynomial (default is 0)
2925 edge: an optional number of channel to drop at
2926 the edge of spectrum. If only one value is
2927 specified, the same number will be dropped
2928 from both sides of the spectrum. Default
2929 is to keep all channels. Nested tuples
2930 represent individual edge selection for
2931 different IFs (a number of spectral channels
2932 can be different)
2933 threshold: the threshold used by line finder. It is
2934 better to keep it large as only strong lines
2935 affect the baseline solution.
2936 chan_avg_limit: a maximum number of consequtive spectral
2937 channels to average during the search of
2938 weak and broad lines. The default is no
2939 averaging (and no search for weak lines).
2940 If such lines can affect the fitted baseline
2941 (e.g. a high order polynomial is fitted),
2942 increase this parameter (usually values up
2943 to 8 are reasonable). Most users of this
2944 method should find the default value sufficient.
2945 plot: plot the fit and the residual. In this each
2946 indivual fit has to be approved, by typing 'y'
2947 or 'n'
2948 getresidual: if False, returns best-fit values instead of
2949 residual. (default is True)
2950 showprogress: show progress status for large data.
2951 default is True.
2952 minnrow: minimum number of input spectra to show.
2953 default is 1000.
2954 outlog: Output the coefficients of the best-fit
2955 function to logger (default is False)
2956 blfile: Name of a text file in which the best-fit
2957 parameter values to be written
2958 (default is "": no file/logger output)
[1846]2959
[2012]2960 Example:
2961 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2962 """
[880]2963
[2186]2964 try:
2965 varlist = vars()
[1846]2966
[2269]2967 if insitu is None:
2968 insitu = rcParams['insitu']
[2186]2969 if insitu:
2970 workscan = self
2971 else:
2972 workscan = self.copy()
[1846]2973
[2410]2974 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2975 if mask is None: mask = []
[2186]2976 if order is None: order = 0
2977 if edge is None: edge = (0, 0)
2978 if threshold is None: threshold = 3
2979 if chan_avg_limit is None: chan_avg_limit = 1
2980 if plot is None: plot = False
2981 if getresidual is None: getresidual = True
[2189]2982 if showprogress is None: showprogress = True
2983 if minnrow is None: minnrow = 1000
[2186]2984 if outlog is None: outlog = False
2985 if blfile is None: blfile = ''
[1846]2986
[2186]2987 edge = normalise_edge_param(edge)
[880]2988
[2012]2989 if plot:
[2269]2990 outblfile = (blfile != "") and \
2991 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]2992 if outblfile: blf = open(blfile, "a")
2993
[2186]2994 from asap.asaplinefind import linefinder
[2012]2995 fl = linefinder()
[2269]2996 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]2997 fl.set_scan(workscan)
[2186]2998
[2012]2999 f = fitter()
3000 f.set_function(lpoly=order)
[880]3001
[2186]3002 rows = xrange(workscan.nrow())
3003 #if len(rows) > 0: workscan._init_blinfo()
3004
[2012]3005 for r in rows:
[2186]3006 idx = 2*workscan.getif(r)
[2269]3007 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
3008 edge[idx:idx+2]) # (CAS-1434)
[907]3009
[2012]3010 f.x = workscan._getabcissa(r)
3011 f.y = workscan._getspectrum(r)
3012 f.mask = fl.get_mask()
3013 f.data = None
3014 f.fit()
3015
3016 f.plot(residual=True)
3017 accept_fit = raw_input("Accept fit ( [y]/n ): ")
3018 if accept_fit.upper() == "N":
3019 #workscan._append_blinfo(None, None, None)
3020 continue
3021
3022 blpars = f.get_parameters()
3023 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3024 #workscan._append_blinfo(blpars, masklist, f.mask)
[2349]3025 workscan._setspectrum(
3026 (f.fitter.getresidual() if getresidual
3027 else f.fitter.getfit()), r
3028 )
[2012]3029
3030 if outblfile:
3031 rms = workscan.get_rms(f.mask, r)
[2269]3032 dataout = \
3033 workscan.format_blparams_row(blpars["params"],
3034 blpars["fixed"],
3035 rms, str(masklist),
3036 r, True)
[2012]3037 blf.write(dataout)
3038
3039 f._p.unmap()
3040 f._p = None
3041
3042 if outblfile: blf.close()
3043 else:
[2269]3044 workscan._auto_poly_baseline(mask, order, edge, threshold,
3045 chan_avg_limit, getresidual,
3046 pack_progress_params(showprogress,
3047 minnrow),
3048 outlog, blfile)
[2012]3049
3050 workscan._add_history("auto_poly_baseline", varlist)
3051
3052 if insitu:
3053 self._assign(workscan)
3054 else:
3055 return workscan
3056
3057 except RuntimeError, e:
[2186]3058 raise_fitting_failure_exception(e)
[2012]3059
3060 def _init_blinfo(self):
3061 """\
3062 Initialise the following three auxiliary members:
3063 blpars : parameters of the best-fit baseline,
3064 masklists : mask data (edge positions of masked channels) and
3065 actualmask : mask data (in boolean list),
3066 to keep for use later (including output to logger/text files).
3067 Used by poly_baseline() and auto_poly_baseline() in case of
3068 'plot=True'.
3069 """
3070 self.blpars = []
3071 self.masklists = []
3072 self.actualmask = []
3073 return
[880]3074
[2012]3075 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3076 """\
3077 Append baseline-fitting related info to blpars, masklist and
3078 actualmask.
3079 """
3080 self.blpars.append(data_blpars)
3081 self.masklists.append(data_masklists)
3082 self.actualmask.append(data_actualmask)
3083 return
3084
[1862]3085 @asaplog_post_dec
[914]3086 def rotate_linpolphase(self, angle):
[1846]3087 """\
[914]3088 Rotate the phase of the complex polarization O=Q+iU correlation.
3089 This is always done in situ in the raw data. So if you call this
3090 function more than once then each call rotates the phase further.
[1846]3091
[914]3092 Parameters:
[1846]3093
[914]3094 angle: The angle (degrees) to rotate (add) by.
[1846]3095
3096 Example::
3097
[914]3098 scan.rotate_linpolphase(2.3)
[1846]3099
[914]3100 """
3101 varlist = vars()
[936]3102 self._math._rotate_linpolphase(self, angle)
[914]3103 self._add_history("rotate_linpolphase", varlist)
3104 return
[710]3105
[1862]3106 @asaplog_post_dec
[914]3107 def rotate_xyphase(self, angle):
[1846]3108 """\
[914]3109 Rotate the phase of the XY correlation. This is always done in situ
3110 in the data. So if you call this function more than once
3111 then each call rotates the phase further.
[1846]3112
[914]3113 Parameters:
[1846]3114
[914]3115 angle: The angle (degrees) to rotate (add) by.
[1846]3116
3117 Example::
3118
[914]3119 scan.rotate_xyphase(2.3)
[1846]3120
[914]3121 """
3122 varlist = vars()
[936]3123 self._math._rotate_xyphase(self, angle)
[914]3124 self._add_history("rotate_xyphase", varlist)
3125 return
3126
[1862]3127 @asaplog_post_dec
[914]3128 def swap_linears(self):
[1846]3129 """\
[1573]3130 Swap the linear polarisations XX and YY, or better the first two
[1348]3131 polarisations as this also works for ciculars.
[914]3132 """
3133 varlist = vars()
[936]3134 self._math._swap_linears(self)
[914]3135 self._add_history("swap_linears", varlist)
3136 return
3137
[1862]3138 @asaplog_post_dec
[914]3139 def invert_phase(self):
[1846]3140 """\
[914]3141 Invert the phase of the complex polarisation
3142 """
3143 varlist = vars()
[936]3144 self._math._invert_phase(self)
[914]3145 self._add_history("invert_phase", varlist)
3146 return
3147
[1862]3148 @asaplog_post_dec
[876]3149 def add(self, offset, insitu=None):
[1846]3150 """\
[513]3151 Return a scan where all spectra have the offset added
[1846]3152
[513]3153 Parameters:
[1846]3154
[513]3155 offset: the offset
[1855]3156
[513]3157 insitu: if False a new scantable is returned.
3158 Otherwise, the scaling is done in-situ
3159 The default is taken from .asaprc (False)
[1846]3160
[513]3161 """
3162 if insitu is None: insitu = rcParams['insitu']
[876]3163 self._math._setinsitu(insitu)
[513]3164 varlist = vars()
[876]3165 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]3166 s._add_history("add", varlist)
[876]3167 if insitu:
3168 self._assign(s)
3169 else:
[513]3170 return s
3171
[1862]3172 @asaplog_post_dec
[1308]3173 def scale(self, factor, tsys=True, insitu=None):
[1846]3174 """\
3175
[1938]3176 Return a scan where all spectra are scaled by the given 'factor'
[1846]3177
[513]3178 Parameters:
[1846]3179
[1819]3180 factor: the scaling factor (float or 1D float list)
[1855]3181
[513]3182 insitu: if False a new scantable is returned.
3183 Otherwise, the scaling is done in-situ
3184 The default is taken from .asaprc (False)
[1855]3185
[513]3186 tsys: if True (default) then apply the operation to Tsys
3187 as well as the data
[1846]3188
[513]3189 """
3190 if insitu is None: insitu = rcParams['insitu']
[876]3191 self._math._setinsitu(insitu)
[513]3192 varlist = vars()
[1819]3193 s = None
3194 import numpy
3195 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]3196 if isinstance(factor[0], list) or isinstance(factor[0],
3197 numpy.ndarray):
[1819]3198 from asapmath import _array2dOp
[2320]3199 s = _array2dOp( self, factor, "MUL", tsys, insitu )
[1819]3200 else:
[2320]3201 s = scantable( self._math._arrayop( self, factor,
3202 "MUL", tsys ) )
[1819]3203 else:
[2320]3204 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]3205 s._add_history("scale", varlist)
[876]3206 if insitu:
3207 self._assign(s)
3208 else:
[513]3209 return s
3210
[2349]3211 @preserve_selection
3212 def set_sourcetype(self, match, matchtype="pattern",
[1504]3213 sourcetype="reference"):
[1846]3214 """\
[1502]3215 Set the type of the source to be an source or reference scan
[1846]3216 using the provided pattern.
3217
[1502]3218 Parameters:
[1846]3219
[1504]3220 match: a Unix style pattern, regular expression or selector
[1855]3221
[1504]3222 matchtype: 'pattern' (default) UNIX style pattern or
3223 'regex' regular expression
[1855]3224
[1502]3225 sourcetype: the type of the source to use (source/reference)
[1846]3226
[1502]3227 """
3228 varlist = vars()
3229 basesel = self.get_selection()
3230 stype = -1
3231 if sourcetype.lower().startswith("r"):
3232 stype = 1
3233 elif sourcetype.lower().startswith("s"):
3234 stype = 0
[1504]3235 else:
[1502]3236 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]3237 if matchtype.lower().startswith("p"):
3238 matchtype = "pattern"
3239 elif matchtype.lower().startswith("r"):
3240 matchtype = "regex"
3241 else:
3242 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3243 sel = selector()
3244 if isinstance(match, selector):
3245 sel = match
3246 else:
[1504]3247 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]3248 self._setsourcetype(stype)
[1573]3249 self._add_history("set_sourcetype", varlist)
[1502]3250
[1862]3251 @asaplog_post_dec
[1857]3252 @preserve_selection
[1819]3253 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3254 """\
[670]3255 This function allows to build quotients automatically.
[1819]3256 It assumes the observation to have the same number of
[670]3257 "ons" and "offs"
[1846]3258
[670]3259 Parameters:
[1846]3260
[710]3261 preserve: you can preserve (default) the continuum or
3262 remove it. The equations used are
[1857]3263
[670]3264 preserve: Output = Toff * (on/off) - Toff
[1857]3265
[1070]3266 remove: Output = Toff * (on/off) - Ton
[1855]3267
[1573]3268 mode: the on/off detection mode
[1348]3269 'paired' (default)
3270 identifies 'off' scans by the
3271 trailing '_R' (Mopra/Parkes) or
3272 '_e'/'_w' (Tid) and matches
3273 on/off pairs from the observing pattern
[1502]3274 'time'
3275 finds the closest off in time
[1348]3276
[1857]3277 .. todo:: verify argument is not implemented
3278
[670]3279 """
[1857]3280 varlist = vars()
[1348]3281 modes = ["time", "paired"]
[670]3282 if not mode in modes:
[876]3283 msg = "please provide valid mode. Valid modes are %s" % (modes)
3284 raise ValueError(msg)
[1348]3285 s = None
3286 if mode.lower() == "paired":
[1857]3287 sel = self.get_selection()
[1875]3288 sel.set_query("SRCTYPE==psoff")
[1356]3289 self.set_selection(sel)
[1348]3290 offs = self.copy()
[1875]3291 sel.set_query("SRCTYPE==pson")
[1356]3292 self.set_selection(sel)
[1348]3293 ons = self.copy()
3294 s = scantable(self._math._quotient(ons, offs, preserve))
3295 elif mode.lower() == "time":
3296 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3297 s._add_history("auto_quotient", varlist)
[876]3298 return s
[710]3299
[1862]3300 @asaplog_post_dec
[1145]3301 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3302 """\
[1143]3303 Form a quotient using "off" beams when observing in "MX" mode.
[1846]3304
[1143]3305 Parameters:
[1846]3306
[1145]3307 mask: an optional mask to be used when weight == 'stddev'
[1855]3308
[1143]3309 weight: How to average the off beams. Default is 'median'.
[1855]3310
[1145]3311 preserve: you can preserve (default) the continuum or
[1855]3312 remove it. The equations used are:
[1846]3313
[1855]3314 preserve: Output = Toff * (on/off) - Toff
3315
3316 remove: Output = Toff * (on/off) - Ton
3317
[1217]3318 """
[1593]3319 mask = mask or ()
[1141]3320 varlist = vars()
3321 on = scantable(self._math._mx_extract(self, 'on'))
[1143]3322 preoff = scantable(self._math._mx_extract(self, 'off'))
3323 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3324 from asapmath import quotient
[1145]3325 q = quotient(on, off, preserve)
[1143]3326 q._add_history("mx_quotient", varlist)
[1217]3327 return q
[513]3328
[1862]3329 @asaplog_post_dec
[718]3330 def freq_switch(self, insitu=None):
[1846]3331 """\
[718]3332 Apply frequency switching to the data.
[1846]3333
[718]3334 Parameters:
[1846]3335
[718]3336 insitu: if False a new scantable is returned.
3337 Otherwise, the swictching is done in-situ
3338 The default is taken from .asaprc (False)
[1846]3339
[718]3340 """
3341 if insitu is None: insitu = rcParams['insitu']
[876]3342 self._math._setinsitu(insitu)
[718]3343 varlist = vars()
[876]3344 s = scantable(self._math._freqswitch(self))
[1118]3345 s._add_history("freq_switch", varlist)
[1856]3346 if insitu:
3347 self._assign(s)
3348 else:
3349 return s
[718]3350
[1862]3351 @asaplog_post_dec
[780]3352 def recalc_azel(self):
[1846]3353 """Recalculate the azimuth and elevation for each position."""
[780]3354 varlist = vars()
[876]3355 self._recalcazel()
[780]3356 self._add_history("recalc_azel", varlist)
3357 return
3358
[1862]3359 @asaplog_post_dec
[513]3360 def __add__(self, other):
3361 varlist = vars()
3362 s = None
3363 if isinstance(other, scantable):
[1573]3364 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]3365 elif isinstance(other, float):
[876]3366 s = scantable(self._math._unaryop(self, other, "ADD", False))
[2144]3367 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3368 if isinstance(other[0], list) \
3369 or isinstance(other[0], numpy.ndarray):
[2144]3370 from asapmath import _array2dOp
3371 s = _array2dOp( self.copy(), other, "ADD", False )
3372 else:
[2349]3373 s = scantable( self._math._arrayop( self.copy(), other,
3374 "ADD", False ) )
[513]3375 else:
[718]3376 raise TypeError("Other input is not a scantable or float value")
[513]3377 s._add_history("operator +", varlist)
3378 return s
3379
[1862]3380 @asaplog_post_dec
[513]3381 def __sub__(self, other):
3382 """
3383 implicit on all axes and on Tsys
3384 """
3385 varlist = vars()
3386 s = None
3387 if isinstance(other, scantable):
[1588]3388 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]3389 elif isinstance(other, float):
[876]3390 s = scantable(self._math._unaryop(self, other, "SUB", False))
[2144]3391 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3392 if isinstance(other[0], list) \
3393 or isinstance(other[0], numpy.ndarray):
[2144]3394 from asapmath import _array2dOp
3395 s = _array2dOp( self.copy(), other, "SUB", False )
3396 else:
[2349]3397 s = scantable( self._math._arrayop( self.copy(), other,
3398 "SUB", False ) )
[513]3399 else:
[718]3400 raise TypeError("Other input is not a scantable or float value")
[513]3401 s._add_history("operator -", varlist)
3402 return s
[710]3403
[1862]3404 @asaplog_post_dec
[513]3405 def __mul__(self, other):
3406 """
3407 implicit on all axes and on Tsys
3408 """
3409 varlist = vars()
3410 s = None
3411 if isinstance(other, scantable):
[1588]3412 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]3413 elif isinstance(other, float):
[876]3414 s = scantable(self._math._unaryop(self, other, "MUL", False))
[2144]3415 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3416 if isinstance(other[0], list) \
3417 or isinstance(other[0], numpy.ndarray):
[2144]3418 from asapmath import _array2dOp
3419 s = _array2dOp( self.copy(), other, "MUL", False )
3420 else:
[2349]3421 s = scantable( self._math._arrayop( self.copy(), other,
3422 "MUL", False ) )
[513]3423 else:
[718]3424 raise TypeError("Other input is not a scantable or float value")
[513]3425 s._add_history("operator *", varlist)
3426 return s
3427
[710]3428
[1862]3429 @asaplog_post_dec
[513]3430 def __div__(self, other):
3431 """
3432 implicit on all axes and on Tsys
3433 """
3434 varlist = vars()
3435 s = None
3436 if isinstance(other, scantable):
[1589]3437 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]3438 elif isinstance(other, float):
3439 if other == 0.0:
[718]3440 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]3441 s = scantable(self._math._unaryop(self, other, "DIV", False))
[2144]3442 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3443 if isinstance(other[0], list) \
3444 or isinstance(other[0], numpy.ndarray):
[2144]3445 from asapmath import _array2dOp
3446 s = _array2dOp( self.copy(), other, "DIV", False )
3447 else:
[2349]3448 s = scantable( self._math._arrayop( self.copy(), other,
3449 "DIV", False ) )
[513]3450 else:
[718]3451 raise TypeError("Other input is not a scantable or float value")
[513]3452 s._add_history("operator /", varlist)
3453 return s
3454
[1862]3455 @asaplog_post_dec
[530]3456 def get_fit(self, row=0):
[1846]3457 """\
[530]3458 Print or return the stored fits for a row in the scantable
[1846]3459
[530]3460 Parameters:
[1846]3461
[530]3462 row: the row which the fit has been applied to.
[1846]3463
[530]3464 """
3465 if row > self.nrow():
3466 return
[976]3467 from asap.asapfit import asapfit
[530]3468 fit = asapfit(self._getfit(row))
[1859]3469 asaplog.push( '%s' %(fit) )
3470 return fit.as_dict()
[530]3471
[2349]3472 @preserve_selection
[1483]3473 def flag_nans(self):
[1846]3474 """\
[1483]3475 Utility function to flag NaN values in the scantable.
3476 """
3477 import numpy
3478 basesel = self.get_selection()
3479 for i in range(self.nrow()):
[1589]3480 sel = self.get_row_selector(i)
3481 self.set_selection(basesel+sel)
[1483]3482 nans = numpy.isnan(self._getspectrum(0))
3483 if numpy.any(nans):
3484 bnans = [ bool(v) for v in nans]
3485 self.flag(bnans)
3486
[1588]3487 def get_row_selector(self, rowno):
[1992]3488 return selector(rows=[rowno])
[1573]3489
[484]3490 def _add_history(self, funcname, parameters):
[1435]3491 if not rcParams['scantable.history']:
3492 return
[484]3493 # create date
3494 sep = "##"
3495 from datetime import datetime
3496 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3497 hist = dstr+sep
3498 hist += funcname+sep#cdate+sep
[2349]3499 if parameters.has_key('self'):
3500 del parameters['self']
[1118]3501 for k, v in parameters.iteritems():
[484]3502 if type(v) is dict:
[1118]3503 for k2, v2 in v.iteritems():
[484]3504 hist += k2
3505 hist += "="
[1118]3506 if isinstance(v2, scantable):
[484]3507 hist += 'scantable'
3508 elif k2 == 'mask':
[1118]3509 if isinstance(v2, list) or isinstance(v2, tuple):
[513]3510 hist += str(self._zip_mask(v2))
3511 else:
3512 hist += str(v2)
[484]3513 else:
[513]3514 hist += str(v2)
[484]3515 else:
3516 hist += k
3517 hist += "="
[1118]3518 if isinstance(v, scantable):
[484]3519 hist += 'scantable'
3520 elif k == 'mask':
[1118]3521 if isinstance(v, list) or isinstance(v, tuple):
[513]3522 hist += str(self._zip_mask(v))
3523 else:
3524 hist += str(v)
[484]3525 else:
3526 hist += str(v)
3527 hist += sep
3528 hist = hist[:-2] # remove trailing '##'
3529 self._addhistory(hist)
3530
[710]3531
[484]3532 def _zip_mask(self, mask):
3533 mask = list(mask)
3534 i = 0
3535 segments = []
3536 while mask[i:].count(1):
3537 i += mask[i:].index(1)
3538 if mask[i:].count(0):
3539 j = i + mask[i:].index(0)
3540 else:
[710]3541 j = len(mask)
[1118]3542 segments.append([i, j])
[710]3543 i = j
[484]3544 return segments
[714]3545
[626]3546 def _get_ordinate_label(self):
3547 fu = "("+self.get_fluxunit()+")"
3548 import re
3549 lbl = "Intensity"
[1118]3550 if re.match(".K.", fu):
[626]3551 lbl = "Brightness Temperature "+ fu
[1118]3552 elif re.match(".Jy.", fu):
[626]3553 lbl = "Flux density "+ fu
3554 return lbl
[710]3555
[876]3556 def _check_ifs(self):
[2349]3557# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
[1986]3558 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]3559 nchans = filter(lambda t: t > 0, nchans)
[876]3560 return (sum(nchans)/len(nchans) == nchans[0])
[976]3561
[1862]3562 @asaplog_post_dec
[1916]3563 def _fill(self, names, unit, average, opts={}):
[976]3564 first = True
3565 fullnames = []
3566 for name in names:
3567 name = os.path.expandvars(name)
3568 name = os.path.expanduser(name)
3569 if not os.path.exists(name):
3570 msg = "File '%s' does not exists" % (name)
3571 raise IOError(msg)
3572 fullnames.append(name)
3573 if average:
3574 asaplog.push('Auto averaging integrations')
[1079]3575 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]3576 for name in fullnames:
[1073]3577 tbl = Scantable(stype)
[2004]3578 if is_ms( name ):
3579 r = msfiller( tbl )
3580 else:
3581 r = filler( tbl )
3582 rx = rcParams['scantable.reference']
3583 r.setreferenceexpr(rx)
[976]3584 msg = "Importing %s..." % (name)
[1118]3585 asaplog.push(msg, False)
[2349]3586 r.open(name, opts)
[1843]3587 r.fill()
[976]3588 if average:
[1118]3589 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]3590 if not first:
3591 tbl = self._math._merge([self, tbl])
3592 Scantable.__init__(self, tbl)
[1843]3593 r.close()
[1118]3594 del r, tbl
[976]3595 first = False
[1861]3596 #flush log
3597 asaplog.post()
[976]3598 if unit is not None:
3599 self.set_fluxunit(unit)
[1824]3600 if not is_casapy():
3601 self.set_freqframe(rcParams['scantable.freqframe'])
[976]3602
[2012]3603
[1402]3604 def __getitem__(self, key):
3605 if key < 0:
3606 key += self.nrow()
3607 if key >= self.nrow():
3608 raise IndexError("Row index out of range.")
3609 return self._getspectrum(key)
3610
3611 def __setitem__(self, key, value):
3612 if key < 0:
3613 key += self.nrow()
3614 if key >= self.nrow():
3615 raise IndexError("Row index out of range.")
3616 if not hasattr(value, "__len__") or \
3617 len(value) > self.nchan(self.getif(key)):
3618 raise ValueError("Spectrum length doesn't match.")
3619 return self._setspectrum(value, key)
3620
3621 def __len__(self):
3622 return self.nrow()
3623
3624 def __iter__(self):
3625 for i in range(len(self)):
3626 yield self[i]
Note: See TracBrowser for help on using the repository browser.