source: trunk/python/scantable.py@ 2420

Last change on this file since 2420 was 2411, checked in by WataruKawasaki, 13 years ago

New Development: No

JIRA Issue: Yes CAS-3759

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: (1) in sinusoidal baselining procedures and their relevants, quit getting nchan info in Python side but in C++ side.

(2) changed the defaults value of addwn from [] to [0] in sd.*_sinusoid_baseline(). (i.e., toolkit level)


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 139.4 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2315]4import tempfile
[1948]5import numpy
[1691]6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
[1824]11from asap.env import is_casapy
[876]12from asap._asap import Scantable
[2004]13from asap._asap import filler, msfiller
[1824]14from asap.parameters import rcParams
[1862]15from asap.logging import asaplog, asaplog_post_dec
[1824]16from asap.selector import selector
17from asap.linecatalog import linecatalog
[1600]18from asap.coordinate import coordinate
[1859]19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]20from asap.asapfitter import fitter
[102]21
[2411]22###############################################################
23### WK temporarily added these lines for testing 2011/11/28 ###
24###############################################################
25#from asap._asap import TestClass
[1689]26
[2411]27#class testclass(TestClass):
28# def __init__(self, nelem):
29# TestClass.__init__(self, nelem)
30
31###############################################################
32
33
[1689]34def preserve_selection(func):
[1691]35 @wraps_dec(func)
[1689]36 def wrap(obj, *args, **kw):
37 basesel = obj.get_selection()
[1857]38 try:
39 val = func(obj, *args, **kw)
40 finally:
41 obj.set_selection(basesel)
[1689]42 return val
43 return wrap
44
[1846]45def is_scantable(filename):
46 """Is the given file a scantable?
[1689]47
[1846]48 Parameters:
49
50 filename: the name of the file/directory to test
51
52 """
[1883]53 if ( os.path.isdir(filename)
54 and os.path.exists(filename+'/table.info')
55 and os.path.exists(filename+'/table.dat') ):
56 f=open(filename+'/table.info')
57 l=f.readline()
58 f.close()
59 #if ( l.find('Scantable') != -1 ):
60 if ( l.find('Measurement Set') == -1 ):
61 return True
62 else:
63 return False
64 else:
65 return False
66## return (os.path.isdir(filename)
67## and not os.path.exists(filename+'/table.f1')
68## and os.path.exists(filename+'/table.info'))
[1697]69
[1883]70def is_ms(filename):
71 """Is the given file a MeasurementSet?
[1697]72
[1883]73 Parameters:
74
75 filename: the name of the file/directory to test
76
77 """
78 if ( os.path.isdir(filename)
79 and os.path.exists(filename+'/table.info')
80 and os.path.exists(filename+'/table.dat') ):
81 f=open(filename+'/table.info')
82 l=f.readline()
83 f.close()
84 if ( l.find('Measurement Set') != -1 ):
85 return True
86 else:
87 return False
88 else:
89 return False
[2186]90
91def normalise_edge_param(edge):
92 """\
93 Convert a given edge value to a one-dimensional array that can be
94 given to baseline-fitting/subtraction functions.
95 The length of the output value will be an even because values for
96 the both sides of spectra are to be contained for each IF. When
97 the length is 2, the values will be applied to all IFs. If the length
98 is larger than 2, it will be 2*ifnos().
99 Accepted format of edge include:
100 * an integer - will be used for both sides of spectra of all IFs.
101 e.g. 10 is converted to [10,10]
[2277]102 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]103 * a list/tuple containing an integer - same as the above case.
104 e.g. [10] is converted to [10,10]
105 * a list/tuple containing two integers - will be used for all IFs.
106 e.g. [5,10] is output as it is. no need to convert.
107 * a list/tuple of lists/tuples containing TWO integers -
108 each element of edge will be used for each IF.
[2277]109 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
110
111 If an element contains the same integer values, the input 'edge'
112 parameter can be given in a simpler shape in the following cases:
[2186]113 ** when len(edge)!=2
[2277]114 any elements containing the same values can be replaced
115 to single integers.
116 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
117 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]118 ** when len(edge)=2
119 care is needed for this case: ONLY ONE of the
120 elements can be a single integer,
121 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]122 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]123 when [5,10] given, it is interpreted as
[2277]124 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]125 """
126 from asap import _is_sequence_or_number as _is_valid
127 if isinstance(edge, list) or isinstance(edge, tuple):
128 for edgepar in edge:
129 if not _is_valid(edgepar, int):
130 raise ValueError, "Each element of the 'edge' tuple has \
131 to be a pair of integers or an integer."
132 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
133 if len(edgepar) != 2:
134 raise ValueError, "Each element of the 'edge' tuple has \
135 to be a pair of integers or an integer."
136 else:
137 if not _is_valid(edge, int):
138 raise ValueError, "Parameter 'edge' has to be an integer or a \
139 pair of integers specified as a tuple. \
140 Nested tuples are allowed \
141 to make individual selection for different IFs."
142
143
144 if isinstance(edge, int):
145 edge = [ edge, edge ] # e.g. 3 => [3,3]
146 elif isinstance(edge, list) or isinstance(edge, tuple):
147 if len(edge) == 0:
148 edge = [0, 0] # e.g. [] => [0,0]
149 elif len(edge) == 1:
150 if isinstance(edge[0], int):
151 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
152
153 commonedge = True
154 if len(edge) > 2: commonedge = False
155 else:
156 for edgepar in edge:
157 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
158 commonedge = False
159 break
160
161 if commonedge:
162 if len(edge) > 1:
163 norm_edge = edge
164 else:
165 norm_edge = edge + edge
166 else:
167 norm_edge = []
168 for edgepar in edge:
169 if isinstance(edgepar, int):
170 norm_edge += [edgepar, edgepar]
171 else:
172 norm_edge += edgepar
173
174 return norm_edge
175
176def raise_fitting_failure_exception(e):
177 msg = "The fit failed, possibly because it didn't converge."
178 if rcParams["verbose"]:
179 asaplog.push(str(e))
180 asaplog.push(str(msg))
181 else:
182 raise RuntimeError(str(e)+'\n'+msg)
183
[2189]184def pack_progress_params(showprogress, minnrow):
185 return str(showprogress).lower() + ',' + str(minnrow)
186
[876]187class scantable(Scantable):
[1846]188 """\
189 The ASAP container for scans (single-dish data).
[102]190 """
[1819]191
[1862]192 @asaplog_post_dec
[2315]193 def __init__(self, filename, average=None, unit=None, parallactify=None,
194 **args):
[1846]195 """\
[102]196 Create a scantable from a saved one or make a reference
[1846]197
[102]198 Parameters:
[1846]199
200 filename: the name of an asap table on disk
201 or
202 the name of a rpfits/sdfits/ms file
203 (integrations within scans are auto averaged
204 and the whole file is read) or
205 [advanced] a reference to an existing scantable
206
207 average: average all integrations withinb a scan on read.
208 The default (True) is taken from .asaprc.
209
[484]210 unit: brightness unit; must be consistent with K or Jy.
[1846]211 Over-rides the default selected by the filler
212 (input rpfits/sdfits/ms) or replaces the value
213 in existing scantables
214
[1920]215 antenna: for MeasurementSet input data only:
[2349]216 Antenna selection. integer (id) or string
217 (name or id).
[1846]218
[2349]219 parallactify: Indicate that the data had been parallactified.
220 Default (false) is taken from rc file.
[1846]221
[710]222 """
[976]223 if average is None:
[710]224 average = rcParams['scantable.autoaverage']
[1593]225 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]226 varlist = vars()
[876]227 from asap._asap import stmath
[1819]228 self._math = stmath( rcParams['insitu'] )
[876]229 if isinstance(filename, Scantable):
230 Scantable.__init__(self, filename)
[181]231 else:
[1697]232 if isinstance(filename, str):
[976]233 filename = os.path.expandvars(filename)
234 filename = os.path.expanduser(filename)
235 if not os.path.exists(filename):
236 s = "File '%s' not found." % (filename)
237 raise IOError(s)
[1697]238 if is_scantable(filename):
239 ondisk = rcParams['scantable.storage'] == 'disk'
240 Scantable.__init__(self, filename, ondisk)
241 if unit is not None:
242 self.set_fluxunit(unit)
[2008]243 if average:
244 self._assign( self.average_time( scanav=True ) )
[1819]245 # do not reset to the default freqframe
246 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]247 elif is_ms(filename):
[1916]248 # Measurement Set
249 opts={'ms': {}}
250 mskeys=['getpt','antenna']
251 for key in mskeys:
252 if key in args.keys():
253 opts['ms'][key] = args[key]
254 self._fill([filename], unit, average, opts)
[1893]255 elif os.path.isfile(filename):
[1916]256 self._fill([filename], unit, average)
[2350]257 # only apply to new data not "copy constructor"
258 self.parallactify(parallactify)
[1883]259 else:
[1819]260 msg = "The given file '%s'is not a valid " \
261 "asap table." % (filename)
[1859]262 raise IOError(msg)
[1118]263 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]264 and isinstance(filename[-1], str):
[1916]265 self._fill(filename, unit, average)
[1586]266 self.parallactify(parallactify)
[1259]267 self._add_history("scantable", varlist)
[102]268
[1862]269 @asaplog_post_dec
[876]270 def save(self, name=None, format=None, overwrite=False):
[1846]271 """\
[1280]272 Store the scantable on disk. This can be an asap (aips++) Table,
273 SDFITS or MS2 format.
[1846]274
[116]275 Parameters:
[1846]276
[1093]277 name: the name of the outputfile. For format "ASCII"
278 this is the root file name (data in 'name'.txt
[497]279 and header in 'name'_header.txt)
[1855]280
[116]281 format: an optional file format. Default is ASAP.
[1855]282 Allowed are:
283
284 * 'ASAP' (save as ASAP [aips++] Table),
285 * 'SDFITS' (save as SDFITS file)
286 * 'ASCII' (saves as ascii text file)
287 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]288 * 'FITS' (save as image FITS - not readable by
289 class)
[1855]290 * 'CLASS' (save as FITS readable by CLASS)
291
[411]292 overwrite: If the file should be overwritten if it exists.
[256]293 The default False is to return with warning
[411]294 without writing the output. USE WITH CARE.
[1855]295
[1846]296 Example::
297
[116]298 scan.save('myscan.asap')
[1118]299 scan.save('myscan.sdfits', 'SDFITS')
[1846]300
[116]301 """
[411]302 from os import path
[1593]303 format = format or rcParams['scantable.save']
[256]304 suffix = '.'+format.lower()
[1118]305 if name is None or name == "":
[256]306 name = 'scantable'+suffix
[718]307 msg = "No filename given. Using default name %s..." % name
308 asaplog.push(msg)
[411]309 name = path.expandvars(name)
[256]310 if path.isfile(name) or path.isdir(name):
311 if not overwrite:
[718]312 msg = "File %s exists." % name
[1859]313 raise IOError(msg)
[451]314 format2 = format.upper()
315 if format2 == 'ASAP':
[116]316 self._save(name)
[2029]317 elif format2 == 'MS2':
318 msopt = {'ms': {'overwrite': overwrite } }
319 from asap._asap import mswriter
320 writer = mswriter( self )
321 writer.write( name, msopt )
[116]322 else:
[989]323 from asap._asap import stwriter as stw
[1118]324 writer = stw(format2)
325 writer.write(self, name)
[116]326 return
327
[102]328 def copy(self):
[1846]329 """Return a copy of this scantable.
330
331 *Note*:
332
[1348]333 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]334
335 Example::
336
[102]337 copiedscan = scan.copy()
[1846]338
[102]339 """
[876]340 sd = scantable(Scantable._copy(self))
[113]341 return sd
342
[1093]343 def drop_scan(self, scanid=None):
[1846]344 """\
[1093]345 Return a new scantable where the specified scan number(s) has(have)
346 been dropped.
[1846]347
[1093]348 Parameters:
[1846]349
[1093]350 scanid: a (list of) scan number(s)
[1846]351
[1093]352 """
353 from asap import _is_sequence_or_number as _is_valid
354 from asap import _to_list
355 from asap import unique
356 if not _is_valid(scanid):
[2315]357 raise RuntimeError( 'Please specify a scanno to drop from the'
358 ' scantable' )
[1859]359 scanid = _to_list(scanid)
360 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
361 for sid in scanid: allscans.remove(sid)
362 if len(allscans) == 0:
363 raise ValueError("Can't remove all scans")
364 sel = selector(scans=allscans)
365 return self._select_copy(sel)
[1093]366
[1594]367 def _select_copy(self, selection):
368 orig = self.get_selection()
369 self.set_selection(orig+selection)
370 cp = self.copy()
371 self.set_selection(orig)
372 return cp
373
[102]374 def get_scan(self, scanid=None):
[1855]375 """\
[102]376 Return a specific scan (by scanno) or collection of scans (by
377 source name) in a new scantable.
[1846]378
379 *Note*:
380
[1348]381 See scantable.drop_scan() for the inverse operation.
[1846]382
[102]383 Parameters:
[1846]384
[513]385 scanid: a (list of) scanno or a source name, unix-style
386 patterns are accepted for source name matching, e.g.
387 '*_R' gets all 'ref scans
[1846]388
389 Example::
390
[513]391 # get all scans containing the source '323p459'
392 newscan = scan.get_scan('323p459')
393 # get all 'off' scans
394 refscans = scan.get_scan('*_R')
395 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]396 newscan = scan.get_scan([0, 2, 7, 10])
[1846]397
[102]398 """
399 if scanid is None:
[1859]400 raise RuntimeError( 'Please specify a scan no or name to '
401 'retrieve from the scantable' )
[102]402 try:
[946]403 bsel = self.get_selection()
404 sel = selector()
[102]405 if type(scanid) is str:
[946]406 sel.set_name(scanid)
[1594]407 return self._select_copy(sel)
[102]408 elif type(scanid) is int:
[946]409 sel.set_scans([scanid])
[1594]410 return self._select_copy(sel)
[381]411 elif type(scanid) is list:
[946]412 sel.set_scans(scanid)
[1594]413 return self._select_copy(sel)
[381]414 else:
[718]415 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]416 raise TypeError(msg)
[102]417 except RuntimeError:
[1859]418 raise
[102]419
420 def __str__(self):
[2315]421 tempFile = tempfile.NamedTemporaryFile()
422 Scantable._summary(self, tempFile.name)
423 tempFile.seek(0)
424 asaplog.clear()
425 return tempFile.file.read()
[102]426
[2315]427 @asaplog_post_dec
[976]428 def summary(self, filename=None):
[1846]429 """\
[102]430 Print a summary of the contents of this scantable.
[1846]431
[102]432 Parameters:
[1846]433
[1931]434 filename: the name of a file to write the putput to
[102]435 Default - no file output
[1846]436
[102]437 """
[2286]438# info = Scantable._summary(self)
[102]439 if filename is not None:
[256]440 if filename is "":
441 filename = 'scantable_summary.txt'
[415]442 from os.path import expandvars, isdir
[411]443 filename = expandvars(filename)
[2286]444# if not isdir(filename):
445# data = open(filename, 'w')
446# data.write(info)
447# data.close()
448# else:
449 if isdir(filename):
[718]450 msg = "Illegal file name '%s'." % (filename)
[1859]451 raise IOError(msg)
[2286]452 else:
453 filename = ""
454 Scantable._summary(self, filename)
[2290]455# info = Scantable._summary(self, filename)
[2286]456# return page(info)
[710]457
[1512]458 def get_spectrum(self, rowno):
[1471]459 """Return the spectrum for the current row in the scantable as a list.
[1846]460
[1471]461 Parameters:
[1846]462
[1573]463 rowno: the row number to retrieve the spectrum from
[1846]464
[1471]465 """
466 return self._getspectrum(rowno)
[946]467
[1471]468 def get_mask(self, rowno):
469 """Return the mask for the current row in the scantable as a list.
[1846]470
[1471]471 Parameters:
[1846]472
[1573]473 rowno: the row number to retrieve the mask from
[1846]474
[1471]475 """
476 return self._getmask(rowno)
477
478 def set_spectrum(self, spec, rowno):
[1938]479 """Set the spectrum for the current row in the scantable.
[1846]480
[1471]481 Parameters:
[1846]482
[1855]483 spec: the new spectrum
[1846]484
[1855]485 rowno: the row number to set the spectrum for
486
[1471]487 """
[2348]488 assert(len(spec) == self.nchan(self.getif(rowno)))
[1471]489 return self._setspectrum(spec, rowno)
490
[1600]491 def get_coordinate(self, rowno):
492 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]493
494 *Note*:
495
[1600]496 * This coordinate is only valid until a scantable method modifies
497 the frequency axis.
498 * This coordinate does contain the original frequency set-up
499 NOT the new frame. The conversions however are done using the user
500 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
501 use scantable.freq_align first. Without it there is no closure,
[1846]502 i.e.::
[1600]503
[1846]504 c = myscan.get_coordinate(0)
505 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
506
[1600]507 Parameters:
[1846]508
[1600]509 rowno: the row number for the spectral coordinate
510
511 """
512 return coordinate(Scantable.get_coordinate(self, rowno))
513
[946]514 def get_selection(self):
[1846]515 """\
[1005]516 Get the selection object currently set on this scantable.
[1846]517
518 Example::
519
[1005]520 sel = scan.get_selection()
521 sel.set_ifs(0) # select IF 0
522 scan.set_selection(sel) # apply modified selection
[1846]523
[946]524 """
525 return selector(self._getselection())
526
[1576]527 def set_selection(self, selection=None, **kw):
[1846]528 """\
[1005]529 Select a subset of the data. All following operations on this scantable
530 are only applied to thi selection.
[1846]531
[1005]532 Parameters:
[1697]533
[1846]534 selection: a selector object (default unset the selection), or
535 any combination of "pols", "ifs", "beams", "scans",
536 "cycles", "name", "query"
[1697]537
[1846]538 Examples::
[1697]539
[1005]540 sel = selector() # create a selection object
[1118]541 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]542 scan.set_selection(sel) # set the selection
543 scan.summary() # will only print summary of scanno 0 an 3
544 scan.set_selection() # unset the selection
[1697]545 # or the equivalent
546 scan.set_selection(scans=[0,3])
547 scan.summary() # will only print summary of scanno 0 an 3
548 scan.set_selection() # unset the selection
[1846]549
[946]550 """
[1576]551 if selection is None:
552 # reset
553 if len(kw) == 0:
554 selection = selector()
555 else:
556 # try keywords
557 for k in kw:
558 if k not in selector.fields:
[2320]559 raise KeyError("Invalid selection key '%s', "
560 "valid keys are %s" % (k,
561 selector.fields))
[1576]562 selection = selector(**kw)
[946]563 self._setselection(selection)
564
[1819]565 def get_row(self, row=0, insitu=None):
[1846]566 """\
[1819]567 Select a row in the scantable.
568 Return a scantable with single row.
[1846]569
[1819]570 Parameters:
[1846]571
572 row: row no of integration, default is 0.
573 insitu: if False a new scantable is returned. Otherwise, the
574 scaling is done in-situ. The default is taken from .asaprc
575 (False)
576
[1819]577 """
[2349]578 if insitu is None:
579 insitu = rcParams['insitu']
[1819]580 if not insitu:
581 workscan = self.copy()
582 else:
583 workscan = self
584 # Select a row
[2349]585 sel = selector()
[1992]586 sel.set_rows([row])
[1819]587 workscan.set_selection(sel)
588 if not workscan.nrow() == 1:
[2349]589 msg = "Could not identify single row. %d rows selected." \
590 % (workscan.nrow())
[1819]591 raise RuntimeError(msg)
592 if insitu:
593 self._assign(workscan)
594 else:
595 return workscan
596
[1862]597 @asaplog_post_dec
[1907]598 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]599 """\
[135]600 Determine the specified statistic of the current beam/if/pol
[102]601 Takes a 'mask' as an optional parameter to specify which
602 channels should be excluded.
[1846]603
[102]604 Parameters:
[1846]605
[1819]606 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
607 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]608
[135]609 mask: an optional mask specifying where the statistic
[102]610 should be determined.
[1855]611
[1819]612 form: format string to print statistic values
[1846]613
[1907]614 row: row number of spectrum to process.
615 (default is None: for all rows)
[1846]616
[1907]617 Example:
[113]618 scan.set_unit('channel')
[1118]619 msk = scan.create_mask([100, 200], [500, 600])
[135]620 scan.stats(stat='mean', mask=m)
[1846]621
[102]622 """
[1593]623 mask = mask or []
[876]624 if not self._check_ifs():
[1118]625 raise ValueError("Cannot apply mask as the IFs have different "
626 "number of channels. Please use setselection() "
627 "to select individual IFs")
[1819]628 rtnabc = False
629 if stat.lower().endswith('_abc'): rtnabc = True
630 getchan = False
631 if stat.lower().startswith('min') or stat.lower().startswith('max'):
632 chan = self._math._minmaxchan(self, mask, stat)
633 getchan = True
634 statvals = []
[1907]635 if not rtnabc:
636 if row == None:
637 statvals = self._math._stats(self, mask, stat)
638 else:
639 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]640
[1819]641 #def cb(i):
642 # return statvals[i]
[256]643
[1819]644 #return self._row_callback(cb, stat)
[102]645
[1819]646 label=stat
647 #callback=cb
648 out = ""
649 #outvec = []
650 sep = '-'*50
[1907]651
652 if row == None:
653 rows = xrange(self.nrow())
654 elif isinstance(row, int):
655 rows = [ row ]
656
657 for i in rows:
[1819]658 refstr = ''
659 statunit= ''
660 if getchan:
661 qx, qy = self.chan2data(rowno=i, chan=chan[i])
662 if rtnabc:
663 statvals.append(qx['value'])
664 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
665 statunit= '['+qx['unit']+']'
666 else:
667 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
668
669 tm = self._gettime(i)
670 src = self._getsourcename(i)
671 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
672 out += 'Time[%s]:\n' % (tm)
[1907]673 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
674 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
675 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]676 #outvec.append(callback(i))
[1907]677 if len(rows) > 1:
678 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
679 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
680 else:
681 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
682 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]683 out += sep+"\n"
684
[1859]685 import os
686 if os.environ.has_key( 'USER' ):
687 usr = os.environ['USER']
688 else:
689 import commands
690 usr = commands.getoutput( 'whoami' )
691 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
692 f = open(tmpfile,'w')
693 print >> f, sep
694 print >> f, ' %s %s' % (label, statunit)
695 print >> f, sep
696 print >> f, out
697 f.close()
698 f = open(tmpfile,'r')
699 x = f.readlines()
700 f.close()
701 asaplog.push(''.join(x), False)
702
[1819]703 return statvals
704
705 def chan2data(self, rowno=0, chan=0):
[1846]706 """\
[1819]707 Returns channel/frequency/velocity and spectral value
708 at an arbitrary row and channel in the scantable.
[1846]709
[1819]710 Parameters:
[1846]711
[1819]712 rowno: a row number in the scantable. Default is the
713 first row, i.e. rowno=0
[1855]714
[1819]715 chan: a channel in the scantable. Default is the first
716 channel, i.e. pos=0
[1846]717
[1819]718 """
719 if isinstance(rowno, int) and isinstance(chan, int):
720 qx = {'unit': self.get_unit(),
721 'value': self._getabcissa(rowno)[chan]}
722 qy = {'unit': self.get_fluxunit(),
723 'value': self._getspectrum(rowno)[chan]}
724 return qx, qy
725
[1118]726 def stddev(self, mask=None):
[1846]727 """\
[135]728 Determine the standard deviation of the current beam/if/pol
729 Takes a 'mask' as an optional parameter to specify which
730 channels should be excluded.
[1846]731
[135]732 Parameters:
[1846]733
[135]734 mask: an optional mask specifying where the standard
735 deviation should be determined.
736
[1846]737 Example::
738
[135]739 scan.set_unit('channel')
[1118]740 msk = scan.create_mask([100, 200], [500, 600])
[135]741 scan.stddev(mask=m)
[1846]742
[135]743 """
[1118]744 return self.stats(stat='stddev', mask=mask);
[135]745
[1003]746
[1259]747 def get_column_names(self):
[1846]748 """\
[1003]749 Return a list of column names, which can be used for selection.
750 """
[1259]751 return list(Scantable.get_column_names(self))
[1003]752
[1730]753 def get_tsys(self, row=-1):
[1846]754 """\
[113]755 Return the System temperatures.
[1846]756
757 Parameters:
758
759 row: the rowno to get the information for. (default all rows)
760
[113]761 Returns:
[1846]762
[876]763 a list of Tsys values for the current selection
[1846]764
[113]765 """
[1730]766 if row > -1:
767 return self._get_column(self._gettsys, row)
[876]768 return self._row_callback(self._gettsys, "Tsys")
[256]769
[2406]770 def get_tsysspectrum(self, row=-1):
771 """\
772 Return the channel dependent system temperatures.
[1730]773
[2406]774 Parameters:
775
776 row: the rowno to get the information for. (default all rows)
777
778 Returns:
779
780 a list of Tsys values for the current selection
781
782 """
783 return self._get_column( self._gettsysspectrum, row )
784
[1730]785 def get_weather(self, row=-1):
[1846]786 """\
787 Return the weather informations.
788
789 Parameters:
790
791 row: the rowno to get the information for. (default all rows)
792
793 Returns:
794
795 a dict or list of of dicts of values for the current selection
796
797 """
798
[1730]799 values = self._get_column(self._get_weather, row)
800 if row > -1:
801 return {'temperature': values[0],
802 'pressure': values[1], 'humidity' : values[2],
803 'windspeed' : values[3], 'windaz' : values[4]
804 }
805 else:
806 out = []
807 for r in values:
808
809 out.append({'temperature': r[0],
810 'pressure': r[1], 'humidity' : r[2],
811 'windspeed' : r[3], 'windaz' : r[4]
812 })
813 return out
814
[876]815 def _row_callback(self, callback, label):
816 out = ""
[1118]817 outvec = []
[1590]818 sep = '-'*50
[876]819 for i in range(self.nrow()):
820 tm = self._gettime(i)
821 src = self._getsourcename(i)
[1590]822 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]823 out += 'Time[%s]:\n' % (tm)
[1590]824 if self.nbeam(-1) > 1:
825 out += ' Beam[%d] ' % (self.getbeam(i))
826 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
827 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]828 outvec.append(callback(i))
829 out += '= %3.3f\n' % (outvec[i])
[1590]830 out += sep+'\n'
[1859]831
832 asaplog.push(sep)
833 asaplog.push(" %s" % (label))
834 asaplog.push(sep)
835 asaplog.push(out)
[1861]836 asaplog.post()
[1175]837 return outvec
[256]838
[1947]839 def _get_column(self, callback, row=-1, *args):
[1070]840 """
841 """
842 if row == -1:
[1947]843 return [callback(i, *args) for i in range(self.nrow())]
[1070]844 else:
[1819]845 if 0 <= row < self.nrow():
[1947]846 return callback(row, *args)
[256]847
[1070]848
[1948]849 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]850 """\
[113]851 Get a list of time stamps for the observations.
[1938]852 Return a datetime object or a string (default) for each
853 integration time stamp in the scantable.
[1846]854
[113]855 Parameters:
[1846]856
[1348]857 row: row no of integration. Default -1 return all rows
[1855]858
[1348]859 asdatetime: return values as datetime objects rather than strings
[1846]860
[2349]861 prec: number of digits shown. Default -1 to automatic
862 calculation.
[1948]863 Note this number is equals to the digits of MVTime,
864 i.e., 0<prec<3: dates with hh:: only,
865 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]866 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
867
[113]868 """
[1175]869 from datetime import datetime
[1948]870 if prec < 0:
871 # automagically set necessary precision +1
[2349]872 prec = 7 - \
873 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]874 prec = max(6, int(prec))
875 else:
876 prec = max(0, prec)
877 if asdatetime:
878 #precision can be 1 millisecond at max
879 prec = min(12, prec)
880
[1947]881 times = self._get_column(self._gettime, row, prec)
[1348]882 if not asdatetime:
[1392]883 return times
[1947]884 format = "%Y/%m/%d/%H:%M:%S.%f"
885 if prec < 7:
886 nsub = 1 + (((6-prec)/2) % 3)
887 substr = [".%f","%S","%M"]
888 for i in range(nsub):
889 format = format.replace(substr[i],"")
[1175]890 if isinstance(times, list):
[1947]891 return [datetime.strptime(i, format) for i in times]
[1175]892 else:
[1947]893 return datetime.strptime(times, format)
[102]894
[1348]895
896 def get_inttime(self, row=-1):
[1846]897 """\
[1348]898 Get a list of integration times for the observations.
899 Return a time in seconds for each integration in the scantable.
[1846]900
[1348]901 Parameters:
[1846]902
[1348]903 row: row no of integration. Default -1 return all rows.
[1846]904
[1348]905 """
[1573]906 return self._get_column(self._getinttime, row)
[1348]907
[1573]908
[714]909 def get_sourcename(self, row=-1):
[1846]910 """\
[794]911 Get a list source names for the observations.
[714]912 Return a string for each integration in the scantable.
913 Parameters:
[1846]914
[1348]915 row: row no of integration. Default -1 return all rows.
[1846]916
[714]917 """
[1070]918 return self._get_column(self._getsourcename, row)
[714]919
[794]920 def get_elevation(self, row=-1):
[1846]921 """\
[794]922 Get a list of elevations for the observations.
923 Return a float for each integration in the scantable.
[1846]924
[794]925 Parameters:
[1846]926
[1348]927 row: row no of integration. Default -1 return all rows.
[1846]928
[794]929 """
[1070]930 return self._get_column(self._getelevation, row)
[794]931
932 def get_azimuth(self, row=-1):
[1846]933 """\
[794]934 Get a list of azimuths for the observations.
935 Return a float for each integration in the scantable.
[1846]936
[794]937 Parameters:
[1348]938 row: row no of integration. Default -1 return all rows.
[1846]939
[794]940 """
[1070]941 return self._get_column(self._getazimuth, row)
[794]942
943 def get_parangle(self, row=-1):
[1846]944 """\
[794]945 Get a list of parallactic angles for the observations.
946 Return a float for each integration in the scantable.
[1846]947
[794]948 Parameters:
[1846]949
[1348]950 row: row no of integration. Default -1 return all rows.
[1846]951
[794]952 """
[1070]953 return self._get_column(self._getparangle, row)
[794]954
[1070]955 def get_direction(self, row=-1):
956 """
957 Get a list of Positions on the sky (direction) for the observations.
[1594]958 Return a string for each integration in the scantable.
[1855]959
[1070]960 Parameters:
[1855]961
[1070]962 row: row no of integration. Default -1 return all rows
[1855]963
[1070]964 """
965 return self._get_column(self._getdirection, row)
966
[1391]967 def get_directionval(self, row=-1):
[1846]968 """\
[1391]969 Get a list of Positions on the sky (direction) for the observations.
970 Return a float for each integration in the scantable.
[1846]971
[1391]972 Parameters:
[1846]973
[1391]974 row: row no of integration. Default -1 return all rows
[1846]975
[1391]976 """
977 return self._get_column(self._getdirectionvec, row)
978
[1862]979 @asaplog_post_dec
[102]980 def set_unit(self, unit='channel'):
[1846]981 """\
[102]982 Set the unit for all following operations on this scantable
[1846]983
[102]984 Parameters:
[1846]985
986 unit: optional unit, default is 'channel'. Use one of '*Hz',
987 'km/s', 'channel' or equivalent ''
988
[102]989 """
[484]990 varlist = vars()
[1118]991 if unit in ['', 'pixel', 'channel']:
[113]992 unit = ''
993 inf = list(self._getcoordinfo())
994 inf[0] = unit
995 self._setcoordinfo(inf)
[1118]996 self._add_history("set_unit", varlist)
[113]997
[1862]998 @asaplog_post_dec
[484]999 def set_instrument(self, instr):
[1846]1000 """\
[1348]1001 Set the instrument for subsequent processing.
[1846]1002
[358]1003 Parameters:
[1846]1004
[710]1005 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]1006 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1007
[358]1008 """
1009 self._setInstrument(instr)
[1118]1010 self._add_history("set_instument", vars())
[358]1011
[1862]1012 @asaplog_post_dec
[1190]1013 def set_feedtype(self, feedtype):
[1846]1014 """\
[1190]1015 Overwrite the feed type, which might not be set correctly.
[1846]1016
[1190]1017 Parameters:
[1846]1018
[1190]1019 feedtype: 'linear' or 'circular'
[1846]1020
[1190]1021 """
1022 self._setfeedtype(feedtype)
1023 self._add_history("set_feedtype", vars())
1024
[1862]1025 @asaplog_post_dec
[276]1026 def set_doppler(self, doppler='RADIO'):
[1846]1027 """\
[276]1028 Set the doppler for all following operations on this scantable.
[1846]1029
[276]1030 Parameters:
[1846]1031
[276]1032 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1033
[276]1034 """
[484]1035 varlist = vars()
[276]1036 inf = list(self._getcoordinfo())
1037 inf[2] = doppler
1038 self._setcoordinfo(inf)
[1118]1039 self._add_history("set_doppler", vars())
[710]1040
[1862]1041 @asaplog_post_dec
[226]1042 def set_freqframe(self, frame=None):
[1846]1043 """\
[113]1044 Set the frame type of the Spectral Axis.
[1846]1045
[113]1046 Parameters:
[1846]1047
[591]1048 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1049 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1050 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1051
1052 Example::
1053
[113]1054 scan.set_freqframe('BARY')
[1846]1055
[113]1056 """
[1593]1057 frame = frame or rcParams['scantable.freqframe']
[484]1058 varlist = vars()
[1819]1059 # "REST" is not implemented in casacore
1060 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1061 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1062 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1063 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1064
[989]1065 if frame in valid:
[113]1066 inf = list(self._getcoordinfo())
1067 inf[1] = frame
1068 self._setcoordinfo(inf)
[1118]1069 self._add_history("set_freqframe", varlist)
[102]1070 else:
[1118]1071 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1072 raise TypeError(msg)
[710]1073
[1862]1074 @asaplog_post_dec
[989]1075 def set_dirframe(self, frame=""):
[1846]1076 """\
[989]1077 Set the frame type of the Direction on the sky.
[1846]1078
[989]1079 Parameters:
[1846]1080
[989]1081 frame: an optional frame type, default ''. Valid frames are:
1082 'J2000', 'B1950', 'GALACTIC'
[1846]1083
1084 Example:
1085
[989]1086 scan.set_dirframe('GALACTIC')
[1846]1087
[989]1088 """
1089 varlist = vars()
[1859]1090 Scantable.set_dirframe(self, frame)
[1118]1091 self._add_history("set_dirframe", varlist)
[989]1092
[113]1093 def get_unit(self):
[1846]1094 """\
[113]1095 Get the default unit set in this scantable
[1846]1096
[113]1097 Returns:
[1846]1098
[113]1099 A unit string
[1846]1100
[113]1101 """
1102 inf = self._getcoordinfo()
1103 unit = inf[0]
1104 if unit == '': unit = 'channel'
1105 return unit
[102]1106
[1862]1107 @asaplog_post_dec
[158]1108 def get_abcissa(self, rowno=0):
[1846]1109 """\
[158]1110 Get the abcissa in the current coordinate setup for the currently
[113]1111 selected Beam/IF/Pol
[1846]1112
[113]1113 Parameters:
[1846]1114
[226]1115 rowno: an optional row number in the scantable. Default is the
1116 first row, i.e. rowno=0
[1846]1117
[113]1118 Returns:
[1846]1119
[1348]1120 The abcissa values and the format string (as a dictionary)
[1846]1121
[113]1122 """
[256]1123 abc = self._getabcissa(rowno)
[710]1124 lbl = self._getabcissalabel(rowno)
[158]1125 return abc, lbl
[113]1126
[1862]1127 @asaplog_post_dec
[2322]1128 def flag(self, mask=None, unflag=False, row=-1):
[1846]1129 """\
[1001]1130 Flag the selected data using an optional channel mask.
[1846]1131
[1001]1132 Parameters:
[1846]1133
[1001]1134 mask: an optional channel mask, created with create_mask. Default
1135 (no mask) is all channels.
[1855]1136
[1819]1137 unflag: if True, unflag the data
[1846]1138
[2322]1139 row: an optional row number in the scantable.
1140 Default -1 flags all rows
1141
[1001]1142 """
1143 varlist = vars()
[1593]1144 mask = mask or []
[1994]1145 self._flag(row, mask, unflag)
[1001]1146 self._add_history("flag", varlist)
1147
[1862]1148 @asaplog_post_dec
[2322]1149 def flag_row(self, rows=None, unflag=False):
[1846]1150 """\
[1819]1151 Flag the selected data in row-based manner.
[1846]1152
[1819]1153 Parameters:
[1846]1154
[1843]1155 rows: list of row numbers to be flagged. Default is no row
[2322]1156 (must be explicitly specified to execute row-based
1157 flagging).
[1855]1158
[1819]1159 unflag: if True, unflag the data.
[1846]1160
[1819]1161 """
1162 varlist = vars()
[2322]1163 if rows is None:
1164 rows = []
[1859]1165 self._flag_row(rows, unflag)
[1819]1166 self._add_history("flag_row", varlist)
1167
[1862]1168 @asaplog_post_dec
[1819]1169 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1170 """\
[1819]1171 Flag the selected data outside a specified range (in channel-base)
[1846]1172
[1819]1173 Parameters:
[1846]1174
[1819]1175 uthres: upper threshold.
[1855]1176
[1819]1177 dthres: lower threshold
[1846]1178
[2322]1179 clipoutside: True for flagging data outside the range
1180 [dthres:uthres].
[1928]1181 False for flagging data inside the range.
[1855]1182
[1846]1183 unflag: if True, unflag the data.
1184
[1819]1185 """
1186 varlist = vars()
[1859]1187 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1188 self._add_history("clip", varlist)
1189
[1862]1190 @asaplog_post_dec
[1584]1191 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1192 """\
[1192]1193 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1194 Flagged data in the scantable get interpolated over the region.
[1192]1195 No taper is applied.
[1846]1196
[1192]1197 Parameters:
[1846]1198
[1579]1199 start: the start frequency (really a period within the
1200 bandwidth) or period to remove
[1855]1201
[1579]1202 end: the end frequency or period to remove
[1855]1203
[1584]1204 unit: the frequency unit (default "MHz") or "" for
[1579]1205 explicit lag channels
[1846]1206
1207 *Notes*:
1208
[1579]1209 It is recommended to flag edges of the band or strong
[1348]1210 signals beforehand.
[1846]1211
[1192]1212 """
1213 if insitu is None: insitu = rcParams['insitu']
1214 self._math._setinsitu(insitu)
1215 varlist = vars()
[1579]1216 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1217 if not (unit == "" or base.has_key(unit)):
[1192]1218 raise ValueError("%s is not a valid unit." % unit)
[1859]1219 if unit == "":
1220 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1221 else:
1222 s = scantable(self._math._lag_flag(self, start*base[unit],
1223 end*base[unit], "frequency"))
[1192]1224 s._add_history("lag_flag", varlist)
1225 if insitu:
1226 self._assign(s)
1227 else:
1228 return s
[1001]1229
[1862]1230 @asaplog_post_dec
[2349]1231 def fft(self, rowno=None, mask=None, getrealimag=False):
[2177]1232 """\
1233 Apply FFT to the spectra.
1234 Flagged data in the scantable get interpolated over the region.
1235
1236 Parameters:
[2186]1237
1238 rowno: The row number(s) to be processed. int, list
[2349]1239 and tuple are accepted. By default (None), FFT
[2186]1240 is applied to the whole data.
1241
1242 mask: Auxiliary channel mask(s). Given as a boolean
1243 list, it is applied to all specified rows.
1244 A list of boolean lists can also be used to
1245 apply different masks. In the latter case, the
1246 length of 'mask' must be the same as that of
[2349]1247 'rowno'. The default is None.
[2177]1248
1249 getrealimag: If True, returns the real and imaginary part
1250 values of the complex results.
1251 If False (the default), returns the amplitude
1252 (absolute value) normalised with Ndata/2 and
1253 phase (argument, in unit of radian).
1254
1255 Returns:
1256
[2186]1257 A list of dictionaries containing the results for each spectrum.
1258 Each dictionary contains two values, the real and the imaginary
1259 parts when getrealimag = True, or the amplitude(absolute value)
1260 and the phase(argument) when getrealimag = False. The key for
1261 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1262 respectively.
1263 """
[2349]1264 if rowno is None:
1265 rowno = []
[2177]1266 if isinstance(rowno, int):
1267 rowno = [rowno]
1268 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1269 raise TypeError("The row number(s) must be int, list or tuple.")
1270 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1271
[2411]1272 usecommonmask = True
1273
1274 if mask is None:
1275 mask = []
1276 if isinstance(mask, list) or isinstance(mask, tuple):
1277 if len(mask) == 0:
1278 mask = [[]]
1279 else:
1280 if isinstance(mask[0], bool):
1281 if len(mask) != self.nchan(self.getif(rowno[0])):
1282 raise ValueError("The spectra and the mask have "
1283 "different length.")
1284 mask = [mask]
1285 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1286 usecommonmask = False
1287 if len(mask) != len(rowno):
1288 raise ValueError("When specifying masks for each "
1289 "spectrum, the numbers of them "
1290 "must be identical.")
1291 for i in xrange(mask):
1292 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1293 raise ValueError("The spectra and the mask have "
1294 "different length.")
1295 else:
1296 raise TypeError("The mask must be a boolean list or "
1297 "a list of boolean list.")
1298 else:
[2349]1299 raise TypeError("The mask must be a boolean list or a list of "
1300 "boolean list.")
[2186]1301
1302 res = []
1303
1304 imask = 0
1305 for whichrow in rowno:
1306 fspec = self._fft(whichrow, mask[imask], getrealimag)
1307 nspec = len(fspec)
[2177]1308
[2349]1309 i = 0
1310 v1 = []
1311 v2 = []
1312 reselem = {"real":[],"imag":[]} if getrealimag \
1313 else {"ampl":[],"phase":[]}
[2177]1314
[2186]1315 while (i < nspec):
1316 v1.append(fspec[i])
1317 v2.append(fspec[i+1])
[2349]1318 i += 2
[2186]1319
[2177]1320 if getrealimag:
[2186]1321 reselem["real"] += v1
1322 reselem["imag"] += v2
[2177]1323 else:
[2186]1324 reselem["ampl"] += v1
1325 reselem["phase"] += v2
[2177]1326
[2186]1327 res.append(reselem)
1328
[2349]1329 if not usecommonmask:
1330 imask += 1
[2186]1331
[2177]1332 return res
1333
1334 @asaplog_post_dec
[113]1335 def create_mask(self, *args, **kwargs):
[1846]1336 """\
[1118]1337 Compute and return a mask based on [min, max] windows.
[189]1338 The specified windows are to be INCLUDED, when the mask is
[113]1339 applied.
[1846]1340
[102]1341 Parameters:
[1846]1342
[1118]1343 [min, max], [min2, max2], ...
[1024]1344 Pairs of start/end points (inclusive)specifying the regions
[102]1345 to be masked
[1855]1346
[189]1347 invert: optional argument. If specified as True,
1348 return an inverted mask, i.e. the regions
1349 specified are EXCLUDED
[1855]1350
[513]1351 row: create the mask using the specified row for
1352 unit conversions, default is row=0
1353 only necessary if frequency varies over rows.
[1846]1354
1355 Examples::
1356
[113]1357 scan.set_unit('channel')
[1846]1358 # a)
[1118]1359 msk = scan.create_mask([400, 500], [800, 900])
[189]1360 # masks everything outside 400 and 500
[113]1361 # and 800 and 900 in the unit 'channel'
1362
[1846]1363 # b)
[1118]1364 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1365 # masks the regions between 400 and 500
[113]1366 # and 800 and 900 in the unit 'channel'
[1846]1367
1368 # c)
1369 #mask only channel 400
[1554]1370 msk = scan.create_mask([400])
[1846]1371
[102]1372 """
[1554]1373 row = kwargs.get("row", 0)
[513]1374 data = self._getabcissa(row)
[113]1375 u = self._getcoordinfo()[0]
[1859]1376 if u == "":
1377 u = "channel"
1378 msg = "The current mask window unit is %s" % u
1379 i = self._check_ifs()
1380 if not i:
1381 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1382 asaplog.push(msg)
[2348]1383 n = len(data)
[1295]1384 msk = _n_bools(n, False)
[710]1385 # test if args is a 'list' or a 'normal *args - UGLY!!!
1386
[2349]1387 ws = (isinstance(args[-1][-1], int)
1388 or isinstance(args[-1][-1], float)) and args or args[0]
[710]1389 for window in ws:
[1554]1390 if len(window) == 1:
1391 window = [window[0], window[0]]
1392 if len(window) == 0 or len(window) > 2:
[2349]1393 raise ValueError("A window needs to be defined as "
1394 "[start(, end)]")
[1545]1395 if window[0] > window[1]:
1396 tmp = window[0]
1397 window[0] = window[1]
1398 window[1] = tmp
[102]1399 for i in range(n):
[1024]1400 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1401 msk[i] = True
[113]1402 if kwargs.has_key('invert'):
1403 if kwargs.get('invert'):
[1295]1404 msk = mask_not(msk)
[102]1405 return msk
[710]1406
[1931]1407 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1408 """\
[1819]1409 Compute and return a list of mask windows, [min, max].
[1846]1410
[1819]1411 Parameters:
[1846]1412
[1819]1413 mask: channel mask, created with create_mask.
[1855]1414
[1819]1415 row: calcutate the masklist using the specified row
1416 for unit conversions, default is row=0
1417 only necessary if frequency varies over rows.
[1846]1418
[1819]1419 Returns:
[1846]1420
[1819]1421 [min, max], [min2, max2], ...
1422 Pairs of start/end points (inclusive)specifying
1423 the masked regions
[1846]1424
[1819]1425 """
1426 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1427 raise TypeError("The mask should be list or tuple.")
1428 if len(mask) < 2:
1429 raise TypeError("The mask elements should be > 1")
[2348]1430 data = self._getabcissa(row)
1431 if len(data) != len(mask):
[1819]1432 msg = "Number of channels in scantable != number of mask elements"
1433 raise TypeError(msg)
1434 u = self._getcoordinfo()[0]
[1859]1435 if u == "":
1436 u = "channel"
1437 msg = "The current mask window unit is %s" % u
1438 i = self._check_ifs()
1439 if not i:
1440 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1441 if not silent:
1442 asaplog.push(msg)
[2349]1443 masklist = []
[1819]1444 ist, ien = None, None
1445 ist, ien=self.get_mask_indices(mask)
1446 if ist is not None and ien is not None:
1447 for i in xrange(len(ist)):
1448 range=[data[ist[i]],data[ien[i]]]
1449 range.sort()
1450 masklist.append([range[0],range[1]])
1451 return masklist
1452
1453 def get_mask_indices(self, mask=None):
[1846]1454 """\
[1819]1455 Compute and Return lists of mask start indices and mask end indices.
[1855]1456
1457 Parameters:
1458
[1819]1459 mask: channel mask, created with create_mask.
[1846]1460
[1819]1461 Returns:
[1846]1462
[1819]1463 List of mask start indices and that of mask end indices,
1464 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1465
[1819]1466 """
1467 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1468 raise TypeError("The mask should be list or tuple.")
1469 if len(mask) < 2:
1470 raise TypeError("The mask elements should be > 1")
[2349]1471 istart = []
1472 iend = []
1473 if mask[0]:
1474 istart.append(0)
[1819]1475 for i in range(len(mask)-1):
1476 if not mask[i] and mask[i+1]:
1477 istart.append(i+1)
1478 elif mask[i] and not mask[i+1]:
1479 iend.append(i)
[2349]1480 if mask[len(mask)-1]:
1481 iend.append(len(mask)-1)
[1819]1482 if len(istart) != len(iend):
1483 raise RuntimeError("Numbers of mask start != mask end.")
1484 for i in range(len(istart)):
1485 if istart[i] > iend[i]:
1486 raise RuntimeError("Mask start index > mask end index")
1487 break
1488 return istart,iend
1489
[2013]1490 @asaplog_post_dec
[2349]1491 def parse_maskexpr(self, maskstring):
[2013]1492 """
1493 Parse CASA type mask selection syntax (IF dependent).
1494
1495 Parameters:
1496 maskstring : A string mask selection expression.
1497 A comma separated selections mean different IF -
1498 channel combinations. IFs and channel selections
1499 are partitioned by a colon, ':'.
1500 examples:
[2015]1501 '' = all IFs (all channels)
[2013]1502 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1503 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1504 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1505 all channels in IF8
1506 Returns:
1507 A dictionary of selected (valid) IF and masklist pairs,
1508 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1509 """
1510 if not isinstance(maskstring,str):
1511 asaplog.post()
1512 asaplog.push("Invalid mask expression")
1513 asaplog.post("ERROR")
1514
1515 valid_ifs = self.getifnos()
1516 frequnit = self.get_unit()
1517 seldict = {}
[2015]1518 if maskstring == "":
1519 maskstring = str(valid_ifs)[1:-1]
[2013]1520 ## split each selection
1521 sellist = maskstring.split(',')
1522 for currselstr in sellist:
1523 selset = currselstr.split(':')
1524 # spw and mask string (may include ~, < or >)
[2349]1525 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1526 offset=1, minval=min(valid_ifs),
1527 maxval=max(valid_ifs))
[2013]1528 for spwlist in spwmasklist:
1529 selspws = []
1530 for ispw in range(spwlist[0],spwlist[1]+1):
1531 # Put into the list only if ispw exists
1532 if valid_ifs.count(ispw):
1533 selspws.append(ispw)
1534 del spwmasklist, spwlist
1535
1536 # parse frequency mask list
1537 if len(selset) > 1:
[2349]1538 freqmasklist = self._parse_selection(selset[1], typestr='float',
1539 offset=0.)
[2013]1540 else:
1541 # want to select the whole spectrum
1542 freqmasklist = [None]
1543
1544 ## define a dictionary of spw - masklist combination
1545 for ispw in selspws:
1546 #print "working on", ispw
1547 spwstr = str(ispw)
1548 if len(selspws) == 0:
1549 # empty spw
1550 continue
1551 else:
1552 ## want to get min and max of the spw and
1553 ## offset to set for '<' and '>'
1554 if frequnit == 'channel':
1555 minfreq = 0
1556 maxfreq = self.nchan(ifno=ispw)
1557 offset = 0.5
1558 else:
1559 ## This is ugly part. need improvement
1560 for ifrow in xrange(self.nrow()):
1561 if self.getif(ifrow) == ispw:
1562 #print "IF",ispw,"found in row =",ifrow
1563 break
1564 freqcoord = self.get_coordinate(ifrow)
1565 freqs = self._getabcissa(ifrow)
1566 minfreq = min(freqs)
1567 maxfreq = max(freqs)
1568 if len(freqs) == 1:
1569 offset = 0.5
1570 elif frequnit.find('Hz') > 0:
[2349]1571 offset = abs(freqcoord.to_frequency(1,
1572 unit=frequnit)
1573 -freqcoord.to_frequency(0,
1574 unit=frequnit)
1575 )*0.5
[2013]1576 elif frequnit.find('m/s') > 0:
[2349]1577 offset = abs(freqcoord.to_velocity(1,
1578 unit=frequnit)
1579 -freqcoord.to_velocity(0,
1580 unit=frequnit)
1581 )*0.5
[2013]1582 else:
1583 asaplog.post()
1584 asaplog.push("Invalid frequency unit")
1585 asaplog.post("ERROR")
1586 del freqs, freqcoord, ifrow
1587 for freq in freqmasklist:
1588 selmask = freq or [minfreq, maxfreq]
1589 if selmask[0] == None:
1590 ## selection was "<freq[1]".
1591 if selmask[1] < minfreq:
1592 ## avoid adding region selection
1593 selmask = None
1594 else:
1595 selmask = [minfreq,selmask[1]-offset]
1596 elif selmask[1] == None:
1597 ## selection was ">freq[0]"
1598 if selmask[0] > maxfreq:
1599 ## avoid adding region selection
1600 selmask = None
1601 else:
1602 selmask = [selmask[0]+offset,maxfreq]
1603 if selmask:
1604 if not seldict.has_key(spwstr):
1605 # new spw selection
1606 seldict[spwstr] = []
1607 seldict[spwstr] += [selmask]
1608 del minfreq,maxfreq,offset,freq,selmask
1609 del spwstr
1610 del freqmasklist
1611 del valid_ifs
1612 if len(seldict) == 0:
1613 asaplog.post()
[2349]1614 asaplog.push("No valid selection in the mask expression: "
1615 +maskstring)
[2013]1616 asaplog.post("WARN")
1617 return None
1618 msg = "Selected masklist:\n"
1619 for sif, lmask in seldict.iteritems():
1620 msg += " IF"+sif+" - "+str(lmask)+"\n"
1621 asaplog.push(msg)
1622 return seldict
1623
[2349]1624 def _parse_selection(self, selstr, typestr='float', offset=0.,
[2351]1625 minval=None, maxval=None):
[2013]1626 """
1627 Parameters:
1628 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1629 typestr : The type of the values in returned list
1630 ('integer' or 'float')
1631 offset : The offset value to subtract from or add to
1632 the boundary value if the selection string
1633 includes '<' or '>'
1634 minval, maxval : The minimum/maximum values to set if the
1635 selection string includes '<' or '>'.
1636 The list element is filled with None by default.
1637 Returns:
1638 A list of min/max pair of selections.
1639 Example:
1640 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1641 returns [[0,2],[5,7],[9,9]]
1642 """
1643 selgroups = selstr.split(';')
1644 sellists = []
1645 if typestr.lower().startswith('int'):
1646 formatfunc = int
1647 else:
1648 formatfunc = float
1649
1650 for currsel in selgroups:
1651 if currsel.find('~') > 0:
1652 minsel = formatfunc(currsel.split('~')[0].strip())
1653 maxsel = formatfunc(currsel.split('~')[1].strip())
1654 elif currsel.strip().startswith('<'):
1655 minsel = minval
1656 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1657 - formatfunc(offset)
1658 elif currsel.strip().startswith('>'):
1659 minsel = formatfunc(currsel.split('>')[1].strip()) \
1660 + formatfunc(offset)
1661 maxsel = maxval
1662 else:
1663 minsel = formatfunc(currsel)
1664 maxsel = formatfunc(currsel)
1665 sellists.append([minsel,maxsel])
1666 return sellists
1667
[1819]1668# def get_restfreqs(self):
1669# """
1670# Get the restfrequency(s) stored in this scantable.
1671# The return value(s) are always of unit 'Hz'
1672# Parameters:
1673# none
1674# Returns:
1675# a list of doubles
1676# """
1677# return list(self._getrestfreqs())
1678
1679 def get_restfreqs(self, ids=None):
[1846]1680 """\
[256]1681 Get the restfrequency(s) stored in this scantable.
1682 The return value(s) are always of unit 'Hz'
[1846]1683
[256]1684 Parameters:
[1846]1685
[1819]1686 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1687 be retrieved
[1846]1688
[256]1689 Returns:
[1846]1690
[1819]1691 dictionary containing ids and a list of doubles for each id
[1846]1692
[256]1693 """
[1819]1694 if ids is None:
[2349]1695 rfreqs = {}
[1819]1696 idlist = self.getmolnos()
1697 for i in idlist:
[2349]1698 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1699 return rfreqs
1700 else:
[2349]1701 if type(ids) == list or type(ids) == tuple:
1702 rfreqs = {}
[1819]1703 for i in ids:
[2349]1704 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1705 return rfreqs
1706 else:
1707 return list(self._getrestfreqs(ids))
[102]1708
[2349]1709 @asaplog_post_dec
[931]1710 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1711 """\
[931]1712 Set or replace the restfrequency specified and
[1938]1713 if the 'freqs' argument holds a scalar,
[931]1714 then that rest frequency will be applied to all the selected
1715 data. If the 'freqs' argument holds
1716 a vector, then it MUST be of equal or smaller length than
1717 the number of IFs (and the available restfrequencies will be
1718 replaced by this vector). In this case, *all* data have
1719 the restfrequency set per IF according
1720 to the corresponding value you give in the 'freqs' vector.
[1118]1721 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1722 IF 1 gets restfreq 2e9.
[1846]1723
[1395]1724 You can also specify the frequencies via a linecatalog.
[1153]1725
[931]1726 Parameters:
[1846]1727
[931]1728 freqs: list of rest frequency values or string idenitfiers
[1855]1729
[931]1730 unit: unit for rest frequency (default 'Hz')
[402]1731
[1846]1732
1733 Example::
1734
[1819]1735 # set the given restfrequency for the all currently selected IFs
[931]1736 scan.set_restfreqs(freqs=1.4e9)
[1845]1737 # set restfrequencies for the n IFs (n > 1) in the order of the
1738 # list, i.e
1739 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1740 # len(list_of_restfreqs) == nIF
1741 # for nIF == 1 the following will set multiple restfrequency for
1742 # that IF
[1819]1743 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1744 # set multiple restfrequencies per IF. as a list of lists where
1745 # the outer list has nIF elements, the inner s arbitrary
1746 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1747
[1846]1748 *Note*:
[1845]1749
[931]1750 To do more sophisticate Restfrequency setting, e.g. on a
1751 source and IF basis, use scantable.set_selection() before using
[1846]1752 this function::
[931]1753
[1846]1754 # provided your scantable is called scan
1755 selection = selector()
1756 selection.set_name("ORION*")
1757 selection.set_ifs([1])
1758 scan.set_selection(selection)
1759 scan.set_restfreqs(freqs=86.6e9)
1760
[931]1761 """
1762 varlist = vars()
[1157]1763 from asap import linecatalog
1764 # simple value
[1118]1765 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1766 self._setrestfreqs([freqs], [""], unit)
[1157]1767 # list of values
[1118]1768 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1769 # list values are scalars
[1118]1770 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1771 if len(freqs) == 1:
1772 self._setrestfreqs(freqs, [""], unit)
1773 else:
1774 # allow the 'old' mode of setting mulitple IFs
1775 sel = selector()
1776 savesel = self._getselection()
1777 iflist = self.getifnos()
1778 if len(freqs)>len(iflist):
1779 raise ValueError("number of elements in list of list "
1780 "exeeds the current IF selections")
1781 iflist = self.getifnos()
1782 for i, fval in enumerate(freqs):
1783 sel.set_ifs(iflist[i])
1784 self._setselection(sel)
1785 self._setrestfreqs([fval], [""], unit)
1786 self._setselection(savesel)
1787
1788 # list values are dict, {'value'=, 'name'=)
[1157]1789 elif isinstance(freqs[-1], dict):
[1845]1790 values = []
1791 names = []
1792 for d in freqs:
1793 values.append(d["value"])
1794 names.append(d["name"])
1795 self._setrestfreqs(values, names, unit)
[1819]1796 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1797 sel = selector()
1798 savesel = self._getselection()
[1322]1799 iflist = self.getifnos()
[1819]1800 if len(freqs)>len(iflist):
[1845]1801 raise ValueError("number of elements in list of list exeeds"
1802 " the current IF selections")
1803 for i, fval in enumerate(freqs):
[1322]1804 sel.set_ifs(iflist[i])
[1259]1805 self._setselection(sel)
[1845]1806 self._setrestfreqs(fval, [""], unit)
[1157]1807 self._setselection(savesel)
1808 # freqs are to be taken from a linecatalog
[1153]1809 elif isinstance(freqs, linecatalog):
1810 sel = selector()
1811 savesel = self._getselection()
1812 for i in xrange(freqs.nrow()):
[1322]1813 sel.set_ifs(iflist[i])
[1153]1814 self._setselection(sel)
[1845]1815 self._setrestfreqs([freqs.get_frequency(i)],
1816 [freqs.get_name(i)], "MHz")
[1153]1817 # ensure that we are not iterating past nIF
1818 if i == self.nif()-1: break
1819 self._setselection(savesel)
[931]1820 else:
1821 return
1822 self._add_history("set_restfreqs", varlist)
1823
[2349]1824 @asaplog_post_dec
[1360]1825 def shift_refpix(self, delta):
[1846]1826 """\
[1589]1827 Shift the reference pixel of the Spectra Coordinate by an
1828 integer amount.
[1846]1829
[1589]1830 Parameters:
[1846]1831
[1589]1832 delta: the amount to shift by
[1846]1833
1834 *Note*:
1835
[1589]1836 Be careful using this with broadband data.
[1846]1837
[1360]1838 """
[2349]1839 varlist = vars()
[1731]1840 Scantable.shift_refpix(self, delta)
[2349]1841 s._add_history("shift_refpix", varlist)
[931]1842
[1862]1843 @asaplog_post_dec
[1259]1844 def history(self, filename=None):
[1846]1845 """\
[1259]1846 Print the history. Optionally to a file.
[1846]1847
[1348]1848 Parameters:
[1846]1849
[1928]1850 filename: The name of the file to save the history to.
[1846]1851
[1259]1852 """
[484]1853 hist = list(self._gethistory())
[794]1854 out = "-"*80
[484]1855 for h in hist:
[489]1856 if h.startswith("---"):
[1857]1857 out = "\n".join([out, h])
[489]1858 else:
1859 items = h.split("##")
1860 date = items[0]
1861 func = items[1]
1862 items = items[2:]
[794]1863 out += "\n"+date+"\n"
1864 out += "Function: %s\n Parameters:" % (func)
[489]1865 for i in items:
[1938]1866 if i == '':
1867 continue
[489]1868 s = i.split("=")
[1118]1869 out += "\n %s = %s" % (s[0], s[1])
[1857]1870 out = "\n".join([out, "-"*80])
[1259]1871 if filename is not None:
1872 if filename is "":
1873 filename = 'scantable_history.txt'
1874 import os
1875 filename = os.path.expandvars(os.path.expanduser(filename))
1876 if not os.path.isdir(filename):
1877 data = open(filename, 'w')
1878 data.write(out)
1879 data.close()
1880 else:
1881 msg = "Illegal file name '%s'." % (filename)
[1859]1882 raise IOError(msg)
1883 return page(out)
[2349]1884
[513]1885 #
1886 # Maths business
1887 #
[1862]1888 @asaplog_post_dec
[931]1889 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1890 """\
[2349]1891 Return the (time) weighted average of a scan. Scans will be averaged
1892 only if the source direction (RA/DEC) is within 1' otherwise
[1846]1893
1894 *Note*:
1895
[1070]1896 in channels only - align if necessary
[1846]1897
[513]1898 Parameters:
[1846]1899
[513]1900 mask: an optional mask (only used for 'var' and 'tsys'
1901 weighting)
[1855]1902
[558]1903 scanav: True averages each scan separately
1904 False (default) averages all scans together,
[1855]1905
[1099]1906 weight: Weighting scheme.
1907 'none' (mean no weight)
1908 'var' (1/var(spec) weighted)
1909 'tsys' (1/Tsys**2 weighted)
1910 'tint' (integration time weighted)
1911 'tintsys' (Tint/Tsys**2)
1912 'median' ( median averaging)
[535]1913 The default is 'tint'
[1855]1914
[931]1915 align: align the spectra in velocity before averaging. It takes
1916 the time of the first spectrum as reference time.
[1846]1917
1918 Example::
1919
[513]1920 # time average the scantable without using a mask
[710]1921 newscan = scan.average_time()
[1846]1922
[513]1923 """
1924 varlist = vars()
[1593]1925 weight = weight or 'TINT'
1926 mask = mask or ()
1927 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1928 scan = (self, )
[1859]1929
1930 if align:
1931 scan = (self.freq_align(insitu=False), )
1932 s = None
1933 if weight.upper() == 'MEDIAN':
1934 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1935 scanav))
1936 else:
1937 s = scantable(self._math._average(scan, mask, weight.upper(),
1938 scanav))
[1099]1939 s._add_history("average_time", varlist)
[513]1940 return s
[710]1941
[1862]1942 @asaplog_post_dec
[876]1943 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1944 """\
[513]1945 Return a scan where all spectra are converted to either
1946 Jansky or Kelvin depending upon the flux units of the scan table.
1947 By default the function tries to look the values up internally.
1948 If it can't find them (or if you want to over-ride), you must
1949 specify EITHER jyperk OR eta (and D which it will try to look up
1950 also if you don't set it). jyperk takes precedence if you set both.
[1846]1951
[513]1952 Parameters:
[1846]1953
[513]1954 jyperk: the Jy / K conversion factor
[1855]1955
[513]1956 eta: the aperture efficiency
[1855]1957
[1928]1958 d: the geometric diameter (metres)
[1855]1959
[513]1960 insitu: if False a new scantable is returned.
1961 Otherwise, the scaling is done in-situ
1962 The default is taken from .asaprc (False)
[1846]1963
[513]1964 """
1965 if insitu is None: insitu = rcParams['insitu']
[876]1966 self._math._setinsitu(insitu)
[513]1967 varlist = vars()
[1593]1968 jyperk = jyperk or -1.0
1969 d = d or -1.0
1970 eta = eta or -1.0
[876]1971 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1972 s._add_history("convert_flux", varlist)
1973 if insitu: self._assign(s)
1974 else: return s
[513]1975
[1862]1976 @asaplog_post_dec
[876]1977 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1978 """\
[513]1979 Return a scan after applying a gain-elevation correction.
1980 The correction can be made via either a polynomial or a
1981 table-based interpolation (and extrapolation if necessary).
1982 You specify polynomial coefficients, an ascii table or neither.
1983 If you specify neither, then a polynomial correction will be made
1984 with built in coefficients known for certain telescopes (an error
1985 will occur if the instrument is not known).
1986 The data and Tsys are *divided* by the scaling factors.
[1846]1987
[513]1988 Parameters:
[1846]1989
[513]1990 poly: Polynomial coefficients (default None) to compute a
1991 gain-elevation correction as a function of
1992 elevation (in degrees).
[1855]1993
[513]1994 filename: The name of an ascii file holding correction factors.
1995 The first row of the ascii file must give the column
1996 names and these MUST include columns
1997 "ELEVATION" (degrees) and "FACTOR" (multiply data
1998 by this) somewhere.
1999 The second row must give the data type of the
2000 column. Use 'R' for Real and 'I' for Integer.
2001 An example file would be
2002 (actual factors are arbitrary) :
2003
2004 TIME ELEVATION FACTOR
2005 R R R
2006 0.1 0 0.8
2007 0.2 20 0.85
2008 0.3 40 0.9
2009 0.4 60 0.85
2010 0.5 80 0.8
2011 0.6 90 0.75
[1855]2012
[513]2013 method: Interpolation method when correcting from a table.
2014 Values are "nearest", "linear" (default), "cubic"
2015 and "spline"
[1855]2016
[513]2017 insitu: if False a new scantable is returned.
2018 Otherwise, the scaling is done in-situ
2019 The default is taken from .asaprc (False)
[1846]2020
[513]2021 """
2022
2023 if insitu is None: insitu = rcParams['insitu']
[876]2024 self._math._setinsitu(insitu)
[513]2025 varlist = vars()
[1593]2026 poly = poly or ()
[513]2027 from os.path import expandvars
2028 filename = expandvars(filename)
[876]2029 s = scantable(self._math._gainel(self, poly, filename, method))
2030 s._add_history("gain_el", varlist)
[1593]2031 if insitu:
2032 self._assign(s)
2033 else:
2034 return s
[710]2035
[1862]2036 @asaplog_post_dec
[931]2037 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2038 """\
[513]2039 Return a scan where all rows have been aligned in frequency/velocity.
2040 The alignment frequency frame (e.g. LSRK) is that set by function
2041 set_freqframe.
[1846]2042
[513]2043 Parameters:
[1855]2044
[513]2045 reftime: reference time to align at. By default, the time of
2046 the first row of data is used.
[1855]2047
[513]2048 method: Interpolation method for regridding the spectra.
2049 Choose from "nearest", "linear", "cubic" (default)
2050 and "spline"
[1855]2051
[513]2052 insitu: if False a new scantable is returned.
2053 Otherwise, the scaling is done in-situ
2054 The default is taken from .asaprc (False)
[1846]2055
[513]2056 """
[931]2057 if insitu is None: insitu = rcParams["insitu"]
[876]2058 self._math._setinsitu(insitu)
[513]2059 varlist = vars()
[1593]2060 reftime = reftime or ""
[931]2061 s = scantable(self._math._freq_align(self, reftime, method))
[876]2062 s._add_history("freq_align", varlist)
[2349]2063 if insitu:
2064 self._assign(s)
2065 else:
2066 return s
[513]2067
[1862]2068 @asaplog_post_dec
[1725]2069 def opacity(self, tau=None, insitu=None):
[1846]2070 """\
[513]2071 Apply an opacity correction. The data
2072 and Tsys are multiplied by the correction factor.
[1846]2073
[513]2074 Parameters:
[1855]2075
[1689]2076 tau: (list of) opacity from which the correction factor is
[513]2077 exp(tau*ZD)
[1689]2078 where ZD is the zenith-distance.
2079 If a list is provided, it has to be of length nIF,
2080 nIF*nPol or 1 and in order of IF/POL, e.g.
2081 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2082 if tau is `None` the opacities are determined from a
2083 model.
[1855]2084
[513]2085 insitu: if False a new scantable is returned.
2086 Otherwise, the scaling is done in-situ
2087 The default is taken from .asaprc (False)
[1846]2088
[513]2089 """
[2349]2090 if insitu is None:
2091 insitu = rcParams['insitu']
[876]2092 self._math._setinsitu(insitu)
[513]2093 varlist = vars()
[1689]2094 if not hasattr(tau, "__len__"):
2095 tau = [tau]
[876]2096 s = scantable(self._math._opacity(self, tau))
2097 s._add_history("opacity", varlist)
[2349]2098 if insitu:
2099 self._assign(s)
2100 else:
2101 return s
[513]2102
[1862]2103 @asaplog_post_dec
[513]2104 def bin(self, width=5, insitu=None):
[1846]2105 """\
[513]2106 Return a scan where all spectra have been binned up.
[1846]2107
[1348]2108 Parameters:
[1846]2109
[513]2110 width: The bin width (default=5) in pixels
[1855]2111
[513]2112 insitu: if False a new scantable is returned.
2113 Otherwise, the scaling is done in-situ
2114 The default is taken from .asaprc (False)
[1846]2115
[513]2116 """
[2349]2117 if insitu is None:
2118 insitu = rcParams['insitu']
[876]2119 self._math._setinsitu(insitu)
[513]2120 varlist = vars()
[876]2121 s = scantable(self._math._bin(self, width))
[1118]2122 s._add_history("bin", varlist)
[1589]2123 if insitu:
2124 self._assign(s)
2125 else:
2126 return s
[513]2127
[1862]2128 @asaplog_post_dec
[513]2129 def resample(self, width=5, method='cubic', insitu=None):
[1846]2130 """\
[1348]2131 Return a scan where all spectra have been binned up.
[1573]2132
[1348]2133 Parameters:
[1846]2134
[513]2135 width: The bin width (default=5) in pixels
[1855]2136
[513]2137 method: Interpolation method when correcting from a table.
2138 Values are "nearest", "linear", "cubic" (default)
2139 and "spline"
[1855]2140
[513]2141 insitu: if False a new scantable is returned.
2142 Otherwise, the scaling is done in-situ
2143 The default is taken from .asaprc (False)
[1846]2144
[513]2145 """
[2349]2146 if insitu is None:
2147 insitu = rcParams['insitu']
[876]2148 self._math._setinsitu(insitu)
[513]2149 varlist = vars()
[876]2150 s = scantable(self._math._resample(self, method, width))
[1118]2151 s._add_history("resample", varlist)
[2349]2152 if insitu:
2153 self._assign(s)
2154 else:
2155 return s
[513]2156
[1862]2157 @asaplog_post_dec
[946]2158 def average_pol(self, mask=None, weight='none'):
[1846]2159 """\
[946]2160 Average the Polarisations together.
[1846]2161
[946]2162 Parameters:
[1846]2163
[946]2164 mask: An optional mask defining the region, where the
2165 averaging will be applied. The output will have all
2166 specified points masked.
[1855]2167
[946]2168 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2169 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2170
[946]2171 """
2172 varlist = vars()
[1593]2173 mask = mask or ()
[1010]2174 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2175 s._add_history("average_pol", varlist)
[992]2176 return s
[513]2177
[1862]2178 @asaplog_post_dec
[1145]2179 def average_beam(self, mask=None, weight='none'):
[1846]2180 """\
[1145]2181 Average the Beams together.
[1846]2182
[1145]2183 Parameters:
2184 mask: An optional mask defining the region, where the
2185 averaging will be applied. The output will have all
2186 specified points masked.
[1855]2187
[1145]2188 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2189 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2190
[1145]2191 """
2192 varlist = vars()
[1593]2193 mask = mask or ()
[1145]2194 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2195 s._add_history("average_beam", varlist)
2196 return s
2197
[1586]2198 def parallactify(self, pflag):
[1846]2199 """\
[1843]2200 Set a flag to indicate whether this data should be treated as having
[1617]2201 been 'parallactified' (total phase == 0.0)
[1846]2202
[1617]2203 Parameters:
[1855]2204
[1843]2205 pflag: Bool indicating whether to turn this on (True) or
[1617]2206 off (False)
[1846]2207
[1617]2208 """
[1586]2209 varlist = vars()
2210 self._parallactify(pflag)
2211 self._add_history("parallactify", varlist)
2212
[1862]2213 @asaplog_post_dec
[992]2214 def convert_pol(self, poltype=None):
[1846]2215 """\
[992]2216 Convert the data to a different polarisation type.
[1565]2217 Note that you will need cross-polarisation terms for most conversions.
[1846]2218
[992]2219 Parameters:
[1855]2220
[992]2221 poltype: The new polarisation type. Valid types are:
[1565]2222 "linear", "circular", "stokes" and "linpol"
[1846]2223
[992]2224 """
2225 varlist = vars()
[1859]2226 s = scantable(self._math._convertpol(self, poltype))
[1118]2227 s._add_history("convert_pol", varlist)
[992]2228 return s
2229
[1862]2230 @asaplog_post_dec
[2269]2231 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2232 insitu=None):
[1846]2233 """\
[513]2234 Smooth the spectrum by the specified kernel (conserving flux).
[1846]2235
[513]2236 Parameters:
[1846]2237
[513]2238 kernel: The type of smoothing kernel. Select from
[1574]2239 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2240 or 'poly'
[1855]2241
[513]2242 width: The width of the kernel in pixels. For hanning this is
2243 ignored otherwise it defauls to 5 pixels.
2244 For 'gaussian' it is the Full Width Half
2245 Maximum. For 'boxcar' it is the full width.
[1574]2246 For 'rmedian' and 'poly' it is the half width.
[1855]2247
[1574]2248 order: Optional parameter for 'poly' kernel (default is 2), to
2249 specify the order of the polnomial. Ignored by all other
2250 kernels.
[1855]2251
[1819]2252 plot: plot the original and the smoothed spectra.
2253 In this each indivual fit has to be approved, by
2254 typing 'y' or 'n'
[1855]2255
[513]2256 insitu: if False a new scantable is returned.
2257 Otherwise, the scaling is done in-situ
2258 The default is taken from .asaprc (False)
[1846]2259
[513]2260 """
2261 if insitu is None: insitu = rcParams['insitu']
[876]2262 self._math._setinsitu(insitu)
[513]2263 varlist = vars()
[1819]2264
2265 if plot: orgscan = self.copy()
2266
[1574]2267 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2268 s._add_history("smooth", varlist)
[1819]2269
2270 if plot:
[2150]2271 from asap.asapplotter import new_asaplot
2272 theplot = new_asaplot(rcParams['plotter.gui'])
2273 theplot.set_panels()
[1819]2274 ylab=s._get_ordinate_label()
[2150]2275 #theplot.palette(0,["#777777","red"])
[1819]2276 for r in xrange(s.nrow()):
2277 xsm=s._getabcissa(r)
2278 ysm=s._getspectrum(r)
2279 xorg=orgscan._getabcissa(r)
2280 yorg=orgscan._getspectrum(r)
[2150]2281 theplot.clear()
2282 theplot.hold()
2283 theplot.set_axes('ylabel',ylab)
2284 theplot.set_axes('xlabel',s._getabcissalabel(r))
2285 theplot.set_axes('title',s._getsourcename(r))
2286 theplot.set_line(label='Original',color="#777777")
2287 theplot.plot(xorg,yorg)
2288 theplot.set_line(label='Smoothed',color="red")
2289 theplot.plot(xsm,ysm)
[1819]2290 ### Ugly part for legend
2291 for i in [0,1]:
[2349]2292 theplot.subplots[0]['lines'].append(
2293 [theplot.subplots[0]['axes'].lines[i]]
2294 )
[2150]2295 theplot.release()
[1819]2296 ### Ugly part for legend
[2150]2297 theplot.subplots[0]['lines']=[]
[1819]2298 res = raw_input("Accept smoothing ([y]/n): ")
2299 if res.upper() == 'N':
2300 s._setspectrum(yorg, r)
[2150]2301 theplot.quit()
2302 del theplot
[1819]2303 del orgscan
2304
[876]2305 if insitu: self._assign(s)
2306 else: return s
[513]2307
[2186]2308 @asaplog_post_dec
2309 def _parse_wn(self, wn):
2310 if isinstance(wn, list) or isinstance(wn, tuple):
2311 return wn
2312 elif isinstance(wn, int):
2313 return [ wn ]
2314 elif isinstance(wn, str):
[2277]2315 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2316 val = wn.split('-')
2317 val = [int(val[0]), int(val[1])]
2318 val.sort()
2319 res = [i for i in xrange(val[0], val[1]+1)]
[2277]2320 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2321 val = int(wn[2:])+1
2322 res = [i for i in xrange(val)]
[2277]2323 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2324 val = int(wn[:-2])+1
2325 res = [i for i in xrange(val)]
[2277]2326 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]2327 val = int(wn[1:])
2328 res = [i for i in xrange(val)]
[2277]2329 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]2330 val = int(wn[:-1])
2331 res = [i for i in xrange(val)]
[2411]2332 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2333 # then interpreted in C++
2334 # side as [a,a+1,...,a_nyq]
2335 # (CAS-3759)
[2186]2336 val = int(wn[2:])
[2411]2337 res = [val, -999]
2338 #res = [i for i in xrange(val, self.nchan()/2+1)]
2339 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2340 # then interpreted in C++
2341 # side as [a,a+1,...,a_nyq]
2342 # (CAS-3759)
[2186]2343 val = int(wn[:-2])
[2411]2344 res = [val, -999]
2345 #res = [i for i in xrange(val, self.nchan()/2+1)]
2346 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2347 # then interpreted in C++
2348 # side as [a+1,a+2,...,a_nyq]
2349 # (CAS-3759)
[2186]2350 val = int(wn[1:])+1
[2411]2351 res = [val, -999]
2352 #res = [i for i in xrange(val, self.nchan()/2+1)]
2353 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2354 # then interpreted in C++
2355 # side as [a+1,a+2,...,a_nyq]
2356 # (CAS-3759)
[2186]2357 val = int(wn[:-1])+1
[2411]2358 res = [val, -999]
2359 #res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2360
[2186]2361 return res
2362 else:
2363 msg = 'wrong value given for addwn/rejwn'
2364 raise RuntimeError(msg)
2365
2366
[1862]2367 @asaplog_post_dec
[2277]2368 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
[2269]2369 fftmethod=None, fftthresh=None,
2370 addwn=None, rejwn=None, clipthresh=None,
2371 clipniter=None, plot=None,
2372 getresidual=None, showprogress=None,
2373 minnrow=None, outlog=None, blfile=None):
[2047]2374 """\
[2349]2375 Return a scan which has been baselined (all rows) with sinusoidal
2376 functions.
2377
[2047]2378 Parameters:
[2186]2379 insitu: if False a new scantable is returned.
[2081]2380 Otherwise, the scaling is done in-situ
2381 The default is taken from .asaprc (False)
[2186]2382 mask: an optional mask
2383 applyfft: if True use some method, such as FFT, to find
2384 strongest sinusoidal components in the wavenumber
2385 domain to be used for baseline fitting.
2386 default is True.
2387 fftmethod: method to find the strong sinusoidal components.
2388 now only 'fft' is available and it is the default.
2389 fftthresh: the threshold to select wave numbers to be used for
2390 fitting from the distribution of amplitudes in the
2391 wavenumber domain.
2392 both float and string values accepted.
2393 given a float value, the unit is set to sigma.
2394 for string values, allowed formats include:
[2349]2395 'xsigma' or 'x' (= x-sigma level. e.g.,
2396 '3sigma'), or
[2186]2397 'topx' (= the x strongest ones, e.g. 'top5').
2398 default is 3.0 (unit: sigma).
2399 addwn: the additional wave numbers to be used for fitting.
2400 list or integer value is accepted to specify every
2401 wave numbers. also string value can be used in case
2402 you need to specify wave numbers in a certain range,
2403 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2404 '<a' (= 0,1,...,a-2,a-1),
2405 '>=a' (= a, a+1, ... up to the maximum wave
2406 number corresponding to the Nyquist
2407 frequency for the case of FFT).
[2411]2408 default is [0].
[2186]2409 rejwn: the wave numbers NOT to be used for fitting.
2410 can be set just as addwn but has higher priority:
2411 wave numbers which are specified both in addwn
2412 and rejwn will NOT be used. default is [].
[2081]2413 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2414 clipniter: maximum number of iteration of 'clipthresh'-sigma
2415 clipping (default is 0)
[2081]2416 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2417 plot the fit and the residual. In this each
2418 indivual fit has to be approved, by typing 'y'
2419 or 'n'
2420 getresidual: if False, returns best-fit values instead of
2421 residual. (default is True)
[2189]2422 showprogress: show progress status for large data.
2423 default is True.
2424 minnrow: minimum number of input spectra to show.
2425 default is 1000.
[2081]2426 outlog: Output the coefficients of the best-fit
2427 function to logger (default is False)
2428 blfile: Name of a text file in which the best-fit
2429 parameter values to be written
[2186]2430 (default is '': no file/logger output)
[2047]2431
2432 Example:
[2349]2433 # return a scan baselined by a combination of sinusoidal curves
2434 # having wave numbers in spectral window up to 10,
[2047]2435 # also with 3-sigma clipping, iteration up to 4 times
[2186]2436 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]2437
2438 Note:
2439 The best-fit parameter values output in logger and/or blfile are now
2440 based on specunit of 'channel'.
[2047]2441 """
2442
[2186]2443 try:
2444 varlist = vars()
[2047]2445
[2186]2446 if insitu is None: insitu = rcParams['insitu']
2447 if insitu:
2448 workscan = self
2449 else:
2450 workscan = self.copy()
2451
[2410]2452 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2453 if mask is None: mask = []
[2186]2454 if applyfft is None: applyfft = True
2455 if fftmethod is None: fftmethod = 'fft'
2456 if fftthresh is None: fftthresh = 3.0
[2411]2457 if addwn is None: addwn = [0]
[2186]2458 if rejwn is None: rejwn = []
2459 if clipthresh is None: clipthresh = 3.0
2460 if clipniter is None: clipniter = 0
2461 if plot is None: plot = False
2462 if getresidual is None: getresidual = True
[2189]2463 if showprogress is None: showprogress = True
2464 if minnrow is None: minnrow = 1000
[2186]2465 if outlog is None: outlog = False
2466 if blfile is None: blfile = ''
[2047]2467
[2081]2468 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2349]2469 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2470 str(fftthresh).lower(),
2471 workscan._parse_wn(addwn),
2472 workscan._parse_wn(rejwn), clipthresh,
2473 clipniter, getresidual,
2474 pack_progress_params(showprogress,
2475 minnrow), outlog,
2476 blfile)
[2186]2477 workscan._add_history('sinusoid_baseline', varlist)
[2047]2478
2479 if insitu:
2480 self._assign(workscan)
2481 else:
2482 return workscan
2483
2484 except RuntimeError, e:
[2186]2485 raise_fitting_failure_exception(e)
[2047]2486
2487
[2186]2488 @asaplog_post_dec
[2349]2489 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2490 fftmethod=None, fftthresh=None,
2491 addwn=None, rejwn=None, clipthresh=None,
2492 clipniter=None, edge=None, threshold=None,
2493 chan_avg_limit=None, plot=None,
2494 getresidual=None, showprogress=None,
2495 minnrow=None, outlog=None, blfile=None):
[2047]2496 """\
[2349]2497 Return a scan which has been baselined (all rows) with sinusoidal
2498 functions.
[2047]2499 Spectral lines are detected first using linefinder and masked out
2500 to avoid them affecting the baseline solution.
2501
2502 Parameters:
[2189]2503 insitu: if False a new scantable is returned.
2504 Otherwise, the scaling is done in-situ
2505 The default is taken from .asaprc (False)
2506 mask: an optional mask retreived from scantable
2507 applyfft: if True use some method, such as FFT, to find
2508 strongest sinusoidal components in the wavenumber
2509 domain to be used for baseline fitting.
2510 default is True.
2511 fftmethod: method to find the strong sinusoidal components.
2512 now only 'fft' is available and it is the default.
2513 fftthresh: the threshold to select wave numbers to be used for
2514 fitting from the distribution of amplitudes in the
2515 wavenumber domain.
2516 both float and string values accepted.
2517 given a float value, the unit is set to sigma.
2518 for string values, allowed formats include:
[2349]2519 'xsigma' or 'x' (= x-sigma level. e.g.,
2520 '3sigma'), or
[2189]2521 'topx' (= the x strongest ones, e.g. 'top5').
2522 default is 3.0 (unit: sigma).
2523 addwn: the additional wave numbers to be used for fitting.
2524 list or integer value is accepted to specify every
2525 wave numbers. also string value can be used in case
2526 you need to specify wave numbers in a certain range,
2527 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2528 '<a' (= 0,1,...,a-2,a-1),
2529 '>=a' (= a, a+1, ... up to the maximum wave
2530 number corresponding to the Nyquist
2531 frequency for the case of FFT).
[2411]2532 default is [0].
[2189]2533 rejwn: the wave numbers NOT to be used for fitting.
2534 can be set just as addwn but has higher priority:
2535 wave numbers which are specified both in addwn
2536 and rejwn will NOT be used. default is [].
2537 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2538 clipniter: maximum number of iteration of 'clipthresh'-sigma
2539 clipping (default is 0)
[2189]2540 edge: an optional number of channel to drop at
2541 the edge of spectrum. If only one value is
2542 specified, the same number will be dropped
2543 from both sides of the spectrum. Default
2544 is to keep all channels. Nested tuples
2545 represent individual edge selection for
2546 different IFs (a number of spectral channels
2547 can be different)
2548 threshold: the threshold used by line finder. It is
2549 better to keep it large as only strong lines
2550 affect the baseline solution.
2551 chan_avg_limit: a maximum number of consequtive spectral
2552 channels to average during the search of
2553 weak and broad lines. The default is no
2554 averaging (and no search for weak lines).
2555 If such lines can affect the fitted baseline
2556 (e.g. a high order polynomial is fitted),
2557 increase this parameter (usually values up
2558 to 8 are reasonable). Most users of this
2559 method should find the default value sufficient.
2560 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2561 plot the fit and the residual. In this each
2562 indivual fit has to be approved, by typing 'y'
2563 or 'n'
2564 getresidual: if False, returns best-fit values instead of
2565 residual. (default is True)
2566 showprogress: show progress status for large data.
2567 default is True.
2568 minnrow: minimum number of input spectra to show.
2569 default is 1000.
2570 outlog: Output the coefficients of the best-fit
2571 function to logger (default is False)
2572 blfile: Name of a text file in which the best-fit
2573 parameter values to be written
2574 (default is "": no file/logger output)
[2047]2575
2576 Example:
[2186]2577 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]2578
2579 Note:
2580 The best-fit parameter values output in logger and/or blfile are now
2581 based on specunit of 'channel'.
[2047]2582 """
2583
[2186]2584 try:
2585 varlist = vars()
[2047]2586
[2186]2587 if insitu is None: insitu = rcParams['insitu']
2588 if insitu:
2589 workscan = self
[2047]2590 else:
[2186]2591 workscan = self.copy()
2592
[2410]2593 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2594 if mask is None: mask = []
[2186]2595 if applyfft is None: applyfft = True
2596 if fftmethod is None: fftmethod = 'fft'
2597 if fftthresh is None: fftthresh = 3.0
[2411]2598 if addwn is None: addwn = [0]
[2186]2599 if rejwn is None: rejwn = []
2600 if clipthresh is None: clipthresh = 3.0
2601 if clipniter is None: clipniter = 0
2602 if edge is None: edge = (0,0)
2603 if threshold is None: threshold = 3
2604 if chan_avg_limit is None: chan_avg_limit = 1
2605 if plot is None: plot = False
2606 if getresidual is None: getresidual = True
[2189]2607 if showprogress is None: showprogress = True
2608 if minnrow is None: minnrow = 1000
[2186]2609 if outlog is None: outlog = False
2610 if blfile is None: blfile = ''
[2047]2611
[2277]2612 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2349]2613 workscan._auto_sinusoid_baseline(mask, applyfft,
2614 fftmethod.lower(),
2615 str(fftthresh).lower(),
2616 workscan._parse_wn(addwn),
2617 workscan._parse_wn(rejwn),
2618 clipthresh, clipniter,
2619 normalise_edge_param(edge),
2620 threshold, chan_avg_limit,
2621 getresidual,
2622 pack_progress_params(showprogress,
2623 minnrow),
2624 outlog, blfile)
[2047]2625 workscan._add_history("auto_sinusoid_baseline", varlist)
2626
2627 if insitu:
2628 self._assign(workscan)
2629 else:
2630 return workscan
2631
2632 except RuntimeError, e:
[2186]2633 raise_fitting_failure_exception(e)
[2047]2634
2635 @asaplog_post_dec
[2349]2636 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2637 clipthresh=None, clipniter=None, plot=None,
2638 getresidual=None, showprogress=None, minnrow=None,
2639 outlog=None, blfile=None):
[1846]2640 """\
[2349]2641 Return a scan which has been baselined (all rows) by cubic spline
2642 function (piecewise cubic polynomial).
2643
[513]2644 Parameters:
[2189]2645 insitu: If False a new scantable is returned.
2646 Otherwise, the scaling is done in-situ
2647 The default is taken from .asaprc (False)
2648 mask: An optional mask
2649 npiece: Number of pieces. (default is 2)
2650 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2651 clipniter: maximum number of iteration of 'clipthresh'-sigma
2652 clipping (default is 0)
[2189]2653 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2654 plot the fit and the residual. In this each
2655 indivual fit has to be approved, by typing 'y'
2656 or 'n'
2657 getresidual: if False, returns best-fit values instead of
2658 residual. (default is True)
2659 showprogress: show progress status for large data.
2660 default is True.
2661 minnrow: minimum number of input spectra to show.
2662 default is 1000.
2663 outlog: Output the coefficients of the best-fit
2664 function to logger (default is False)
2665 blfile: Name of a text file in which the best-fit
2666 parameter values to be written
2667 (default is "": no file/logger output)
[1846]2668
[2012]2669 Example:
[2349]2670 # return a scan baselined by a cubic spline consisting of 2 pieces
2671 # (i.e., 1 internal knot),
[2012]2672 # also with 3-sigma clipping, iteration up to 4 times
2673 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2674
2675 Note:
2676 The best-fit parameter values output in logger and/or blfile are now
2677 based on specunit of 'channel'.
[2012]2678 """
2679
[2186]2680 try:
2681 varlist = vars()
2682
2683 if insitu is None: insitu = rcParams['insitu']
2684 if insitu:
2685 workscan = self
2686 else:
2687 workscan = self.copy()
[1855]2688
[2410]2689 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2690 if mask is None: mask = []
[2189]2691 if npiece is None: npiece = 2
2692 if clipthresh is None: clipthresh = 3.0
2693 if clipniter is None: clipniter = 0
2694 if plot is None: plot = False
2695 if getresidual is None: getresidual = True
2696 if showprogress is None: showprogress = True
2697 if minnrow is None: minnrow = 1000
2698 if outlog is None: outlog = False
2699 if blfile is None: blfile = ''
[1855]2700
[2012]2701 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2349]2702 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2703 getresidual,
2704 pack_progress_params(showprogress,
2705 minnrow), outlog,
2706 blfile)
[2012]2707 workscan._add_history("cspline_baseline", varlist)
2708
2709 if insitu:
2710 self._assign(workscan)
2711 else:
2712 return workscan
2713
2714 except RuntimeError, e:
[2186]2715 raise_fitting_failure_exception(e)
[1855]2716
[2186]2717 @asaplog_post_dec
[2349]2718 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2719 clipthresh=None, clipniter=None,
2720 edge=None, threshold=None, chan_avg_limit=None,
2721 getresidual=None, plot=None,
2722 showprogress=None, minnrow=None, outlog=None,
2723 blfile=None):
[2012]2724 """\
2725 Return a scan which has been baselined (all rows) by cubic spline
2726 function (piecewise cubic polynomial).
2727 Spectral lines are detected first using linefinder and masked out
2728 to avoid them affecting the baseline solution.
2729
2730 Parameters:
[2189]2731 insitu: if False a new scantable is returned.
2732 Otherwise, the scaling is done in-situ
2733 The default is taken from .asaprc (False)
2734 mask: an optional mask retreived from scantable
2735 npiece: Number of pieces. (default is 2)
2736 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2737 clipniter: maximum number of iteration of 'clipthresh'-sigma
2738 clipping (default is 0)
[2189]2739 edge: an optional number of channel to drop at
2740 the edge of spectrum. If only one value is
2741 specified, the same number will be dropped
2742 from both sides of the spectrum. Default
2743 is to keep all channels. Nested tuples
2744 represent individual edge selection for
2745 different IFs (a number of spectral channels
2746 can be different)
2747 threshold: the threshold used by line finder. It is
2748 better to keep it large as only strong lines
2749 affect the baseline solution.
2750 chan_avg_limit: a maximum number of consequtive spectral
2751 channels to average during the search of
2752 weak and broad lines. The default is no
2753 averaging (and no search for weak lines).
2754 If such lines can affect the fitted baseline
2755 (e.g. a high order polynomial is fitted),
2756 increase this parameter (usually values up
2757 to 8 are reasonable). Most users of this
2758 method should find the default value sufficient.
2759 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2760 plot the fit and the residual. In this each
2761 indivual fit has to be approved, by typing 'y'
2762 or 'n'
2763 getresidual: if False, returns best-fit values instead of
2764 residual. (default is True)
2765 showprogress: show progress status for large data.
2766 default is True.
2767 minnrow: minimum number of input spectra to show.
2768 default is 1000.
2769 outlog: Output the coefficients of the best-fit
2770 function to logger (default is False)
2771 blfile: Name of a text file in which the best-fit
2772 parameter values to be written
2773 (default is "": no file/logger output)
[1846]2774
[1907]2775 Example:
[2012]2776 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]2777
2778 Note:
2779 The best-fit parameter values output in logger and/or blfile are now
2780 based on specunit of 'channel'.
[2012]2781 """
[1846]2782
[2186]2783 try:
2784 varlist = vars()
[2012]2785
[2186]2786 if insitu is None: insitu = rcParams['insitu']
2787 if insitu:
2788 workscan = self
[1391]2789 else:
[2186]2790 workscan = self.copy()
2791
[2410]2792 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2793 if mask is None: mask = []
[2186]2794 if npiece is None: npiece = 2
2795 if clipthresh is None: clipthresh = 3.0
2796 if clipniter is None: clipniter = 0
2797 if edge is None: edge = (0, 0)
2798 if threshold is None: threshold = 3
2799 if chan_avg_limit is None: chan_avg_limit = 1
2800 if plot is None: plot = False
2801 if getresidual is None: getresidual = True
[2189]2802 if showprogress is None: showprogress = True
2803 if minnrow is None: minnrow = 1000
[2186]2804 if outlog is None: outlog = False
2805 if blfile is None: blfile = ''
[1819]2806
[2277]2807 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2269]2808 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2809 clipniter,
2810 normalise_edge_param(edge),
2811 threshold,
2812 chan_avg_limit, getresidual,
2813 pack_progress_params(showprogress,
2814 minnrow),
2815 outlog, blfile)
[2012]2816 workscan._add_history("auto_cspline_baseline", varlist)
[1907]2817
[1856]2818 if insitu:
2819 self._assign(workscan)
2820 else:
2821 return workscan
[2012]2822
2823 except RuntimeError, e:
[2186]2824 raise_fitting_failure_exception(e)
[513]2825
[1931]2826 @asaplog_post_dec
[2269]2827 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2828 getresidual=None, showprogress=None, minnrow=None,
2829 outlog=None, blfile=None):
[1907]2830 """\
2831 Return a scan which has been baselined (all rows) by a polynomial.
2832 Parameters:
[2189]2833 insitu: if False a new scantable is returned.
2834 Otherwise, the scaling is done in-situ
2835 The default is taken from .asaprc (False)
2836 mask: an optional mask
2837 order: the order of the polynomial (default is 0)
2838 plot: plot the fit and the residual. In this each
2839 indivual fit has to be approved, by typing 'y'
2840 or 'n'
2841 getresidual: if False, returns best-fit values instead of
2842 residual. (default is True)
2843 showprogress: show progress status for large data.
2844 default is True.
2845 minnrow: minimum number of input spectra to show.
2846 default is 1000.
2847 outlog: Output the coefficients of the best-fit
2848 function to logger (default is False)
2849 blfile: Name of a text file in which the best-fit
2850 parameter values to be written
2851 (default is "": no file/logger output)
[2012]2852
[1907]2853 Example:
2854 # return a scan baselined by a third order polynomial,
2855 # not using a mask
2856 bscan = scan.poly_baseline(order=3)
2857 """
[1931]2858
[2186]2859 try:
2860 varlist = vars()
[1931]2861
[2269]2862 if insitu is None:
2863 insitu = rcParams["insitu"]
[2186]2864 if insitu:
2865 workscan = self
2866 else:
2867 workscan = self.copy()
[1907]2868
[2410]2869 #if mask is None: mask = [True for i in \
2870 # xrange(workscan.nchan())]
2871 if mask is None: mask = []
[2189]2872 if order is None: order = 0
2873 if plot is None: plot = False
2874 if getresidual is None: getresidual = True
2875 if showprogress is None: showprogress = True
2876 if minnrow is None: minnrow = 1000
2877 if outlog is None: outlog = False
2878 if blfile is None: blfile = ""
[1907]2879
[2012]2880 if plot:
[2269]2881 outblfile = (blfile != "") and \
[2349]2882 os.path.exists(os.path.expanduser(
2883 os.path.expandvars(blfile))
2884 )
[2269]2885 if outblfile:
2886 blf = open(blfile, "a")
[2012]2887
[1907]2888 f = fitter()
2889 f.set_function(lpoly=order)
[2186]2890
2891 rows = xrange(workscan.nrow())
2892 #if len(rows) > 0: workscan._init_blinfo()
2893
[1907]2894 for r in rows:
2895 f.x = workscan._getabcissa(r)
2896 f.y = workscan._getspectrum(r)
2897 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2898 f.data = None
2899 f.fit()
2900
2901 f.plot(residual=True)
2902 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2903 if accept_fit.upper() == "N":
[2012]2904 #workscan._append_blinfo(None, None, None)
[1907]2905 continue
[2012]2906
2907 blpars = f.get_parameters()
2908 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2909 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2910 workscan._setspectrum((f.fitter.getresidual()
2911 if getresidual else f.fitter.getfit()), r)
[1907]2912
[2012]2913 if outblfile:
2914 rms = workscan.get_rms(f.mask, r)
[2269]2915 dataout = \
2916 workscan.format_blparams_row(blpars["params"],
2917 blpars["fixed"],
2918 rms, str(masklist),
2919 r, True)
[2012]2920 blf.write(dataout)
2921
[1907]2922 f._p.unmap()
2923 f._p = None
[2012]2924
[2349]2925 if outblfile:
2926 blf.close()
[1907]2927 else:
[2269]2928 workscan._poly_baseline(mask, order, getresidual,
2929 pack_progress_params(showprogress,
2930 minnrow),
2931 outlog, blfile)
[1907]2932
2933 workscan._add_history("poly_baseline", varlist)
2934
2935 if insitu:
2936 self._assign(workscan)
2937 else:
2938 return workscan
2939
[1919]2940 except RuntimeError, e:
[2186]2941 raise_fitting_failure_exception(e)
[1907]2942
[2186]2943 @asaplog_post_dec
[2269]2944 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2945 threshold=None, chan_avg_limit=None,
2946 plot=None, insitu=None,
2947 getresidual=None, showprogress=None,
2948 minnrow=None, outlog=None, blfile=None):
[1846]2949 """\
[1931]2950 Return a scan which has been baselined (all rows) by a polynomial.
[880]2951 Spectral lines are detected first using linefinder and masked out
2952 to avoid them affecting the baseline solution.
2953
2954 Parameters:
[2189]2955 insitu: if False a new scantable is returned.
2956 Otherwise, the scaling is done in-situ
2957 The default is taken from .asaprc (False)
2958 mask: an optional mask retreived from scantable
2959 order: the order of the polynomial (default is 0)
2960 edge: an optional number of channel to drop at
2961 the edge of spectrum. If only one value is
2962 specified, the same number will be dropped
2963 from both sides of the spectrum. Default
2964 is to keep all channels. Nested tuples
2965 represent individual edge selection for
2966 different IFs (a number of spectral channels
2967 can be different)
2968 threshold: the threshold used by line finder. It is
2969 better to keep it large as only strong lines
2970 affect the baseline solution.
2971 chan_avg_limit: a maximum number of consequtive spectral
2972 channels to average during the search of
2973 weak and broad lines. The default is no
2974 averaging (and no search for weak lines).
2975 If such lines can affect the fitted baseline
2976 (e.g. a high order polynomial is fitted),
2977 increase this parameter (usually values up
2978 to 8 are reasonable). Most users of this
2979 method should find the default value sufficient.
2980 plot: plot the fit and the residual. In this each
2981 indivual fit has to be approved, by typing 'y'
2982 or 'n'
2983 getresidual: if False, returns best-fit values instead of
2984 residual. (default is True)
2985 showprogress: show progress status for large data.
2986 default is True.
2987 minnrow: minimum number of input spectra to show.
2988 default is 1000.
2989 outlog: Output the coefficients of the best-fit
2990 function to logger (default is False)
2991 blfile: Name of a text file in which the best-fit
2992 parameter values to be written
2993 (default is "": no file/logger output)
[1846]2994
[2012]2995 Example:
2996 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2997 """
[880]2998
[2186]2999 try:
3000 varlist = vars()
[1846]3001
[2269]3002 if insitu is None:
3003 insitu = rcParams['insitu']
[2186]3004 if insitu:
3005 workscan = self
3006 else:
3007 workscan = self.copy()
[1846]3008
[2410]3009 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3010 if mask is None: mask = []
[2186]3011 if order is None: order = 0
3012 if edge is None: edge = (0, 0)
3013 if threshold is None: threshold = 3
3014 if chan_avg_limit is None: chan_avg_limit = 1
3015 if plot is None: plot = False
3016 if getresidual is None: getresidual = True
[2189]3017 if showprogress is None: showprogress = True
3018 if minnrow is None: minnrow = 1000
[2186]3019 if outlog is None: outlog = False
3020 if blfile is None: blfile = ''
[1846]3021
[2186]3022 edge = normalise_edge_param(edge)
[880]3023
[2012]3024 if plot:
[2269]3025 outblfile = (blfile != "") and \
3026 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]3027 if outblfile: blf = open(blfile, "a")
3028
[2186]3029 from asap.asaplinefind import linefinder
[2012]3030 fl = linefinder()
[2269]3031 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]3032 fl.set_scan(workscan)
[2186]3033
[2012]3034 f = fitter()
3035 f.set_function(lpoly=order)
[880]3036
[2186]3037 rows = xrange(workscan.nrow())
3038 #if len(rows) > 0: workscan._init_blinfo()
3039
[2012]3040 for r in rows:
[2186]3041 idx = 2*workscan.getif(r)
[2269]3042 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
3043 edge[idx:idx+2]) # (CAS-1434)
[907]3044
[2012]3045 f.x = workscan._getabcissa(r)
3046 f.y = workscan._getspectrum(r)
3047 f.mask = fl.get_mask()
3048 f.data = None
3049 f.fit()
3050
3051 f.plot(residual=True)
3052 accept_fit = raw_input("Accept fit ( [y]/n ): ")
3053 if accept_fit.upper() == "N":
3054 #workscan._append_blinfo(None, None, None)
3055 continue
3056
3057 blpars = f.get_parameters()
3058 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3059 #workscan._append_blinfo(blpars, masklist, f.mask)
[2349]3060 workscan._setspectrum(
3061 (f.fitter.getresidual() if getresidual
3062 else f.fitter.getfit()), r
3063 )
[2012]3064
3065 if outblfile:
3066 rms = workscan.get_rms(f.mask, r)
[2269]3067 dataout = \
3068 workscan.format_blparams_row(blpars["params"],
3069 blpars["fixed"],
3070 rms, str(masklist),
3071 r, True)
[2012]3072 blf.write(dataout)
3073
3074 f._p.unmap()
3075 f._p = None
3076
3077 if outblfile: blf.close()
3078 else:
[2269]3079 workscan._auto_poly_baseline(mask, order, edge, threshold,
3080 chan_avg_limit, getresidual,
3081 pack_progress_params(showprogress,
3082 minnrow),
3083 outlog, blfile)
[2012]3084
3085 workscan._add_history("auto_poly_baseline", varlist)
3086
3087 if insitu:
3088 self._assign(workscan)
3089 else:
3090 return workscan
3091
3092 except RuntimeError, e:
[2186]3093 raise_fitting_failure_exception(e)
[2012]3094
3095 def _init_blinfo(self):
3096 """\
3097 Initialise the following three auxiliary members:
3098 blpars : parameters of the best-fit baseline,
3099 masklists : mask data (edge positions of masked channels) and
3100 actualmask : mask data (in boolean list),
3101 to keep for use later (including output to logger/text files).
3102 Used by poly_baseline() and auto_poly_baseline() in case of
3103 'plot=True'.
3104 """
3105 self.blpars = []
3106 self.masklists = []
3107 self.actualmask = []
3108 return
[880]3109
[2012]3110 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3111 """\
3112 Append baseline-fitting related info to blpars, masklist and
3113 actualmask.
3114 """
3115 self.blpars.append(data_blpars)
3116 self.masklists.append(data_masklists)
3117 self.actualmask.append(data_actualmask)
3118 return
3119
[1862]3120 @asaplog_post_dec
[914]3121 def rotate_linpolphase(self, angle):
[1846]3122 """\
[914]3123 Rotate the phase of the complex polarization O=Q+iU correlation.
3124 This is always done in situ in the raw data. So if you call this
3125 function more than once then each call rotates the phase further.
[1846]3126
[914]3127 Parameters:
[1846]3128
[914]3129 angle: The angle (degrees) to rotate (add) by.
[1846]3130
3131 Example::
3132
[914]3133 scan.rotate_linpolphase(2.3)
[1846]3134
[914]3135 """
3136 varlist = vars()
[936]3137 self._math._rotate_linpolphase(self, angle)
[914]3138 self._add_history("rotate_linpolphase", varlist)
3139 return
[710]3140
[1862]3141 @asaplog_post_dec
[914]3142 def rotate_xyphase(self, angle):
[1846]3143 """\
[914]3144 Rotate the phase of the XY correlation. This is always done in situ
3145 in the data. So if you call this function more than once
3146 then each call rotates the phase further.
[1846]3147
[914]3148 Parameters:
[1846]3149
[914]3150 angle: The angle (degrees) to rotate (add) by.
[1846]3151
3152 Example::
3153
[914]3154 scan.rotate_xyphase(2.3)
[1846]3155
[914]3156 """
3157 varlist = vars()
[936]3158 self._math._rotate_xyphase(self, angle)
[914]3159 self._add_history("rotate_xyphase", varlist)
3160 return
3161
[1862]3162 @asaplog_post_dec
[914]3163 def swap_linears(self):
[1846]3164 """\
[1573]3165 Swap the linear polarisations XX and YY, or better the first two
[1348]3166 polarisations as this also works for ciculars.
[914]3167 """
3168 varlist = vars()
[936]3169 self._math._swap_linears(self)
[914]3170 self._add_history("swap_linears", varlist)
3171 return
3172
[1862]3173 @asaplog_post_dec
[914]3174 def invert_phase(self):
[1846]3175 """\
[914]3176 Invert the phase of the complex polarisation
3177 """
3178 varlist = vars()
[936]3179 self._math._invert_phase(self)
[914]3180 self._add_history("invert_phase", varlist)
3181 return
3182
[1862]3183 @asaplog_post_dec
[876]3184 def add(self, offset, insitu=None):
[1846]3185 """\
[513]3186 Return a scan where all spectra have the offset added
[1846]3187
[513]3188 Parameters:
[1846]3189
[513]3190 offset: the offset
[1855]3191
[513]3192 insitu: if False a new scantable is returned.
3193 Otherwise, the scaling is done in-situ
3194 The default is taken from .asaprc (False)
[1846]3195
[513]3196 """
3197 if insitu is None: insitu = rcParams['insitu']
[876]3198 self._math._setinsitu(insitu)
[513]3199 varlist = vars()
[876]3200 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]3201 s._add_history("add", varlist)
[876]3202 if insitu:
3203 self._assign(s)
3204 else:
[513]3205 return s
3206
[1862]3207 @asaplog_post_dec
[1308]3208 def scale(self, factor, tsys=True, insitu=None):
[1846]3209 """\
3210
[1938]3211 Return a scan where all spectra are scaled by the given 'factor'
[1846]3212
[513]3213 Parameters:
[1846]3214
[1819]3215 factor: the scaling factor (float or 1D float list)
[1855]3216
[513]3217 insitu: if False a new scantable is returned.
3218 Otherwise, the scaling is done in-situ
3219 The default is taken from .asaprc (False)
[1855]3220
[513]3221 tsys: if True (default) then apply the operation to Tsys
3222 as well as the data
[1846]3223
[513]3224 """
3225 if insitu is None: insitu = rcParams['insitu']
[876]3226 self._math._setinsitu(insitu)
[513]3227 varlist = vars()
[1819]3228 s = None
3229 import numpy
3230 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]3231 if isinstance(factor[0], list) or isinstance(factor[0],
3232 numpy.ndarray):
[1819]3233 from asapmath import _array2dOp
[2320]3234 s = _array2dOp( self, factor, "MUL", tsys, insitu )
[1819]3235 else:
[2320]3236 s = scantable( self._math._arrayop( self, factor,
3237 "MUL", tsys ) )
[1819]3238 else:
[2320]3239 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]3240 s._add_history("scale", varlist)
[876]3241 if insitu:
3242 self._assign(s)
3243 else:
[513]3244 return s
3245
[2349]3246 @preserve_selection
3247 def set_sourcetype(self, match, matchtype="pattern",
[1504]3248 sourcetype="reference"):
[1846]3249 """\
[1502]3250 Set the type of the source to be an source or reference scan
[1846]3251 using the provided pattern.
3252
[1502]3253 Parameters:
[1846]3254
[1504]3255 match: a Unix style pattern, regular expression or selector
[1855]3256
[1504]3257 matchtype: 'pattern' (default) UNIX style pattern or
3258 'regex' regular expression
[1855]3259
[1502]3260 sourcetype: the type of the source to use (source/reference)
[1846]3261
[1502]3262 """
3263 varlist = vars()
3264 basesel = self.get_selection()
3265 stype = -1
3266 if sourcetype.lower().startswith("r"):
3267 stype = 1
3268 elif sourcetype.lower().startswith("s"):
3269 stype = 0
[1504]3270 else:
[1502]3271 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]3272 if matchtype.lower().startswith("p"):
3273 matchtype = "pattern"
3274 elif matchtype.lower().startswith("r"):
3275 matchtype = "regex"
3276 else:
3277 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3278 sel = selector()
3279 if isinstance(match, selector):
3280 sel = match
3281 else:
[1504]3282 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]3283 self._setsourcetype(stype)
[1573]3284 self._add_history("set_sourcetype", varlist)
[1502]3285
[1862]3286 @asaplog_post_dec
[1857]3287 @preserve_selection
[1819]3288 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3289 """\
[670]3290 This function allows to build quotients automatically.
[1819]3291 It assumes the observation to have the same number of
[670]3292 "ons" and "offs"
[1846]3293
[670]3294 Parameters:
[1846]3295
[710]3296 preserve: you can preserve (default) the continuum or
3297 remove it. The equations used are
[1857]3298
[670]3299 preserve: Output = Toff * (on/off) - Toff
[1857]3300
[1070]3301 remove: Output = Toff * (on/off) - Ton
[1855]3302
[1573]3303 mode: the on/off detection mode
[1348]3304 'paired' (default)
3305 identifies 'off' scans by the
3306 trailing '_R' (Mopra/Parkes) or
3307 '_e'/'_w' (Tid) and matches
3308 on/off pairs from the observing pattern
[1502]3309 'time'
3310 finds the closest off in time
[1348]3311
[1857]3312 .. todo:: verify argument is not implemented
3313
[670]3314 """
[1857]3315 varlist = vars()
[1348]3316 modes = ["time", "paired"]
[670]3317 if not mode in modes:
[876]3318 msg = "please provide valid mode. Valid modes are %s" % (modes)
3319 raise ValueError(msg)
[1348]3320 s = None
3321 if mode.lower() == "paired":
[1857]3322 sel = self.get_selection()
[1875]3323 sel.set_query("SRCTYPE==psoff")
[1356]3324 self.set_selection(sel)
[1348]3325 offs = self.copy()
[1875]3326 sel.set_query("SRCTYPE==pson")
[1356]3327 self.set_selection(sel)
[1348]3328 ons = self.copy()
3329 s = scantable(self._math._quotient(ons, offs, preserve))
3330 elif mode.lower() == "time":
3331 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3332 s._add_history("auto_quotient", varlist)
[876]3333 return s
[710]3334
[1862]3335 @asaplog_post_dec
[1145]3336 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3337 """\
[1143]3338 Form a quotient using "off" beams when observing in "MX" mode.
[1846]3339
[1143]3340 Parameters:
[1846]3341
[1145]3342 mask: an optional mask to be used when weight == 'stddev'
[1855]3343
[1143]3344 weight: How to average the off beams. Default is 'median'.
[1855]3345
[1145]3346 preserve: you can preserve (default) the continuum or
[1855]3347 remove it. The equations used are:
[1846]3348
[1855]3349 preserve: Output = Toff * (on/off) - Toff
3350
3351 remove: Output = Toff * (on/off) - Ton
3352
[1217]3353 """
[1593]3354 mask = mask or ()
[1141]3355 varlist = vars()
3356 on = scantable(self._math._mx_extract(self, 'on'))
[1143]3357 preoff = scantable(self._math._mx_extract(self, 'off'))
3358 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3359 from asapmath import quotient
[1145]3360 q = quotient(on, off, preserve)
[1143]3361 q._add_history("mx_quotient", varlist)
[1217]3362 return q
[513]3363
[1862]3364 @asaplog_post_dec
[718]3365 def freq_switch(self, insitu=None):
[1846]3366 """\
[718]3367 Apply frequency switching to the data.
[1846]3368
[718]3369 Parameters:
[1846]3370
[718]3371 insitu: if False a new scantable is returned.
3372 Otherwise, the swictching is done in-situ
3373 The default is taken from .asaprc (False)
[1846]3374
[718]3375 """
3376 if insitu is None: insitu = rcParams['insitu']
[876]3377 self._math._setinsitu(insitu)
[718]3378 varlist = vars()
[876]3379 s = scantable(self._math._freqswitch(self))
[1118]3380 s._add_history("freq_switch", varlist)
[1856]3381 if insitu:
3382 self._assign(s)
3383 else:
3384 return s
[718]3385
[1862]3386 @asaplog_post_dec
[780]3387 def recalc_azel(self):
[1846]3388 """Recalculate the azimuth and elevation for each position."""
[780]3389 varlist = vars()
[876]3390 self._recalcazel()
[780]3391 self._add_history("recalc_azel", varlist)
3392 return
3393
[1862]3394 @asaplog_post_dec
[513]3395 def __add__(self, other):
3396 varlist = vars()
3397 s = None
3398 if isinstance(other, scantable):
[1573]3399 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]3400 elif isinstance(other, float):
[876]3401 s = scantable(self._math._unaryop(self, other, "ADD", False))
[2144]3402 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3403 if isinstance(other[0], list) \
3404 or isinstance(other[0], numpy.ndarray):
[2144]3405 from asapmath import _array2dOp
3406 s = _array2dOp( self.copy(), other, "ADD", False )
3407 else:
[2349]3408 s = scantable( self._math._arrayop( self.copy(), other,
3409 "ADD", False ) )
[513]3410 else:
[718]3411 raise TypeError("Other input is not a scantable or float value")
[513]3412 s._add_history("operator +", varlist)
3413 return s
3414
[1862]3415 @asaplog_post_dec
[513]3416 def __sub__(self, other):
3417 """
3418 implicit on all axes and on Tsys
3419 """
3420 varlist = vars()
3421 s = None
3422 if isinstance(other, scantable):
[1588]3423 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]3424 elif isinstance(other, float):
[876]3425 s = scantable(self._math._unaryop(self, other, "SUB", False))
[2144]3426 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3427 if isinstance(other[0], list) \
3428 or isinstance(other[0], numpy.ndarray):
[2144]3429 from asapmath import _array2dOp
3430 s = _array2dOp( self.copy(), other, "SUB", False )
3431 else:
[2349]3432 s = scantable( self._math._arrayop( self.copy(), other,
3433 "SUB", False ) )
[513]3434 else:
[718]3435 raise TypeError("Other input is not a scantable or float value")
[513]3436 s._add_history("operator -", varlist)
3437 return s
[710]3438
[1862]3439 @asaplog_post_dec
[513]3440 def __mul__(self, other):
3441 """
3442 implicit on all axes and on Tsys
3443 """
3444 varlist = vars()
3445 s = None
3446 if isinstance(other, scantable):
[1588]3447 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]3448 elif isinstance(other, float):
[876]3449 s = scantable(self._math._unaryop(self, other, "MUL", False))
[2144]3450 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3451 if isinstance(other[0], list) \
3452 or isinstance(other[0], numpy.ndarray):
[2144]3453 from asapmath import _array2dOp
3454 s = _array2dOp( self.copy(), other, "MUL", False )
3455 else:
[2349]3456 s = scantable( self._math._arrayop( self.copy(), other,
3457 "MUL", False ) )
[513]3458 else:
[718]3459 raise TypeError("Other input is not a scantable or float value")
[513]3460 s._add_history("operator *", varlist)
3461 return s
3462
[710]3463
[1862]3464 @asaplog_post_dec
[513]3465 def __div__(self, other):
3466 """
3467 implicit on all axes and on Tsys
3468 """
3469 varlist = vars()
3470 s = None
3471 if isinstance(other, scantable):
[1589]3472 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]3473 elif isinstance(other, float):
3474 if other == 0.0:
[718]3475 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]3476 s = scantable(self._math._unaryop(self, other, "DIV", False))
[2144]3477 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3478 if isinstance(other[0], list) \
3479 or isinstance(other[0], numpy.ndarray):
[2144]3480 from asapmath import _array2dOp
3481 s = _array2dOp( self.copy(), other, "DIV", False )
3482 else:
[2349]3483 s = scantable( self._math._arrayop( self.copy(), other,
3484 "DIV", False ) )
[513]3485 else:
[718]3486 raise TypeError("Other input is not a scantable or float value")
[513]3487 s._add_history("operator /", varlist)
3488 return s
3489
[1862]3490 @asaplog_post_dec
[530]3491 def get_fit(self, row=0):
[1846]3492 """\
[530]3493 Print or return the stored fits for a row in the scantable
[1846]3494
[530]3495 Parameters:
[1846]3496
[530]3497 row: the row which the fit has been applied to.
[1846]3498
[530]3499 """
3500 if row > self.nrow():
3501 return
[976]3502 from asap.asapfit import asapfit
[530]3503 fit = asapfit(self._getfit(row))
[1859]3504 asaplog.push( '%s' %(fit) )
3505 return fit.as_dict()
[530]3506
[2349]3507 @preserve_selection
[1483]3508 def flag_nans(self):
[1846]3509 """\
[1483]3510 Utility function to flag NaN values in the scantable.
3511 """
3512 import numpy
3513 basesel = self.get_selection()
3514 for i in range(self.nrow()):
[1589]3515 sel = self.get_row_selector(i)
3516 self.set_selection(basesel+sel)
[1483]3517 nans = numpy.isnan(self._getspectrum(0))
3518 if numpy.any(nans):
3519 bnans = [ bool(v) for v in nans]
3520 self.flag(bnans)
3521
[1588]3522 def get_row_selector(self, rowno):
[1992]3523 return selector(rows=[rowno])
[1573]3524
[484]3525 def _add_history(self, funcname, parameters):
[1435]3526 if not rcParams['scantable.history']:
3527 return
[484]3528 # create date
3529 sep = "##"
3530 from datetime import datetime
3531 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3532 hist = dstr+sep
3533 hist += funcname+sep#cdate+sep
[2349]3534 if parameters.has_key('self'):
3535 del parameters['self']
[1118]3536 for k, v in parameters.iteritems():
[484]3537 if type(v) is dict:
[1118]3538 for k2, v2 in v.iteritems():
[484]3539 hist += k2
3540 hist += "="
[1118]3541 if isinstance(v2, scantable):
[484]3542 hist += 'scantable'
3543 elif k2 == 'mask':
[1118]3544 if isinstance(v2, list) or isinstance(v2, tuple):
[513]3545 hist += str(self._zip_mask(v2))
3546 else:
3547 hist += str(v2)
[484]3548 else:
[513]3549 hist += str(v2)
[484]3550 else:
3551 hist += k
3552 hist += "="
[1118]3553 if isinstance(v, scantable):
[484]3554 hist += 'scantable'
3555 elif k == 'mask':
[1118]3556 if isinstance(v, list) or isinstance(v, tuple):
[513]3557 hist += str(self._zip_mask(v))
3558 else:
3559 hist += str(v)
[484]3560 else:
3561 hist += str(v)
3562 hist += sep
3563 hist = hist[:-2] # remove trailing '##'
3564 self._addhistory(hist)
3565
[710]3566
[484]3567 def _zip_mask(self, mask):
3568 mask = list(mask)
3569 i = 0
3570 segments = []
3571 while mask[i:].count(1):
3572 i += mask[i:].index(1)
3573 if mask[i:].count(0):
3574 j = i + mask[i:].index(0)
3575 else:
[710]3576 j = len(mask)
[1118]3577 segments.append([i, j])
[710]3578 i = j
[484]3579 return segments
[714]3580
[626]3581 def _get_ordinate_label(self):
3582 fu = "("+self.get_fluxunit()+")"
3583 import re
3584 lbl = "Intensity"
[1118]3585 if re.match(".K.", fu):
[626]3586 lbl = "Brightness Temperature "+ fu
[1118]3587 elif re.match(".Jy.", fu):
[626]3588 lbl = "Flux density "+ fu
3589 return lbl
[710]3590
[876]3591 def _check_ifs(self):
[2349]3592# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
[1986]3593 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]3594 nchans = filter(lambda t: t > 0, nchans)
[876]3595 return (sum(nchans)/len(nchans) == nchans[0])
[976]3596
[1862]3597 @asaplog_post_dec
[1916]3598 def _fill(self, names, unit, average, opts={}):
[976]3599 first = True
3600 fullnames = []
3601 for name in names:
3602 name = os.path.expandvars(name)
3603 name = os.path.expanduser(name)
3604 if not os.path.exists(name):
3605 msg = "File '%s' does not exists" % (name)
3606 raise IOError(msg)
3607 fullnames.append(name)
3608 if average:
3609 asaplog.push('Auto averaging integrations')
[1079]3610 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]3611 for name in fullnames:
[1073]3612 tbl = Scantable(stype)
[2004]3613 if is_ms( name ):
3614 r = msfiller( tbl )
3615 else:
3616 r = filler( tbl )
3617 rx = rcParams['scantable.reference']
3618 r.setreferenceexpr(rx)
[976]3619 msg = "Importing %s..." % (name)
[1118]3620 asaplog.push(msg, False)
[2349]3621 r.open(name, opts)
[1843]3622 r.fill()
[976]3623 if average:
[1118]3624 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]3625 if not first:
3626 tbl = self._math._merge([self, tbl])
3627 Scantable.__init__(self, tbl)
[1843]3628 r.close()
[1118]3629 del r, tbl
[976]3630 first = False
[1861]3631 #flush log
3632 asaplog.post()
[976]3633 if unit is not None:
3634 self.set_fluxunit(unit)
[1824]3635 if not is_casapy():
3636 self.set_freqframe(rcParams['scantable.freqframe'])
[976]3637
[2012]3638
[1402]3639 def __getitem__(self, key):
3640 if key < 0:
3641 key += self.nrow()
3642 if key >= self.nrow():
3643 raise IndexError("Row index out of range.")
3644 return self._getspectrum(key)
3645
3646 def __setitem__(self, key, value):
3647 if key < 0:
3648 key += self.nrow()
3649 if key >= self.nrow():
3650 raise IndexError("Row index out of range.")
3651 if not hasattr(value, "__len__") or \
3652 len(value) > self.nchan(self.getif(key)):
3653 raise ValueError("Spectrum length doesn't match.")
3654 return self._setspectrum(value, key)
3655
3656 def __len__(self):
3657 return self.nrow()
3658
3659 def __iter__(self):
3660 for i in range(len(self)):
3661 yield self[i]
Note: See TracBrowser for help on using the repository browser.