source: trunk/python/scantable.py@ 2493

Last change on this file since 2493 was 2480, checked in by Malte Marquarding, 13 years ago

Ticket #269: fix for custom reference scan expressions not being honoured.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 141.8 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2315]4import tempfile
[1948]5import numpy
[1691]6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
[1824]11from asap.env import is_casapy
[876]12from asap._asap import Scantable
[2004]13from asap._asap import filler, msfiller
[1824]14from asap.parameters import rcParams
[1862]15from asap.logging import asaplog, asaplog_post_dec
[1824]16from asap.selector import selector
17from asap.linecatalog import linecatalog
[1600]18from asap.coordinate import coordinate
[1859]19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]20from asap.asapfitter import fitter
[102]21
[2411]22###############################################################
23### WK temporarily added these lines for testing 2011/11/28 ###
24###############################################################
25#from asap._asap import TestClass
[1689]26
[2411]27#class testclass(TestClass):
28# def __init__(self, nelem):
29# TestClass.__init__(self, nelem)
30
31###############################################################
32
33
[1689]34def preserve_selection(func):
[1691]35 @wraps_dec(func)
[1689]36 def wrap(obj, *args, **kw):
37 basesel = obj.get_selection()
[1857]38 try:
39 val = func(obj, *args, **kw)
40 finally:
41 obj.set_selection(basesel)
[1689]42 return val
43 return wrap
44
[1846]45def is_scantable(filename):
46 """Is the given file a scantable?
[1689]47
[1846]48 Parameters:
49
50 filename: the name of the file/directory to test
51
52 """
[1883]53 if ( os.path.isdir(filename)
54 and os.path.exists(filename+'/table.info')
55 and os.path.exists(filename+'/table.dat') ):
56 f=open(filename+'/table.info')
57 l=f.readline()
58 f.close()
59 #if ( l.find('Scantable') != -1 ):
60 if ( l.find('Measurement Set') == -1 ):
61 return True
62 else:
63 return False
64 else:
65 return False
66## return (os.path.isdir(filename)
67## and not os.path.exists(filename+'/table.f1')
68## and os.path.exists(filename+'/table.info'))
[1697]69
[1883]70def is_ms(filename):
71 """Is the given file a MeasurementSet?
[1697]72
[1883]73 Parameters:
74
75 filename: the name of the file/directory to test
76
77 """
78 if ( os.path.isdir(filename)
79 and os.path.exists(filename+'/table.info')
80 and os.path.exists(filename+'/table.dat') ):
81 f=open(filename+'/table.info')
82 l=f.readline()
83 f.close()
84 if ( l.find('Measurement Set') != -1 ):
85 return True
86 else:
87 return False
88 else:
89 return False
[2186]90
91def normalise_edge_param(edge):
92 """\
93 Convert a given edge value to a one-dimensional array that can be
94 given to baseline-fitting/subtraction functions.
95 The length of the output value will be an even because values for
96 the both sides of spectra are to be contained for each IF. When
97 the length is 2, the values will be applied to all IFs. If the length
98 is larger than 2, it will be 2*ifnos().
99 Accepted format of edge include:
100 * an integer - will be used for both sides of spectra of all IFs.
101 e.g. 10 is converted to [10,10]
[2277]102 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]103 * a list/tuple containing an integer - same as the above case.
104 e.g. [10] is converted to [10,10]
105 * a list/tuple containing two integers - will be used for all IFs.
106 e.g. [5,10] is output as it is. no need to convert.
107 * a list/tuple of lists/tuples containing TWO integers -
108 each element of edge will be used for each IF.
[2277]109 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
110
111 If an element contains the same integer values, the input 'edge'
112 parameter can be given in a simpler shape in the following cases:
[2186]113 ** when len(edge)!=2
[2277]114 any elements containing the same values can be replaced
115 to single integers.
116 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
117 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]118 ** when len(edge)=2
119 care is needed for this case: ONLY ONE of the
120 elements can be a single integer,
121 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]122 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]123 when [5,10] given, it is interpreted as
[2277]124 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]125 """
126 from asap import _is_sequence_or_number as _is_valid
127 if isinstance(edge, list) or isinstance(edge, tuple):
128 for edgepar in edge:
129 if not _is_valid(edgepar, int):
130 raise ValueError, "Each element of the 'edge' tuple has \
131 to be a pair of integers or an integer."
132 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
133 if len(edgepar) != 2:
134 raise ValueError, "Each element of the 'edge' tuple has \
135 to be a pair of integers or an integer."
136 else:
137 if not _is_valid(edge, int):
138 raise ValueError, "Parameter 'edge' has to be an integer or a \
139 pair of integers specified as a tuple. \
140 Nested tuples are allowed \
141 to make individual selection for different IFs."
142
143
144 if isinstance(edge, int):
145 edge = [ edge, edge ] # e.g. 3 => [3,3]
146 elif isinstance(edge, list) or isinstance(edge, tuple):
147 if len(edge) == 0:
148 edge = [0, 0] # e.g. [] => [0,0]
149 elif len(edge) == 1:
150 if isinstance(edge[0], int):
151 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
152
153 commonedge = True
154 if len(edge) > 2: commonedge = False
155 else:
156 for edgepar in edge:
157 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
158 commonedge = False
159 break
160
161 if commonedge:
162 if len(edge) > 1:
163 norm_edge = edge
164 else:
165 norm_edge = edge + edge
166 else:
167 norm_edge = []
168 for edgepar in edge:
169 if isinstance(edgepar, int):
170 norm_edge += [edgepar, edgepar]
171 else:
172 norm_edge += edgepar
173
174 return norm_edge
175
176def raise_fitting_failure_exception(e):
177 msg = "The fit failed, possibly because it didn't converge."
178 if rcParams["verbose"]:
179 asaplog.push(str(e))
180 asaplog.push(str(msg))
181 else:
182 raise RuntimeError(str(e)+'\n'+msg)
183
[2189]184def pack_progress_params(showprogress, minnrow):
185 return str(showprogress).lower() + ',' + str(minnrow)
186
[876]187class scantable(Scantable):
[1846]188 """\
189 The ASAP container for scans (single-dish data).
[102]190 """
[1819]191
[1862]192 @asaplog_post_dec
[2315]193 def __init__(self, filename, average=None, unit=None, parallactify=None,
194 **args):
[1846]195 """\
[102]196 Create a scantable from a saved one or make a reference
[1846]197
[102]198 Parameters:
[1846]199
200 filename: the name of an asap table on disk
201 or
202 the name of a rpfits/sdfits/ms file
203 (integrations within scans are auto averaged
204 and the whole file is read) or
205 [advanced] a reference to an existing scantable
206
207 average: average all integrations withinb a scan on read.
208 The default (True) is taken from .asaprc.
209
[484]210 unit: brightness unit; must be consistent with K or Jy.
[1846]211 Over-rides the default selected by the filler
212 (input rpfits/sdfits/ms) or replaces the value
213 in existing scantables
214
[1920]215 antenna: for MeasurementSet input data only:
[2349]216 Antenna selection. integer (id) or string
217 (name or id).
[1846]218
[2349]219 parallactify: Indicate that the data had been parallactified.
220 Default (false) is taken from rc file.
[1846]221
[710]222 """
[976]223 if average is None:
[710]224 average = rcParams['scantable.autoaverage']
[1593]225 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]226 varlist = vars()
[876]227 from asap._asap import stmath
[1819]228 self._math = stmath( rcParams['insitu'] )
[876]229 if isinstance(filename, Scantable):
230 Scantable.__init__(self, filename)
[181]231 else:
[1697]232 if isinstance(filename, str):
[976]233 filename = os.path.expandvars(filename)
234 filename = os.path.expanduser(filename)
235 if not os.path.exists(filename):
236 s = "File '%s' not found." % (filename)
237 raise IOError(s)
[1697]238 if is_scantable(filename):
239 ondisk = rcParams['scantable.storage'] == 'disk'
240 Scantable.__init__(self, filename, ondisk)
241 if unit is not None:
242 self.set_fluxunit(unit)
[2008]243 if average:
244 self._assign( self.average_time( scanav=True ) )
[1819]245 # do not reset to the default freqframe
246 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]247 elif is_ms(filename):
[1916]248 # Measurement Set
249 opts={'ms': {}}
250 mskeys=['getpt','antenna']
251 for key in mskeys:
252 if key in args.keys():
253 opts['ms'][key] = args[key]
254 self._fill([filename], unit, average, opts)
[1893]255 elif os.path.isfile(filename):
[1916]256 self._fill([filename], unit, average)
[2350]257 # only apply to new data not "copy constructor"
258 self.parallactify(parallactify)
[1883]259 else:
[1819]260 msg = "The given file '%s'is not a valid " \
261 "asap table." % (filename)
[1859]262 raise IOError(msg)
[1118]263 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]264 and isinstance(filename[-1], str):
[1916]265 self._fill(filename, unit, average)
[1586]266 self.parallactify(parallactify)
[1259]267 self._add_history("scantable", varlist)
[102]268
[1862]269 @asaplog_post_dec
[876]270 def save(self, name=None, format=None, overwrite=False):
[1846]271 """\
[1280]272 Store the scantable on disk. This can be an asap (aips++) Table,
273 SDFITS or MS2 format.
[1846]274
[116]275 Parameters:
[1846]276
[2431]277 name: the name of the outputfile. For format 'ASCII'
[1093]278 this is the root file name (data in 'name'.txt
[497]279 and header in 'name'_header.txt)
[1855]280
[116]281 format: an optional file format. Default is ASAP.
[1855]282 Allowed are:
283
284 * 'ASAP' (save as ASAP [aips++] Table),
285 * 'SDFITS' (save as SDFITS file)
286 * 'ASCII' (saves as ascii text file)
287 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]288 * 'FITS' (save as image FITS - not readable by
289 class)
[1855]290 * 'CLASS' (save as FITS readable by CLASS)
291
[411]292 overwrite: If the file should be overwritten if it exists.
[256]293 The default False is to return with warning
[411]294 without writing the output. USE WITH CARE.
[1855]295
[1846]296 Example::
297
[116]298 scan.save('myscan.asap')
[1118]299 scan.save('myscan.sdfits', 'SDFITS')
[1846]300
[116]301 """
[411]302 from os import path
[1593]303 format = format or rcParams['scantable.save']
[256]304 suffix = '.'+format.lower()
[1118]305 if name is None or name == "":
[256]306 name = 'scantable'+suffix
[718]307 msg = "No filename given. Using default name %s..." % name
308 asaplog.push(msg)
[411]309 name = path.expandvars(name)
[256]310 if path.isfile(name) or path.isdir(name):
311 if not overwrite:
[718]312 msg = "File %s exists." % name
[1859]313 raise IOError(msg)
[451]314 format2 = format.upper()
315 if format2 == 'ASAP':
[116]316 self._save(name)
[2029]317 elif format2 == 'MS2':
318 msopt = {'ms': {'overwrite': overwrite } }
319 from asap._asap import mswriter
320 writer = mswriter( self )
321 writer.write( name, msopt )
[116]322 else:
[989]323 from asap._asap import stwriter as stw
[1118]324 writer = stw(format2)
325 writer.write(self, name)
[116]326 return
327
[102]328 def copy(self):
[1846]329 """Return a copy of this scantable.
330
331 *Note*:
332
[1348]333 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]334
335 Example::
336
[102]337 copiedscan = scan.copy()
[1846]338
[102]339 """
[876]340 sd = scantable(Scantable._copy(self))
[113]341 return sd
342
[1093]343 def drop_scan(self, scanid=None):
[1846]344 """\
[1093]345 Return a new scantable where the specified scan number(s) has(have)
346 been dropped.
[1846]347
[1093]348 Parameters:
[1846]349
[1093]350 scanid: a (list of) scan number(s)
[1846]351
[1093]352 """
353 from asap import _is_sequence_or_number as _is_valid
354 from asap import _to_list
355 from asap import unique
356 if not _is_valid(scanid):
[2315]357 raise RuntimeError( 'Please specify a scanno to drop from the'
358 ' scantable' )
[1859]359 scanid = _to_list(scanid)
360 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
361 for sid in scanid: allscans.remove(sid)
362 if len(allscans) == 0:
363 raise ValueError("Can't remove all scans")
364 sel = selector(scans=allscans)
365 return self._select_copy(sel)
[1093]366
[1594]367 def _select_copy(self, selection):
368 orig = self.get_selection()
369 self.set_selection(orig+selection)
370 cp = self.copy()
371 self.set_selection(orig)
372 return cp
373
[102]374 def get_scan(self, scanid=None):
[1855]375 """\
[102]376 Return a specific scan (by scanno) or collection of scans (by
377 source name) in a new scantable.
[1846]378
379 *Note*:
380
[1348]381 See scantable.drop_scan() for the inverse operation.
[1846]382
[102]383 Parameters:
[1846]384
[513]385 scanid: a (list of) scanno or a source name, unix-style
386 patterns are accepted for source name matching, e.g.
387 '*_R' gets all 'ref scans
[1846]388
389 Example::
390
[513]391 # get all scans containing the source '323p459'
392 newscan = scan.get_scan('323p459')
393 # get all 'off' scans
394 refscans = scan.get_scan('*_R')
395 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]396 newscan = scan.get_scan([0, 2, 7, 10])
[1846]397
[102]398 """
399 if scanid is None:
[1859]400 raise RuntimeError( 'Please specify a scan no or name to '
401 'retrieve from the scantable' )
[102]402 try:
[946]403 bsel = self.get_selection()
404 sel = selector()
[102]405 if type(scanid) is str:
[946]406 sel.set_name(scanid)
[1594]407 return self._select_copy(sel)
[102]408 elif type(scanid) is int:
[946]409 sel.set_scans([scanid])
[1594]410 return self._select_copy(sel)
[381]411 elif type(scanid) is list:
[946]412 sel.set_scans(scanid)
[1594]413 return self._select_copy(sel)
[381]414 else:
[718]415 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]416 raise TypeError(msg)
[102]417 except RuntimeError:
[1859]418 raise
[102]419
420 def __str__(self):
[2315]421 tempFile = tempfile.NamedTemporaryFile()
422 Scantable._summary(self, tempFile.name)
423 tempFile.seek(0)
424 asaplog.clear()
425 return tempFile.file.read()
[102]426
[2315]427 @asaplog_post_dec
[976]428 def summary(self, filename=None):
[1846]429 """\
[102]430 Print a summary of the contents of this scantable.
[1846]431
[102]432 Parameters:
[1846]433
[1931]434 filename: the name of a file to write the putput to
[102]435 Default - no file output
[1846]436
[102]437 """
438 if filename is not None:
[256]439 if filename is "":
440 filename = 'scantable_summary.txt'
[415]441 from os.path import expandvars, isdir
[411]442 filename = expandvars(filename)
[2286]443 if isdir(filename):
[718]444 msg = "Illegal file name '%s'." % (filename)
[1859]445 raise IOError(msg)
[2286]446 else:
447 filename = ""
448 Scantable._summary(self, filename)
[710]449
[1512]450 def get_spectrum(self, rowno):
[1471]451 """Return the spectrum for the current row in the scantable as a list.
[1846]452
[1471]453 Parameters:
[1846]454
[1573]455 rowno: the row number to retrieve the spectrum from
[1846]456
[1471]457 """
458 return self._getspectrum(rowno)
[946]459
[1471]460 def get_mask(self, rowno):
461 """Return the mask for the current row in the scantable as a list.
[1846]462
[1471]463 Parameters:
[1846]464
[1573]465 rowno: the row number to retrieve the mask from
[1846]466
[1471]467 """
468 return self._getmask(rowno)
469
470 def set_spectrum(self, spec, rowno):
[1938]471 """Set the spectrum for the current row in the scantable.
[1846]472
[1471]473 Parameters:
[1846]474
[1855]475 spec: the new spectrum
[1846]476
[1855]477 rowno: the row number to set the spectrum for
478
[1471]479 """
[2348]480 assert(len(spec) == self.nchan(self.getif(rowno)))
[1471]481 return self._setspectrum(spec, rowno)
482
[1600]483 def get_coordinate(self, rowno):
484 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]485
486 *Note*:
487
[1600]488 * This coordinate is only valid until a scantable method modifies
489 the frequency axis.
490 * This coordinate does contain the original frequency set-up
491 NOT the new frame. The conversions however are done using the user
492 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
493 use scantable.freq_align first. Without it there is no closure,
[1846]494 i.e.::
[1600]495
[1846]496 c = myscan.get_coordinate(0)
497 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
498
[1600]499 Parameters:
[1846]500
[1600]501 rowno: the row number for the spectral coordinate
502
503 """
504 return coordinate(Scantable.get_coordinate(self, rowno))
505
[946]506 def get_selection(self):
[1846]507 """\
[1005]508 Get the selection object currently set on this scantable.
[1846]509
510 Example::
511
[1005]512 sel = scan.get_selection()
513 sel.set_ifs(0) # select IF 0
514 scan.set_selection(sel) # apply modified selection
[1846]515
[946]516 """
517 return selector(self._getselection())
518
[1576]519 def set_selection(self, selection=None, **kw):
[1846]520 """\
[1005]521 Select a subset of the data. All following operations on this scantable
522 are only applied to thi selection.
[1846]523
[1005]524 Parameters:
[1697]525
[1846]526 selection: a selector object (default unset the selection), or
[2431]527 any combination of 'pols', 'ifs', 'beams', 'scans',
528 'cycles', 'name', 'query'
[1697]529
[1846]530 Examples::
[1697]531
[1005]532 sel = selector() # create a selection object
[1118]533 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]534 scan.set_selection(sel) # set the selection
535 scan.summary() # will only print summary of scanno 0 an 3
536 scan.set_selection() # unset the selection
[1697]537 # or the equivalent
538 scan.set_selection(scans=[0,3])
539 scan.summary() # will only print summary of scanno 0 an 3
540 scan.set_selection() # unset the selection
[1846]541
[946]542 """
[1576]543 if selection is None:
544 # reset
545 if len(kw) == 0:
546 selection = selector()
547 else:
548 # try keywords
549 for k in kw:
550 if k not in selector.fields:
[2320]551 raise KeyError("Invalid selection key '%s', "
552 "valid keys are %s" % (k,
553 selector.fields))
[1576]554 selection = selector(**kw)
[946]555 self._setselection(selection)
556
[1819]557 def get_row(self, row=0, insitu=None):
[1846]558 """\
[1819]559 Select a row in the scantable.
560 Return a scantable with single row.
[1846]561
[1819]562 Parameters:
[1846]563
564 row: row no of integration, default is 0.
565 insitu: if False a new scantable is returned. Otherwise, the
566 scaling is done in-situ. The default is taken from .asaprc
567 (False)
568
[1819]569 """
[2349]570 if insitu is None:
571 insitu = rcParams['insitu']
[1819]572 if not insitu:
573 workscan = self.copy()
574 else:
575 workscan = self
576 # Select a row
[2349]577 sel = selector()
[1992]578 sel.set_rows([row])
[1819]579 workscan.set_selection(sel)
580 if not workscan.nrow() == 1:
[2349]581 msg = "Could not identify single row. %d rows selected." \
582 % (workscan.nrow())
[1819]583 raise RuntimeError(msg)
584 if insitu:
585 self._assign(workscan)
586 else:
587 return workscan
588
[1862]589 @asaplog_post_dec
[1907]590 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]591 """\
[135]592 Determine the specified statistic of the current beam/if/pol
[102]593 Takes a 'mask' as an optional parameter to specify which
594 channels should be excluded.
[1846]595
[102]596 Parameters:
[1846]597
[1819]598 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
599 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]600
[135]601 mask: an optional mask specifying where the statistic
[102]602 should be determined.
[1855]603
[1819]604 form: format string to print statistic values
[1846]605
[1907]606 row: row number of spectrum to process.
607 (default is None: for all rows)
[1846]608
[1907]609 Example:
[113]610 scan.set_unit('channel')
[1118]611 msk = scan.create_mask([100, 200], [500, 600])
[135]612 scan.stats(stat='mean', mask=m)
[1846]613
[102]614 """
[1593]615 mask = mask or []
[876]616 if not self._check_ifs():
[1118]617 raise ValueError("Cannot apply mask as the IFs have different "
618 "number of channels. Please use setselection() "
619 "to select individual IFs")
[1819]620 rtnabc = False
621 if stat.lower().endswith('_abc'): rtnabc = True
622 getchan = False
623 if stat.lower().startswith('min') or stat.lower().startswith('max'):
624 chan = self._math._minmaxchan(self, mask, stat)
625 getchan = True
626 statvals = []
[1907]627 if not rtnabc:
628 if row == None:
629 statvals = self._math._stats(self, mask, stat)
630 else:
631 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]632
[1819]633 #def cb(i):
634 # return statvals[i]
[256]635
[1819]636 #return self._row_callback(cb, stat)
[102]637
[1819]638 label=stat
639 #callback=cb
640 out = ""
641 #outvec = []
642 sep = '-'*50
[1907]643
644 if row == None:
645 rows = xrange(self.nrow())
646 elif isinstance(row, int):
647 rows = [ row ]
648
649 for i in rows:
[1819]650 refstr = ''
651 statunit= ''
652 if getchan:
653 qx, qy = self.chan2data(rowno=i, chan=chan[i])
654 if rtnabc:
655 statvals.append(qx['value'])
656 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
657 statunit= '['+qx['unit']+']'
658 else:
659 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
660
661 tm = self._gettime(i)
662 src = self._getsourcename(i)
663 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
664 out += 'Time[%s]:\n' % (tm)
[1907]665 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
666 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
667 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]668 #outvec.append(callback(i))
[1907]669 if len(rows) > 1:
670 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
671 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
672 else:
673 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
674 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]675 out += sep+"\n"
676
[1859]677 import os
678 if os.environ.has_key( 'USER' ):
679 usr = os.environ['USER']
680 else:
681 import commands
682 usr = commands.getoutput( 'whoami' )
683 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
684 f = open(tmpfile,'w')
685 print >> f, sep
686 print >> f, ' %s %s' % (label, statunit)
687 print >> f, sep
688 print >> f, out
689 f.close()
690 f = open(tmpfile,'r')
691 x = f.readlines()
692 f.close()
693 asaplog.push(''.join(x), False)
694
[1819]695 return statvals
696
697 def chan2data(self, rowno=0, chan=0):
[1846]698 """\
[1819]699 Returns channel/frequency/velocity and spectral value
700 at an arbitrary row and channel in the scantable.
[1846]701
[1819]702 Parameters:
[1846]703
[1819]704 rowno: a row number in the scantable. Default is the
705 first row, i.e. rowno=0
[1855]706
[1819]707 chan: a channel in the scantable. Default is the first
708 channel, i.e. pos=0
[1846]709
[1819]710 """
711 if isinstance(rowno, int) and isinstance(chan, int):
712 qx = {'unit': self.get_unit(),
713 'value': self._getabcissa(rowno)[chan]}
714 qy = {'unit': self.get_fluxunit(),
715 'value': self._getspectrum(rowno)[chan]}
716 return qx, qy
717
[1118]718 def stddev(self, mask=None):
[1846]719 """\
[135]720 Determine the standard deviation of the current beam/if/pol
721 Takes a 'mask' as an optional parameter to specify which
722 channels should be excluded.
[1846]723
[135]724 Parameters:
[1846]725
[135]726 mask: an optional mask specifying where the standard
727 deviation should be determined.
728
[1846]729 Example::
730
[135]731 scan.set_unit('channel')
[1118]732 msk = scan.create_mask([100, 200], [500, 600])
[135]733 scan.stddev(mask=m)
[1846]734
[135]735 """
[1118]736 return self.stats(stat='stddev', mask=mask);
[135]737
[1003]738
[1259]739 def get_column_names(self):
[1846]740 """\
[1003]741 Return a list of column names, which can be used for selection.
742 """
[1259]743 return list(Scantable.get_column_names(self))
[1003]744
[1730]745 def get_tsys(self, row=-1):
[1846]746 """\
[113]747 Return the System temperatures.
[1846]748
749 Parameters:
750
751 row: the rowno to get the information for. (default all rows)
752
[113]753 Returns:
[1846]754
[876]755 a list of Tsys values for the current selection
[1846]756
[113]757 """
[1730]758 if row > -1:
759 return self._get_column(self._gettsys, row)
[876]760 return self._row_callback(self._gettsys, "Tsys")
[256]761
[2406]762 def get_tsysspectrum(self, row=-1):
763 """\
764 Return the channel dependent system temperatures.
[1730]765
[2406]766 Parameters:
767
768 row: the rowno to get the information for. (default all rows)
769
770 Returns:
771
772 a list of Tsys values for the current selection
773
774 """
775 return self._get_column( self._gettsysspectrum, row )
776
[1730]777 def get_weather(self, row=-1):
[1846]778 """\
779 Return the weather informations.
780
781 Parameters:
782
783 row: the rowno to get the information for. (default all rows)
784
785 Returns:
786
787 a dict or list of of dicts of values for the current selection
788
789 """
790
[1730]791 values = self._get_column(self._get_weather, row)
792 if row > -1:
793 return {'temperature': values[0],
794 'pressure': values[1], 'humidity' : values[2],
795 'windspeed' : values[3], 'windaz' : values[4]
796 }
797 else:
798 out = []
799 for r in values:
800
801 out.append({'temperature': r[0],
802 'pressure': r[1], 'humidity' : r[2],
803 'windspeed' : r[3], 'windaz' : r[4]
804 })
805 return out
806
[876]807 def _row_callback(self, callback, label):
808 out = ""
[1118]809 outvec = []
[1590]810 sep = '-'*50
[876]811 for i in range(self.nrow()):
812 tm = self._gettime(i)
813 src = self._getsourcename(i)
[1590]814 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]815 out += 'Time[%s]:\n' % (tm)
[1590]816 if self.nbeam(-1) > 1:
817 out += ' Beam[%d] ' % (self.getbeam(i))
818 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
819 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]820 outvec.append(callback(i))
821 out += '= %3.3f\n' % (outvec[i])
[1590]822 out += sep+'\n'
[1859]823
824 asaplog.push(sep)
825 asaplog.push(" %s" % (label))
826 asaplog.push(sep)
827 asaplog.push(out)
[1861]828 asaplog.post()
[1175]829 return outvec
[256]830
[1947]831 def _get_column(self, callback, row=-1, *args):
[1070]832 """
833 """
834 if row == -1:
[1947]835 return [callback(i, *args) for i in range(self.nrow())]
[1070]836 else:
[1819]837 if 0 <= row < self.nrow():
[1947]838 return callback(row, *args)
[256]839
[1070]840
[1948]841 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]842 """\
[113]843 Get a list of time stamps for the observations.
[1938]844 Return a datetime object or a string (default) for each
845 integration time stamp in the scantable.
[1846]846
[113]847 Parameters:
[1846]848
[1348]849 row: row no of integration. Default -1 return all rows
[1855]850
[1348]851 asdatetime: return values as datetime objects rather than strings
[1846]852
[2349]853 prec: number of digits shown. Default -1 to automatic
854 calculation.
[1948]855 Note this number is equals to the digits of MVTime,
856 i.e., 0<prec<3: dates with hh:: only,
857 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]858 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
859
[113]860 """
[1175]861 from datetime import datetime
[1948]862 if prec < 0:
863 # automagically set necessary precision +1
[2349]864 prec = 7 - \
865 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]866 prec = max(6, int(prec))
867 else:
868 prec = max(0, prec)
869 if asdatetime:
870 #precision can be 1 millisecond at max
871 prec = min(12, prec)
872
[1947]873 times = self._get_column(self._gettime, row, prec)
[1348]874 if not asdatetime:
[1392]875 return times
[1947]876 format = "%Y/%m/%d/%H:%M:%S.%f"
877 if prec < 7:
878 nsub = 1 + (((6-prec)/2) % 3)
879 substr = [".%f","%S","%M"]
880 for i in range(nsub):
881 format = format.replace(substr[i],"")
[1175]882 if isinstance(times, list):
[1947]883 return [datetime.strptime(i, format) for i in times]
[1175]884 else:
[1947]885 return datetime.strptime(times, format)
[102]886
[1348]887
888 def get_inttime(self, row=-1):
[1846]889 """\
[1348]890 Get a list of integration times for the observations.
891 Return a time in seconds for each integration in the scantable.
[1846]892
[1348]893 Parameters:
[1846]894
[1348]895 row: row no of integration. Default -1 return all rows.
[1846]896
[1348]897 """
[1573]898 return self._get_column(self._getinttime, row)
[1348]899
[1573]900
[714]901 def get_sourcename(self, row=-1):
[1846]902 """\
[794]903 Get a list source names for the observations.
[714]904 Return a string for each integration in the scantable.
905 Parameters:
[1846]906
[1348]907 row: row no of integration. Default -1 return all rows.
[1846]908
[714]909 """
[1070]910 return self._get_column(self._getsourcename, row)
[714]911
[794]912 def get_elevation(self, row=-1):
[1846]913 """\
[794]914 Get a list of elevations for the observations.
915 Return a float for each integration in the scantable.
[1846]916
[794]917 Parameters:
[1846]918
[1348]919 row: row no of integration. Default -1 return all rows.
[1846]920
[794]921 """
[1070]922 return self._get_column(self._getelevation, row)
[794]923
924 def get_azimuth(self, row=-1):
[1846]925 """\
[794]926 Get a list of azimuths for the observations.
927 Return a float for each integration in the scantable.
[1846]928
[794]929 Parameters:
[1348]930 row: row no of integration. Default -1 return all rows.
[1846]931
[794]932 """
[1070]933 return self._get_column(self._getazimuth, row)
[794]934
935 def get_parangle(self, row=-1):
[1846]936 """\
[794]937 Get a list of parallactic angles for the observations.
938 Return a float for each integration in the scantable.
[1846]939
[794]940 Parameters:
[1846]941
[1348]942 row: row no of integration. Default -1 return all rows.
[1846]943
[794]944 """
[1070]945 return self._get_column(self._getparangle, row)
[794]946
[1070]947 def get_direction(self, row=-1):
948 """
949 Get a list of Positions on the sky (direction) for the observations.
[1594]950 Return a string for each integration in the scantable.
[1855]951
[1070]952 Parameters:
[1855]953
[1070]954 row: row no of integration. Default -1 return all rows
[1855]955
[1070]956 """
957 return self._get_column(self._getdirection, row)
958
[1391]959 def get_directionval(self, row=-1):
[1846]960 """\
[1391]961 Get a list of Positions on the sky (direction) for the observations.
962 Return a float for each integration in the scantable.
[1846]963
[1391]964 Parameters:
[1846]965
[1391]966 row: row no of integration. Default -1 return all rows
[1846]967
[1391]968 """
969 return self._get_column(self._getdirectionvec, row)
970
[1862]971 @asaplog_post_dec
[102]972 def set_unit(self, unit='channel'):
[1846]973 """\
[102]974 Set the unit for all following operations on this scantable
[1846]975
[102]976 Parameters:
[1846]977
978 unit: optional unit, default is 'channel'. Use one of '*Hz',
979 'km/s', 'channel' or equivalent ''
980
[102]981 """
[484]982 varlist = vars()
[1118]983 if unit in ['', 'pixel', 'channel']:
[113]984 unit = ''
985 inf = list(self._getcoordinfo())
986 inf[0] = unit
987 self._setcoordinfo(inf)
[1118]988 self._add_history("set_unit", varlist)
[113]989
[1862]990 @asaplog_post_dec
[484]991 def set_instrument(self, instr):
[1846]992 """\
[1348]993 Set the instrument for subsequent processing.
[1846]994
[358]995 Parameters:
[1846]996
[710]997 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]998 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]999
[358]1000 """
1001 self._setInstrument(instr)
[1118]1002 self._add_history("set_instument", vars())
[358]1003
[1862]1004 @asaplog_post_dec
[1190]1005 def set_feedtype(self, feedtype):
[1846]1006 """\
[1190]1007 Overwrite the feed type, which might not be set correctly.
[1846]1008
[1190]1009 Parameters:
[1846]1010
[1190]1011 feedtype: 'linear' or 'circular'
[1846]1012
[1190]1013 """
1014 self._setfeedtype(feedtype)
1015 self._add_history("set_feedtype", vars())
1016
[1862]1017 @asaplog_post_dec
[276]1018 def set_doppler(self, doppler='RADIO'):
[1846]1019 """\
[276]1020 Set the doppler for all following operations on this scantable.
[1846]1021
[276]1022 Parameters:
[1846]1023
[276]1024 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1025
[276]1026 """
[484]1027 varlist = vars()
[276]1028 inf = list(self._getcoordinfo())
1029 inf[2] = doppler
1030 self._setcoordinfo(inf)
[1118]1031 self._add_history("set_doppler", vars())
[710]1032
[1862]1033 @asaplog_post_dec
[226]1034 def set_freqframe(self, frame=None):
[1846]1035 """\
[113]1036 Set the frame type of the Spectral Axis.
[1846]1037
[113]1038 Parameters:
[1846]1039
[591]1040 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1041 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1042 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1043
1044 Example::
1045
[113]1046 scan.set_freqframe('BARY')
[1846]1047
[113]1048 """
[1593]1049 frame = frame or rcParams['scantable.freqframe']
[484]1050 varlist = vars()
[1819]1051 # "REST" is not implemented in casacore
1052 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1053 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1054 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1055 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1056
[989]1057 if frame in valid:
[113]1058 inf = list(self._getcoordinfo())
1059 inf[1] = frame
1060 self._setcoordinfo(inf)
[1118]1061 self._add_history("set_freqframe", varlist)
[102]1062 else:
[1118]1063 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1064 raise TypeError(msg)
[710]1065
[1862]1066 @asaplog_post_dec
[989]1067 def set_dirframe(self, frame=""):
[1846]1068 """\
[989]1069 Set the frame type of the Direction on the sky.
[1846]1070
[989]1071 Parameters:
[1846]1072
[989]1073 frame: an optional frame type, default ''. Valid frames are:
1074 'J2000', 'B1950', 'GALACTIC'
[1846]1075
1076 Example:
1077
[989]1078 scan.set_dirframe('GALACTIC')
[1846]1079
[989]1080 """
1081 varlist = vars()
[1859]1082 Scantable.set_dirframe(self, frame)
[1118]1083 self._add_history("set_dirframe", varlist)
[989]1084
[113]1085 def get_unit(self):
[1846]1086 """\
[113]1087 Get the default unit set in this scantable
[1846]1088
[113]1089 Returns:
[1846]1090
[113]1091 A unit string
[1846]1092
[113]1093 """
1094 inf = self._getcoordinfo()
1095 unit = inf[0]
1096 if unit == '': unit = 'channel'
1097 return unit
[102]1098
[1862]1099 @asaplog_post_dec
[158]1100 def get_abcissa(self, rowno=0):
[1846]1101 """\
[158]1102 Get the abcissa in the current coordinate setup for the currently
[113]1103 selected Beam/IF/Pol
[1846]1104
[113]1105 Parameters:
[1846]1106
[226]1107 rowno: an optional row number in the scantable. Default is the
1108 first row, i.e. rowno=0
[1846]1109
[113]1110 Returns:
[1846]1111
[1348]1112 The abcissa values and the format string (as a dictionary)
[1846]1113
[113]1114 """
[256]1115 abc = self._getabcissa(rowno)
[710]1116 lbl = self._getabcissalabel(rowno)
[158]1117 return abc, lbl
[113]1118
[1862]1119 @asaplog_post_dec
[2322]1120 def flag(self, mask=None, unflag=False, row=-1):
[1846]1121 """\
[1001]1122 Flag the selected data using an optional channel mask.
[1846]1123
[1001]1124 Parameters:
[1846]1125
[1001]1126 mask: an optional channel mask, created with create_mask. Default
1127 (no mask) is all channels.
[1855]1128
[1819]1129 unflag: if True, unflag the data
[1846]1130
[2322]1131 row: an optional row number in the scantable.
1132 Default -1 flags all rows
1133
[1001]1134 """
1135 varlist = vars()
[1593]1136 mask = mask or []
[1994]1137 self._flag(row, mask, unflag)
[1001]1138 self._add_history("flag", varlist)
1139
[1862]1140 @asaplog_post_dec
[2322]1141 def flag_row(self, rows=None, unflag=False):
[1846]1142 """\
[1819]1143 Flag the selected data in row-based manner.
[1846]1144
[1819]1145 Parameters:
[1846]1146
[1843]1147 rows: list of row numbers to be flagged. Default is no row
[2322]1148 (must be explicitly specified to execute row-based
1149 flagging).
[1855]1150
[1819]1151 unflag: if True, unflag the data.
[1846]1152
[1819]1153 """
1154 varlist = vars()
[2322]1155 if rows is None:
1156 rows = []
[1859]1157 self._flag_row(rows, unflag)
[1819]1158 self._add_history("flag_row", varlist)
1159
[1862]1160 @asaplog_post_dec
[1819]1161 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1162 """\
[1819]1163 Flag the selected data outside a specified range (in channel-base)
[1846]1164
[1819]1165 Parameters:
[1846]1166
[1819]1167 uthres: upper threshold.
[1855]1168
[1819]1169 dthres: lower threshold
[1846]1170
[2322]1171 clipoutside: True for flagging data outside the range
1172 [dthres:uthres].
[1928]1173 False for flagging data inside the range.
[1855]1174
[1846]1175 unflag: if True, unflag the data.
1176
[1819]1177 """
1178 varlist = vars()
[1859]1179 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1180 self._add_history("clip", varlist)
1181
[1862]1182 @asaplog_post_dec
[1584]1183 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1184 """\
[1192]1185 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1186 Flagged data in the scantable get interpolated over the region.
[1192]1187 No taper is applied.
[1846]1188
[1192]1189 Parameters:
[1846]1190
[1579]1191 start: the start frequency (really a period within the
1192 bandwidth) or period to remove
[1855]1193
[1579]1194 end: the end frequency or period to remove
[1855]1195
[2431]1196 unit: the frequency unit (default 'MHz') or '' for
[1579]1197 explicit lag channels
[1846]1198
1199 *Notes*:
1200
[1579]1201 It is recommended to flag edges of the band or strong
[1348]1202 signals beforehand.
[1846]1203
[1192]1204 """
1205 if insitu is None: insitu = rcParams['insitu']
1206 self._math._setinsitu(insitu)
1207 varlist = vars()
[1579]1208 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1209 if not (unit == "" or base.has_key(unit)):
[1192]1210 raise ValueError("%s is not a valid unit." % unit)
[1859]1211 if unit == "":
1212 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1213 else:
1214 s = scantable(self._math._lag_flag(self, start*base[unit],
1215 end*base[unit], "frequency"))
[1192]1216 s._add_history("lag_flag", varlist)
1217 if insitu:
1218 self._assign(s)
1219 else:
1220 return s
[1001]1221
[1862]1222 @asaplog_post_dec
[2349]1223 def fft(self, rowno=None, mask=None, getrealimag=False):
[2177]1224 """\
1225 Apply FFT to the spectra.
1226 Flagged data in the scantable get interpolated over the region.
1227
1228 Parameters:
[2186]1229
1230 rowno: The row number(s) to be processed. int, list
[2349]1231 and tuple are accepted. By default (None), FFT
[2186]1232 is applied to the whole data.
1233
1234 mask: Auxiliary channel mask(s). Given as a boolean
1235 list, it is applied to all specified rows.
1236 A list of boolean lists can also be used to
1237 apply different masks. In the latter case, the
1238 length of 'mask' must be the same as that of
[2349]1239 'rowno'. The default is None.
[2177]1240
1241 getrealimag: If True, returns the real and imaginary part
1242 values of the complex results.
1243 If False (the default), returns the amplitude
1244 (absolute value) normalised with Ndata/2 and
1245 phase (argument, in unit of radian).
1246
1247 Returns:
1248
[2186]1249 A list of dictionaries containing the results for each spectrum.
1250 Each dictionary contains two values, the real and the imaginary
1251 parts when getrealimag = True, or the amplitude(absolute value)
1252 and the phase(argument) when getrealimag = False. The key for
1253 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1254 respectively.
1255 """
[2349]1256 if rowno is None:
1257 rowno = []
[2177]1258 if isinstance(rowno, int):
1259 rowno = [rowno]
1260 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1261 raise TypeError("The row number(s) must be int, list or tuple.")
1262 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1263
[2411]1264 usecommonmask = True
1265
1266 if mask is None:
1267 mask = []
1268 if isinstance(mask, list) or isinstance(mask, tuple):
1269 if len(mask) == 0:
1270 mask = [[]]
1271 else:
1272 if isinstance(mask[0], bool):
1273 if len(mask) != self.nchan(self.getif(rowno[0])):
1274 raise ValueError("The spectra and the mask have "
1275 "different length.")
1276 mask = [mask]
1277 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1278 usecommonmask = False
1279 if len(mask) != len(rowno):
1280 raise ValueError("When specifying masks for each "
1281 "spectrum, the numbers of them "
1282 "must be identical.")
1283 for i in xrange(mask):
1284 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1285 raise ValueError("The spectra and the mask have "
1286 "different length.")
1287 else:
1288 raise TypeError("The mask must be a boolean list or "
1289 "a list of boolean list.")
1290 else:
[2349]1291 raise TypeError("The mask must be a boolean list or a list of "
1292 "boolean list.")
[2186]1293
1294 res = []
1295
1296 imask = 0
1297 for whichrow in rowno:
1298 fspec = self._fft(whichrow, mask[imask], getrealimag)
1299 nspec = len(fspec)
[2177]1300
[2349]1301 i = 0
1302 v1 = []
1303 v2 = []
1304 reselem = {"real":[],"imag":[]} if getrealimag \
1305 else {"ampl":[],"phase":[]}
[2177]1306
[2186]1307 while (i < nspec):
1308 v1.append(fspec[i])
1309 v2.append(fspec[i+1])
[2349]1310 i += 2
[2186]1311
[2177]1312 if getrealimag:
[2186]1313 reselem["real"] += v1
1314 reselem["imag"] += v2
[2177]1315 else:
[2186]1316 reselem["ampl"] += v1
1317 reselem["phase"] += v2
[2177]1318
[2186]1319 res.append(reselem)
1320
[2349]1321 if not usecommonmask:
1322 imask += 1
[2186]1323
[2177]1324 return res
1325
1326 @asaplog_post_dec
[113]1327 def create_mask(self, *args, **kwargs):
[1846]1328 """\
[1118]1329 Compute and return a mask based on [min, max] windows.
[189]1330 The specified windows are to be INCLUDED, when the mask is
[113]1331 applied.
[1846]1332
[102]1333 Parameters:
[1846]1334
[1118]1335 [min, max], [min2, max2], ...
[1024]1336 Pairs of start/end points (inclusive)specifying the regions
[102]1337 to be masked
[1855]1338
[189]1339 invert: optional argument. If specified as True,
1340 return an inverted mask, i.e. the regions
1341 specified are EXCLUDED
[1855]1342
[513]1343 row: create the mask using the specified row for
1344 unit conversions, default is row=0
1345 only necessary if frequency varies over rows.
[1846]1346
1347 Examples::
1348
[113]1349 scan.set_unit('channel')
[1846]1350 # a)
[1118]1351 msk = scan.create_mask([400, 500], [800, 900])
[189]1352 # masks everything outside 400 and 500
[113]1353 # and 800 and 900 in the unit 'channel'
1354
[1846]1355 # b)
[1118]1356 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1357 # masks the regions between 400 and 500
[113]1358 # and 800 and 900 in the unit 'channel'
[1846]1359
1360 # c)
1361 #mask only channel 400
[1554]1362 msk = scan.create_mask([400])
[1846]1363
[102]1364 """
[1554]1365 row = kwargs.get("row", 0)
[513]1366 data = self._getabcissa(row)
[113]1367 u = self._getcoordinfo()[0]
[1859]1368 if u == "":
1369 u = "channel"
1370 msg = "The current mask window unit is %s" % u
1371 i = self._check_ifs()
1372 if not i:
1373 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1374 asaplog.push(msg)
[2348]1375 n = len(data)
[1295]1376 msk = _n_bools(n, False)
[710]1377 # test if args is a 'list' or a 'normal *args - UGLY!!!
1378
[2349]1379 ws = (isinstance(args[-1][-1], int)
1380 or isinstance(args[-1][-1], float)) and args or args[0]
[710]1381 for window in ws:
[1554]1382 if len(window) == 1:
1383 window = [window[0], window[0]]
1384 if len(window) == 0 or len(window) > 2:
[2349]1385 raise ValueError("A window needs to be defined as "
1386 "[start(, end)]")
[1545]1387 if window[0] > window[1]:
1388 tmp = window[0]
1389 window[0] = window[1]
1390 window[1] = tmp
[102]1391 for i in range(n):
[1024]1392 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1393 msk[i] = True
[113]1394 if kwargs.has_key('invert'):
1395 if kwargs.get('invert'):
[1295]1396 msk = mask_not(msk)
[102]1397 return msk
[710]1398
[1931]1399 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1400 """\
[1819]1401 Compute and return a list of mask windows, [min, max].
[1846]1402
[1819]1403 Parameters:
[1846]1404
[1819]1405 mask: channel mask, created with create_mask.
[1855]1406
[1819]1407 row: calcutate the masklist using the specified row
1408 for unit conversions, default is row=0
1409 only necessary if frequency varies over rows.
[1846]1410
[1819]1411 Returns:
[1846]1412
[1819]1413 [min, max], [min2, max2], ...
1414 Pairs of start/end points (inclusive)specifying
1415 the masked regions
[1846]1416
[1819]1417 """
1418 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1419 raise TypeError("The mask should be list or tuple.")
[2427]1420 if len(mask) <= 0:
1421 raise TypeError("The mask elements should be > 0")
[2348]1422 data = self._getabcissa(row)
1423 if len(data) != len(mask):
[1819]1424 msg = "Number of channels in scantable != number of mask elements"
1425 raise TypeError(msg)
1426 u = self._getcoordinfo()[0]
[1859]1427 if u == "":
1428 u = "channel"
1429 msg = "The current mask window unit is %s" % u
1430 i = self._check_ifs()
1431 if not i:
1432 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1433 if not silent:
1434 asaplog.push(msg)
[2349]1435 masklist = []
[1819]1436 ist, ien = None, None
1437 ist, ien=self.get_mask_indices(mask)
1438 if ist is not None and ien is not None:
1439 for i in xrange(len(ist)):
1440 range=[data[ist[i]],data[ien[i]]]
1441 range.sort()
1442 masklist.append([range[0],range[1]])
1443 return masklist
1444
1445 def get_mask_indices(self, mask=None):
[1846]1446 """\
[1819]1447 Compute and Return lists of mask start indices and mask end indices.
[1855]1448
1449 Parameters:
1450
[1819]1451 mask: channel mask, created with create_mask.
[1846]1452
[1819]1453 Returns:
[1846]1454
[1819]1455 List of mask start indices and that of mask end indices,
1456 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1457
[1819]1458 """
1459 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1460 raise TypeError("The mask should be list or tuple.")
[2427]1461 if len(mask) <= 0:
1462 raise TypeError("The mask elements should be > 0")
[2349]1463 istart = []
1464 iend = []
1465 if mask[0]:
1466 istart.append(0)
[1819]1467 for i in range(len(mask)-1):
1468 if not mask[i] and mask[i+1]:
1469 istart.append(i+1)
1470 elif mask[i] and not mask[i+1]:
1471 iend.append(i)
[2349]1472 if mask[len(mask)-1]:
1473 iend.append(len(mask)-1)
[1819]1474 if len(istart) != len(iend):
1475 raise RuntimeError("Numbers of mask start != mask end.")
1476 for i in range(len(istart)):
1477 if istart[i] > iend[i]:
1478 raise RuntimeError("Mask start index > mask end index")
1479 break
1480 return istart,iend
1481
[2013]1482 @asaplog_post_dec
[2349]1483 def parse_maskexpr(self, maskstring):
[2013]1484 """
1485 Parse CASA type mask selection syntax (IF dependent).
1486
1487 Parameters:
1488 maskstring : A string mask selection expression.
1489 A comma separated selections mean different IF -
1490 channel combinations. IFs and channel selections
1491 are partitioned by a colon, ':'.
1492 examples:
[2015]1493 '' = all IFs (all channels)
[2013]1494 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1495 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1496 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1497 all channels in IF8
1498 Returns:
1499 A dictionary of selected (valid) IF and masklist pairs,
1500 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1501 """
1502 if not isinstance(maskstring,str):
1503 asaplog.post()
1504 asaplog.push("Invalid mask expression")
1505 asaplog.post("ERROR")
1506
1507 valid_ifs = self.getifnos()
1508 frequnit = self.get_unit()
1509 seldict = {}
[2015]1510 if maskstring == "":
1511 maskstring = str(valid_ifs)[1:-1]
[2013]1512 ## split each selection
1513 sellist = maskstring.split(',')
1514 for currselstr in sellist:
1515 selset = currselstr.split(':')
1516 # spw and mask string (may include ~, < or >)
[2349]1517 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1518 offset=1, minval=min(valid_ifs),
1519 maxval=max(valid_ifs))
[2013]1520 for spwlist in spwmasklist:
1521 selspws = []
1522 for ispw in range(spwlist[0],spwlist[1]+1):
1523 # Put into the list only if ispw exists
1524 if valid_ifs.count(ispw):
1525 selspws.append(ispw)
1526 del spwmasklist, spwlist
1527
1528 # parse frequency mask list
1529 if len(selset) > 1:
[2349]1530 freqmasklist = self._parse_selection(selset[1], typestr='float',
1531 offset=0.)
[2013]1532 else:
1533 # want to select the whole spectrum
1534 freqmasklist = [None]
1535
1536 ## define a dictionary of spw - masklist combination
1537 for ispw in selspws:
1538 #print "working on", ispw
1539 spwstr = str(ispw)
1540 if len(selspws) == 0:
1541 # empty spw
1542 continue
1543 else:
1544 ## want to get min and max of the spw and
1545 ## offset to set for '<' and '>'
1546 if frequnit == 'channel':
1547 minfreq = 0
1548 maxfreq = self.nchan(ifno=ispw)
1549 offset = 0.5
1550 else:
1551 ## This is ugly part. need improvement
1552 for ifrow in xrange(self.nrow()):
1553 if self.getif(ifrow) == ispw:
1554 #print "IF",ispw,"found in row =",ifrow
1555 break
1556 freqcoord = self.get_coordinate(ifrow)
1557 freqs = self._getabcissa(ifrow)
1558 minfreq = min(freqs)
1559 maxfreq = max(freqs)
1560 if len(freqs) == 1:
1561 offset = 0.5
1562 elif frequnit.find('Hz') > 0:
[2349]1563 offset = abs(freqcoord.to_frequency(1,
1564 unit=frequnit)
1565 -freqcoord.to_frequency(0,
1566 unit=frequnit)
1567 )*0.5
[2013]1568 elif frequnit.find('m/s') > 0:
[2349]1569 offset = abs(freqcoord.to_velocity(1,
1570 unit=frequnit)
1571 -freqcoord.to_velocity(0,
1572 unit=frequnit)
1573 )*0.5
[2013]1574 else:
1575 asaplog.post()
1576 asaplog.push("Invalid frequency unit")
1577 asaplog.post("ERROR")
1578 del freqs, freqcoord, ifrow
1579 for freq in freqmasklist:
1580 selmask = freq or [minfreq, maxfreq]
1581 if selmask[0] == None:
1582 ## selection was "<freq[1]".
1583 if selmask[1] < minfreq:
1584 ## avoid adding region selection
1585 selmask = None
1586 else:
1587 selmask = [minfreq,selmask[1]-offset]
1588 elif selmask[1] == None:
1589 ## selection was ">freq[0]"
1590 if selmask[0] > maxfreq:
1591 ## avoid adding region selection
1592 selmask = None
1593 else:
1594 selmask = [selmask[0]+offset,maxfreq]
1595 if selmask:
1596 if not seldict.has_key(spwstr):
1597 # new spw selection
1598 seldict[spwstr] = []
1599 seldict[spwstr] += [selmask]
1600 del minfreq,maxfreq,offset,freq,selmask
1601 del spwstr
1602 del freqmasklist
1603 del valid_ifs
1604 if len(seldict) == 0:
1605 asaplog.post()
[2349]1606 asaplog.push("No valid selection in the mask expression: "
1607 +maskstring)
[2013]1608 asaplog.post("WARN")
1609 return None
1610 msg = "Selected masklist:\n"
1611 for sif, lmask in seldict.iteritems():
1612 msg += " IF"+sif+" - "+str(lmask)+"\n"
1613 asaplog.push(msg)
1614 return seldict
1615
[2349]1616 def _parse_selection(self, selstr, typestr='float', offset=0.,
[2351]1617 minval=None, maxval=None):
[2013]1618 """
1619 Parameters:
1620 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1621 typestr : The type of the values in returned list
1622 ('integer' or 'float')
1623 offset : The offset value to subtract from or add to
1624 the boundary value if the selection string
1625 includes '<' or '>'
1626 minval, maxval : The minimum/maximum values to set if the
1627 selection string includes '<' or '>'.
1628 The list element is filled with None by default.
1629 Returns:
1630 A list of min/max pair of selections.
1631 Example:
1632 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1633 returns [[0,2],[5,7],[9,9]]
1634 """
1635 selgroups = selstr.split(';')
1636 sellists = []
1637 if typestr.lower().startswith('int'):
1638 formatfunc = int
1639 else:
1640 formatfunc = float
1641
1642 for currsel in selgroups:
1643 if currsel.find('~') > 0:
1644 minsel = formatfunc(currsel.split('~')[0].strip())
1645 maxsel = formatfunc(currsel.split('~')[1].strip())
1646 elif currsel.strip().startswith('<'):
1647 minsel = minval
1648 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1649 - formatfunc(offset)
1650 elif currsel.strip().startswith('>'):
1651 minsel = formatfunc(currsel.split('>')[1].strip()) \
1652 + formatfunc(offset)
1653 maxsel = maxval
1654 else:
1655 minsel = formatfunc(currsel)
1656 maxsel = formatfunc(currsel)
1657 sellists.append([minsel,maxsel])
1658 return sellists
1659
[1819]1660# def get_restfreqs(self):
1661# """
1662# Get the restfrequency(s) stored in this scantable.
1663# The return value(s) are always of unit 'Hz'
1664# Parameters:
1665# none
1666# Returns:
1667# a list of doubles
1668# """
1669# return list(self._getrestfreqs())
1670
1671 def get_restfreqs(self, ids=None):
[1846]1672 """\
[256]1673 Get the restfrequency(s) stored in this scantable.
1674 The return value(s) are always of unit 'Hz'
[1846]1675
[256]1676 Parameters:
[1846]1677
[1819]1678 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1679 be retrieved
[1846]1680
[256]1681 Returns:
[1846]1682
[1819]1683 dictionary containing ids and a list of doubles for each id
[1846]1684
[256]1685 """
[1819]1686 if ids is None:
[2349]1687 rfreqs = {}
[1819]1688 idlist = self.getmolnos()
1689 for i in idlist:
[2349]1690 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1691 return rfreqs
1692 else:
[2349]1693 if type(ids) == list or type(ids) == tuple:
1694 rfreqs = {}
[1819]1695 for i in ids:
[2349]1696 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1697 return rfreqs
1698 else:
1699 return list(self._getrestfreqs(ids))
[102]1700
[2349]1701 @asaplog_post_dec
[931]1702 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1703 """\
[931]1704 Set or replace the restfrequency specified and
[1938]1705 if the 'freqs' argument holds a scalar,
[931]1706 then that rest frequency will be applied to all the selected
1707 data. If the 'freqs' argument holds
1708 a vector, then it MUST be of equal or smaller length than
1709 the number of IFs (and the available restfrequencies will be
1710 replaced by this vector). In this case, *all* data have
1711 the restfrequency set per IF according
1712 to the corresponding value you give in the 'freqs' vector.
[1118]1713 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1714 IF 1 gets restfreq 2e9.
[1846]1715
[1395]1716 You can also specify the frequencies via a linecatalog.
[1153]1717
[931]1718 Parameters:
[1846]1719
[931]1720 freqs: list of rest frequency values or string idenitfiers
[1855]1721
[931]1722 unit: unit for rest frequency (default 'Hz')
[402]1723
[1846]1724
1725 Example::
1726
[1819]1727 # set the given restfrequency for the all currently selected IFs
[931]1728 scan.set_restfreqs(freqs=1.4e9)
[1845]1729 # set restfrequencies for the n IFs (n > 1) in the order of the
1730 # list, i.e
1731 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1732 # len(list_of_restfreqs) == nIF
1733 # for nIF == 1 the following will set multiple restfrequency for
1734 # that IF
[1819]1735 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1736 # set multiple restfrequencies per IF. as a list of lists where
1737 # the outer list has nIF elements, the inner s arbitrary
1738 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1739
[1846]1740 *Note*:
[1845]1741
[931]1742 To do more sophisticate Restfrequency setting, e.g. on a
1743 source and IF basis, use scantable.set_selection() before using
[1846]1744 this function::
[931]1745
[1846]1746 # provided your scantable is called scan
1747 selection = selector()
[2431]1748 selection.set_name('ORION*')
[1846]1749 selection.set_ifs([1])
1750 scan.set_selection(selection)
1751 scan.set_restfreqs(freqs=86.6e9)
1752
[931]1753 """
1754 varlist = vars()
[1157]1755 from asap import linecatalog
1756 # simple value
[1118]1757 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1758 self._setrestfreqs([freqs], [""], unit)
[1157]1759 # list of values
[1118]1760 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1761 # list values are scalars
[1118]1762 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1763 if len(freqs) == 1:
1764 self._setrestfreqs(freqs, [""], unit)
1765 else:
1766 # allow the 'old' mode of setting mulitple IFs
1767 sel = selector()
1768 savesel = self._getselection()
1769 iflist = self.getifnos()
1770 if len(freqs)>len(iflist):
1771 raise ValueError("number of elements in list of list "
1772 "exeeds the current IF selections")
1773 iflist = self.getifnos()
1774 for i, fval in enumerate(freqs):
1775 sel.set_ifs(iflist[i])
1776 self._setselection(sel)
1777 self._setrestfreqs([fval], [""], unit)
1778 self._setselection(savesel)
1779
1780 # list values are dict, {'value'=, 'name'=)
[1157]1781 elif isinstance(freqs[-1], dict):
[1845]1782 values = []
1783 names = []
1784 for d in freqs:
1785 values.append(d["value"])
1786 names.append(d["name"])
1787 self._setrestfreqs(values, names, unit)
[1819]1788 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1789 sel = selector()
1790 savesel = self._getselection()
[1322]1791 iflist = self.getifnos()
[1819]1792 if len(freqs)>len(iflist):
[1845]1793 raise ValueError("number of elements in list of list exeeds"
1794 " the current IF selections")
1795 for i, fval in enumerate(freqs):
[1322]1796 sel.set_ifs(iflist[i])
[1259]1797 self._setselection(sel)
[1845]1798 self._setrestfreqs(fval, [""], unit)
[1157]1799 self._setselection(savesel)
1800 # freqs are to be taken from a linecatalog
[1153]1801 elif isinstance(freqs, linecatalog):
1802 sel = selector()
1803 savesel = self._getselection()
1804 for i in xrange(freqs.nrow()):
[1322]1805 sel.set_ifs(iflist[i])
[1153]1806 self._setselection(sel)
[1845]1807 self._setrestfreqs([freqs.get_frequency(i)],
1808 [freqs.get_name(i)], "MHz")
[1153]1809 # ensure that we are not iterating past nIF
1810 if i == self.nif()-1: break
1811 self._setselection(savesel)
[931]1812 else:
1813 return
1814 self._add_history("set_restfreqs", varlist)
1815
[2349]1816 @asaplog_post_dec
[1360]1817 def shift_refpix(self, delta):
[1846]1818 """\
[1589]1819 Shift the reference pixel of the Spectra Coordinate by an
1820 integer amount.
[1846]1821
[1589]1822 Parameters:
[1846]1823
[1589]1824 delta: the amount to shift by
[1846]1825
1826 *Note*:
1827
[1589]1828 Be careful using this with broadband data.
[1846]1829
[1360]1830 """
[2349]1831 varlist = vars()
[1731]1832 Scantable.shift_refpix(self, delta)
[2349]1833 s._add_history("shift_refpix", varlist)
[931]1834
[1862]1835 @asaplog_post_dec
[1259]1836 def history(self, filename=None):
[1846]1837 """\
[1259]1838 Print the history. Optionally to a file.
[1846]1839
[1348]1840 Parameters:
[1846]1841
[1928]1842 filename: The name of the file to save the history to.
[1846]1843
[1259]1844 """
[484]1845 hist = list(self._gethistory())
[794]1846 out = "-"*80
[484]1847 for h in hist:
[489]1848 if h.startswith("---"):
[1857]1849 out = "\n".join([out, h])
[489]1850 else:
1851 items = h.split("##")
1852 date = items[0]
1853 func = items[1]
1854 items = items[2:]
[794]1855 out += "\n"+date+"\n"
1856 out += "Function: %s\n Parameters:" % (func)
[489]1857 for i in items:
[1938]1858 if i == '':
1859 continue
[489]1860 s = i.split("=")
[1118]1861 out += "\n %s = %s" % (s[0], s[1])
[1857]1862 out = "\n".join([out, "-"*80])
[1259]1863 if filename is not None:
1864 if filename is "":
1865 filename = 'scantable_history.txt'
1866 import os
1867 filename = os.path.expandvars(os.path.expanduser(filename))
1868 if not os.path.isdir(filename):
1869 data = open(filename, 'w')
1870 data.write(out)
1871 data.close()
1872 else:
1873 msg = "Illegal file name '%s'." % (filename)
[1859]1874 raise IOError(msg)
1875 return page(out)
[2349]1876
[513]1877 #
1878 # Maths business
1879 #
[1862]1880 @asaplog_post_dec
[931]1881 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1882 """\
[2349]1883 Return the (time) weighted average of a scan. Scans will be averaged
1884 only if the source direction (RA/DEC) is within 1' otherwise
[1846]1885
1886 *Note*:
1887
[1070]1888 in channels only - align if necessary
[1846]1889
[513]1890 Parameters:
[1846]1891
[513]1892 mask: an optional mask (only used for 'var' and 'tsys'
1893 weighting)
[1855]1894
[558]1895 scanav: True averages each scan separately
1896 False (default) averages all scans together,
[1855]1897
[1099]1898 weight: Weighting scheme.
1899 'none' (mean no weight)
1900 'var' (1/var(spec) weighted)
1901 'tsys' (1/Tsys**2 weighted)
1902 'tint' (integration time weighted)
1903 'tintsys' (Tint/Tsys**2)
1904 'median' ( median averaging)
[535]1905 The default is 'tint'
[1855]1906
[931]1907 align: align the spectra in velocity before averaging. It takes
1908 the time of the first spectrum as reference time.
[1846]1909
1910 Example::
1911
[513]1912 # time average the scantable without using a mask
[710]1913 newscan = scan.average_time()
[1846]1914
[513]1915 """
1916 varlist = vars()
[1593]1917 weight = weight or 'TINT'
1918 mask = mask or ()
1919 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1920 scan = (self, )
[1859]1921
1922 if align:
1923 scan = (self.freq_align(insitu=False), )
1924 s = None
1925 if weight.upper() == 'MEDIAN':
1926 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1927 scanav))
1928 else:
1929 s = scantable(self._math._average(scan, mask, weight.upper(),
1930 scanav))
[1099]1931 s._add_history("average_time", varlist)
[513]1932 return s
[710]1933
[1862]1934 @asaplog_post_dec
[876]1935 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1936 """\
[513]1937 Return a scan where all spectra are converted to either
1938 Jansky or Kelvin depending upon the flux units of the scan table.
1939 By default the function tries to look the values up internally.
1940 If it can't find them (or if you want to over-ride), you must
1941 specify EITHER jyperk OR eta (and D which it will try to look up
1942 also if you don't set it). jyperk takes precedence if you set both.
[1846]1943
[513]1944 Parameters:
[1846]1945
[513]1946 jyperk: the Jy / K conversion factor
[1855]1947
[513]1948 eta: the aperture efficiency
[1855]1949
[1928]1950 d: the geometric diameter (metres)
[1855]1951
[513]1952 insitu: if False a new scantable is returned.
1953 Otherwise, the scaling is done in-situ
1954 The default is taken from .asaprc (False)
[1846]1955
[513]1956 """
1957 if insitu is None: insitu = rcParams['insitu']
[876]1958 self._math._setinsitu(insitu)
[513]1959 varlist = vars()
[1593]1960 jyperk = jyperk or -1.0
1961 d = d or -1.0
1962 eta = eta or -1.0
[876]1963 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1964 s._add_history("convert_flux", varlist)
1965 if insitu: self._assign(s)
1966 else: return s
[513]1967
[1862]1968 @asaplog_post_dec
[876]1969 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1970 """\
[513]1971 Return a scan after applying a gain-elevation correction.
1972 The correction can be made via either a polynomial or a
1973 table-based interpolation (and extrapolation if necessary).
1974 You specify polynomial coefficients, an ascii table or neither.
1975 If you specify neither, then a polynomial correction will be made
1976 with built in coefficients known for certain telescopes (an error
1977 will occur if the instrument is not known).
1978 The data and Tsys are *divided* by the scaling factors.
[1846]1979
[513]1980 Parameters:
[1846]1981
[513]1982 poly: Polynomial coefficients (default None) to compute a
1983 gain-elevation correction as a function of
1984 elevation (in degrees).
[1855]1985
[513]1986 filename: The name of an ascii file holding correction factors.
1987 The first row of the ascii file must give the column
1988 names and these MUST include columns
[2431]1989 'ELEVATION' (degrees) and 'FACTOR' (multiply data
[513]1990 by this) somewhere.
1991 The second row must give the data type of the
1992 column. Use 'R' for Real and 'I' for Integer.
1993 An example file would be
1994 (actual factors are arbitrary) :
1995
1996 TIME ELEVATION FACTOR
1997 R R R
1998 0.1 0 0.8
1999 0.2 20 0.85
2000 0.3 40 0.9
2001 0.4 60 0.85
2002 0.5 80 0.8
2003 0.6 90 0.75
[1855]2004
[513]2005 method: Interpolation method when correcting from a table.
[2431]2006 Values are 'nearest', 'linear' (default), 'cubic'
2007 and 'spline'
[1855]2008
[513]2009 insitu: if False a new scantable is returned.
2010 Otherwise, the scaling is done in-situ
2011 The default is taken from .asaprc (False)
[1846]2012
[513]2013 """
2014
2015 if insitu is None: insitu = rcParams['insitu']
[876]2016 self._math._setinsitu(insitu)
[513]2017 varlist = vars()
[1593]2018 poly = poly or ()
[513]2019 from os.path import expandvars
2020 filename = expandvars(filename)
[876]2021 s = scantable(self._math._gainel(self, poly, filename, method))
2022 s._add_history("gain_el", varlist)
[1593]2023 if insitu:
2024 self._assign(s)
2025 else:
2026 return s
[710]2027
[1862]2028 @asaplog_post_dec
[931]2029 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2030 """\
[513]2031 Return a scan where all rows have been aligned in frequency/velocity.
2032 The alignment frequency frame (e.g. LSRK) is that set by function
2033 set_freqframe.
[1846]2034
[513]2035 Parameters:
[1855]2036
[513]2037 reftime: reference time to align at. By default, the time of
2038 the first row of data is used.
[1855]2039
[513]2040 method: Interpolation method for regridding the spectra.
[2431]2041 Choose from 'nearest', 'linear', 'cubic' (default)
2042 and 'spline'
[1855]2043
[513]2044 insitu: if False a new scantable is returned.
2045 Otherwise, the scaling is done in-situ
2046 The default is taken from .asaprc (False)
[1846]2047
[513]2048 """
[931]2049 if insitu is None: insitu = rcParams["insitu"]
[2429]2050 oldInsitu = self._math._insitu()
[876]2051 self._math._setinsitu(insitu)
[513]2052 varlist = vars()
[1593]2053 reftime = reftime or ""
[931]2054 s = scantable(self._math._freq_align(self, reftime, method))
[876]2055 s._add_history("freq_align", varlist)
[2429]2056 self._math._setinsitu(oldInsitu)
[2349]2057 if insitu:
2058 self._assign(s)
2059 else:
2060 return s
[513]2061
[1862]2062 @asaplog_post_dec
[1725]2063 def opacity(self, tau=None, insitu=None):
[1846]2064 """\
[513]2065 Apply an opacity correction. The data
2066 and Tsys are multiplied by the correction factor.
[1846]2067
[513]2068 Parameters:
[1855]2069
[1689]2070 tau: (list of) opacity from which the correction factor is
[513]2071 exp(tau*ZD)
[1689]2072 where ZD is the zenith-distance.
2073 If a list is provided, it has to be of length nIF,
2074 nIF*nPol or 1 and in order of IF/POL, e.g.
2075 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2076 if tau is `None` the opacities are determined from a
2077 model.
[1855]2078
[513]2079 insitu: if False a new scantable is returned.
2080 Otherwise, the scaling is done in-situ
2081 The default is taken from .asaprc (False)
[1846]2082
[513]2083 """
[2349]2084 if insitu is None:
2085 insitu = rcParams['insitu']
[876]2086 self._math._setinsitu(insitu)
[513]2087 varlist = vars()
[1689]2088 if not hasattr(tau, "__len__"):
2089 tau = [tau]
[876]2090 s = scantable(self._math._opacity(self, tau))
2091 s._add_history("opacity", varlist)
[2349]2092 if insitu:
2093 self._assign(s)
2094 else:
2095 return s
[513]2096
[1862]2097 @asaplog_post_dec
[513]2098 def bin(self, width=5, insitu=None):
[1846]2099 """\
[513]2100 Return a scan where all spectra have been binned up.
[1846]2101
[1348]2102 Parameters:
[1846]2103
[513]2104 width: The bin width (default=5) in pixels
[1855]2105
[513]2106 insitu: if False a new scantable is returned.
2107 Otherwise, the scaling is done in-situ
2108 The default is taken from .asaprc (False)
[1846]2109
[513]2110 """
[2349]2111 if insitu is None:
2112 insitu = rcParams['insitu']
[876]2113 self._math._setinsitu(insitu)
[513]2114 varlist = vars()
[876]2115 s = scantable(self._math._bin(self, width))
[1118]2116 s._add_history("bin", varlist)
[1589]2117 if insitu:
2118 self._assign(s)
2119 else:
2120 return s
[513]2121
[1862]2122 @asaplog_post_dec
[513]2123 def resample(self, width=5, method='cubic', insitu=None):
[1846]2124 """\
[1348]2125 Return a scan where all spectra have been binned up.
[1573]2126
[1348]2127 Parameters:
[1846]2128
[513]2129 width: The bin width (default=5) in pixels
[1855]2130
[513]2131 method: Interpolation method when correcting from a table.
[2431]2132 Values are 'nearest', 'linear', 'cubic' (default)
2133 and 'spline'
[1855]2134
[513]2135 insitu: if False a new scantable is returned.
2136 Otherwise, the scaling is done in-situ
2137 The default is taken from .asaprc (False)
[1846]2138
[513]2139 """
[2349]2140 if insitu is None:
2141 insitu = rcParams['insitu']
[876]2142 self._math._setinsitu(insitu)
[513]2143 varlist = vars()
[876]2144 s = scantable(self._math._resample(self, method, width))
[1118]2145 s._add_history("resample", varlist)
[2349]2146 if insitu:
2147 self._assign(s)
2148 else:
2149 return s
[513]2150
[1862]2151 @asaplog_post_dec
[946]2152 def average_pol(self, mask=None, weight='none'):
[1846]2153 """\
[946]2154 Average the Polarisations together.
[1846]2155
[946]2156 Parameters:
[1846]2157
[946]2158 mask: An optional mask defining the region, where the
2159 averaging will be applied. The output will have all
2160 specified points masked.
[1855]2161
[946]2162 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2163 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2164
[946]2165 """
2166 varlist = vars()
[1593]2167 mask = mask or ()
[1010]2168 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2169 s._add_history("average_pol", varlist)
[992]2170 return s
[513]2171
[1862]2172 @asaplog_post_dec
[1145]2173 def average_beam(self, mask=None, weight='none'):
[1846]2174 """\
[1145]2175 Average the Beams together.
[1846]2176
[1145]2177 Parameters:
2178 mask: An optional mask defining the region, where the
2179 averaging will be applied. The output will have all
2180 specified points masked.
[1855]2181
[1145]2182 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2183 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2184
[1145]2185 """
2186 varlist = vars()
[1593]2187 mask = mask or ()
[1145]2188 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2189 s._add_history("average_beam", varlist)
2190 return s
2191
[1586]2192 def parallactify(self, pflag):
[1846]2193 """\
[1843]2194 Set a flag to indicate whether this data should be treated as having
[1617]2195 been 'parallactified' (total phase == 0.0)
[1846]2196
[1617]2197 Parameters:
[1855]2198
[1843]2199 pflag: Bool indicating whether to turn this on (True) or
[1617]2200 off (False)
[1846]2201
[1617]2202 """
[1586]2203 varlist = vars()
2204 self._parallactify(pflag)
2205 self._add_history("parallactify", varlist)
2206
[1862]2207 @asaplog_post_dec
[992]2208 def convert_pol(self, poltype=None):
[1846]2209 """\
[992]2210 Convert the data to a different polarisation type.
[1565]2211 Note that you will need cross-polarisation terms for most conversions.
[1846]2212
[992]2213 Parameters:
[1855]2214
[992]2215 poltype: The new polarisation type. Valid types are:
[2431]2216 'linear', 'circular', 'stokes' and 'linpol'
[1846]2217
[992]2218 """
2219 varlist = vars()
[1859]2220 s = scantable(self._math._convertpol(self, poltype))
[1118]2221 s._add_history("convert_pol", varlist)
[992]2222 return s
2223
[1862]2224 @asaplog_post_dec
[2269]2225 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2226 insitu=None):
[1846]2227 """\
[513]2228 Smooth the spectrum by the specified kernel (conserving flux).
[1846]2229
[513]2230 Parameters:
[1846]2231
[513]2232 kernel: The type of smoothing kernel. Select from
[1574]2233 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2234 or 'poly'
[1855]2235
[513]2236 width: The width of the kernel in pixels. For hanning this is
2237 ignored otherwise it defauls to 5 pixels.
2238 For 'gaussian' it is the Full Width Half
2239 Maximum. For 'boxcar' it is the full width.
[1574]2240 For 'rmedian' and 'poly' it is the half width.
[1855]2241
[1574]2242 order: Optional parameter for 'poly' kernel (default is 2), to
2243 specify the order of the polnomial. Ignored by all other
2244 kernels.
[1855]2245
[1819]2246 plot: plot the original and the smoothed spectra.
2247 In this each indivual fit has to be approved, by
2248 typing 'y' or 'n'
[1855]2249
[513]2250 insitu: if False a new scantable is returned.
2251 Otherwise, the scaling is done in-situ
2252 The default is taken from .asaprc (False)
[1846]2253
[513]2254 """
2255 if insitu is None: insitu = rcParams['insitu']
[876]2256 self._math._setinsitu(insitu)
[513]2257 varlist = vars()
[1819]2258
2259 if plot: orgscan = self.copy()
2260
[1574]2261 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2262 s._add_history("smooth", varlist)
[1819]2263
2264 if plot:
[2150]2265 from asap.asapplotter import new_asaplot
2266 theplot = new_asaplot(rcParams['plotter.gui'])
2267 theplot.set_panels()
[1819]2268 ylab=s._get_ordinate_label()
[2150]2269 #theplot.palette(0,["#777777","red"])
[1819]2270 for r in xrange(s.nrow()):
2271 xsm=s._getabcissa(r)
2272 ysm=s._getspectrum(r)
2273 xorg=orgscan._getabcissa(r)
2274 yorg=orgscan._getspectrum(r)
[2150]2275 theplot.clear()
2276 theplot.hold()
2277 theplot.set_axes('ylabel',ylab)
2278 theplot.set_axes('xlabel',s._getabcissalabel(r))
2279 theplot.set_axes('title',s._getsourcename(r))
2280 theplot.set_line(label='Original',color="#777777")
2281 theplot.plot(xorg,yorg)
2282 theplot.set_line(label='Smoothed',color="red")
2283 theplot.plot(xsm,ysm)
[1819]2284 ### Ugly part for legend
2285 for i in [0,1]:
[2349]2286 theplot.subplots[0]['lines'].append(
2287 [theplot.subplots[0]['axes'].lines[i]]
2288 )
[2150]2289 theplot.release()
[1819]2290 ### Ugly part for legend
[2150]2291 theplot.subplots[0]['lines']=[]
[1819]2292 res = raw_input("Accept smoothing ([y]/n): ")
2293 if res.upper() == 'N':
2294 s._setspectrum(yorg, r)
[2150]2295 theplot.quit()
2296 del theplot
[1819]2297 del orgscan
2298
[876]2299 if insitu: self._assign(s)
2300 else: return s
[513]2301
[2186]2302 @asaplog_post_dec
[2435]2303 def regrid_channel(self, width=5, plot=False, insitu=None):
2304 """\
2305 Regrid the spectra by the specified channel width
2306
2307 Parameters:
2308
2309 width: The channel width (float) of regridded spectra
2310 in the current spectral unit.
2311
2312 plot: [NOT IMPLEMENTED YET]
2313 plot the original and the regridded spectra.
2314 In this each indivual fit has to be approved, by
2315 typing 'y' or 'n'
2316
2317 insitu: if False a new scantable is returned.
2318 Otherwise, the scaling is done in-situ
2319 The default is taken from .asaprc (False)
2320
2321 """
2322 if insitu is None: insitu = rcParams['insitu']
2323 varlist = vars()
2324
2325 if plot:
2326 asaplog.post()
2327 asaplog.push("Verification plot is not implemtnetd yet.")
2328 asaplog.post("WARN")
2329
2330 s = self.copy()
2331 s._regrid_specchan(width)
2332
2333 s._add_history("regrid_channel", varlist)
2334
2335# if plot:
2336# from asap.asapplotter import new_asaplot
2337# theplot = new_asaplot(rcParams['plotter.gui'])
2338# theplot.set_panels()
2339# ylab=s._get_ordinate_label()
2340# #theplot.palette(0,["#777777","red"])
2341# for r in xrange(s.nrow()):
2342# xsm=s._getabcissa(r)
2343# ysm=s._getspectrum(r)
2344# xorg=orgscan._getabcissa(r)
2345# yorg=orgscan._getspectrum(r)
2346# theplot.clear()
2347# theplot.hold()
2348# theplot.set_axes('ylabel',ylab)
2349# theplot.set_axes('xlabel',s._getabcissalabel(r))
2350# theplot.set_axes('title',s._getsourcename(r))
2351# theplot.set_line(label='Original',color="#777777")
2352# theplot.plot(xorg,yorg)
2353# theplot.set_line(label='Smoothed',color="red")
2354# theplot.plot(xsm,ysm)
2355# ### Ugly part for legend
2356# for i in [0,1]:
2357# theplot.subplots[0]['lines'].append(
2358# [theplot.subplots[0]['axes'].lines[i]]
2359# )
2360# theplot.release()
2361# ### Ugly part for legend
2362# theplot.subplots[0]['lines']=[]
2363# res = raw_input("Accept smoothing ([y]/n): ")
2364# if res.upper() == 'N':
2365# s._setspectrum(yorg, r)
2366# theplot.quit()
2367# del theplot
2368# del orgscan
2369
2370 if insitu: self._assign(s)
2371 else: return s
2372
2373 @asaplog_post_dec
[2186]2374 def _parse_wn(self, wn):
2375 if isinstance(wn, list) or isinstance(wn, tuple):
2376 return wn
2377 elif isinstance(wn, int):
2378 return [ wn ]
2379 elif isinstance(wn, str):
[2277]2380 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2381 val = wn.split('-')
2382 val = [int(val[0]), int(val[1])]
2383 val.sort()
2384 res = [i for i in xrange(val[0], val[1]+1)]
[2277]2385 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2386 val = int(wn[2:])+1
2387 res = [i for i in xrange(val)]
[2277]2388 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2389 val = int(wn[:-2])+1
2390 res = [i for i in xrange(val)]
[2277]2391 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]2392 val = int(wn[1:])
2393 res = [i for i in xrange(val)]
[2277]2394 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]2395 val = int(wn[:-1])
2396 res = [i for i in xrange(val)]
[2411]2397 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2398 # then interpreted in C++
2399 # side as [a,a+1,...,a_nyq]
2400 # (CAS-3759)
[2186]2401 val = int(wn[2:])
[2411]2402 res = [val, -999]
2403 #res = [i for i in xrange(val, self.nchan()/2+1)]
2404 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2405 # then interpreted in C++
2406 # side as [a,a+1,...,a_nyq]
2407 # (CAS-3759)
[2186]2408 val = int(wn[:-2])
[2411]2409 res = [val, -999]
2410 #res = [i for i in xrange(val, self.nchan()/2+1)]
2411 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2412 # then interpreted in C++
2413 # side as [a+1,a+2,...,a_nyq]
2414 # (CAS-3759)
[2186]2415 val = int(wn[1:])+1
[2411]2416 res = [val, -999]
2417 #res = [i for i in xrange(val, self.nchan()/2+1)]
2418 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2419 # then interpreted in C++
2420 # side as [a+1,a+2,...,a_nyq]
2421 # (CAS-3759)
[2186]2422 val = int(wn[:-1])+1
[2411]2423 res = [val, -999]
2424 #res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2425
[2186]2426 return res
2427 else:
2428 msg = 'wrong value given for addwn/rejwn'
2429 raise RuntimeError(msg)
2430
2431
[1862]2432 @asaplog_post_dec
[2277]2433 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
[2269]2434 fftmethod=None, fftthresh=None,
2435 addwn=None, rejwn=None, clipthresh=None,
2436 clipniter=None, plot=None,
2437 getresidual=None, showprogress=None,
2438 minnrow=None, outlog=None, blfile=None):
[2047]2439 """\
[2349]2440 Return a scan which has been baselined (all rows) with sinusoidal
2441 functions.
2442
[2047]2443 Parameters:
[2186]2444 insitu: if False a new scantable is returned.
[2081]2445 Otherwise, the scaling is done in-situ
2446 The default is taken from .asaprc (False)
[2186]2447 mask: an optional mask
2448 applyfft: if True use some method, such as FFT, to find
2449 strongest sinusoidal components in the wavenumber
2450 domain to be used for baseline fitting.
2451 default is True.
2452 fftmethod: method to find the strong sinusoidal components.
2453 now only 'fft' is available and it is the default.
2454 fftthresh: the threshold to select wave numbers to be used for
2455 fitting from the distribution of amplitudes in the
2456 wavenumber domain.
2457 both float and string values accepted.
2458 given a float value, the unit is set to sigma.
2459 for string values, allowed formats include:
[2349]2460 'xsigma' or 'x' (= x-sigma level. e.g.,
2461 '3sigma'), or
[2186]2462 'topx' (= the x strongest ones, e.g. 'top5').
2463 default is 3.0 (unit: sigma).
2464 addwn: the additional wave numbers to be used for fitting.
2465 list or integer value is accepted to specify every
2466 wave numbers. also string value can be used in case
2467 you need to specify wave numbers in a certain range,
2468 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2469 '<a' (= 0,1,...,a-2,a-1),
2470 '>=a' (= a, a+1, ... up to the maximum wave
2471 number corresponding to the Nyquist
2472 frequency for the case of FFT).
[2411]2473 default is [0].
[2186]2474 rejwn: the wave numbers NOT to be used for fitting.
2475 can be set just as addwn but has higher priority:
2476 wave numbers which are specified both in addwn
2477 and rejwn will NOT be used. default is [].
[2081]2478 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2479 clipniter: maximum number of iteration of 'clipthresh'-sigma
2480 clipping (default is 0)
[2081]2481 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2482 plot the fit and the residual. In this each
2483 indivual fit has to be approved, by typing 'y'
2484 or 'n'
2485 getresidual: if False, returns best-fit values instead of
2486 residual. (default is True)
[2189]2487 showprogress: show progress status for large data.
2488 default is True.
2489 minnrow: minimum number of input spectra to show.
2490 default is 1000.
[2081]2491 outlog: Output the coefficients of the best-fit
2492 function to logger (default is False)
2493 blfile: Name of a text file in which the best-fit
2494 parameter values to be written
[2186]2495 (default is '': no file/logger output)
[2047]2496
2497 Example:
[2349]2498 # return a scan baselined by a combination of sinusoidal curves
2499 # having wave numbers in spectral window up to 10,
[2047]2500 # also with 3-sigma clipping, iteration up to 4 times
[2186]2501 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]2502
2503 Note:
2504 The best-fit parameter values output in logger and/or blfile are now
2505 based on specunit of 'channel'.
[2047]2506 """
2507
[2186]2508 try:
2509 varlist = vars()
[2047]2510
[2186]2511 if insitu is None: insitu = rcParams['insitu']
2512 if insitu:
2513 workscan = self
2514 else:
2515 workscan = self.copy()
2516
[2410]2517 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2518 if mask is None: mask = []
[2186]2519 if applyfft is None: applyfft = True
2520 if fftmethod is None: fftmethod = 'fft'
2521 if fftthresh is None: fftthresh = 3.0
[2411]2522 if addwn is None: addwn = [0]
[2186]2523 if rejwn is None: rejwn = []
2524 if clipthresh is None: clipthresh = 3.0
2525 if clipniter is None: clipniter = 0
2526 if plot is None: plot = False
2527 if getresidual is None: getresidual = True
[2189]2528 if showprogress is None: showprogress = True
2529 if minnrow is None: minnrow = 1000
[2186]2530 if outlog is None: outlog = False
2531 if blfile is None: blfile = ''
[2047]2532
[2081]2533 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2349]2534 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2535 str(fftthresh).lower(),
2536 workscan._parse_wn(addwn),
2537 workscan._parse_wn(rejwn), clipthresh,
2538 clipniter, getresidual,
2539 pack_progress_params(showprogress,
2540 minnrow), outlog,
2541 blfile)
[2186]2542 workscan._add_history('sinusoid_baseline', varlist)
[2047]2543
2544 if insitu:
2545 self._assign(workscan)
2546 else:
2547 return workscan
2548
2549 except RuntimeError, e:
[2186]2550 raise_fitting_failure_exception(e)
[2047]2551
2552
[2186]2553 @asaplog_post_dec
[2349]2554 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2555 fftmethod=None, fftthresh=None,
2556 addwn=None, rejwn=None, clipthresh=None,
2557 clipniter=None, edge=None, threshold=None,
2558 chan_avg_limit=None, plot=None,
2559 getresidual=None, showprogress=None,
2560 minnrow=None, outlog=None, blfile=None):
[2047]2561 """\
[2349]2562 Return a scan which has been baselined (all rows) with sinusoidal
2563 functions.
[2047]2564 Spectral lines are detected first using linefinder and masked out
2565 to avoid them affecting the baseline solution.
2566
2567 Parameters:
[2189]2568 insitu: if False a new scantable is returned.
2569 Otherwise, the scaling is done in-situ
2570 The default is taken from .asaprc (False)
2571 mask: an optional mask retreived from scantable
2572 applyfft: if True use some method, such as FFT, to find
2573 strongest sinusoidal components in the wavenumber
2574 domain to be used for baseline fitting.
2575 default is True.
2576 fftmethod: method to find the strong sinusoidal components.
2577 now only 'fft' is available and it is the default.
2578 fftthresh: the threshold to select wave numbers to be used for
2579 fitting from the distribution of amplitudes in the
2580 wavenumber domain.
2581 both float and string values accepted.
2582 given a float value, the unit is set to sigma.
2583 for string values, allowed formats include:
[2349]2584 'xsigma' or 'x' (= x-sigma level. e.g.,
2585 '3sigma'), or
[2189]2586 'topx' (= the x strongest ones, e.g. 'top5').
2587 default is 3.0 (unit: sigma).
2588 addwn: the additional wave numbers to be used for fitting.
2589 list or integer value is accepted to specify every
2590 wave numbers. also string value can be used in case
2591 you need to specify wave numbers in a certain range,
2592 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2593 '<a' (= 0,1,...,a-2,a-1),
2594 '>=a' (= a, a+1, ... up to the maximum wave
2595 number corresponding to the Nyquist
2596 frequency for the case of FFT).
[2411]2597 default is [0].
[2189]2598 rejwn: the wave numbers NOT to be used for fitting.
2599 can be set just as addwn but has higher priority:
2600 wave numbers which are specified both in addwn
2601 and rejwn will NOT be used. default is [].
2602 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2603 clipniter: maximum number of iteration of 'clipthresh'-sigma
2604 clipping (default is 0)
[2189]2605 edge: an optional number of channel to drop at
2606 the edge of spectrum. If only one value is
2607 specified, the same number will be dropped
2608 from both sides of the spectrum. Default
2609 is to keep all channels. Nested tuples
2610 represent individual edge selection for
2611 different IFs (a number of spectral channels
2612 can be different)
2613 threshold: the threshold used by line finder. It is
2614 better to keep it large as only strong lines
2615 affect the baseline solution.
2616 chan_avg_limit: a maximum number of consequtive spectral
2617 channels to average during the search of
2618 weak and broad lines. The default is no
2619 averaging (and no search for weak lines).
2620 If such lines can affect the fitted baseline
2621 (e.g. a high order polynomial is fitted),
2622 increase this parameter (usually values up
2623 to 8 are reasonable). Most users of this
2624 method should find the default value sufficient.
2625 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2626 plot the fit and the residual. In this each
2627 indivual fit has to be approved, by typing 'y'
2628 or 'n'
2629 getresidual: if False, returns best-fit values instead of
2630 residual. (default is True)
2631 showprogress: show progress status for large data.
2632 default is True.
2633 minnrow: minimum number of input spectra to show.
2634 default is 1000.
2635 outlog: Output the coefficients of the best-fit
2636 function to logger (default is False)
2637 blfile: Name of a text file in which the best-fit
2638 parameter values to be written
2639 (default is "": no file/logger output)
[2047]2640
2641 Example:
[2186]2642 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]2643
2644 Note:
2645 The best-fit parameter values output in logger and/or blfile are now
2646 based on specunit of 'channel'.
[2047]2647 """
2648
[2186]2649 try:
2650 varlist = vars()
[2047]2651
[2186]2652 if insitu is None: insitu = rcParams['insitu']
2653 if insitu:
2654 workscan = self
[2047]2655 else:
[2186]2656 workscan = self.copy()
2657
[2410]2658 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2659 if mask is None: mask = []
[2186]2660 if applyfft is None: applyfft = True
2661 if fftmethod is None: fftmethod = 'fft'
2662 if fftthresh is None: fftthresh = 3.0
[2411]2663 if addwn is None: addwn = [0]
[2186]2664 if rejwn is None: rejwn = []
2665 if clipthresh is None: clipthresh = 3.0
2666 if clipniter is None: clipniter = 0
2667 if edge is None: edge = (0,0)
2668 if threshold is None: threshold = 3
2669 if chan_avg_limit is None: chan_avg_limit = 1
2670 if plot is None: plot = False
2671 if getresidual is None: getresidual = True
[2189]2672 if showprogress is None: showprogress = True
2673 if minnrow is None: minnrow = 1000
[2186]2674 if outlog is None: outlog = False
2675 if blfile is None: blfile = ''
[2047]2676
[2277]2677 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2349]2678 workscan._auto_sinusoid_baseline(mask, applyfft,
2679 fftmethod.lower(),
2680 str(fftthresh).lower(),
2681 workscan._parse_wn(addwn),
2682 workscan._parse_wn(rejwn),
2683 clipthresh, clipniter,
2684 normalise_edge_param(edge),
2685 threshold, chan_avg_limit,
2686 getresidual,
2687 pack_progress_params(showprogress,
2688 minnrow),
2689 outlog, blfile)
[2047]2690 workscan._add_history("auto_sinusoid_baseline", varlist)
2691
2692 if insitu:
2693 self._assign(workscan)
2694 else:
2695 return workscan
2696
2697 except RuntimeError, e:
[2186]2698 raise_fitting_failure_exception(e)
[2047]2699
2700 @asaplog_post_dec
[2349]2701 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2702 clipthresh=None, clipniter=None, plot=None,
2703 getresidual=None, showprogress=None, minnrow=None,
2704 outlog=None, blfile=None):
[1846]2705 """\
[2349]2706 Return a scan which has been baselined (all rows) by cubic spline
2707 function (piecewise cubic polynomial).
2708
[513]2709 Parameters:
[2189]2710 insitu: If False a new scantable is returned.
2711 Otherwise, the scaling is done in-situ
2712 The default is taken from .asaprc (False)
2713 mask: An optional mask
2714 npiece: Number of pieces. (default is 2)
2715 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2716 clipniter: maximum number of iteration of 'clipthresh'-sigma
2717 clipping (default is 0)
[2189]2718 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2719 plot the fit and the residual. In this each
2720 indivual fit has to be approved, by typing 'y'
2721 or 'n'
2722 getresidual: if False, returns best-fit values instead of
2723 residual. (default is True)
2724 showprogress: show progress status for large data.
2725 default is True.
2726 minnrow: minimum number of input spectra to show.
2727 default is 1000.
2728 outlog: Output the coefficients of the best-fit
2729 function to logger (default is False)
2730 blfile: Name of a text file in which the best-fit
2731 parameter values to be written
2732 (default is "": no file/logger output)
[1846]2733
[2012]2734 Example:
[2349]2735 # return a scan baselined by a cubic spline consisting of 2 pieces
2736 # (i.e., 1 internal knot),
[2012]2737 # also with 3-sigma clipping, iteration up to 4 times
2738 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2739
2740 Note:
2741 The best-fit parameter values output in logger and/or blfile are now
2742 based on specunit of 'channel'.
[2012]2743 """
2744
[2186]2745 try:
2746 varlist = vars()
2747
2748 if insitu is None: insitu = rcParams['insitu']
2749 if insitu:
2750 workscan = self
2751 else:
2752 workscan = self.copy()
[1855]2753
[2410]2754 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2755 if mask is None: mask = []
[2189]2756 if npiece is None: npiece = 2
2757 if clipthresh is None: clipthresh = 3.0
2758 if clipniter is None: clipniter = 0
2759 if plot is None: plot = False
2760 if getresidual is None: getresidual = True
2761 if showprogress is None: showprogress = True
2762 if minnrow is None: minnrow = 1000
2763 if outlog is None: outlog = False
2764 if blfile is None: blfile = ''
[1855]2765
[2012]2766 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2349]2767 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2768 getresidual,
2769 pack_progress_params(showprogress,
2770 minnrow), outlog,
2771 blfile)
[2012]2772 workscan._add_history("cspline_baseline", varlist)
2773
2774 if insitu:
2775 self._assign(workscan)
2776 else:
2777 return workscan
2778
2779 except RuntimeError, e:
[2186]2780 raise_fitting_failure_exception(e)
[1855]2781
[2186]2782 @asaplog_post_dec
[2349]2783 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2784 clipthresh=None, clipniter=None,
2785 edge=None, threshold=None, chan_avg_limit=None,
2786 getresidual=None, plot=None,
2787 showprogress=None, minnrow=None, outlog=None,
2788 blfile=None):
[2012]2789 """\
2790 Return a scan which has been baselined (all rows) by cubic spline
2791 function (piecewise cubic polynomial).
2792 Spectral lines are detected first using linefinder and masked out
2793 to avoid them affecting the baseline solution.
2794
2795 Parameters:
[2189]2796 insitu: if False a new scantable is returned.
2797 Otherwise, the scaling is done in-situ
2798 The default is taken from .asaprc (False)
2799 mask: an optional mask retreived from scantable
2800 npiece: Number of pieces. (default is 2)
2801 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2802 clipniter: maximum number of iteration of 'clipthresh'-sigma
2803 clipping (default is 0)
[2189]2804 edge: an optional number of channel to drop at
2805 the edge of spectrum. If only one value is
2806 specified, the same number will be dropped
2807 from both sides of the spectrum. Default
2808 is to keep all channels. Nested tuples
2809 represent individual edge selection for
2810 different IFs (a number of spectral channels
2811 can be different)
2812 threshold: the threshold used by line finder. It is
2813 better to keep it large as only strong lines
2814 affect the baseline solution.
2815 chan_avg_limit: a maximum number of consequtive spectral
2816 channels to average during the search of
2817 weak and broad lines. The default is no
2818 averaging (and no search for weak lines).
2819 If such lines can affect the fitted baseline
2820 (e.g. a high order polynomial is fitted),
2821 increase this parameter (usually values up
2822 to 8 are reasonable). Most users of this
2823 method should find the default value sufficient.
2824 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2825 plot the fit and the residual. In this each
2826 indivual fit has to be approved, by typing 'y'
2827 or 'n'
2828 getresidual: if False, returns best-fit values instead of
2829 residual. (default is True)
2830 showprogress: show progress status for large data.
2831 default is True.
2832 minnrow: minimum number of input spectra to show.
2833 default is 1000.
2834 outlog: Output the coefficients of the best-fit
2835 function to logger (default is False)
2836 blfile: Name of a text file in which the best-fit
2837 parameter values to be written
2838 (default is "": no file/logger output)
[1846]2839
[1907]2840 Example:
[2012]2841 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]2842
2843 Note:
2844 The best-fit parameter values output in logger and/or blfile are now
2845 based on specunit of 'channel'.
[2012]2846 """
[1846]2847
[2186]2848 try:
2849 varlist = vars()
[2012]2850
[2186]2851 if insitu is None: insitu = rcParams['insitu']
2852 if insitu:
2853 workscan = self
[1391]2854 else:
[2186]2855 workscan = self.copy()
2856
[2410]2857 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2858 if mask is None: mask = []
[2186]2859 if npiece is None: npiece = 2
2860 if clipthresh is None: clipthresh = 3.0
2861 if clipniter is None: clipniter = 0
2862 if edge is None: edge = (0, 0)
2863 if threshold is None: threshold = 3
2864 if chan_avg_limit is None: chan_avg_limit = 1
2865 if plot is None: plot = False
2866 if getresidual is None: getresidual = True
[2189]2867 if showprogress is None: showprogress = True
2868 if minnrow is None: minnrow = 1000
[2186]2869 if outlog is None: outlog = False
2870 if blfile is None: blfile = ''
[1819]2871
[2277]2872 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2269]2873 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2874 clipniter,
2875 normalise_edge_param(edge),
2876 threshold,
2877 chan_avg_limit, getresidual,
2878 pack_progress_params(showprogress,
2879 minnrow),
2880 outlog, blfile)
[2012]2881 workscan._add_history("auto_cspline_baseline", varlist)
[1907]2882
[1856]2883 if insitu:
2884 self._assign(workscan)
2885 else:
2886 return workscan
[2012]2887
2888 except RuntimeError, e:
[2186]2889 raise_fitting_failure_exception(e)
[513]2890
[1931]2891 @asaplog_post_dec
[2269]2892 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2893 getresidual=None, showprogress=None, minnrow=None,
2894 outlog=None, blfile=None):
[1907]2895 """\
2896 Return a scan which has been baselined (all rows) by a polynomial.
2897 Parameters:
[2189]2898 insitu: if False a new scantable is returned.
2899 Otherwise, the scaling is done in-situ
2900 The default is taken from .asaprc (False)
2901 mask: an optional mask
2902 order: the order of the polynomial (default is 0)
2903 plot: plot the fit and the residual. In this each
2904 indivual fit has to be approved, by typing 'y'
2905 or 'n'
2906 getresidual: if False, returns best-fit values instead of
2907 residual. (default is True)
2908 showprogress: show progress status for large data.
2909 default is True.
2910 minnrow: minimum number of input spectra to show.
2911 default is 1000.
2912 outlog: Output the coefficients of the best-fit
2913 function to logger (default is False)
2914 blfile: Name of a text file in which the best-fit
2915 parameter values to be written
2916 (default is "": no file/logger output)
[2012]2917
[1907]2918 Example:
2919 # return a scan baselined by a third order polynomial,
2920 # not using a mask
2921 bscan = scan.poly_baseline(order=3)
2922 """
[1931]2923
[2186]2924 try:
2925 varlist = vars()
[1931]2926
[2269]2927 if insitu is None:
2928 insitu = rcParams["insitu"]
[2186]2929 if insitu:
2930 workscan = self
2931 else:
2932 workscan = self.copy()
[1907]2933
[2410]2934 #if mask is None: mask = [True for i in \
2935 # xrange(workscan.nchan())]
2936 if mask is None: mask = []
[2189]2937 if order is None: order = 0
2938 if plot is None: plot = False
2939 if getresidual is None: getresidual = True
2940 if showprogress is None: showprogress = True
2941 if minnrow is None: minnrow = 1000
2942 if outlog is None: outlog = False
2943 if blfile is None: blfile = ""
[1907]2944
[2012]2945 if plot:
[2269]2946 outblfile = (blfile != "") and \
[2349]2947 os.path.exists(os.path.expanduser(
2948 os.path.expandvars(blfile))
2949 )
[2269]2950 if outblfile:
2951 blf = open(blfile, "a")
[2012]2952
[1907]2953 f = fitter()
2954 f.set_function(lpoly=order)
[2186]2955
2956 rows = xrange(workscan.nrow())
2957 #if len(rows) > 0: workscan._init_blinfo()
2958
[1907]2959 for r in rows:
2960 f.x = workscan._getabcissa(r)
2961 f.y = workscan._getspectrum(r)
2962 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2963 f.data = None
2964 f.fit()
2965
2966 f.plot(residual=True)
2967 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2968 if accept_fit.upper() == "N":
[2012]2969 #workscan._append_blinfo(None, None, None)
[1907]2970 continue
[2012]2971
2972 blpars = f.get_parameters()
2973 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2974 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2975 workscan._setspectrum((f.fitter.getresidual()
2976 if getresidual else f.fitter.getfit()), r)
[1907]2977
[2012]2978 if outblfile:
2979 rms = workscan.get_rms(f.mask, r)
[2269]2980 dataout = \
2981 workscan.format_blparams_row(blpars["params"],
2982 blpars["fixed"],
2983 rms, str(masklist),
2984 r, True)
[2012]2985 blf.write(dataout)
2986
[1907]2987 f._p.unmap()
2988 f._p = None
[2012]2989
[2349]2990 if outblfile:
2991 blf.close()
[1907]2992 else:
[2269]2993 workscan._poly_baseline(mask, order, getresidual,
2994 pack_progress_params(showprogress,
2995 minnrow),
2996 outlog, blfile)
[1907]2997
2998 workscan._add_history("poly_baseline", varlist)
2999
3000 if insitu:
3001 self._assign(workscan)
3002 else:
3003 return workscan
3004
[1919]3005 except RuntimeError, e:
[2186]3006 raise_fitting_failure_exception(e)
[1907]3007
[2186]3008 @asaplog_post_dec
[2269]3009 def auto_poly_baseline(self, mask=None, order=None, edge=None,
3010 threshold=None, chan_avg_limit=None,
3011 plot=None, insitu=None,
3012 getresidual=None, showprogress=None,
3013 minnrow=None, outlog=None, blfile=None):
[1846]3014 """\
[1931]3015 Return a scan which has been baselined (all rows) by a polynomial.
[880]3016 Spectral lines are detected first using linefinder and masked out
3017 to avoid them affecting the baseline solution.
3018
3019 Parameters:
[2189]3020 insitu: if False a new scantable is returned.
3021 Otherwise, the scaling is done in-situ
3022 The default is taken from .asaprc (False)
3023 mask: an optional mask retreived from scantable
3024 order: the order of the polynomial (default is 0)
3025 edge: an optional number of channel to drop at
3026 the edge of spectrum. If only one value is
3027 specified, the same number will be dropped
3028 from both sides of the spectrum. Default
3029 is to keep all channels. Nested tuples
3030 represent individual edge selection for
3031 different IFs (a number of spectral channels
3032 can be different)
3033 threshold: the threshold used by line finder. It is
3034 better to keep it large as only strong lines
3035 affect the baseline solution.
3036 chan_avg_limit: a maximum number of consequtive spectral
3037 channels to average during the search of
3038 weak and broad lines. The default is no
3039 averaging (and no search for weak lines).
3040 If such lines can affect the fitted baseline
3041 (e.g. a high order polynomial is fitted),
3042 increase this parameter (usually values up
3043 to 8 are reasonable). Most users of this
3044 method should find the default value sufficient.
3045 plot: plot the fit and the residual. In this each
3046 indivual fit has to be approved, by typing 'y'
3047 or 'n'
3048 getresidual: if False, returns best-fit values instead of
3049 residual. (default is True)
3050 showprogress: show progress status for large data.
3051 default is True.
3052 minnrow: minimum number of input spectra to show.
3053 default is 1000.
3054 outlog: Output the coefficients of the best-fit
3055 function to logger (default is False)
3056 blfile: Name of a text file in which the best-fit
3057 parameter values to be written
3058 (default is "": no file/logger output)
[1846]3059
[2012]3060 Example:
3061 bscan = scan.auto_poly_baseline(order=7, insitu=False)
3062 """
[880]3063
[2186]3064 try:
3065 varlist = vars()
[1846]3066
[2269]3067 if insitu is None:
3068 insitu = rcParams['insitu']
[2186]3069 if insitu:
3070 workscan = self
3071 else:
3072 workscan = self.copy()
[1846]3073
[2410]3074 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3075 if mask is None: mask = []
[2186]3076 if order is None: order = 0
3077 if edge is None: edge = (0, 0)
3078 if threshold is None: threshold = 3
3079 if chan_avg_limit is None: chan_avg_limit = 1
3080 if plot is None: plot = False
3081 if getresidual is None: getresidual = True
[2189]3082 if showprogress is None: showprogress = True
3083 if minnrow is None: minnrow = 1000
[2186]3084 if outlog is None: outlog = False
3085 if blfile is None: blfile = ''
[1846]3086
[2186]3087 edge = normalise_edge_param(edge)
[880]3088
[2012]3089 if plot:
[2269]3090 outblfile = (blfile != "") and \
3091 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]3092 if outblfile: blf = open(blfile, "a")
3093
[2186]3094 from asap.asaplinefind import linefinder
[2012]3095 fl = linefinder()
[2269]3096 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]3097 fl.set_scan(workscan)
[2186]3098
[2012]3099 f = fitter()
3100 f.set_function(lpoly=order)
[880]3101
[2186]3102 rows = xrange(workscan.nrow())
3103 #if len(rows) > 0: workscan._init_blinfo()
3104
[2012]3105 for r in rows:
[2186]3106 idx = 2*workscan.getif(r)
[2269]3107 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
3108 edge[idx:idx+2]) # (CAS-1434)
[907]3109
[2012]3110 f.x = workscan._getabcissa(r)
3111 f.y = workscan._getspectrum(r)
3112 f.mask = fl.get_mask()
3113 f.data = None
3114 f.fit()
3115
3116 f.plot(residual=True)
3117 accept_fit = raw_input("Accept fit ( [y]/n ): ")
3118 if accept_fit.upper() == "N":
3119 #workscan._append_blinfo(None, None, None)
3120 continue
3121
3122 blpars = f.get_parameters()
3123 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3124 #workscan._append_blinfo(blpars, masklist, f.mask)
[2349]3125 workscan._setspectrum(
3126 (f.fitter.getresidual() if getresidual
3127 else f.fitter.getfit()), r
3128 )
[2012]3129
3130 if outblfile:
3131 rms = workscan.get_rms(f.mask, r)
[2269]3132 dataout = \
3133 workscan.format_blparams_row(blpars["params"],
3134 blpars["fixed"],
3135 rms, str(masklist),
3136 r, True)
[2012]3137 blf.write(dataout)
3138
3139 f._p.unmap()
3140 f._p = None
3141
3142 if outblfile: blf.close()
3143 else:
[2269]3144 workscan._auto_poly_baseline(mask, order, edge, threshold,
3145 chan_avg_limit, getresidual,
3146 pack_progress_params(showprogress,
3147 minnrow),
3148 outlog, blfile)
[2012]3149
3150 workscan._add_history("auto_poly_baseline", varlist)
3151
3152 if insitu:
3153 self._assign(workscan)
3154 else:
3155 return workscan
3156
3157 except RuntimeError, e:
[2186]3158 raise_fitting_failure_exception(e)
[2012]3159
3160 def _init_blinfo(self):
3161 """\
3162 Initialise the following three auxiliary members:
3163 blpars : parameters of the best-fit baseline,
3164 masklists : mask data (edge positions of masked channels) and
3165 actualmask : mask data (in boolean list),
3166 to keep for use later (including output to logger/text files).
3167 Used by poly_baseline() and auto_poly_baseline() in case of
3168 'plot=True'.
3169 """
3170 self.blpars = []
3171 self.masklists = []
3172 self.actualmask = []
3173 return
[880]3174
[2012]3175 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3176 """\
3177 Append baseline-fitting related info to blpars, masklist and
3178 actualmask.
3179 """
3180 self.blpars.append(data_blpars)
3181 self.masklists.append(data_masklists)
3182 self.actualmask.append(data_actualmask)
3183 return
3184
[1862]3185 @asaplog_post_dec
[914]3186 def rotate_linpolphase(self, angle):
[1846]3187 """\
[914]3188 Rotate the phase of the complex polarization O=Q+iU correlation.
3189 This is always done in situ in the raw data. So if you call this
3190 function more than once then each call rotates the phase further.
[1846]3191
[914]3192 Parameters:
[1846]3193
[914]3194 angle: The angle (degrees) to rotate (add) by.
[1846]3195
3196 Example::
3197
[914]3198 scan.rotate_linpolphase(2.3)
[1846]3199
[914]3200 """
3201 varlist = vars()
[936]3202 self._math._rotate_linpolphase(self, angle)
[914]3203 self._add_history("rotate_linpolphase", varlist)
3204 return
[710]3205
[1862]3206 @asaplog_post_dec
[914]3207 def rotate_xyphase(self, angle):
[1846]3208 """\
[914]3209 Rotate the phase of the XY correlation. This is always done in situ
3210 in the data. So if you call this function more than once
3211 then each call rotates the phase further.
[1846]3212
[914]3213 Parameters:
[1846]3214
[914]3215 angle: The angle (degrees) to rotate (add) by.
[1846]3216
3217 Example::
3218
[914]3219 scan.rotate_xyphase(2.3)
[1846]3220
[914]3221 """
3222 varlist = vars()
[936]3223 self._math._rotate_xyphase(self, angle)
[914]3224 self._add_history("rotate_xyphase", varlist)
3225 return
3226
[1862]3227 @asaplog_post_dec
[914]3228 def swap_linears(self):
[1846]3229 """\
[1573]3230 Swap the linear polarisations XX and YY, or better the first two
[1348]3231 polarisations as this also works for ciculars.
[914]3232 """
3233 varlist = vars()
[936]3234 self._math._swap_linears(self)
[914]3235 self._add_history("swap_linears", varlist)
3236 return
3237
[1862]3238 @asaplog_post_dec
[914]3239 def invert_phase(self):
[1846]3240 """\
[914]3241 Invert the phase of the complex polarisation
3242 """
3243 varlist = vars()
[936]3244 self._math._invert_phase(self)
[914]3245 self._add_history("invert_phase", varlist)
3246 return
3247
[1862]3248 @asaplog_post_dec
[876]3249 def add(self, offset, insitu=None):
[1846]3250 """\
[513]3251 Return a scan where all spectra have the offset added
[1846]3252
[513]3253 Parameters:
[1846]3254
[513]3255 offset: the offset
[1855]3256
[513]3257 insitu: if False a new scantable is returned.
3258 Otherwise, the scaling is done in-situ
3259 The default is taken from .asaprc (False)
[1846]3260
[513]3261 """
3262 if insitu is None: insitu = rcParams['insitu']
[876]3263 self._math._setinsitu(insitu)
[513]3264 varlist = vars()
[876]3265 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]3266 s._add_history("add", varlist)
[876]3267 if insitu:
3268 self._assign(s)
3269 else:
[513]3270 return s
3271
[1862]3272 @asaplog_post_dec
[1308]3273 def scale(self, factor, tsys=True, insitu=None):
[1846]3274 """\
3275
[1938]3276 Return a scan where all spectra are scaled by the given 'factor'
[1846]3277
[513]3278 Parameters:
[1846]3279
[1819]3280 factor: the scaling factor (float or 1D float list)
[1855]3281
[513]3282 insitu: if False a new scantable is returned.
3283 Otherwise, the scaling is done in-situ
3284 The default is taken from .asaprc (False)
[1855]3285
[513]3286 tsys: if True (default) then apply the operation to Tsys
3287 as well as the data
[1846]3288
[513]3289 """
3290 if insitu is None: insitu = rcParams['insitu']
[876]3291 self._math._setinsitu(insitu)
[513]3292 varlist = vars()
[1819]3293 s = None
3294 import numpy
3295 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]3296 if isinstance(factor[0], list) or isinstance(factor[0],
3297 numpy.ndarray):
[1819]3298 from asapmath import _array2dOp
[2320]3299 s = _array2dOp( self, factor, "MUL", tsys, insitu )
[1819]3300 else:
[2320]3301 s = scantable( self._math._arrayop( self, factor,
3302 "MUL", tsys ) )
[1819]3303 else:
[2320]3304 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]3305 s._add_history("scale", varlist)
[876]3306 if insitu:
3307 self._assign(s)
3308 else:
[513]3309 return s
3310
[2349]3311 @preserve_selection
3312 def set_sourcetype(self, match, matchtype="pattern",
[1504]3313 sourcetype="reference"):
[1846]3314 """\
[1502]3315 Set the type of the source to be an source or reference scan
[1846]3316 using the provided pattern.
3317
[1502]3318 Parameters:
[1846]3319
[1504]3320 match: a Unix style pattern, regular expression or selector
[1855]3321
[1504]3322 matchtype: 'pattern' (default) UNIX style pattern or
3323 'regex' regular expression
[1855]3324
[1502]3325 sourcetype: the type of the source to use (source/reference)
[1846]3326
[1502]3327 """
3328 varlist = vars()
3329 stype = -1
[2480]3330 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
[1502]3331 stype = 1
[2480]3332 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
[1502]3333 stype = 0
[1504]3334 else:
[2480]3335 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
[1504]3336 if matchtype.lower().startswith("p"):
3337 matchtype = "pattern"
3338 elif matchtype.lower().startswith("r"):
3339 matchtype = "regex"
3340 else:
3341 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3342 sel = selector()
3343 if isinstance(match, selector):
3344 sel = match
3345 else:
[2480]3346 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
3347 self.set_selection(sel)
[1502]3348 self._setsourcetype(stype)
[1573]3349 self._add_history("set_sourcetype", varlist)
[1502]3350
[1862]3351 @asaplog_post_dec
[1857]3352 @preserve_selection
[1819]3353 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3354 """\
[670]3355 This function allows to build quotients automatically.
[1819]3356 It assumes the observation to have the same number of
[670]3357 "ons" and "offs"
[1846]3358
[670]3359 Parameters:
[1846]3360
[710]3361 preserve: you can preserve (default) the continuum or
3362 remove it. The equations used are
[1857]3363
[670]3364 preserve: Output = Toff * (on/off) - Toff
[1857]3365
[1070]3366 remove: Output = Toff * (on/off) - Ton
[1855]3367
[1573]3368 mode: the on/off detection mode
[1348]3369 'paired' (default)
3370 identifies 'off' scans by the
3371 trailing '_R' (Mopra/Parkes) or
3372 '_e'/'_w' (Tid) and matches
3373 on/off pairs from the observing pattern
[1502]3374 'time'
3375 finds the closest off in time
[1348]3376
[1857]3377 .. todo:: verify argument is not implemented
3378
[670]3379 """
[1857]3380 varlist = vars()
[1348]3381 modes = ["time", "paired"]
[670]3382 if not mode in modes:
[876]3383 msg = "please provide valid mode. Valid modes are %s" % (modes)
3384 raise ValueError(msg)
[1348]3385 s = None
3386 if mode.lower() == "paired":
[1857]3387 sel = self.get_selection()
[1875]3388 sel.set_query("SRCTYPE==psoff")
[1356]3389 self.set_selection(sel)
[1348]3390 offs = self.copy()
[1875]3391 sel.set_query("SRCTYPE==pson")
[1356]3392 self.set_selection(sel)
[1348]3393 ons = self.copy()
3394 s = scantable(self._math._quotient(ons, offs, preserve))
3395 elif mode.lower() == "time":
3396 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3397 s._add_history("auto_quotient", varlist)
[876]3398 return s
[710]3399
[1862]3400 @asaplog_post_dec
[1145]3401 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3402 """\
[1143]3403 Form a quotient using "off" beams when observing in "MX" mode.
[1846]3404
[1143]3405 Parameters:
[1846]3406
[1145]3407 mask: an optional mask to be used when weight == 'stddev'
[1855]3408
[1143]3409 weight: How to average the off beams. Default is 'median'.
[1855]3410
[1145]3411 preserve: you can preserve (default) the continuum or
[1855]3412 remove it. The equations used are:
[1846]3413
[1855]3414 preserve: Output = Toff * (on/off) - Toff
3415
3416 remove: Output = Toff * (on/off) - Ton
3417
[1217]3418 """
[1593]3419 mask = mask or ()
[1141]3420 varlist = vars()
3421 on = scantable(self._math._mx_extract(self, 'on'))
[1143]3422 preoff = scantable(self._math._mx_extract(self, 'off'))
3423 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3424 from asapmath import quotient
[1145]3425 q = quotient(on, off, preserve)
[1143]3426 q._add_history("mx_quotient", varlist)
[1217]3427 return q
[513]3428
[1862]3429 @asaplog_post_dec
[718]3430 def freq_switch(self, insitu=None):
[1846]3431 """\
[718]3432 Apply frequency switching to the data.
[1846]3433
[718]3434 Parameters:
[1846]3435
[718]3436 insitu: if False a new scantable is returned.
3437 Otherwise, the swictching is done in-situ
3438 The default is taken from .asaprc (False)
[1846]3439
[718]3440 """
3441 if insitu is None: insitu = rcParams['insitu']
[876]3442 self._math._setinsitu(insitu)
[718]3443 varlist = vars()
[876]3444 s = scantable(self._math._freqswitch(self))
[1118]3445 s._add_history("freq_switch", varlist)
[1856]3446 if insitu:
3447 self._assign(s)
3448 else:
3449 return s
[718]3450
[1862]3451 @asaplog_post_dec
[780]3452 def recalc_azel(self):
[1846]3453 """Recalculate the azimuth and elevation for each position."""
[780]3454 varlist = vars()
[876]3455 self._recalcazel()
[780]3456 self._add_history("recalc_azel", varlist)
3457 return
3458
[1862]3459 @asaplog_post_dec
[513]3460 def __add__(self, other):
3461 varlist = vars()
3462 s = None
3463 if isinstance(other, scantable):
[1573]3464 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]3465 elif isinstance(other, float):
[876]3466 s = scantable(self._math._unaryop(self, other, "ADD", False))
[2144]3467 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3468 if isinstance(other[0], list) \
3469 or isinstance(other[0], numpy.ndarray):
[2144]3470 from asapmath import _array2dOp
3471 s = _array2dOp( self.copy(), other, "ADD", False )
3472 else:
[2349]3473 s = scantable( self._math._arrayop( self.copy(), other,
3474 "ADD", False ) )
[513]3475 else:
[718]3476 raise TypeError("Other input is not a scantable or float value")
[513]3477 s._add_history("operator +", varlist)
3478 return s
3479
[1862]3480 @asaplog_post_dec
[513]3481 def __sub__(self, other):
3482 """
3483 implicit on all axes and on Tsys
3484 """
3485 varlist = vars()
3486 s = None
3487 if isinstance(other, scantable):
[1588]3488 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]3489 elif isinstance(other, float):
[876]3490 s = scantable(self._math._unaryop(self, other, "SUB", False))
[2144]3491 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3492 if isinstance(other[0], list) \
3493 or isinstance(other[0], numpy.ndarray):
[2144]3494 from asapmath import _array2dOp
3495 s = _array2dOp( self.copy(), other, "SUB", False )
3496 else:
[2349]3497 s = scantable( self._math._arrayop( self.copy(), other,
3498 "SUB", False ) )
[513]3499 else:
[718]3500 raise TypeError("Other input is not a scantable or float value")
[513]3501 s._add_history("operator -", varlist)
3502 return s
[710]3503
[1862]3504 @asaplog_post_dec
[513]3505 def __mul__(self, other):
3506 """
3507 implicit on all axes and on Tsys
3508 """
3509 varlist = vars()
3510 s = None
3511 if isinstance(other, scantable):
[1588]3512 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]3513 elif isinstance(other, float):
[876]3514 s = scantable(self._math._unaryop(self, other, "MUL", False))
[2144]3515 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3516 if isinstance(other[0], list) \
3517 or isinstance(other[0], numpy.ndarray):
[2144]3518 from asapmath import _array2dOp
3519 s = _array2dOp( self.copy(), other, "MUL", False )
3520 else:
[2349]3521 s = scantable( self._math._arrayop( self.copy(), other,
3522 "MUL", False ) )
[513]3523 else:
[718]3524 raise TypeError("Other input is not a scantable or float value")
[513]3525 s._add_history("operator *", varlist)
3526 return s
3527
[710]3528
[1862]3529 @asaplog_post_dec
[513]3530 def __div__(self, other):
3531 """
3532 implicit on all axes and on Tsys
3533 """
3534 varlist = vars()
3535 s = None
3536 if isinstance(other, scantable):
[1589]3537 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]3538 elif isinstance(other, float):
3539 if other == 0.0:
[718]3540 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]3541 s = scantable(self._math._unaryop(self, other, "DIV", False))
[2144]3542 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3543 if isinstance(other[0], list) \
3544 or isinstance(other[0], numpy.ndarray):
[2144]3545 from asapmath import _array2dOp
3546 s = _array2dOp( self.copy(), other, "DIV", False )
3547 else:
[2349]3548 s = scantable( self._math._arrayop( self.copy(), other,
3549 "DIV", False ) )
[513]3550 else:
[718]3551 raise TypeError("Other input is not a scantable or float value")
[513]3552 s._add_history("operator /", varlist)
3553 return s
3554
[1862]3555 @asaplog_post_dec
[530]3556 def get_fit(self, row=0):
[1846]3557 """\
[530]3558 Print or return the stored fits for a row in the scantable
[1846]3559
[530]3560 Parameters:
[1846]3561
[530]3562 row: the row which the fit has been applied to.
[1846]3563
[530]3564 """
3565 if row > self.nrow():
3566 return
[976]3567 from asap.asapfit import asapfit
[530]3568 fit = asapfit(self._getfit(row))
[1859]3569 asaplog.push( '%s' %(fit) )
3570 return fit.as_dict()
[530]3571
[2349]3572 @preserve_selection
[1483]3573 def flag_nans(self):
[1846]3574 """\
[1483]3575 Utility function to flag NaN values in the scantable.
3576 """
3577 import numpy
3578 basesel = self.get_selection()
3579 for i in range(self.nrow()):
[1589]3580 sel = self.get_row_selector(i)
3581 self.set_selection(basesel+sel)
[1483]3582 nans = numpy.isnan(self._getspectrum(0))
3583 if numpy.any(nans):
3584 bnans = [ bool(v) for v in nans]
3585 self.flag(bnans)
3586
[1588]3587 def get_row_selector(self, rowno):
[1992]3588 return selector(rows=[rowno])
[1573]3589
[484]3590 def _add_history(self, funcname, parameters):
[1435]3591 if not rcParams['scantable.history']:
3592 return
[484]3593 # create date
3594 sep = "##"
3595 from datetime import datetime
3596 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3597 hist = dstr+sep
3598 hist += funcname+sep#cdate+sep
[2349]3599 if parameters.has_key('self'):
3600 del parameters['self']
[1118]3601 for k, v in parameters.iteritems():
[484]3602 if type(v) is dict:
[1118]3603 for k2, v2 in v.iteritems():
[484]3604 hist += k2
3605 hist += "="
[1118]3606 if isinstance(v2, scantable):
[484]3607 hist += 'scantable'
3608 elif k2 == 'mask':
[1118]3609 if isinstance(v2, list) or isinstance(v2, tuple):
[513]3610 hist += str(self._zip_mask(v2))
3611 else:
3612 hist += str(v2)
[484]3613 else:
[513]3614 hist += str(v2)
[484]3615 else:
3616 hist += k
3617 hist += "="
[1118]3618 if isinstance(v, scantable):
[484]3619 hist += 'scantable'
3620 elif k == 'mask':
[1118]3621 if isinstance(v, list) or isinstance(v, tuple):
[513]3622 hist += str(self._zip_mask(v))
3623 else:
3624 hist += str(v)
[484]3625 else:
3626 hist += str(v)
3627 hist += sep
3628 hist = hist[:-2] # remove trailing '##'
3629 self._addhistory(hist)
3630
[710]3631
[484]3632 def _zip_mask(self, mask):
3633 mask = list(mask)
3634 i = 0
3635 segments = []
3636 while mask[i:].count(1):
3637 i += mask[i:].index(1)
3638 if mask[i:].count(0):
3639 j = i + mask[i:].index(0)
3640 else:
[710]3641 j = len(mask)
[1118]3642 segments.append([i, j])
[710]3643 i = j
[484]3644 return segments
[714]3645
[626]3646 def _get_ordinate_label(self):
3647 fu = "("+self.get_fluxunit()+")"
3648 import re
3649 lbl = "Intensity"
[1118]3650 if re.match(".K.", fu):
[626]3651 lbl = "Brightness Temperature "+ fu
[1118]3652 elif re.match(".Jy.", fu):
[626]3653 lbl = "Flux density "+ fu
3654 return lbl
[710]3655
[876]3656 def _check_ifs(self):
[2349]3657# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
[1986]3658 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]3659 nchans = filter(lambda t: t > 0, nchans)
[876]3660 return (sum(nchans)/len(nchans) == nchans[0])
[976]3661
[1862]3662 @asaplog_post_dec
[1916]3663 def _fill(self, names, unit, average, opts={}):
[976]3664 first = True
3665 fullnames = []
3666 for name in names:
3667 name = os.path.expandvars(name)
3668 name = os.path.expanduser(name)
3669 if not os.path.exists(name):
3670 msg = "File '%s' does not exists" % (name)
3671 raise IOError(msg)
3672 fullnames.append(name)
3673 if average:
3674 asaplog.push('Auto averaging integrations')
[1079]3675 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]3676 for name in fullnames:
[1073]3677 tbl = Scantable(stype)
[2004]3678 if is_ms( name ):
3679 r = msfiller( tbl )
3680 else:
3681 r = filler( tbl )
[976]3682 msg = "Importing %s..." % (name)
[1118]3683 asaplog.push(msg, False)
[2349]3684 r.open(name, opts)
[2480]3685 rx = rcParams['scantable.reference']
3686 r.setreferenceexpr(rx)
[1843]3687 r.fill()
[976]3688 if average:
[1118]3689 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]3690 if not first:
3691 tbl = self._math._merge([self, tbl])
3692 Scantable.__init__(self, tbl)
[1843]3693 r.close()
[1118]3694 del r, tbl
[976]3695 first = False
[1861]3696 #flush log
3697 asaplog.post()
[976]3698 if unit is not None:
3699 self.set_fluxunit(unit)
[1824]3700 if not is_casapy():
3701 self.set_freqframe(rcParams['scantable.freqframe'])
[976]3702
[2012]3703
[1402]3704 def __getitem__(self, key):
3705 if key < 0:
3706 key += self.nrow()
3707 if key >= self.nrow():
3708 raise IndexError("Row index out of range.")
3709 return self._getspectrum(key)
3710
3711 def __setitem__(self, key, value):
3712 if key < 0:
3713 key += self.nrow()
3714 if key >= self.nrow():
3715 raise IndexError("Row index out of range.")
3716 if not hasattr(value, "__len__") or \
3717 len(value) > self.nchan(self.getif(key)):
3718 raise ValueError("Spectrum length doesn't match.")
3719 return self._setspectrum(value, key)
3720
3721 def __len__(self):
3722 return self.nrow()
3723
3724 def __iter__(self):
3725 for i in range(len(self)):
3726 yield self[i]
Note: See TracBrowser for help on using the repository browser.