source: trunk/python/scantable.py@ 2317

Last change on this file since 2317 was 2315, checked in by Malte Marquarding, 13 years ago

Ticket #247: Make new summary work in standard asap by handling logger i/o

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 135.4 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2315]4import tempfile
[1948]5import numpy
[1691]6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
[1824]11from asap.env import is_casapy
[876]12from asap._asap import Scantable
[2004]13from asap._asap import filler, msfiller
[1824]14from asap.parameters import rcParams
[1862]15from asap.logging import asaplog, asaplog_post_dec
[1824]16from asap.selector import selector
17from asap.linecatalog import linecatalog
[1600]18from asap.coordinate import coordinate
[1859]19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]20from asap.asapfitter import fitter
[102]21
[1689]22
23def preserve_selection(func):
[1691]24 @wraps_dec(func)
[1689]25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
[1857]27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
[1689]31 return val
32 return wrap
33
[1846]34def is_scantable(filename):
35 """Is the given file a scantable?
[1689]36
[1846]37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
[1883]42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 #if ( l.find('Scantable') != -1 ):
49 if ( l.find('Measurement Set') == -1 ):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
[1697]58
[1883]59def is_ms(filename):
60 """Is the given file a MeasurementSet?
[1697]61
[1883]62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
[2186]79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
[2277]91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
[2277]98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
[2186]102 ** when len(edge)!=2
[2277]103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]111 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]112 when [5,10] given, it is interpreted as
[2277]113 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
[2189]173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
[876]176class scantable(Scantable):
[1846]177 """\
178 The ASAP container for scans (single-dish data).
[102]179 """
[1819]180
[1862]181 @asaplog_post_dec
[1916]182 #def __init__(self, filename, average=None, unit=None, getpt=None,
183 # antenna=None, parallactify=None):
[2315]184 def __init__(self, filename, average=None, unit=None, parallactify=None,
185 **args):
[1846]186 """\
[102]187 Create a scantable from a saved one or make a reference
[1846]188
[102]189 Parameters:
[1846]190
191 filename: the name of an asap table on disk
192 or
193 the name of a rpfits/sdfits/ms file
194 (integrations within scans are auto averaged
195 and the whole file is read) or
196 [advanced] a reference to an existing scantable
197
198 average: average all integrations withinb a scan on read.
199 The default (True) is taken from .asaprc.
200
[484]201 unit: brightness unit; must be consistent with K or Jy.
[1846]202 Over-rides the default selected by the filler
203 (input rpfits/sdfits/ms) or replaces the value
204 in existing scantables
205
206 getpt: for MeasurementSet input data only:
207 If True, all pointing data are filled.
208 The deafult is False, which makes time to load
209 the MS data faster in some cases.
210
[1920]211 antenna: for MeasurementSet input data only:
[2277]212 Antenna selection. integer (id) or string (name or id).
[1846]213
214 parallactify: Indicate that the data had been parallatified. Default
215 is taken from rc file.
216
[710]217 """
[976]218 if average is None:
[710]219 average = rcParams['scantable.autoaverage']
[1916]220 #if getpt is None:
221 # getpt = True
222 #if antenna is not None:
223 # asaplog.push("Antenna selection currently unsupported."
224 # "Using ''")
225 # asaplog.post('WARN')
226 #if antenna is None:
227 # antenna = ''
228 #elif type(antenna) == int:
229 # antenna = '%s' % antenna
230 #elif type(antenna) == list:
231 # tmpstr = ''
232 # for i in range( len(antenna) ):
233 # if type(antenna[i]) == int:
234 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
235 # elif type(antenna[i]) == str:
236 # tmpstr=tmpstr+antenna[i]+','
237 # else:
238 # raise TypeError('Bad antenna selection.')
239 # antenna = tmpstr.rstrip(',')
[1593]240 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]241 varlist = vars()
[876]242 from asap._asap import stmath
[1819]243 self._math = stmath( rcParams['insitu'] )
[876]244 if isinstance(filename, Scantable):
245 Scantable.__init__(self, filename)
[181]246 else:
[1697]247 if isinstance(filename, str):
[976]248 filename = os.path.expandvars(filename)
249 filename = os.path.expanduser(filename)
250 if not os.path.exists(filename):
251 s = "File '%s' not found." % (filename)
252 raise IOError(s)
[1697]253 if is_scantable(filename):
254 ondisk = rcParams['scantable.storage'] == 'disk'
255 Scantable.__init__(self, filename, ondisk)
256 if unit is not None:
257 self.set_fluxunit(unit)
[2008]258 if average:
259 self._assign( self.average_time( scanav=True ) )
[1819]260 # do not reset to the default freqframe
261 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]262 #elif os.path.isdir(filename) \
263 # and not os.path.exists(filename+'/table.f1'):
264 elif is_ms(filename):
[1916]265 # Measurement Set
266 opts={'ms': {}}
267 mskeys=['getpt','antenna']
268 for key in mskeys:
269 if key in args.keys():
270 opts['ms'][key] = args[key]
271 #self._fill([filename], unit, average, getpt, antenna)
272 self._fill([filename], unit, average, opts)
[1893]273 elif os.path.isfile(filename):
[1916]274 #self._fill([filename], unit, average, getpt, antenna)
275 self._fill([filename], unit, average)
[1883]276 else:
[1819]277 msg = "The given file '%s'is not a valid " \
278 "asap table." % (filename)
[1859]279 raise IOError(msg)
[1118]280 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]281 and isinstance(filename[-1], str):
[1916]282 #self._fill(filename, unit, average, getpt, antenna)
283 self._fill(filename, unit, average)
[1586]284 self.parallactify(parallactify)
[1259]285 self._add_history("scantable", varlist)
[102]286
[1862]287 @asaplog_post_dec
[876]288 def save(self, name=None, format=None, overwrite=False):
[1846]289 """\
[1280]290 Store the scantable on disk. This can be an asap (aips++) Table,
291 SDFITS or MS2 format.
[1846]292
[116]293 Parameters:
[1846]294
[1093]295 name: the name of the outputfile. For format "ASCII"
296 this is the root file name (data in 'name'.txt
[497]297 and header in 'name'_header.txt)
[1855]298
[116]299 format: an optional file format. Default is ASAP.
[1855]300 Allowed are:
301
302 * 'ASAP' (save as ASAP [aips++] Table),
303 * 'SDFITS' (save as SDFITS file)
304 * 'ASCII' (saves as ascii text file)
305 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]306 * 'FITS' (save as image FITS - not readable by
307 class)
[1855]308 * 'CLASS' (save as FITS readable by CLASS)
309
[411]310 overwrite: If the file should be overwritten if it exists.
[256]311 The default False is to return with warning
[411]312 without writing the output. USE WITH CARE.
[1855]313
[1846]314 Example::
315
[116]316 scan.save('myscan.asap')
[1118]317 scan.save('myscan.sdfits', 'SDFITS')
[1846]318
[116]319 """
[411]320 from os import path
[1593]321 format = format or rcParams['scantable.save']
[256]322 suffix = '.'+format.lower()
[1118]323 if name is None or name == "":
[256]324 name = 'scantable'+suffix
[718]325 msg = "No filename given. Using default name %s..." % name
326 asaplog.push(msg)
[411]327 name = path.expandvars(name)
[256]328 if path.isfile(name) or path.isdir(name):
329 if not overwrite:
[718]330 msg = "File %s exists." % name
[1859]331 raise IOError(msg)
[451]332 format2 = format.upper()
333 if format2 == 'ASAP':
[116]334 self._save(name)
[2029]335 elif format2 == 'MS2':
336 msopt = {'ms': {'overwrite': overwrite } }
337 from asap._asap import mswriter
338 writer = mswriter( self )
339 writer.write( name, msopt )
[116]340 else:
[989]341 from asap._asap import stwriter as stw
[1118]342 writer = stw(format2)
343 writer.write(self, name)
[116]344 return
345
[102]346 def copy(self):
[1846]347 """Return a copy of this scantable.
348
349 *Note*:
350
[1348]351 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]352
353 Example::
354
[102]355 copiedscan = scan.copy()
[1846]356
[102]357 """
[876]358 sd = scantable(Scantable._copy(self))
[113]359 return sd
360
[1093]361 def drop_scan(self, scanid=None):
[1846]362 """\
[1093]363 Return a new scantable where the specified scan number(s) has(have)
364 been dropped.
[1846]365
[1093]366 Parameters:
[1846]367
[1093]368 scanid: a (list of) scan number(s)
[1846]369
[1093]370 """
371 from asap import _is_sequence_or_number as _is_valid
372 from asap import _to_list
373 from asap import unique
374 if not _is_valid(scanid):
[2315]375 raise RuntimeError( 'Please specify a scanno to drop from the'
376 ' scantable' )
[1859]377 scanid = _to_list(scanid)
378 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
379 for sid in scanid: allscans.remove(sid)
380 if len(allscans) == 0:
381 raise ValueError("Can't remove all scans")
382 sel = selector(scans=allscans)
383 return self._select_copy(sel)
[1093]384
[1594]385 def _select_copy(self, selection):
386 orig = self.get_selection()
387 self.set_selection(orig+selection)
388 cp = self.copy()
389 self.set_selection(orig)
390 return cp
391
[102]392 def get_scan(self, scanid=None):
[1855]393 """\
[102]394 Return a specific scan (by scanno) or collection of scans (by
395 source name) in a new scantable.
[1846]396
397 *Note*:
398
[1348]399 See scantable.drop_scan() for the inverse operation.
[1846]400
[102]401 Parameters:
[1846]402
[513]403 scanid: a (list of) scanno or a source name, unix-style
404 patterns are accepted for source name matching, e.g.
405 '*_R' gets all 'ref scans
[1846]406
407 Example::
408
[513]409 # get all scans containing the source '323p459'
410 newscan = scan.get_scan('323p459')
411 # get all 'off' scans
412 refscans = scan.get_scan('*_R')
413 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]414 newscan = scan.get_scan([0, 2, 7, 10])
[1846]415
[102]416 """
417 if scanid is None:
[1859]418 raise RuntimeError( 'Please specify a scan no or name to '
419 'retrieve from the scantable' )
[102]420 try:
[946]421 bsel = self.get_selection()
422 sel = selector()
[102]423 if type(scanid) is str:
[946]424 sel.set_name(scanid)
[1594]425 return self._select_copy(sel)
[102]426 elif type(scanid) is int:
[946]427 sel.set_scans([scanid])
[1594]428 return self._select_copy(sel)
[381]429 elif type(scanid) is list:
[946]430 sel.set_scans(scanid)
[1594]431 return self._select_copy(sel)
[381]432 else:
[718]433 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]434 raise TypeError(msg)
[102]435 except RuntimeError:
[1859]436 raise
[102]437
438 def __str__(self):
[2315]439 tempFile = tempfile.NamedTemporaryFile()
440 Scantable._summary(self, tempFile.name)
441 tempFile.seek(0)
442 asaplog.clear()
443 return tempFile.file.read()
[102]444
[2315]445 @asaplog_post_dec
[976]446 def summary(self, filename=None):
[1846]447 """\
[102]448 Print a summary of the contents of this scantable.
[1846]449
[102]450 Parameters:
[1846]451
[1931]452 filename: the name of a file to write the putput to
[102]453 Default - no file output
[1846]454
[102]455 """
[2286]456# info = Scantable._summary(self)
[102]457 if filename is not None:
[256]458 if filename is "":
459 filename = 'scantable_summary.txt'
[415]460 from os.path import expandvars, isdir
[411]461 filename = expandvars(filename)
[2286]462# if not isdir(filename):
463# data = open(filename, 'w')
464# data.write(info)
465# data.close()
466# else:
467 if isdir(filename):
[718]468 msg = "Illegal file name '%s'." % (filename)
[1859]469 raise IOError(msg)
[2286]470 else:
471 filename = ""
472 Scantable._summary(self, filename)
[2290]473# info = Scantable._summary(self, filename)
[2286]474# return page(info)
[710]475
[1512]476 def get_spectrum(self, rowno):
[1471]477 """Return the spectrum for the current row in the scantable as a list.
[1846]478
[1471]479 Parameters:
[1846]480
[1573]481 rowno: the row number to retrieve the spectrum from
[1846]482
[1471]483 """
484 return self._getspectrum(rowno)
[946]485
[1471]486 def get_mask(self, rowno):
487 """Return the mask for the current row in the scantable as a list.
[1846]488
[1471]489 Parameters:
[1846]490
[1573]491 rowno: the row number to retrieve the mask from
[1846]492
[1471]493 """
494 return self._getmask(rowno)
495
496 def set_spectrum(self, spec, rowno):
[1938]497 """Set the spectrum for the current row in the scantable.
[1846]498
[1471]499 Parameters:
[1846]500
[1855]501 spec: the new spectrum
[1846]502
[1855]503 rowno: the row number to set the spectrum for
504
[1471]505 """
506 assert(len(spec) == self.nchan())
507 return self._setspectrum(spec, rowno)
508
[1600]509 def get_coordinate(self, rowno):
510 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]511
512 *Note*:
513
[1600]514 * This coordinate is only valid until a scantable method modifies
515 the frequency axis.
516 * This coordinate does contain the original frequency set-up
517 NOT the new frame. The conversions however are done using the user
518 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
519 use scantable.freq_align first. Without it there is no closure,
[1846]520 i.e.::
[1600]521
[1846]522 c = myscan.get_coordinate(0)
523 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
524
[1600]525 Parameters:
[1846]526
[1600]527 rowno: the row number for the spectral coordinate
528
529 """
530 return coordinate(Scantable.get_coordinate(self, rowno))
531
[946]532 def get_selection(self):
[1846]533 """\
[1005]534 Get the selection object currently set on this scantable.
[1846]535
536 Example::
537
[1005]538 sel = scan.get_selection()
539 sel.set_ifs(0) # select IF 0
540 scan.set_selection(sel) # apply modified selection
[1846]541
[946]542 """
543 return selector(self._getselection())
544
[1576]545 def set_selection(self, selection=None, **kw):
[1846]546 """\
[1005]547 Select a subset of the data. All following operations on this scantable
548 are only applied to thi selection.
[1846]549
[1005]550 Parameters:
[1697]551
[1846]552 selection: a selector object (default unset the selection), or
553 any combination of "pols", "ifs", "beams", "scans",
554 "cycles", "name", "query"
[1697]555
[1846]556 Examples::
[1697]557
[1005]558 sel = selector() # create a selection object
[1118]559 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]560 scan.set_selection(sel) # set the selection
561 scan.summary() # will only print summary of scanno 0 an 3
562 scan.set_selection() # unset the selection
[1697]563 # or the equivalent
564 scan.set_selection(scans=[0,3])
565 scan.summary() # will only print summary of scanno 0 an 3
566 scan.set_selection() # unset the selection
[1846]567
[946]568 """
[1576]569 if selection is None:
570 # reset
571 if len(kw) == 0:
572 selection = selector()
573 else:
574 # try keywords
575 for k in kw:
576 if k not in selector.fields:
577 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
578 selection = selector(**kw)
[946]579 self._setselection(selection)
580
[1819]581 def get_row(self, row=0, insitu=None):
[1846]582 """\
[1819]583 Select a row in the scantable.
584 Return a scantable with single row.
[1846]585
[1819]586 Parameters:
[1846]587
588 row: row no of integration, default is 0.
589 insitu: if False a new scantable is returned. Otherwise, the
590 scaling is done in-situ. The default is taken from .asaprc
591 (False)
592
[1819]593 """
594 if insitu is None: insitu = rcParams['insitu']
595 if not insitu:
596 workscan = self.copy()
597 else:
598 workscan = self
599 # Select a row
600 sel=selector()
[1992]601 sel.set_rows([row])
602 #sel.set_scans([workscan.getscan(row)])
603 #sel.set_cycles([workscan.getcycle(row)])
604 #sel.set_beams([workscan.getbeam(row)])
605 #sel.set_ifs([workscan.getif(row)])
606 #sel.set_polarisations([workscan.getpol(row)])
607 #sel.set_name(workscan._getsourcename(row))
[1819]608 workscan.set_selection(sel)
609 if not workscan.nrow() == 1:
610 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
611 raise RuntimeError(msg)
612 del sel
613 if insitu:
614 self._assign(workscan)
615 else:
616 return workscan
617
[1862]618 @asaplog_post_dec
[1907]619 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]620 """\
[135]621 Determine the specified statistic of the current beam/if/pol
[102]622 Takes a 'mask' as an optional parameter to specify which
623 channels should be excluded.
[1846]624
[102]625 Parameters:
[1846]626
[1819]627 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
628 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]629
[135]630 mask: an optional mask specifying where the statistic
[102]631 should be determined.
[1855]632
[1819]633 form: format string to print statistic values
[1846]634
[1907]635 row: row number of spectrum to process.
636 (default is None: for all rows)
[1846]637
[1907]638 Example:
[113]639 scan.set_unit('channel')
[1118]640 msk = scan.create_mask([100, 200], [500, 600])
[135]641 scan.stats(stat='mean', mask=m)
[1846]642
[102]643 """
[1593]644 mask = mask or []
[876]645 if not self._check_ifs():
[1118]646 raise ValueError("Cannot apply mask as the IFs have different "
647 "number of channels. Please use setselection() "
648 "to select individual IFs")
[1819]649 rtnabc = False
650 if stat.lower().endswith('_abc'): rtnabc = True
651 getchan = False
652 if stat.lower().startswith('min') or stat.lower().startswith('max'):
653 chan = self._math._minmaxchan(self, mask, stat)
654 getchan = True
655 statvals = []
[1907]656 if not rtnabc:
657 if row == None:
658 statvals = self._math._stats(self, mask, stat)
659 else:
660 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]661
[1819]662 #def cb(i):
663 # return statvals[i]
[256]664
[1819]665 #return self._row_callback(cb, stat)
[102]666
[1819]667 label=stat
668 #callback=cb
669 out = ""
670 #outvec = []
671 sep = '-'*50
[1907]672
673 if row == None:
674 rows = xrange(self.nrow())
675 elif isinstance(row, int):
676 rows = [ row ]
677
678 for i in rows:
[1819]679 refstr = ''
680 statunit= ''
681 if getchan:
682 qx, qy = self.chan2data(rowno=i, chan=chan[i])
683 if rtnabc:
684 statvals.append(qx['value'])
685 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
686 statunit= '['+qx['unit']+']'
687 else:
688 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
689
690 tm = self._gettime(i)
691 src = self._getsourcename(i)
692 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
693 out += 'Time[%s]:\n' % (tm)
[1907]694 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
695 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
696 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]697 #outvec.append(callback(i))
[1907]698 if len(rows) > 1:
699 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
700 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
701 else:
702 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
703 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]704 out += sep+"\n"
705
[1859]706 import os
707 if os.environ.has_key( 'USER' ):
708 usr = os.environ['USER']
709 else:
710 import commands
711 usr = commands.getoutput( 'whoami' )
712 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
713 f = open(tmpfile,'w')
714 print >> f, sep
715 print >> f, ' %s %s' % (label, statunit)
716 print >> f, sep
717 print >> f, out
718 f.close()
719 f = open(tmpfile,'r')
720 x = f.readlines()
721 f.close()
722 asaplog.push(''.join(x), False)
723
[1819]724 return statvals
725
726 def chan2data(self, rowno=0, chan=0):
[1846]727 """\
[1819]728 Returns channel/frequency/velocity and spectral value
729 at an arbitrary row and channel in the scantable.
[1846]730
[1819]731 Parameters:
[1846]732
[1819]733 rowno: a row number in the scantable. Default is the
734 first row, i.e. rowno=0
[1855]735
[1819]736 chan: a channel in the scantable. Default is the first
737 channel, i.e. pos=0
[1846]738
[1819]739 """
740 if isinstance(rowno, int) and isinstance(chan, int):
741 qx = {'unit': self.get_unit(),
742 'value': self._getabcissa(rowno)[chan]}
743 qy = {'unit': self.get_fluxunit(),
744 'value': self._getspectrum(rowno)[chan]}
745 return qx, qy
746
[1118]747 def stddev(self, mask=None):
[1846]748 """\
[135]749 Determine the standard deviation of the current beam/if/pol
750 Takes a 'mask' as an optional parameter to specify which
751 channels should be excluded.
[1846]752
[135]753 Parameters:
[1846]754
[135]755 mask: an optional mask specifying where the standard
756 deviation should be determined.
757
[1846]758 Example::
759
[135]760 scan.set_unit('channel')
[1118]761 msk = scan.create_mask([100, 200], [500, 600])
[135]762 scan.stddev(mask=m)
[1846]763
[135]764 """
[1118]765 return self.stats(stat='stddev', mask=mask);
[135]766
[1003]767
[1259]768 def get_column_names(self):
[1846]769 """\
[1003]770 Return a list of column names, which can be used for selection.
771 """
[1259]772 return list(Scantable.get_column_names(self))
[1003]773
[1730]774 def get_tsys(self, row=-1):
[1846]775 """\
[113]776 Return the System temperatures.
[1846]777
778 Parameters:
779
780 row: the rowno to get the information for. (default all rows)
781
[113]782 Returns:
[1846]783
[876]784 a list of Tsys values for the current selection
[1846]785
[113]786 """
[1730]787 if row > -1:
788 return self._get_column(self._gettsys, row)
[876]789 return self._row_callback(self._gettsys, "Tsys")
[256]790
[1730]791
792 def get_weather(self, row=-1):
[1846]793 """\
794 Return the weather informations.
795
796 Parameters:
797
798 row: the rowno to get the information for. (default all rows)
799
800 Returns:
801
802 a dict or list of of dicts of values for the current selection
803
804 """
805
[1730]806 values = self._get_column(self._get_weather, row)
807 if row > -1:
808 return {'temperature': values[0],
809 'pressure': values[1], 'humidity' : values[2],
810 'windspeed' : values[3], 'windaz' : values[4]
811 }
812 else:
813 out = []
814 for r in values:
815
816 out.append({'temperature': r[0],
817 'pressure': r[1], 'humidity' : r[2],
818 'windspeed' : r[3], 'windaz' : r[4]
819 })
820 return out
821
[876]822 def _row_callback(self, callback, label):
823 out = ""
[1118]824 outvec = []
[1590]825 sep = '-'*50
[876]826 for i in range(self.nrow()):
827 tm = self._gettime(i)
828 src = self._getsourcename(i)
[1590]829 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]830 out += 'Time[%s]:\n' % (tm)
[1590]831 if self.nbeam(-1) > 1:
832 out += ' Beam[%d] ' % (self.getbeam(i))
833 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
834 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]835 outvec.append(callback(i))
836 out += '= %3.3f\n' % (outvec[i])
[1590]837 out += sep+'\n'
[1859]838
839 asaplog.push(sep)
840 asaplog.push(" %s" % (label))
841 asaplog.push(sep)
842 asaplog.push(out)
[1861]843 asaplog.post()
[1175]844 return outvec
[256]845
[1947]846 def _get_column(self, callback, row=-1, *args):
[1070]847 """
848 """
849 if row == -1:
[1947]850 return [callback(i, *args) for i in range(self.nrow())]
[1070]851 else:
[1819]852 if 0 <= row < self.nrow():
[1947]853 return callback(row, *args)
[256]854
[1070]855
[1948]856 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]857 """\
[113]858 Get a list of time stamps for the observations.
[1938]859 Return a datetime object or a string (default) for each
860 integration time stamp in the scantable.
[1846]861
[113]862 Parameters:
[1846]863
[1348]864 row: row no of integration. Default -1 return all rows
[1855]865
[1348]866 asdatetime: return values as datetime objects rather than strings
[1846]867
[2277]868 prec: number of digits shown. Default -1 to automatic calculation.
[1948]869 Note this number is equals to the digits of MVTime,
870 i.e., 0<prec<3: dates with hh:: only,
871 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]872 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
873
[113]874 """
[1175]875 from datetime import datetime
[1948]876 if prec < 0:
877 # automagically set necessary precision +1
[1950]878 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]879 prec = max(6, int(prec))
880 else:
881 prec = max(0, prec)
882 if asdatetime:
883 #precision can be 1 millisecond at max
884 prec = min(12, prec)
885
[1947]886 times = self._get_column(self._gettime, row, prec)
[1348]887 if not asdatetime:
[1392]888 return times
[1947]889 format = "%Y/%m/%d/%H:%M:%S.%f"
890 if prec < 7:
891 nsub = 1 + (((6-prec)/2) % 3)
892 substr = [".%f","%S","%M"]
893 for i in range(nsub):
894 format = format.replace(substr[i],"")
[1175]895 if isinstance(times, list):
[1947]896 return [datetime.strptime(i, format) for i in times]
[1175]897 else:
[1947]898 return datetime.strptime(times, format)
[102]899
[1348]900
901 def get_inttime(self, row=-1):
[1846]902 """\
[1348]903 Get a list of integration times for the observations.
904 Return a time in seconds for each integration in the scantable.
[1846]905
[1348]906 Parameters:
[1846]907
[1348]908 row: row no of integration. Default -1 return all rows.
[1846]909
[1348]910 """
[1573]911 return self._get_column(self._getinttime, row)
[1348]912
[1573]913
[714]914 def get_sourcename(self, row=-1):
[1846]915 """\
[794]916 Get a list source names for the observations.
[714]917 Return a string for each integration in the scantable.
918 Parameters:
[1846]919
[1348]920 row: row no of integration. Default -1 return all rows.
[1846]921
[714]922 """
[1070]923 return self._get_column(self._getsourcename, row)
[714]924
[794]925 def get_elevation(self, row=-1):
[1846]926 """\
[794]927 Get a list of elevations for the observations.
928 Return a float for each integration in the scantable.
[1846]929
[794]930 Parameters:
[1846]931
[1348]932 row: row no of integration. Default -1 return all rows.
[1846]933
[794]934 """
[1070]935 return self._get_column(self._getelevation, row)
[794]936
937 def get_azimuth(self, row=-1):
[1846]938 """\
[794]939 Get a list of azimuths for the observations.
940 Return a float for each integration in the scantable.
[1846]941
[794]942 Parameters:
[1348]943 row: row no of integration. Default -1 return all rows.
[1846]944
[794]945 """
[1070]946 return self._get_column(self._getazimuth, row)
[794]947
948 def get_parangle(self, row=-1):
[1846]949 """\
[794]950 Get a list of parallactic angles for the observations.
951 Return a float for each integration in the scantable.
[1846]952
[794]953 Parameters:
[1846]954
[1348]955 row: row no of integration. Default -1 return all rows.
[1846]956
[794]957 """
[1070]958 return self._get_column(self._getparangle, row)
[794]959
[1070]960 def get_direction(self, row=-1):
961 """
962 Get a list of Positions on the sky (direction) for the observations.
[1594]963 Return a string for each integration in the scantable.
[1855]964
[1070]965 Parameters:
[1855]966
[1070]967 row: row no of integration. Default -1 return all rows
[1855]968
[1070]969 """
970 return self._get_column(self._getdirection, row)
971
[1391]972 def get_directionval(self, row=-1):
[1846]973 """\
[1391]974 Get a list of Positions on the sky (direction) for the observations.
975 Return a float for each integration in the scantable.
[1846]976
[1391]977 Parameters:
[1846]978
[1391]979 row: row no of integration. Default -1 return all rows
[1846]980
[1391]981 """
982 return self._get_column(self._getdirectionvec, row)
983
[1862]984 @asaplog_post_dec
[102]985 def set_unit(self, unit='channel'):
[1846]986 """\
[102]987 Set the unit for all following operations on this scantable
[1846]988
[102]989 Parameters:
[1846]990
991 unit: optional unit, default is 'channel'. Use one of '*Hz',
992 'km/s', 'channel' or equivalent ''
993
[102]994 """
[484]995 varlist = vars()
[1118]996 if unit in ['', 'pixel', 'channel']:
[113]997 unit = ''
998 inf = list(self._getcoordinfo())
999 inf[0] = unit
1000 self._setcoordinfo(inf)
[1118]1001 self._add_history("set_unit", varlist)
[113]1002
[1862]1003 @asaplog_post_dec
[484]1004 def set_instrument(self, instr):
[1846]1005 """\
[1348]1006 Set the instrument for subsequent processing.
[1846]1007
[358]1008 Parameters:
[1846]1009
[710]1010 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]1011 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1012
[358]1013 """
1014 self._setInstrument(instr)
[1118]1015 self._add_history("set_instument", vars())
[358]1016
[1862]1017 @asaplog_post_dec
[1190]1018 def set_feedtype(self, feedtype):
[1846]1019 """\
[1190]1020 Overwrite the feed type, which might not be set correctly.
[1846]1021
[1190]1022 Parameters:
[1846]1023
[1190]1024 feedtype: 'linear' or 'circular'
[1846]1025
[1190]1026 """
1027 self._setfeedtype(feedtype)
1028 self._add_history("set_feedtype", vars())
1029
[1862]1030 @asaplog_post_dec
[276]1031 def set_doppler(self, doppler='RADIO'):
[1846]1032 """\
[276]1033 Set the doppler for all following operations on this scantable.
[1846]1034
[276]1035 Parameters:
[1846]1036
[276]1037 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1038
[276]1039 """
[484]1040 varlist = vars()
[276]1041 inf = list(self._getcoordinfo())
1042 inf[2] = doppler
1043 self._setcoordinfo(inf)
[1118]1044 self._add_history("set_doppler", vars())
[710]1045
[1862]1046 @asaplog_post_dec
[226]1047 def set_freqframe(self, frame=None):
[1846]1048 """\
[113]1049 Set the frame type of the Spectral Axis.
[1846]1050
[113]1051 Parameters:
[1846]1052
[591]1053 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1054 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1055 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1056
1057 Example::
1058
[113]1059 scan.set_freqframe('BARY')
[1846]1060
[113]1061 """
[1593]1062 frame = frame or rcParams['scantable.freqframe']
[484]1063 varlist = vars()
[1819]1064 # "REST" is not implemented in casacore
1065 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1066 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1067 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1068 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1069
[989]1070 if frame in valid:
[113]1071 inf = list(self._getcoordinfo())
1072 inf[1] = frame
1073 self._setcoordinfo(inf)
[1118]1074 self._add_history("set_freqframe", varlist)
[102]1075 else:
[1118]1076 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1077 raise TypeError(msg)
[710]1078
[1862]1079 @asaplog_post_dec
[989]1080 def set_dirframe(self, frame=""):
[1846]1081 """\
[989]1082 Set the frame type of the Direction on the sky.
[1846]1083
[989]1084 Parameters:
[1846]1085
[989]1086 frame: an optional frame type, default ''. Valid frames are:
1087 'J2000', 'B1950', 'GALACTIC'
[1846]1088
1089 Example:
1090
[989]1091 scan.set_dirframe('GALACTIC')
[1846]1092
[989]1093 """
1094 varlist = vars()
[1859]1095 Scantable.set_dirframe(self, frame)
[1118]1096 self._add_history("set_dirframe", varlist)
[989]1097
[113]1098 def get_unit(self):
[1846]1099 """\
[113]1100 Get the default unit set in this scantable
[1846]1101
[113]1102 Returns:
[1846]1103
[113]1104 A unit string
[1846]1105
[113]1106 """
1107 inf = self._getcoordinfo()
1108 unit = inf[0]
1109 if unit == '': unit = 'channel'
1110 return unit
[102]1111
[1862]1112 @asaplog_post_dec
[158]1113 def get_abcissa(self, rowno=0):
[1846]1114 """\
[158]1115 Get the abcissa in the current coordinate setup for the currently
[113]1116 selected Beam/IF/Pol
[1846]1117
[113]1118 Parameters:
[1846]1119
[226]1120 rowno: an optional row number in the scantable. Default is the
1121 first row, i.e. rowno=0
[1846]1122
[113]1123 Returns:
[1846]1124
[1348]1125 The abcissa values and the format string (as a dictionary)
[1846]1126
[113]1127 """
[256]1128 abc = self._getabcissa(rowno)
[710]1129 lbl = self._getabcissalabel(rowno)
[158]1130 return abc, lbl
[113]1131
[1862]1132 @asaplog_post_dec
[1994]1133 def flag(self, row=-1, mask=None, unflag=False):
[1846]1134 """\
[1001]1135 Flag the selected data using an optional channel mask.
[1846]1136
[1001]1137 Parameters:
[1846]1138
[1994]1139 row: an optional row number in the scantable.
1140 Default -1 flags all rows
1141
[1001]1142 mask: an optional channel mask, created with create_mask. Default
1143 (no mask) is all channels.
[1855]1144
[1819]1145 unflag: if True, unflag the data
[1846]1146
[1001]1147 """
1148 varlist = vars()
[1593]1149 mask = mask or []
[1994]1150 self._flag(row, mask, unflag)
[1001]1151 self._add_history("flag", varlist)
1152
[1862]1153 @asaplog_post_dec
[1819]1154 def flag_row(self, rows=[], unflag=False):
[1846]1155 """\
[1819]1156 Flag the selected data in row-based manner.
[1846]1157
[1819]1158 Parameters:
[1846]1159
[1843]1160 rows: list of row numbers to be flagged. Default is no row
[2277]1161 (must be explicitly specified to execute row-based flagging).
[1855]1162
[1819]1163 unflag: if True, unflag the data.
[1846]1164
[1819]1165 """
1166 varlist = vars()
[1859]1167 self._flag_row(rows, unflag)
[1819]1168 self._add_history("flag_row", varlist)
1169
[1862]1170 @asaplog_post_dec
[1819]1171 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1172 """\
[1819]1173 Flag the selected data outside a specified range (in channel-base)
[1846]1174
[1819]1175 Parameters:
[1846]1176
[1819]1177 uthres: upper threshold.
[1855]1178
[1819]1179 dthres: lower threshold
[1846]1180
[2277]1181 clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1182 False for flagging data inside the range.
[1855]1183
[1846]1184 unflag: if True, unflag the data.
1185
[1819]1186 """
1187 varlist = vars()
[1859]1188 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1189 self._add_history("clip", varlist)
1190
[1862]1191 @asaplog_post_dec
[1584]1192 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1193 """\
[1192]1194 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1195 Flagged data in the scantable get interpolated over the region.
[1192]1196 No taper is applied.
[1846]1197
[1192]1198 Parameters:
[1846]1199
[1579]1200 start: the start frequency (really a period within the
1201 bandwidth) or period to remove
[1855]1202
[1579]1203 end: the end frequency or period to remove
[1855]1204
[1584]1205 unit: the frequency unit (default "MHz") or "" for
[1579]1206 explicit lag channels
[1846]1207
1208 *Notes*:
1209
[1579]1210 It is recommended to flag edges of the band or strong
[1348]1211 signals beforehand.
[1846]1212
[1192]1213 """
1214 if insitu is None: insitu = rcParams['insitu']
1215 self._math._setinsitu(insitu)
1216 varlist = vars()
[1579]1217 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1218 if not (unit == "" or base.has_key(unit)):
[1192]1219 raise ValueError("%s is not a valid unit." % unit)
[1859]1220 if unit == "":
1221 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1222 else:
1223 s = scantable(self._math._lag_flag(self, start*base[unit],
1224 end*base[unit], "frequency"))
[1192]1225 s._add_history("lag_flag", varlist)
1226 if insitu:
1227 self._assign(s)
1228 else:
1229 return s
[1001]1230
[1862]1231 @asaplog_post_dec
[2186]1232 def fft(self, rowno=[], mask=[], getrealimag=False):
[2177]1233 """\
1234 Apply FFT to the spectra.
1235 Flagged data in the scantable get interpolated over the region.
1236
1237 Parameters:
[2186]1238
1239 rowno: The row number(s) to be processed. int, list
1240 and tuple are accepted. By default ([]), FFT
1241 is applied to the whole data.
1242
1243 mask: Auxiliary channel mask(s). Given as a boolean
1244 list, it is applied to all specified rows.
1245 A list of boolean lists can also be used to
1246 apply different masks. In the latter case, the
1247 length of 'mask' must be the same as that of
1248 'rowno'. The default is [].
[2177]1249
1250 getrealimag: If True, returns the real and imaginary part
1251 values of the complex results.
1252 If False (the default), returns the amplitude
1253 (absolute value) normalised with Ndata/2 and
1254 phase (argument, in unit of radian).
1255
1256 Returns:
1257
[2186]1258 A list of dictionaries containing the results for each spectrum.
1259 Each dictionary contains two values, the real and the imaginary
1260 parts when getrealimag = True, or the amplitude(absolute value)
1261 and the phase(argument) when getrealimag = False. The key for
1262 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1263 respectively.
1264 """
1265 if isinstance(rowno, int):
1266 rowno = [rowno]
1267 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1268 raise TypeError("The row number(s) must be int, list or tuple.")
1269
1270 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1271
1272 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1273 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1274 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1275 if isinstance(mask[0], bool): mask = [mask]
1276 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1277 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1278
1279 usecommonmask = (len(mask) == 1)
1280 if not usecommonmask:
1281 if len(mask) != len(rowno):
1282 raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1283 for amask in mask:
1284 if len(amask) != self.nchan():
1285 raise ValueError("The spectra and the mask have different length.")
[2177]1286
[2186]1287 res = []
1288
1289 imask = 0
1290 for whichrow in rowno:
1291 fspec = self._fft(whichrow, mask[imask], getrealimag)
1292 nspec = len(fspec)
[2177]1293
[2186]1294 i=0
1295 v1=[]
1296 v2=[]
1297 reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
[2177]1298
[2186]1299 while (i < nspec):
1300 v1.append(fspec[i])
1301 v2.append(fspec[i+1])
1302 i+=2
1303
[2177]1304 if getrealimag:
[2186]1305 reselem["real"] += v1
1306 reselem["imag"] += v2
[2177]1307 else:
[2186]1308 reselem["ampl"] += v1
1309 reselem["phase"] += v2
[2177]1310
[2186]1311 res.append(reselem)
1312
1313 if not usecommonmask: imask += 1
1314
[2177]1315 return res
1316
1317 @asaplog_post_dec
[113]1318 def create_mask(self, *args, **kwargs):
[1846]1319 """\
[1118]1320 Compute and return a mask based on [min, max] windows.
[189]1321 The specified windows are to be INCLUDED, when the mask is
[113]1322 applied.
[1846]1323
[102]1324 Parameters:
[1846]1325
[1118]1326 [min, max], [min2, max2], ...
[1024]1327 Pairs of start/end points (inclusive)specifying the regions
[102]1328 to be masked
[1855]1329
[189]1330 invert: optional argument. If specified as True,
1331 return an inverted mask, i.e. the regions
1332 specified are EXCLUDED
[1855]1333
[513]1334 row: create the mask using the specified row for
1335 unit conversions, default is row=0
1336 only necessary if frequency varies over rows.
[1846]1337
1338 Examples::
1339
[113]1340 scan.set_unit('channel')
[1846]1341 # a)
[1118]1342 msk = scan.create_mask([400, 500], [800, 900])
[189]1343 # masks everything outside 400 and 500
[113]1344 # and 800 and 900 in the unit 'channel'
1345
[1846]1346 # b)
[1118]1347 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1348 # masks the regions between 400 and 500
[113]1349 # and 800 and 900 in the unit 'channel'
[1846]1350
1351 # c)
1352 #mask only channel 400
[1554]1353 msk = scan.create_mask([400])
[1846]1354
[102]1355 """
[1554]1356 row = kwargs.get("row", 0)
[513]1357 data = self._getabcissa(row)
[113]1358 u = self._getcoordinfo()[0]
[1859]1359 if u == "":
1360 u = "channel"
1361 msg = "The current mask window unit is %s" % u
1362 i = self._check_ifs()
1363 if not i:
1364 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1365 asaplog.push(msg)
[102]1366 n = self.nchan()
[1295]1367 msk = _n_bools(n, False)
[710]1368 # test if args is a 'list' or a 'normal *args - UGLY!!!
1369
[1118]1370 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1371 and args or args[0]
[710]1372 for window in ws:
[1554]1373 if len(window) == 1:
1374 window = [window[0], window[0]]
1375 if len(window) == 0 or len(window) > 2:
1376 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1377 if window[0] > window[1]:
1378 tmp = window[0]
1379 window[0] = window[1]
1380 window[1] = tmp
[102]1381 for i in range(n):
[1024]1382 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1383 msk[i] = True
[113]1384 if kwargs.has_key('invert'):
1385 if kwargs.get('invert'):
[1295]1386 msk = mask_not(msk)
[102]1387 return msk
[710]1388
[1931]1389 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1390 """\
[1819]1391 Compute and return a list of mask windows, [min, max].
[1846]1392
[1819]1393 Parameters:
[1846]1394
[1819]1395 mask: channel mask, created with create_mask.
[1855]1396
[1819]1397 row: calcutate the masklist using the specified row
1398 for unit conversions, default is row=0
1399 only necessary if frequency varies over rows.
[1846]1400
[1819]1401 Returns:
[1846]1402
[1819]1403 [min, max], [min2, max2], ...
1404 Pairs of start/end points (inclusive)specifying
1405 the masked regions
[1846]1406
[1819]1407 """
1408 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1409 raise TypeError("The mask should be list or tuple.")
1410 if len(mask) < 2:
1411 raise TypeError("The mask elements should be > 1")
1412 if self.nchan() != len(mask):
1413 msg = "Number of channels in scantable != number of mask elements"
1414 raise TypeError(msg)
1415 data = self._getabcissa(row)
1416 u = self._getcoordinfo()[0]
[1859]1417 if u == "":
1418 u = "channel"
1419 msg = "The current mask window unit is %s" % u
1420 i = self._check_ifs()
1421 if not i:
1422 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1423 if not silent:
1424 asaplog.push(msg)
[1819]1425 masklist=[]
1426 ist, ien = None, None
1427 ist, ien=self.get_mask_indices(mask)
1428 if ist is not None and ien is not None:
1429 for i in xrange(len(ist)):
1430 range=[data[ist[i]],data[ien[i]]]
1431 range.sort()
1432 masklist.append([range[0],range[1]])
1433 return masklist
1434
1435 def get_mask_indices(self, mask=None):
[1846]1436 """\
[1819]1437 Compute and Return lists of mask start indices and mask end indices.
[1855]1438
1439 Parameters:
1440
[1819]1441 mask: channel mask, created with create_mask.
[1846]1442
[1819]1443 Returns:
[1846]1444
[1819]1445 List of mask start indices and that of mask end indices,
1446 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1447
[1819]1448 """
1449 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1450 raise TypeError("The mask should be list or tuple.")
1451 if len(mask) < 2:
1452 raise TypeError("The mask elements should be > 1")
1453 istart=[]
1454 iend=[]
1455 if mask[0]: istart.append(0)
1456 for i in range(len(mask)-1):
1457 if not mask[i] and mask[i+1]:
1458 istart.append(i+1)
1459 elif mask[i] and not mask[i+1]:
1460 iend.append(i)
1461 if mask[len(mask)-1]: iend.append(len(mask)-1)
1462 if len(istart) != len(iend):
1463 raise RuntimeError("Numbers of mask start != mask end.")
1464 for i in range(len(istart)):
1465 if istart[i] > iend[i]:
1466 raise RuntimeError("Mask start index > mask end index")
1467 break
1468 return istart,iend
1469
[2013]1470 @asaplog_post_dec
1471 def parse_maskexpr(self,maskstring):
1472 """
1473 Parse CASA type mask selection syntax (IF dependent).
1474
1475 Parameters:
1476 maskstring : A string mask selection expression.
1477 A comma separated selections mean different IF -
1478 channel combinations. IFs and channel selections
1479 are partitioned by a colon, ':'.
1480 examples:
[2015]1481 '' = all IFs (all channels)
[2013]1482 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1483 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1484 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1485 all channels in IF8
1486 Returns:
1487 A dictionary of selected (valid) IF and masklist pairs,
1488 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1489 """
1490 if not isinstance(maskstring,str):
1491 asaplog.post()
1492 asaplog.push("Invalid mask expression")
1493 asaplog.post("ERROR")
1494
1495 valid_ifs = self.getifnos()
1496 frequnit = self.get_unit()
1497 seldict = {}
[2015]1498 if maskstring == "":
1499 maskstring = str(valid_ifs)[1:-1]
[2013]1500 ## split each selection
1501 sellist = maskstring.split(',')
1502 for currselstr in sellist:
1503 selset = currselstr.split(':')
1504 # spw and mask string (may include ~, < or >)
1505 spwmasklist = self._parse_selection(selset[0],typestr='integer',
1506 offset=1,minval=min(valid_ifs),
1507 maxval=max(valid_ifs))
1508 for spwlist in spwmasklist:
1509 selspws = []
1510 for ispw in range(spwlist[0],spwlist[1]+1):
1511 # Put into the list only if ispw exists
1512 if valid_ifs.count(ispw):
1513 selspws.append(ispw)
1514 del spwmasklist, spwlist
1515
1516 # parse frequency mask list
1517 if len(selset) > 1:
1518 freqmasklist = self._parse_selection(selset[1],typestr='float',
1519 offset=0.)
1520 else:
1521 # want to select the whole spectrum
1522 freqmasklist = [None]
1523
1524 ## define a dictionary of spw - masklist combination
1525 for ispw in selspws:
1526 #print "working on", ispw
1527 spwstr = str(ispw)
1528 if len(selspws) == 0:
1529 # empty spw
1530 continue
1531 else:
1532 ## want to get min and max of the spw and
1533 ## offset to set for '<' and '>'
1534 if frequnit == 'channel':
1535 minfreq = 0
1536 maxfreq = self.nchan(ifno=ispw)
1537 offset = 0.5
1538 else:
1539 ## This is ugly part. need improvement
1540 for ifrow in xrange(self.nrow()):
1541 if self.getif(ifrow) == ispw:
1542 #print "IF",ispw,"found in row =",ifrow
1543 break
1544 freqcoord = self.get_coordinate(ifrow)
1545 freqs = self._getabcissa(ifrow)
1546 minfreq = min(freqs)
1547 maxfreq = max(freqs)
1548 if len(freqs) == 1:
1549 offset = 0.5
1550 elif frequnit.find('Hz') > 0:
1551 offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1552 -freqcoord.to_frequency(0,unit=frequnit))*0.5
1553 elif frequnit.find('m/s') > 0:
1554 offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1555 -freqcoord.to_velocity(0,unit=frequnit))*0.5
1556 else:
1557 asaplog.post()
1558 asaplog.push("Invalid frequency unit")
1559 asaplog.post("ERROR")
1560 del freqs, freqcoord, ifrow
1561 for freq in freqmasklist:
1562 selmask = freq or [minfreq, maxfreq]
1563 if selmask[0] == None:
1564 ## selection was "<freq[1]".
1565 if selmask[1] < minfreq:
1566 ## avoid adding region selection
1567 selmask = None
1568 else:
1569 selmask = [minfreq,selmask[1]-offset]
1570 elif selmask[1] == None:
1571 ## selection was ">freq[0]"
1572 if selmask[0] > maxfreq:
1573 ## avoid adding region selection
1574 selmask = None
1575 else:
1576 selmask = [selmask[0]+offset,maxfreq]
1577 if selmask:
1578 if not seldict.has_key(spwstr):
1579 # new spw selection
1580 seldict[spwstr] = []
1581 seldict[spwstr] += [selmask]
1582 del minfreq,maxfreq,offset,freq,selmask
1583 del spwstr
1584 del freqmasklist
1585 del valid_ifs
1586 if len(seldict) == 0:
1587 asaplog.post()
1588 asaplog.push("No valid selection in the mask expression: "+maskstring)
1589 asaplog.post("WARN")
1590 return None
1591 msg = "Selected masklist:\n"
1592 for sif, lmask in seldict.iteritems():
1593 msg += " IF"+sif+" - "+str(lmask)+"\n"
1594 asaplog.push(msg)
1595 return seldict
1596
1597 def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1598 """
1599 Parameters:
1600 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1601 typestr : The type of the values in returned list
1602 ('integer' or 'float')
1603 offset : The offset value to subtract from or add to
1604 the boundary value if the selection string
1605 includes '<' or '>'
1606 minval, maxval : The minimum/maximum values to set if the
1607 selection string includes '<' or '>'.
1608 The list element is filled with None by default.
1609 Returns:
1610 A list of min/max pair of selections.
1611 Example:
1612 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1613 returns [[0,2],[5,7],[9,9]]
1614 """
1615 selgroups = selstr.split(';')
1616 sellists = []
1617 if typestr.lower().startswith('int'):
1618 formatfunc = int
1619 else:
1620 formatfunc = float
1621
1622 for currsel in selgroups:
1623 if currsel.find('~') > 0:
1624 minsel = formatfunc(currsel.split('~')[0].strip())
1625 maxsel = formatfunc(currsel.split('~')[1].strip())
1626 elif currsel.strip().startswith('<'):
1627 minsel = minval
1628 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1629 - formatfunc(offset)
1630 elif currsel.strip().startswith('>'):
1631 minsel = formatfunc(currsel.split('>')[1].strip()) \
1632 + formatfunc(offset)
1633 maxsel = maxval
1634 else:
1635 minsel = formatfunc(currsel)
1636 maxsel = formatfunc(currsel)
1637 sellists.append([minsel,maxsel])
1638 return sellists
1639
[1819]1640# def get_restfreqs(self):
1641# """
1642# Get the restfrequency(s) stored in this scantable.
1643# The return value(s) are always of unit 'Hz'
1644# Parameters:
1645# none
1646# Returns:
1647# a list of doubles
1648# """
1649# return list(self._getrestfreqs())
1650
1651 def get_restfreqs(self, ids=None):
[1846]1652 """\
[256]1653 Get the restfrequency(s) stored in this scantable.
1654 The return value(s) are always of unit 'Hz'
[1846]1655
[256]1656 Parameters:
[1846]1657
[1819]1658 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1659 be retrieved
[1846]1660
[256]1661 Returns:
[1846]1662
[1819]1663 dictionary containing ids and a list of doubles for each id
[1846]1664
[256]1665 """
[1819]1666 if ids is None:
1667 rfreqs={}
1668 idlist = self.getmolnos()
1669 for i in idlist:
1670 rfreqs[i]=list(self._getrestfreqs(i))
1671 return rfreqs
1672 else:
1673 if type(ids)==list or type(ids)==tuple:
1674 rfreqs={}
1675 for i in ids:
1676 rfreqs[i]=list(self._getrestfreqs(i))
1677 return rfreqs
1678 else:
1679 return list(self._getrestfreqs(ids))
1680 #return list(self._getrestfreqs(ids))
[102]1681
[931]1682 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1683 """\
[931]1684 Set or replace the restfrequency specified and
[1938]1685 if the 'freqs' argument holds a scalar,
[931]1686 then that rest frequency will be applied to all the selected
1687 data. If the 'freqs' argument holds
1688 a vector, then it MUST be of equal or smaller length than
1689 the number of IFs (and the available restfrequencies will be
1690 replaced by this vector). In this case, *all* data have
1691 the restfrequency set per IF according
1692 to the corresponding value you give in the 'freqs' vector.
[1118]1693 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1694 IF 1 gets restfreq 2e9.
[1846]1695
[1395]1696 You can also specify the frequencies via a linecatalog.
[1153]1697
[931]1698 Parameters:
[1846]1699
[931]1700 freqs: list of rest frequency values or string idenitfiers
[1855]1701
[931]1702 unit: unit for rest frequency (default 'Hz')
[402]1703
[1846]1704
1705 Example::
1706
[1819]1707 # set the given restfrequency for the all currently selected IFs
[931]1708 scan.set_restfreqs(freqs=1.4e9)
[1845]1709 # set restfrequencies for the n IFs (n > 1) in the order of the
1710 # list, i.e
1711 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1712 # len(list_of_restfreqs) == nIF
1713 # for nIF == 1 the following will set multiple restfrequency for
1714 # that IF
[1819]1715 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1716 # set multiple restfrequencies per IF. as a list of lists where
1717 # the outer list has nIF elements, the inner s arbitrary
1718 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1719
[1846]1720 *Note*:
[1845]1721
[931]1722 To do more sophisticate Restfrequency setting, e.g. on a
1723 source and IF basis, use scantable.set_selection() before using
[1846]1724 this function::
[931]1725
[1846]1726 # provided your scantable is called scan
1727 selection = selector()
1728 selection.set_name("ORION*")
1729 selection.set_ifs([1])
1730 scan.set_selection(selection)
1731 scan.set_restfreqs(freqs=86.6e9)
1732
[931]1733 """
1734 varlist = vars()
[1157]1735 from asap import linecatalog
1736 # simple value
[1118]1737 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1738 self._setrestfreqs([freqs], [""], unit)
[1157]1739 # list of values
[1118]1740 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1741 # list values are scalars
[1118]1742 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1743 if len(freqs) == 1:
1744 self._setrestfreqs(freqs, [""], unit)
1745 else:
1746 # allow the 'old' mode of setting mulitple IFs
1747 sel = selector()
1748 savesel = self._getselection()
1749 iflist = self.getifnos()
1750 if len(freqs)>len(iflist):
1751 raise ValueError("number of elements in list of list "
1752 "exeeds the current IF selections")
1753 iflist = self.getifnos()
1754 for i, fval in enumerate(freqs):
1755 sel.set_ifs(iflist[i])
1756 self._setselection(sel)
1757 self._setrestfreqs([fval], [""], unit)
1758 self._setselection(savesel)
1759
1760 # list values are dict, {'value'=, 'name'=)
[1157]1761 elif isinstance(freqs[-1], dict):
[1845]1762 values = []
1763 names = []
1764 for d in freqs:
1765 values.append(d["value"])
1766 names.append(d["name"])
1767 self._setrestfreqs(values, names, unit)
[1819]1768 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1769 sel = selector()
1770 savesel = self._getselection()
[1322]1771 iflist = self.getifnos()
[1819]1772 if len(freqs)>len(iflist):
[1845]1773 raise ValueError("number of elements in list of list exeeds"
1774 " the current IF selections")
1775 for i, fval in enumerate(freqs):
[1322]1776 sel.set_ifs(iflist[i])
[1259]1777 self._setselection(sel)
[1845]1778 self._setrestfreqs(fval, [""], unit)
[1157]1779 self._setselection(savesel)
1780 # freqs are to be taken from a linecatalog
[1153]1781 elif isinstance(freqs, linecatalog):
1782 sel = selector()
1783 savesel = self._getselection()
1784 for i in xrange(freqs.nrow()):
[1322]1785 sel.set_ifs(iflist[i])
[1153]1786 self._setselection(sel)
[1845]1787 self._setrestfreqs([freqs.get_frequency(i)],
1788 [freqs.get_name(i)], "MHz")
[1153]1789 # ensure that we are not iterating past nIF
1790 if i == self.nif()-1: break
1791 self._setselection(savesel)
[931]1792 else:
1793 return
1794 self._add_history("set_restfreqs", varlist)
1795
[1360]1796 def shift_refpix(self, delta):
[1846]1797 """\
[1589]1798 Shift the reference pixel of the Spectra Coordinate by an
1799 integer amount.
[1846]1800
[1589]1801 Parameters:
[1846]1802
[1589]1803 delta: the amount to shift by
[1846]1804
1805 *Note*:
1806
[1589]1807 Be careful using this with broadband data.
[1846]1808
[1360]1809 """
[1731]1810 Scantable.shift_refpix(self, delta)
[931]1811
[1862]1812 @asaplog_post_dec
[1259]1813 def history(self, filename=None):
[1846]1814 """\
[1259]1815 Print the history. Optionally to a file.
[1846]1816
[1348]1817 Parameters:
[1846]1818
[1928]1819 filename: The name of the file to save the history to.
[1846]1820
[1259]1821 """
[484]1822 hist = list(self._gethistory())
[794]1823 out = "-"*80
[484]1824 for h in hist:
[489]1825 if h.startswith("---"):
[1857]1826 out = "\n".join([out, h])
[489]1827 else:
1828 items = h.split("##")
1829 date = items[0]
1830 func = items[1]
1831 items = items[2:]
[794]1832 out += "\n"+date+"\n"
1833 out += "Function: %s\n Parameters:" % (func)
[489]1834 for i in items:
[1938]1835 if i == '':
1836 continue
[489]1837 s = i.split("=")
[1118]1838 out += "\n %s = %s" % (s[0], s[1])
[1857]1839 out = "\n".join([out, "-"*80])
[1259]1840 if filename is not None:
1841 if filename is "":
1842 filename = 'scantable_history.txt'
1843 import os
1844 filename = os.path.expandvars(os.path.expanduser(filename))
1845 if not os.path.isdir(filename):
1846 data = open(filename, 'w')
1847 data.write(out)
1848 data.close()
1849 else:
1850 msg = "Illegal file name '%s'." % (filename)
[1859]1851 raise IOError(msg)
1852 return page(out)
[513]1853 #
1854 # Maths business
1855 #
[1862]1856 @asaplog_post_dec
[931]1857 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1858 """\
[1070]1859 Return the (time) weighted average of a scan.
[1846]1860
1861 *Note*:
1862
[1070]1863 in channels only - align if necessary
[1846]1864
[513]1865 Parameters:
[1846]1866
[513]1867 mask: an optional mask (only used for 'var' and 'tsys'
1868 weighting)
[1855]1869
[558]1870 scanav: True averages each scan separately
1871 False (default) averages all scans together,
[1855]1872
[1099]1873 weight: Weighting scheme.
1874 'none' (mean no weight)
1875 'var' (1/var(spec) weighted)
1876 'tsys' (1/Tsys**2 weighted)
1877 'tint' (integration time weighted)
1878 'tintsys' (Tint/Tsys**2)
1879 'median' ( median averaging)
[535]1880 The default is 'tint'
[1855]1881
[931]1882 align: align the spectra in velocity before averaging. It takes
1883 the time of the first spectrum as reference time.
[1846]1884
1885 Example::
1886
[513]1887 # time average the scantable without using a mask
[710]1888 newscan = scan.average_time()
[1846]1889
[513]1890 """
1891 varlist = vars()
[1593]1892 weight = weight or 'TINT'
1893 mask = mask or ()
1894 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1895 scan = (self, )
[1859]1896
1897 if align:
1898 scan = (self.freq_align(insitu=False), )
1899 s = None
1900 if weight.upper() == 'MEDIAN':
1901 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1902 scanav))
1903 else:
1904 s = scantable(self._math._average(scan, mask, weight.upper(),
1905 scanav))
[1099]1906 s._add_history("average_time", varlist)
[513]1907 return s
[710]1908
[1862]1909 @asaplog_post_dec
[876]1910 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1911 """\
[513]1912 Return a scan where all spectra are converted to either
1913 Jansky or Kelvin depending upon the flux units of the scan table.
1914 By default the function tries to look the values up internally.
1915 If it can't find them (or if you want to over-ride), you must
1916 specify EITHER jyperk OR eta (and D which it will try to look up
1917 also if you don't set it). jyperk takes precedence if you set both.
[1846]1918
[513]1919 Parameters:
[1846]1920
[513]1921 jyperk: the Jy / K conversion factor
[1855]1922
[513]1923 eta: the aperture efficiency
[1855]1924
[1928]1925 d: the geometric diameter (metres)
[1855]1926
[513]1927 insitu: if False a new scantable is returned.
1928 Otherwise, the scaling is done in-situ
1929 The default is taken from .asaprc (False)
[1846]1930
[513]1931 """
1932 if insitu is None: insitu = rcParams['insitu']
[876]1933 self._math._setinsitu(insitu)
[513]1934 varlist = vars()
[1593]1935 jyperk = jyperk or -1.0
1936 d = d or -1.0
1937 eta = eta or -1.0
[876]1938 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1939 s._add_history("convert_flux", varlist)
1940 if insitu: self._assign(s)
1941 else: return s
[513]1942
[1862]1943 @asaplog_post_dec
[876]1944 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1945 """\
[513]1946 Return a scan after applying a gain-elevation correction.
1947 The correction can be made via either a polynomial or a
1948 table-based interpolation (and extrapolation if necessary).
1949 You specify polynomial coefficients, an ascii table or neither.
1950 If you specify neither, then a polynomial correction will be made
1951 with built in coefficients known for certain telescopes (an error
1952 will occur if the instrument is not known).
1953 The data and Tsys are *divided* by the scaling factors.
[1846]1954
[513]1955 Parameters:
[1846]1956
[513]1957 poly: Polynomial coefficients (default None) to compute a
1958 gain-elevation correction as a function of
1959 elevation (in degrees).
[1855]1960
[513]1961 filename: The name of an ascii file holding correction factors.
1962 The first row of the ascii file must give the column
1963 names and these MUST include columns
1964 "ELEVATION" (degrees) and "FACTOR" (multiply data
1965 by this) somewhere.
1966 The second row must give the data type of the
1967 column. Use 'R' for Real and 'I' for Integer.
1968 An example file would be
1969 (actual factors are arbitrary) :
1970
1971 TIME ELEVATION FACTOR
1972 R R R
1973 0.1 0 0.8
1974 0.2 20 0.85
1975 0.3 40 0.9
1976 0.4 60 0.85
1977 0.5 80 0.8
1978 0.6 90 0.75
[1855]1979
[513]1980 method: Interpolation method when correcting from a table.
1981 Values are "nearest", "linear" (default), "cubic"
1982 and "spline"
[1855]1983
[513]1984 insitu: if False a new scantable is returned.
1985 Otherwise, the scaling is done in-situ
1986 The default is taken from .asaprc (False)
[1846]1987
[513]1988 """
1989
1990 if insitu is None: insitu = rcParams['insitu']
[876]1991 self._math._setinsitu(insitu)
[513]1992 varlist = vars()
[1593]1993 poly = poly or ()
[513]1994 from os.path import expandvars
1995 filename = expandvars(filename)
[876]1996 s = scantable(self._math._gainel(self, poly, filename, method))
1997 s._add_history("gain_el", varlist)
[1593]1998 if insitu:
1999 self._assign(s)
2000 else:
2001 return s
[710]2002
[1862]2003 @asaplog_post_dec
[931]2004 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2005 """\
[513]2006 Return a scan where all rows have been aligned in frequency/velocity.
2007 The alignment frequency frame (e.g. LSRK) is that set by function
2008 set_freqframe.
[1846]2009
[513]2010 Parameters:
[1855]2011
[513]2012 reftime: reference time to align at. By default, the time of
2013 the first row of data is used.
[1855]2014
[513]2015 method: Interpolation method for regridding the spectra.
2016 Choose from "nearest", "linear", "cubic" (default)
2017 and "spline"
[1855]2018
[513]2019 insitu: if False a new scantable is returned.
2020 Otherwise, the scaling is done in-situ
2021 The default is taken from .asaprc (False)
[1846]2022
[513]2023 """
[931]2024 if insitu is None: insitu = rcParams["insitu"]
[876]2025 self._math._setinsitu(insitu)
[513]2026 varlist = vars()
[1593]2027 reftime = reftime or ""
[931]2028 s = scantable(self._math._freq_align(self, reftime, method))
[876]2029 s._add_history("freq_align", varlist)
2030 if insitu: self._assign(s)
2031 else: return s
[513]2032
[1862]2033 @asaplog_post_dec
[1725]2034 def opacity(self, tau=None, insitu=None):
[1846]2035 """\
[513]2036 Apply an opacity correction. The data
2037 and Tsys are multiplied by the correction factor.
[1846]2038
[513]2039 Parameters:
[1855]2040
[1689]2041 tau: (list of) opacity from which the correction factor is
[513]2042 exp(tau*ZD)
[1689]2043 where ZD is the zenith-distance.
2044 If a list is provided, it has to be of length nIF,
2045 nIF*nPol or 1 and in order of IF/POL, e.g.
2046 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2047 if tau is `None` the opacities are determined from a
2048 model.
[1855]2049
[513]2050 insitu: if False a new scantable is returned.
2051 Otherwise, the scaling is done in-situ
2052 The default is taken from .asaprc (False)
[1846]2053
[513]2054 """
2055 if insitu is None: insitu = rcParams['insitu']
[876]2056 self._math._setinsitu(insitu)
[513]2057 varlist = vars()
[1689]2058 if not hasattr(tau, "__len__"):
2059 tau = [tau]
[876]2060 s = scantable(self._math._opacity(self, tau))
2061 s._add_history("opacity", varlist)
2062 if insitu: self._assign(s)
2063 else: return s
[513]2064
[1862]2065 @asaplog_post_dec
[513]2066 def bin(self, width=5, insitu=None):
[1846]2067 """\
[513]2068 Return a scan where all spectra have been binned up.
[1846]2069
[1348]2070 Parameters:
[1846]2071
[513]2072 width: The bin width (default=5) in pixels
[1855]2073
[513]2074 insitu: if False a new scantable is returned.
2075 Otherwise, the scaling is done in-situ
2076 The default is taken from .asaprc (False)
[1846]2077
[513]2078 """
2079 if insitu is None: insitu = rcParams['insitu']
[876]2080 self._math._setinsitu(insitu)
[513]2081 varlist = vars()
[876]2082 s = scantable(self._math._bin(self, width))
[1118]2083 s._add_history("bin", varlist)
[1589]2084 if insitu:
2085 self._assign(s)
2086 else:
2087 return s
[513]2088
[1862]2089 @asaplog_post_dec
[513]2090 def resample(self, width=5, method='cubic', insitu=None):
[1846]2091 """\
[1348]2092 Return a scan where all spectra have been binned up.
[1573]2093
[1348]2094 Parameters:
[1846]2095
[513]2096 width: The bin width (default=5) in pixels
[1855]2097
[513]2098 method: Interpolation method when correcting from a table.
2099 Values are "nearest", "linear", "cubic" (default)
2100 and "spline"
[1855]2101
[513]2102 insitu: if False a new scantable is returned.
2103 Otherwise, the scaling is done in-situ
2104 The default is taken from .asaprc (False)
[1846]2105
[513]2106 """
2107 if insitu is None: insitu = rcParams['insitu']
[876]2108 self._math._setinsitu(insitu)
[513]2109 varlist = vars()
[876]2110 s = scantable(self._math._resample(self, method, width))
[1118]2111 s._add_history("resample", varlist)
[876]2112 if insitu: self._assign(s)
2113 else: return s
[513]2114
[1862]2115 @asaplog_post_dec
[946]2116 def average_pol(self, mask=None, weight='none'):
[1846]2117 """\
[946]2118 Average the Polarisations together.
[1846]2119
[946]2120 Parameters:
[1846]2121
[946]2122 mask: An optional mask defining the region, where the
2123 averaging will be applied. The output will have all
2124 specified points masked.
[1855]2125
[946]2126 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2127 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2128
[946]2129 """
2130 varlist = vars()
[1593]2131 mask = mask or ()
[1010]2132 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2133 s._add_history("average_pol", varlist)
[992]2134 return s
[513]2135
[1862]2136 @asaplog_post_dec
[1145]2137 def average_beam(self, mask=None, weight='none'):
[1846]2138 """\
[1145]2139 Average the Beams together.
[1846]2140
[1145]2141 Parameters:
2142 mask: An optional mask defining the region, where the
2143 averaging will be applied. The output will have all
2144 specified points masked.
[1855]2145
[1145]2146 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2147 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2148
[1145]2149 """
2150 varlist = vars()
[1593]2151 mask = mask or ()
[1145]2152 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2153 s._add_history("average_beam", varlist)
2154 return s
2155
[1586]2156 def parallactify(self, pflag):
[1846]2157 """\
[1843]2158 Set a flag to indicate whether this data should be treated as having
[1617]2159 been 'parallactified' (total phase == 0.0)
[1846]2160
[1617]2161 Parameters:
[1855]2162
[1843]2163 pflag: Bool indicating whether to turn this on (True) or
[1617]2164 off (False)
[1846]2165
[1617]2166 """
[1586]2167 varlist = vars()
2168 self._parallactify(pflag)
2169 self._add_history("parallactify", varlist)
2170
[1862]2171 @asaplog_post_dec
[992]2172 def convert_pol(self, poltype=None):
[1846]2173 """\
[992]2174 Convert the data to a different polarisation type.
[1565]2175 Note that you will need cross-polarisation terms for most conversions.
[1846]2176
[992]2177 Parameters:
[1855]2178
[992]2179 poltype: The new polarisation type. Valid types are:
[1565]2180 "linear", "circular", "stokes" and "linpol"
[1846]2181
[992]2182 """
2183 varlist = vars()
[1859]2184 s = scantable(self._math._convertpol(self, poltype))
[1118]2185 s._add_history("convert_pol", varlist)
[992]2186 return s
2187
[1862]2188 @asaplog_post_dec
[2269]2189 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2190 insitu=None):
[1846]2191 """\
[513]2192 Smooth the spectrum by the specified kernel (conserving flux).
[1846]2193
[513]2194 Parameters:
[1846]2195
[513]2196 kernel: The type of smoothing kernel. Select from
[1574]2197 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2198 or 'poly'
[1855]2199
[513]2200 width: The width of the kernel in pixels. For hanning this is
2201 ignored otherwise it defauls to 5 pixels.
2202 For 'gaussian' it is the Full Width Half
2203 Maximum. For 'boxcar' it is the full width.
[1574]2204 For 'rmedian' and 'poly' it is the half width.
[1855]2205
[1574]2206 order: Optional parameter for 'poly' kernel (default is 2), to
2207 specify the order of the polnomial. Ignored by all other
2208 kernels.
[1855]2209
[1819]2210 plot: plot the original and the smoothed spectra.
2211 In this each indivual fit has to be approved, by
2212 typing 'y' or 'n'
[1855]2213
[513]2214 insitu: if False a new scantable is returned.
2215 Otherwise, the scaling is done in-situ
2216 The default is taken from .asaprc (False)
[1846]2217
[513]2218 """
2219 if insitu is None: insitu = rcParams['insitu']
[876]2220 self._math._setinsitu(insitu)
[513]2221 varlist = vars()
[1819]2222
2223 if plot: orgscan = self.copy()
2224
[1574]2225 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2226 s._add_history("smooth", varlist)
[1819]2227
2228 if plot:
[2150]2229 from asap.asapplotter import new_asaplot
2230 theplot = new_asaplot(rcParams['plotter.gui'])
2231 theplot.set_panels()
[1819]2232 ylab=s._get_ordinate_label()
[2150]2233 #theplot.palette(0,["#777777","red"])
[1819]2234 for r in xrange(s.nrow()):
2235 xsm=s._getabcissa(r)
2236 ysm=s._getspectrum(r)
2237 xorg=orgscan._getabcissa(r)
2238 yorg=orgscan._getspectrum(r)
[2150]2239 theplot.clear()
2240 theplot.hold()
2241 theplot.set_axes('ylabel',ylab)
2242 theplot.set_axes('xlabel',s._getabcissalabel(r))
2243 theplot.set_axes('title',s._getsourcename(r))
2244 theplot.set_line(label='Original',color="#777777")
2245 theplot.plot(xorg,yorg)
2246 theplot.set_line(label='Smoothed',color="red")
2247 theplot.plot(xsm,ysm)
[1819]2248 ### Ugly part for legend
2249 for i in [0,1]:
[2150]2250 theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2251 theplot.release()
[1819]2252 ### Ugly part for legend
[2150]2253 theplot.subplots[0]['lines']=[]
[1819]2254 res = raw_input("Accept smoothing ([y]/n): ")
2255 if res.upper() == 'N':
2256 s._setspectrum(yorg, r)
[2150]2257 theplot.quit()
2258 del theplot
[1819]2259 del orgscan
2260
[876]2261 if insitu: self._assign(s)
2262 else: return s
[513]2263
[2186]2264 @asaplog_post_dec
2265 def _parse_wn(self, wn):
2266 if isinstance(wn, list) or isinstance(wn, tuple):
2267 return wn
2268 elif isinstance(wn, int):
2269 return [ wn ]
2270 elif isinstance(wn, str):
[2277]2271 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2272 val = wn.split('-')
2273 val = [int(val[0]), int(val[1])]
2274 val.sort()
2275 res = [i for i in xrange(val[0], val[1]+1)]
[2277]2276 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2277 val = int(wn[2:])+1
2278 res = [i for i in xrange(val)]
[2277]2279 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2280 val = int(wn[:-2])+1
2281 res = [i for i in xrange(val)]
[2277]2282 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]2283 val = int(wn[1:])
2284 res = [i for i in xrange(val)]
[2277]2285 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]2286 val = int(wn[:-1])
2287 res = [i for i in xrange(val)]
[2277]2288 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
[2186]2289 val = int(wn[2:])
2290 res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2291 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
[2186]2292 val = int(wn[:-2])
2293 res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2294 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
[2186]2295 val = int(wn[1:])+1
2296 res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2297 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
[2186]2298 val = int(wn[:-1])+1
2299 res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2300
[2186]2301 return res
2302 else:
2303 msg = 'wrong value given for addwn/rejwn'
2304 raise RuntimeError(msg)
2305
2306
[1862]2307 @asaplog_post_dec
[2277]2308 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
[2269]2309 fftmethod=None, fftthresh=None,
2310 addwn=None, rejwn=None, clipthresh=None,
2311 clipniter=None, plot=None,
2312 getresidual=None, showprogress=None,
2313 minnrow=None, outlog=None, blfile=None):
[2047]2314 """\
[2094]2315 Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2316 Parameters:
[2186]2317 insitu: if False a new scantable is returned.
[2081]2318 Otherwise, the scaling is done in-situ
2319 The default is taken from .asaprc (False)
[2186]2320 mask: an optional mask
2321 applyfft: if True use some method, such as FFT, to find
2322 strongest sinusoidal components in the wavenumber
2323 domain to be used for baseline fitting.
2324 default is True.
2325 fftmethod: method to find the strong sinusoidal components.
2326 now only 'fft' is available and it is the default.
2327 fftthresh: the threshold to select wave numbers to be used for
2328 fitting from the distribution of amplitudes in the
2329 wavenumber domain.
2330 both float and string values accepted.
2331 given a float value, the unit is set to sigma.
2332 for string values, allowed formats include:
2333 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2334 'topx' (= the x strongest ones, e.g. 'top5').
2335 default is 3.0 (unit: sigma).
2336 addwn: the additional wave numbers to be used for fitting.
2337 list or integer value is accepted to specify every
2338 wave numbers. also string value can be used in case
2339 you need to specify wave numbers in a certain range,
2340 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2341 '<a' (= 0,1,...,a-2,a-1),
2342 '>=a' (= a, a+1, ... up to the maximum wave
2343 number corresponding to the Nyquist
2344 frequency for the case of FFT).
2345 default is [].
2346 rejwn: the wave numbers NOT to be used for fitting.
2347 can be set just as addwn but has higher priority:
2348 wave numbers which are specified both in addwn
2349 and rejwn will NOT be used. default is [].
[2081]2350 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2129]2351 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2081]2352 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2353 plot the fit and the residual. In this each
2354 indivual fit has to be approved, by typing 'y'
2355 or 'n'
2356 getresidual: if False, returns best-fit values instead of
2357 residual. (default is True)
[2189]2358 showprogress: show progress status for large data.
2359 default is True.
2360 minnrow: minimum number of input spectra to show.
2361 default is 1000.
[2081]2362 outlog: Output the coefficients of the best-fit
2363 function to logger (default is False)
2364 blfile: Name of a text file in which the best-fit
2365 parameter values to be written
[2186]2366 (default is '': no file/logger output)
[2047]2367
2368 Example:
2369 # return a scan baselined by a combination of sinusoidal curves having
[2081]2370 # wave numbers in spectral window up to 10,
[2047]2371 # also with 3-sigma clipping, iteration up to 4 times
[2186]2372 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]2373
2374 Note:
2375 The best-fit parameter values output in logger and/or blfile are now
2376 based on specunit of 'channel'.
[2047]2377 """
2378
[2186]2379 try:
2380 varlist = vars()
[2047]2381
[2186]2382 if insitu is None: insitu = rcParams['insitu']
2383 if insitu:
2384 workscan = self
2385 else:
2386 workscan = self.copy()
2387
[2277]2388 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2186]2389 if applyfft is None: applyfft = True
2390 if fftmethod is None: fftmethod = 'fft'
2391 if fftthresh is None: fftthresh = 3.0
2392 if addwn is None: addwn = []
2393 if rejwn is None: rejwn = []
2394 if clipthresh is None: clipthresh = 3.0
2395 if clipniter is None: clipniter = 0
2396 if plot is None: plot = False
2397 if getresidual is None: getresidual = True
[2189]2398 if showprogress is None: showprogress = True
2399 if minnrow is None: minnrow = 1000
[2186]2400 if outlog is None: outlog = False
2401 if blfile is None: blfile = ''
[2047]2402
[2081]2403 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2277]2404 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2186]2405 workscan._add_history('sinusoid_baseline', varlist)
[2047]2406
2407 if insitu:
2408 self._assign(workscan)
2409 else:
2410 return workscan
2411
2412 except RuntimeError, e:
[2186]2413 raise_fitting_failure_exception(e)
[2047]2414
2415
[2186]2416 @asaplog_post_dec
[2277]2417 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2418 addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
2419 chan_avg_limit=None, plot=None, getresidual=None, showprogress=None, minnrow=None,
[2189]2420 outlog=None, blfile=None):
[2047]2421 """\
[2277]2422 Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2423 Spectral lines are detected first using linefinder and masked out
2424 to avoid them affecting the baseline solution.
2425
2426 Parameters:
[2189]2427 insitu: if False a new scantable is returned.
2428 Otherwise, the scaling is done in-situ
2429 The default is taken from .asaprc (False)
2430 mask: an optional mask retreived from scantable
2431 applyfft: if True use some method, such as FFT, to find
2432 strongest sinusoidal components in the wavenumber
2433 domain to be used for baseline fitting.
2434 default is True.
2435 fftmethod: method to find the strong sinusoidal components.
2436 now only 'fft' is available and it is the default.
2437 fftthresh: the threshold to select wave numbers to be used for
2438 fitting from the distribution of amplitudes in the
2439 wavenumber domain.
2440 both float and string values accepted.
2441 given a float value, the unit is set to sigma.
2442 for string values, allowed formats include:
[2277]2443 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
[2189]2444 'topx' (= the x strongest ones, e.g. 'top5').
2445 default is 3.0 (unit: sigma).
2446 addwn: the additional wave numbers to be used for fitting.
2447 list or integer value is accepted to specify every
2448 wave numbers. also string value can be used in case
2449 you need to specify wave numbers in a certain range,
2450 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2451 '<a' (= 0,1,...,a-2,a-1),
2452 '>=a' (= a, a+1, ... up to the maximum wave
2453 number corresponding to the Nyquist
2454 frequency for the case of FFT).
2455 default is [].
2456 rejwn: the wave numbers NOT to be used for fitting.
2457 can be set just as addwn but has higher priority:
2458 wave numbers which are specified both in addwn
2459 and rejwn will NOT be used. default is [].
2460 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2277]2461 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2462 edge: an optional number of channel to drop at
2463 the edge of spectrum. If only one value is
2464 specified, the same number will be dropped
2465 from both sides of the spectrum. Default
2466 is to keep all channels. Nested tuples
2467 represent individual edge selection for
2468 different IFs (a number of spectral channels
2469 can be different)
2470 threshold: the threshold used by line finder. It is
2471 better to keep it large as only strong lines
2472 affect the baseline solution.
2473 chan_avg_limit: a maximum number of consequtive spectral
2474 channels to average during the search of
2475 weak and broad lines. The default is no
2476 averaging (and no search for weak lines).
2477 If such lines can affect the fitted baseline
2478 (e.g. a high order polynomial is fitted),
2479 increase this parameter (usually values up
2480 to 8 are reasonable). Most users of this
2481 method should find the default value sufficient.
2482 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2483 plot the fit and the residual. In this each
2484 indivual fit has to be approved, by typing 'y'
2485 or 'n'
2486 getresidual: if False, returns best-fit values instead of
2487 residual. (default is True)
2488 showprogress: show progress status for large data.
2489 default is True.
2490 minnrow: minimum number of input spectra to show.
2491 default is 1000.
2492 outlog: Output the coefficients of the best-fit
2493 function to logger (default is False)
2494 blfile: Name of a text file in which the best-fit
2495 parameter values to be written
2496 (default is "": no file/logger output)
[2047]2497
2498 Example:
[2186]2499 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]2500
2501 Note:
2502 The best-fit parameter values output in logger and/or blfile are now
2503 based on specunit of 'channel'.
[2047]2504 """
2505
[2186]2506 try:
2507 varlist = vars()
[2047]2508
[2186]2509 if insitu is None: insitu = rcParams['insitu']
2510 if insitu:
2511 workscan = self
[2047]2512 else:
[2186]2513 workscan = self.copy()
2514
[2277]2515 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2186]2516 if applyfft is None: applyfft = True
2517 if fftmethod is None: fftmethod = 'fft'
2518 if fftthresh is None: fftthresh = 3.0
2519 if addwn is None: addwn = []
2520 if rejwn is None: rejwn = []
2521 if clipthresh is None: clipthresh = 3.0
2522 if clipniter is None: clipniter = 0
2523 if edge is None: edge = (0,0)
2524 if threshold is None: threshold = 3
2525 if chan_avg_limit is None: chan_avg_limit = 1
2526 if plot is None: plot = False
2527 if getresidual is None: getresidual = True
[2189]2528 if showprogress is None: showprogress = True
2529 if minnrow is None: minnrow = 1000
[2186]2530 if outlog is None: outlog = False
2531 if blfile is None: blfile = ''
[2047]2532
[2277]2533 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2534 workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2047]2535 workscan._add_history("auto_sinusoid_baseline", varlist)
2536
2537 if insitu:
2538 self._assign(workscan)
2539 else:
2540 return workscan
2541
2542 except RuntimeError, e:
[2186]2543 raise_fitting_failure_exception(e)
[2047]2544
2545 @asaplog_post_dec
[2277]2546 def cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2547 plot=None, getresidual=None, showprogress=None, minnrow=None, outlog=None, blfile=None):
[1846]2548 """\
[2277]2549 Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
[513]2550 Parameters:
[2189]2551 insitu: If False a new scantable is returned.
2552 Otherwise, the scaling is done in-situ
2553 The default is taken from .asaprc (False)
2554 mask: An optional mask
2555 npiece: Number of pieces. (default is 2)
2556 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2277]2557 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2558 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2559 plot the fit and the residual. In this each
2560 indivual fit has to be approved, by typing 'y'
2561 or 'n'
2562 getresidual: if False, returns best-fit values instead of
2563 residual. (default is True)
2564 showprogress: show progress status for large data.
2565 default is True.
2566 minnrow: minimum number of input spectra to show.
2567 default is 1000.
2568 outlog: Output the coefficients of the best-fit
2569 function to logger (default is False)
2570 blfile: Name of a text file in which the best-fit
2571 parameter values to be written
2572 (default is "": no file/logger output)
[1846]2573
[2012]2574 Example:
[2277]2575 # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
[2012]2576 # also with 3-sigma clipping, iteration up to 4 times
2577 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2578
2579 Note:
2580 The best-fit parameter values output in logger and/or blfile are now
2581 based on specunit of 'channel'.
[2012]2582 """
2583
[2186]2584 try:
2585 varlist = vars()
2586
2587 if insitu is None: insitu = rcParams['insitu']
2588 if insitu:
2589 workscan = self
2590 else:
2591 workscan = self.copy()
[1855]2592
[2277]2593 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2189]2594 if npiece is None: npiece = 2
2595 if clipthresh is None: clipthresh = 3.0
2596 if clipniter is None: clipniter = 0
2597 if plot is None: plot = False
2598 if getresidual is None: getresidual = True
2599 if showprogress is None: showprogress = True
2600 if minnrow is None: minnrow = 1000
2601 if outlog is None: outlog = False
2602 if blfile is None: blfile = ''
[1855]2603
[2012]2604 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2277]2605 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2012]2606 workscan._add_history("cspline_baseline", varlist)
2607
2608 if insitu:
2609 self._assign(workscan)
2610 else:
2611 return workscan
2612
2613 except RuntimeError, e:
[2186]2614 raise_fitting_failure_exception(e)
[1855]2615
[2186]2616 @asaplog_post_dec
[2277]2617 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2618 edge=None, threshold=None, chan_avg_limit=None, getresidual=None, plot=None,
2619 showprogress=None, minnrow=None, outlog=None, blfile=None):
[2012]2620 """\
2621 Return a scan which has been baselined (all rows) by cubic spline
2622 function (piecewise cubic polynomial).
2623 Spectral lines are detected first using linefinder and masked out
2624 to avoid them affecting the baseline solution.
2625
2626 Parameters:
[2189]2627 insitu: if False a new scantable is returned.
2628 Otherwise, the scaling is done in-situ
2629 The default is taken from .asaprc (False)
2630 mask: an optional mask retreived from scantable
2631 npiece: Number of pieces. (default is 2)
2632 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2277]2633 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2634 edge: an optional number of channel to drop at
2635 the edge of spectrum. If only one value is
2636 specified, the same number will be dropped
2637 from both sides of the spectrum. Default
2638 is to keep all channels. Nested tuples
2639 represent individual edge selection for
2640 different IFs (a number of spectral channels
2641 can be different)
2642 threshold: the threshold used by line finder. It is
2643 better to keep it large as only strong lines
2644 affect the baseline solution.
2645 chan_avg_limit: a maximum number of consequtive spectral
2646 channels to average during the search of
2647 weak and broad lines. The default is no
2648 averaging (and no search for weak lines).
2649 If such lines can affect the fitted baseline
2650 (e.g. a high order polynomial is fitted),
2651 increase this parameter (usually values up
2652 to 8 are reasonable). Most users of this
2653 method should find the default value sufficient.
2654 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2655 plot the fit and the residual. In this each
2656 indivual fit has to be approved, by typing 'y'
2657 or 'n'
2658 getresidual: if False, returns best-fit values instead of
2659 residual. (default is True)
2660 showprogress: show progress status for large data.
2661 default is True.
2662 minnrow: minimum number of input spectra to show.
2663 default is 1000.
2664 outlog: Output the coefficients of the best-fit
2665 function to logger (default is False)
2666 blfile: Name of a text file in which the best-fit
2667 parameter values to be written
2668 (default is "": no file/logger output)
[1846]2669
[1907]2670 Example:
[2012]2671 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]2672
2673 Note:
2674 The best-fit parameter values output in logger and/or blfile are now
2675 based on specunit of 'channel'.
[2012]2676 """
[1846]2677
[2186]2678 try:
2679 varlist = vars()
[2012]2680
[2186]2681 if insitu is None: insitu = rcParams['insitu']
2682 if insitu:
2683 workscan = self
[1391]2684 else:
[2186]2685 workscan = self.copy()
2686
[2277]2687 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2186]2688 if npiece is None: npiece = 2
2689 if clipthresh is None: clipthresh = 3.0
2690 if clipniter is None: clipniter = 0
2691 if edge is None: edge = (0, 0)
2692 if threshold is None: threshold = 3
2693 if chan_avg_limit is None: chan_avg_limit = 1
2694 if plot is None: plot = False
2695 if getresidual is None: getresidual = True
[2189]2696 if showprogress is None: showprogress = True
2697 if minnrow is None: minnrow = 1000
[2186]2698 if outlog is None: outlog = False
2699 if blfile is None: blfile = ''
[1819]2700
[2277]2701 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2269]2702 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2703 clipniter,
2704 normalise_edge_param(edge),
2705 threshold,
2706 chan_avg_limit, getresidual,
2707 pack_progress_params(showprogress,
2708 minnrow),
2709 outlog, blfile)
[2012]2710 workscan._add_history("auto_cspline_baseline", varlist)
[1907]2711
[1856]2712 if insitu:
2713 self._assign(workscan)
2714 else:
2715 return workscan
[2012]2716
2717 except RuntimeError, e:
[2186]2718 raise_fitting_failure_exception(e)
[513]2719
[1931]2720 @asaplog_post_dec
[2269]2721 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2722 getresidual=None, showprogress=None, minnrow=None,
2723 outlog=None, blfile=None):
[1907]2724 """\
2725 Return a scan which has been baselined (all rows) by a polynomial.
2726 Parameters:
[2189]2727 insitu: if False a new scantable is returned.
2728 Otherwise, the scaling is done in-situ
2729 The default is taken from .asaprc (False)
2730 mask: an optional mask
2731 order: the order of the polynomial (default is 0)
2732 plot: plot the fit and the residual. In this each
2733 indivual fit has to be approved, by typing 'y'
2734 or 'n'
2735 getresidual: if False, returns best-fit values instead of
2736 residual. (default is True)
2737 showprogress: show progress status for large data.
2738 default is True.
2739 minnrow: minimum number of input spectra to show.
2740 default is 1000.
2741 outlog: Output the coefficients of the best-fit
2742 function to logger (default is False)
2743 blfile: Name of a text file in which the best-fit
2744 parameter values to be written
2745 (default is "": no file/logger output)
[2012]2746
[1907]2747 Example:
2748 # return a scan baselined by a third order polynomial,
2749 # not using a mask
2750 bscan = scan.poly_baseline(order=3)
2751 """
[1931]2752
[2186]2753 try:
2754 varlist = vars()
[1931]2755
[2269]2756 if insitu is None:
2757 insitu = rcParams["insitu"]
[2186]2758 if insitu:
2759 workscan = self
2760 else:
2761 workscan = self.copy()
[1907]2762
[2277]2763 if mask is None: mask = [True for i in \
2764 xrange(workscan.nchan())]
[2189]2765 if order is None: order = 0
2766 if plot is None: plot = False
2767 if getresidual is None: getresidual = True
2768 if showprogress is None: showprogress = True
2769 if minnrow is None: minnrow = 1000
2770 if outlog is None: outlog = False
2771 if blfile is None: blfile = ""
[1907]2772
[2012]2773 if plot:
[2269]2774 outblfile = (blfile != "") and \
2775 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2776 if outblfile:
2777 blf = open(blfile, "a")
[2012]2778
[1907]2779 f = fitter()
2780 f.set_function(lpoly=order)
[2186]2781
2782 rows = xrange(workscan.nrow())
2783 #if len(rows) > 0: workscan._init_blinfo()
2784
[1907]2785 for r in rows:
2786 f.x = workscan._getabcissa(r)
2787 f.y = workscan._getspectrum(r)
2788 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2789 f.data = None
2790 f.fit()
2791
2792 f.plot(residual=True)
2793 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2794 if accept_fit.upper() == "N":
[2012]2795 #workscan._append_blinfo(None, None, None)
[1907]2796 continue
[2012]2797
2798 blpars = f.get_parameters()
2799 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2800 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2801 workscan._setspectrum((f.fitter.getresidual()
2802 if getresidual else f.fitter.getfit()), r)
[1907]2803
[2012]2804 if outblfile:
2805 rms = workscan.get_rms(f.mask, r)
[2269]2806 dataout = \
2807 workscan.format_blparams_row(blpars["params"],
2808 blpars["fixed"],
2809 rms, str(masklist),
2810 r, True)
[2012]2811 blf.write(dataout)
2812
[1907]2813 f._p.unmap()
2814 f._p = None
[2012]2815
2816 if outblfile: blf.close()
[1907]2817 else:
[2269]2818 workscan._poly_baseline(mask, order, getresidual,
2819 pack_progress_params(showprogress,
2820 minnrow),
2821 outlog, blfile)
[1907]2822
2823 workscan._add_history("poly_baseline", varlist)
2824
2825 if insitu:
2826 self._assign(workscan)
2827 else:
2828 return workscan
2829
[1919]2830 except RuntimeError, e:
[2186]2831 raise_fitting_failure_exception(e)
[1907]2832
[2186]2833 @asaplog_post_dec
[2269]2834 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2835 threshold=None, chan_avg_limit=None,
2836 plot=None, insitu=None,
2837 getresidual=None, showprogress=None,
2838 minnrow=None, outlog=None, blfile=None):
[1846]2839 """\
[1931]2840 Return a scan which has been baselined (all rows) by a polynomial.
[880]2841 Spectral lines are detected first using linefinder and masked out
2842 to avoid them affecting the baseline solution.
2843
2844 Parameters:
[2189]2845 insitu: if False a new scantable is returned.
2846 Otherwise, the scaling is done in-situ
2847 The default is taken from .asaprc (False)
2848 mask: an optional mask retreived from scantable
2849 order: the order of the polynomial (default is 0)
2850 edge: an optional number of channel to drop at
2851 the edge of spectrum. If only one value is
2852 specified, the same number will be dropped
2853 from both sides of the spectrum. Default
2854 is to keep all channels. Nested tuples
2855 represent individual edge selection for
2856 different IFs (a number of spectral channels
2857 can be different)
2858 threshold: the threshold used by line finder. It is
2859 better to keep it large as only strong lines
2860 affect the baseline solution.
2861 chan_avg_limit: a maximum number of consequtive spectral
2862 channels to average during the search of
2863 weak and broad lines. The default is no
2864 averaging (and no search for weak lines).
2865 If such lines can affect the fitted baseline
2866 (e.g. a high order polynomial is fitted),
2867 increase this parameter (usually values up
2868 to 8 are reasonable). Most users of this
2869 method should find the default value sufficient.
2870 plot: plot the fit and the residual. In this each
2871 indivual fit has to be approved, by typing 'y'
2872 or 'n'
2873 getresidual: if False, returns best-fit values instead of
2874 residual. (default is True)
2875 showprogress: show progress status for large data.
2876 default is True.
2877 minnrow: minimum number of input spectra to show.
2878 default is 1000.
2879 outlog: Output the coefficients of the best-fit
2880 function to logger (default is False)
2881 blfile: Name of a text file in which the best-fit
2882 parameter values to be written
2883 (default is "": no file/logger output)
[1846]2884
[2012]2885 Example:
2886 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2887 """
[880]2888
[2186]2889 try:
2890 varlist = vars()
[1846]2891
[2269]2892 if insitu is None:
2893 insitu = rcParams['insitu']
[2186]2894 if insitu:
2895 workscan = self
2896 else:
2897 workscan = self.copy()
[1846]2898
[2277]2899 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2186]2900 if order is None: order = 0
2901 if edge is None: edge = (0, 0)
2902 if threshold is None: threshold = 3
2903 if chan_avg_limit is None: chan_avg_limit = 1
2904 if plot is None: plot = False
2905 if getresidual is None: getresidual = True
[2189]2906 if showprogress is None: showprogress = True
2907 if minnrow is None: minnrow = 1000
[2186]2908 if outlog is None: outlog = False
2909 if blfile is None: blfile = ''
[1846]2910
[2186]2911 edge = normalise_edge_param(edge)
[880]2912
[2012]2913 if plot:
[2269]2914 outblfile = (blfile != "") and \
2915 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]2916 if outblfile: blf = open(blfile, "a")
2917
[2186]2918 from asap.asaplinefind import linefinder
[2012]2919 fl = linefinder()
[2269]2920 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]2921 fl.set_scan(workscan)
[2186]2922
[2012]2923 f = fitter()
2924 f.set_function(lpoly=order)
[880]2925
[2186]2926 rows = xrange(workscan.nrow())
2927 #if len(rows) > 0: workscan._init_blinfo()
2928
[2012]2929 for r in rows:
[2186]2930 idx = 2*workscan.getif(r)
[2269]2931 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2932 edge[idx:idx+2]) # (CAS-1434)
[907]2933
[2012]2934 f.x = workscan._getabcissa(r)
2935 f.y = workscan._getspectrum(r)
2936 f.mask = fl.get_mask()
2937 f.data = None
2938 f.fit()
2939
2940 f.plot(residual=True)
2941 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2942 if accept_fit.upper() == "N":
2943 #workscan._append_blinfo(None, None, None)
2944 continue
2945
2946 blpars = f.get_parameters()
2947 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2948 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2949 workscan._setspectrum((f.fitter.getresidual() \
2950 if getresidual else f.fitter.getfit()), r)
[2012]2951
2952 if outblfile:
2953 rms = workscan.get_rms(f.mask, r)
[2269]2954 dataout = \
2955 workscan.format_blparams_row(blpars["params"],
2956 blpars["fixed"],
2957 rms, str(masklist),
2958 r, True)
[2012]2959 blf.write(dataout)
2960
2961 f._p.unmap()
2962 f._p = None
2963
2964 if outblfile: blf.close()
2965 else:
[2269]2966 workscan._auto_poly_baseline(mask, order, edge, threshold,
2967 chan_avg_limit, getresidual,
2968 pack_progress_params(showprogress,
2969 minnrow),
2970 outlog, blfile)
[2012]2971
2972 workscan._add_history("auto_poly_baseline", varlist)
2973
2974 if insitu:
2975 self._assign(workscan)
2976 else:
2977 return workscan
2978
2979 except RuntimeError, e:
[2186]2980 raise_fitting_failure_exception(e)
[2012]2981
2982 def _init_blinfo(self):
2983 """\
2984 Initialise the following three auxiliary members:
2985 blpars : parameters of the best-fit baseline,
2986 masklists : mask data (edge positions of masked channels) and
2987 actualmask : mask data (in boolean list),
2988 to keep for use later (including output to logger/text files).
2989 Used by poly_baseline() and auto_poly_baseline() in case of
2990 'plot=True'.
2991 """
2992 self.blpars = []
2993 self.masklists = []
2994 self.actualmask = []
2995 return
[880]2996
[2012]2997 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
2998 """\
2999 Append baseline-fitting related info to blpars, masklist and
3000 actualmask.
3001 """
3002 self.blpars.append(data_blpars)
3003 self.masklists.append(data_masklists)
3004 self.actualmask.append(data_actualmask)
3005 return
3006
[1862]3007 @asaplog_post_dec
[914]3008 def rotate_linpolphase(self, angle):
[1846]3009 """\
[914]3010 Rotate the phase of the complex polarization O=Q+iU correlation.
3011 This is always done in situ in the raw data. So if you call this
3012 function more than once then each call rotates the phase further.
[1846]3013
[914]3014 Parameters:
[1846]3015
[914]3016 angle: The angle (degrees) to rotate (add) by.
[1846]3017
3018 Example::
3019
[914]3020 scan.rotate_linpolphase(2.3)
[1846]3021
[914]3022 """
3023 varlist = vars()
[936]3024 self._math._rotate_linpolphase(self, angle)
[914]3025 self._add_history("rotate_linpolphase", varlist)
3026 return
[710]3027
[1862]3028 @asaplog_post_dec
[914]3029 def rotate_xyphase(self, angle):
[1846]3030 """\
[914]3031 Rotate the phase of the XY correlation. This is always done in situ
3032 in the data. So if you call this function more than once
3033 then each call rotates the phase further.
[1846]3034
[914]3035 Parameters:
[1846]3036
[914]3037 angle: The angle (degrees) to rotate (add) by.
[1846]3038
3039 Example::
3040
[914]3041 scan.rotate_xyphase(2.3)
[1846]3042
[914]3043 """
3044 varlist = vars()
[936]3045 self._math._rotate_xyphase(self, angle)
[914]3046 self._add_history("rotate_xyphase", varlist)
3047 return
3048
[1862]3049 @asaplog_post_dec
[914]3050 def swap_linears(self):
[1846]3051 """\
[1573]3052 Swap the linear polarisations XX and YY, or better the first two
[1348]3053 polarisations as this also works for ciculars.
[914]3054 """
3055 varlist = vars()
[936]3056 self._math._swap_linears(self)
[914]3057 self._add_history("swap_linears", varlist)
3058 return
3059
[1862]3060 @asaplog_post_dec
[914]3061 def invert_phase(self):
[1846]3062 """\
[914]3063 Invert the phase of the complex polarisation
3064 """
3065 varlist = vars()
[936]3066 self._math._invert_phase(self)
[914]3067 self._add_history("invert_phase", varlist)
3068 return
3069
[1862]3070 @asaplog_post_dec
[876]3071 def add(self, offset, insitu=None):
[1846]3072 """\
[513]3073 Return a scan where all spectra have the offset added
[1846]3074
[513]3075 Parameters:
[1846]3076
[513]3077 offset: the offset
[1855]3078
[513]3079 insitu: if False a new scantable is returned.
3080 Otherwise, the scaling is done in-situ
3081 The default is taken from .asaprc (False)
[1846]3082
[513]3083 """
3084 if insitu is None: insitu = rcParams['insitu']
[876]3085 self._math._setinsitu(insitu)
[513]3086 varlist = vars()
[876]3087 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]3088 s._add_history("add", varlist)
[876]3089 if insitu:
3090 self._assign(s)
3091 else:
[513]3092 return s
3093
[1862]3094 @asaplog_post_dec
[1308]3095 def scale(self, factor, tsys=True, insitu=None):
[1846]3096 """\
3097
[1938]3098 Return a scan where all spectra are scaled by the given 'factor'
[1846]3099
[513]3100 Parameters:
[1846]3101
[1819]3102 factor: the scaling factor (float or 1D float list)
[1855]3103
[513]3104 insitu: if False a new scantable is returned.
3105 Otherwise, the scaling is done in-situ
3106 The default is taken from .asaprc (False)
[1855]3107
[513]3108 tsys: if True (default) then apply the operation to Tsys
3109 as well as the data
[1846]3110
[513]3111 """
3112 if insitu is None: insitu = rcParams['insitu']
[876]3113 self._math._setinsitu(insitu)
[513]3114 varlist = vars()
[1819]3115 s = None
3116 import numpy
3117 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3118 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3119 from asapmath import _array2dOp
3120 s = _array2dOp( self.copy(), factor, "MUL", tsys )
3121 else:
3122 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3123 else:
3124 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]3125 s._add_history("scale", varlist)
[876]3126 if insitu:
3127 self._assign(s)
3128 else:
[513]3129 return s
3130
[1504]3131 def set_sourcetype(self, match, matchtype="pattern",
3132 sourcetype="reference"):
[1846]3133 """\
[1502]3134 Set the type of the source to be an source or reference scan
[1846]3135 using the provided pattern.
3136
[1502]3137 Parameters:
[1846]3138
[1504]3139 match: a Unix style pattern, regular expression or selector
[1855]3140
[1504]3141 matchtype: 'pattern' (default) UNIX style pattern or
3142 'regex' regular expression
[1855]3143
[1502]3144 sourcetype: the type of the source to use (source/reference)
[1846]3145
[1502]3146 """
3147 varlist = vars()
3148 basesel = self.get_selection()
3149 stype = -1
3150 if sourcetype.lower().startswith("r"):
3151 stype = 1
3152 elif sourcetype.lower().startswith("s"):
3153 stype = 0
[1504]3154 else:
[1502]3155 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]3156 if matchtype.lower().startswith("p"):
3157 matchtype = "pattern"
3158 elif matchtype.lower().startswith("r"):
3159 matchtype = "regex"
3160 else:
3161 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3162 sel = selector()
3163 if isinstance(match, selector):
3164 sel = match
3165 else:
[1504]3166 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]3167 self.set_selection(basesel+sel)
3168 self._setsourcetype(stype)
3169 self.set_selection(basesel)
[1573]3170 self._add_history("set_sourcetype", varlist)
[1502]3171
[1862]3172 @asaplog_post_dec
[1857]3173 @preserve_selection
[1819]3174 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3175 """\
[670]3176 This function allows to build quotients automatically.
[1819]3177 It assumes the observation to have the same number of
[670]3178 "ons" and "offs"
[1846]3179
[670]3180 Parameters:
[1846]3181
[710]3182 preserve: you can preserve (default) the continuum or
3183 remove it. The equations used are
[1857]3184
[670]3185 preserve: Output = Toff * (on/off) - Toff
[1857]3186
[1070]3187 remove: Output = Toff * (on/off) - Ton
[1855]3188
[1573]3189 mode: the on/off detection mode
[1348]3190 'paired' (default)
3191 identifies 'off' scans by the
3192 trailing '_R' (Mopra/Parkes) or
3193 '_e'/'_w' (Tid) and matches
3194 on/off pairs from the observing pattern
[1502]3195 'time'
3196 finds the closest off in time
[1348]3197
[1857]3198 .. todo:: verify argument is not implemented
3199
[670]3200 """
[1857]3201 varlist = vars()
[1348]3202 modes = ["time", "paired"]
[670]3203 if not mode in modes:
[876]3204 msg = "please provide valid mode. Valid modes are %s" % (modes)
3205 raise ValueError(msg)
[1348]3206 s = None
3207 if mode.lower() == "paired":
[1857]3208 sel = self.get_selection()
[1875]3209 sel.set_query("SRCTYPE==psoff")
[1356]3210 self.set_selection(sel)
[1348]3211 offs = self.copy()
[1875]3212 sel.set_query("SRCTYPE==pson")
[1356]3213 self.set_selection(sel)
[1348]3214 ons = self.copy()
3215 s = scantable(self._math._quotient(ons, offs, preserve))
3216 elif mode.lower() == "time":
3217 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3218 s._add_history("auto_quotient", varlist)
[876]3219 return s
[710]3220
[1862]3221 @asaplog_post_dec
[1145]3222 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3223 """\
[1143]3224 Form a quotient using "off" beams when observing in "MX" mode.
[1846]3225
[1143]3226 Parameters:
[1846]3227
[1145]3228 mask: an optional mask to be used when weight == 'stddev'
[1855]3229
[1143]3230 weight: How to average the off beams. Default is 'median'.
[1855]3231
[1145]3232 preserve: you can preserve (default) the continuum or
[1855]3233 remove it. The equations used are:
[1846]3234
[1855]3235 preserve: Output = Toff * (on/off) - Toff
3236
3237 remove: Output = Toff * (on/off) - Ton
3238
[1217]3239 """
[1593]3240 mask = mask or ()
[1141]3241 varlist = vars()
3242 on = scantable(self._math._mx_extract(self, 'on'))
[1143]3243 preoff = scantable(self._math._mx_extract(self, 'off'))
3244 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3245 from asapmath import quotient
[1145]3246 q = quotient(on, off, preserve)
[1143]3247 q._add_history("mx_quotient", varlist)
[1217]3248 return q
[513]3249
[1862]3250 @asaplog_post_dec
[718]3251 def freq_switch(self, insitu=None):
[1846]3252 """\
[718]3253 Apply frequency switching to the data.
[1846]3254
[718]3255 Parameters:
[1846]3256
[718]3257 insitu: if False a new scantable is returned.
3258 Otherwise, the swictching is done in-situ
3259 The default is taken from .asaprc (False)
[1846]3260
[718]3261 """
3262 if insitu is None: insitu = rcParams['insitu']
[876]3263 self._math._setinsitu(insitu)
[718]3264 varlist = vars()
[876]3265 s = scantable(self._math._freqswitch(self))
[1118]3266 s._add_history("freq_switch", varlist)
[1856]3267 if insitu:
3268 self._assign(s)
3269 else:
3270 return s
[718]3271
[1862]3272 @asaplog_post_dec
[780]3273 def recalc_azel(self):
[1846]3274 """Recalculate the azimuth and elevation for each position."""
[780]3275 varlist = vars()
[876]3276 self._recalcazel()
[780]3277 self._add_history("recalc_azel", varlist)
3278 return
3279
[1862]3280 @asaplog_post_dec
[513]3281 def __add__(self, other):
3282 varlist = vars()
3283 s = None
3284 if isinstance(other, scantable):
[1573]3285 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]3286 elif isinstance(other, float):
[876]3287 s = scantable(self._math._unaryop(self, other, "ADD", False))
[2144]3288 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3289 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3290 from asapmath import _array2dOp
3291 s = _array2dOp( self.copy(), other, "ADD", False )
3292 else:
3293 s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
[513]3294 else:
[718]3295 raise TypeError("Other input is not a scantable or float value")
[513]3296 s._add_history("operator +", varlist)
3297 return s
3298
[1862]3299 @asaplog_post_dec
[513]3300 def __sub__(self, other):
3301 """
3302 implicit on all axes and on Tsys
3303 """
3304 varlist = vars()
3305 s = None
3306 if isinstance(other, scantable):
[1588]3307 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]3308 elif isinstance(other, float):
[876]3309 s = scantable(self._math._unaryop(self, other, "SUB", False))
[2144]3310 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3311 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3312 from asapmath import _array2dOp
3313 s = _array2dOp( self.copy(), other, "SUB", False )
3314 else:
[2277]3315 s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
[513]3316 else:
[718]3317 raise TypeError("Other input is not a scantable or float value")
[513]3318 s._add_history("operator -", varlist)
3319 return s
[710]3320
[1862]3321 @asaplog_post_dec
[513]3322 def __mul__(self, other):
3323 """
3324 implicit on all axes and on Tsys
3325 """
3326 varlist = vars()
3327 s = None
3328 if isinstance(other, scantable):
[1588]3329 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]3330 elif isinstance(other, float):
[876]3331 s = scantable(self._math._unaryop(self, other, "MUL", False))
[2144]3332 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3333 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3334 from asapmath import _array2dOp
3335 s = _array2dOp( self.copy(), other, "MUL", False )
3336 else:
[2277]3337 s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
[513]3338 else:
[718]3339 raise TypeError("Other input is not a scantable or float value")
[513]3340 s._add_history("operator *", varlist)
3341 return s
3342
[710]3343
[1862]3344 @asaplog_post_dec
[513]3345 def __div__(self, other):
3346 """
3347 implicit on all axes and on Tsys
3348 """
3349 varlist = vars()
3350 s = None
3351 if isinstance(other, scantable):
[1589]3352 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]3353 elif isinstance(other, float):
3354 if other == 0.0:
[718]3355 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]3356 s = scantable(self._math._unaryop(self, other, "DIV", False))
[2144]3357 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3358 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3359 from asapmath import _array2dOp
3360 s = _array2dOp( self.copy(), other, "DIV", False )
3361 else:
[2277]3362 s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
[513]3363 else:
[718]3364 raise TypeError("Other input is not a scantable or float value")
[513]3365 s._add_history("operator /", varlist)
3366 return s
3367
[1862]3368 @asaplog_post_dec
[530]3369 def get_fit(self, row=0):
[1846]3370 """\
[530]3371 Print or return the stored fits for a row in the scantable
[1846]3372
[530]3373 Parameters:
[1846]3374
[530]3375 row: the row which the fit has been applied to.
[1846]3376
[530]3377 """
3378 if row > self.nrow():
3379 return
[976]3380 from asap.asapfit import asapfit
[530]3381 fit = asapfit(self._getfit(row))
[1859]3382 asaplog.push( '%s' %(fit) )
3383 return fit.as_dict()
[530]3384
[1483]3385 def flag_nans(self):
[1846]3386 """\
[1483]3387 Utility function to flag NaN values in the scantable.
3388 """
3389 import numpy
3390 basesel = self.get_selection()
3391 for i in range(self.nrow()):
[1589]3392 sel = self.get_row_selector(i)
3393 self.set_selection(basesel+sel)
[1483]3394 nans = numpy.isnan(self._getspectrum(0))
3395 if numpy.any(nans):
3396 bnans = [ bool(v) for v in nans]
3397 self.flag(bnans)
3398 self.set_selection(basesel)
3399
[1588]3400 def get_row_selector(self, rowno):
[1992]3401 #return selector(beams=self.getbeam(rowno),
3402 # ifs=self.getif(rowno),
3403 # pols=self.getpol(rowno),
3404 # scans=self.getscan(rowno),
3405 # cycles=self.getcycle(rowno))
3406 return selector(rows=[rowno])
[1573]3407
[484]3408 def _add_history(self, funcname, parameters):
[1435]3409 if not rcParams['scantable.history']:
3410 return
[484]3411 # create date
3412 sep = "##"
3413 from datetime import datetime
3414 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3415 hist = dstr+sep
3416 hist += funcname+sep#cdate+sep
3417 if parameters.has_key('self'): del parameters['self']
[1118]3418 for k, v in parameters.iteritems():
[484]3419 if type(v) is dict:
[1118]3420 for k2, v2 in v.iteritems():
[484]3421 hist += k2
3422 hist += "="
[1118]3423 if isinstance(v2, scantable):
[484]3424 hist += 'scantable'
3425 elif k2 == 'mask':
[1118]3426 if isinstance(v2, list) or isinstance(v2, tuple):
[513]3427 hist += str(self._zip_mask(v2))
3428 else:
3429 hist += str(v2)
[484]3430 else:
[513]3431 hist += str(v2)
[484]3432 else:
3433 hist += k
3434 hist += "="
[1118]3435 if isinstance(v, scantable):
[484]3436 hist += 'scantable'
3437 elif k == 'mask':
[1118]3438 if isinstance(v, list) or isinstance(v, tuple):
[513]3439 hist += str(self._zip_mask(v))
3440 else:
3441 hist += str(v)
[484]3442 else:
3443 hist += str(v)
3444 hist += sep
3445 hist = hist[:-2] # remove trailing '##'
3446 self._addhistory(hist)
3447
[710]3448
[484]3449 def _zip_mask(self, mask):
3450 mask = list(mask)
3451 i = 0
3452 segments = []
3453 while mask[i:].count(1):
3454 i += mask[i:].index(1)
3455 if mask[i:].count(0):
3456 j = i + mask[i:].index(0)
3457 else:
[710]3458 j = len(mask)
[1118]3459 segments.append([i, j])
[710]3460 i = j
[484]3461 return segments
[714]3462
[626]3463 def _get_ordinate_label(self):
3464 fu = "("+self.get_fluxunit()+")"
3465 import re
3466 lbl = "Intensity"
[1118]3467 if re.match(".K.", fu):
[626]3468 lbl = "Brightness Temperature "+ fu
[1118]3469 elif re.match(".Jy.", fu):
[626]3470 lbl = "Flux density "+ fu
3471 return lbl
[710]3472
[876]3473 def _check_ifs(self):
[1986]3474 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3475 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]3476 nchans = filter(lambda t: t > 0, nchans)
[876]3477 return (sum(nchans)/len(nchans) == nchans[0])
[976]3478
[1862]3479 @asaplog_post_dec
[1916]3480 #def _fill(self, names, unit, average, getpt, antenna):
3481 def _fill(self, names, unit, average, opts={}):
[976]3482 first = True
3483 fullnames = []
3484 for name in names:
3485 name = os.path.expandvars(name)
3486 name = os.path.expanduser(name)
3487 if not os.path.exists(name):
3488 msg = "File '%s' does not exists" % (name)
3489 raise IOError(msg)
3490 fullnames.append(name)
3491 if average:
3492 asaplog.push('Auto averaging integrations')
[1079]3493 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]3494 for name in fullnames:
[1073]3495 tbl = Scantable(stype)
[2004]3496 if is_ms( name ):
3497 r = msfiller( tbl )
3498 else:
3499 r = filler( tbl )
3500 rx = rcParams['scantable.reference']
3501 r.setreferenceexpr(rx)
3502 #r = filler(tbl)
3503 #rx = rcParams['scantable.reference']
3504 #r.setreferenceexpr(rx)
[976]3505 msg = "Importing %s..." % (name)
[1118]3506 asaplog.push(msg, False)
[1916]3507 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]3508 r.open(name, opts)# antenna, -1, -1, getpt)
[1843]3509 r.fill()
[976]3510 if average:
[1118]3511 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]3512 if not first:
3513 tbl = self._math._merge([self, tbl])
3514 Scantable.__init__(self, tbl)
[1843]3515 r.close()
[1118]3516 del r, tbl
[976]3517 first = False
[1861]3518 #flush log
3519 asaplog.post()
[976]3520 if unit is not None:
3521 self.set_fluxunit(unit)
[1824]3522 if not is_casapy():
3523 self.set_freqframe(rcParams['scantable.freqframe'])
[976]3524
[2012]3525
[1402]3526 def __getitem__(self, key):
3527 if key < 0:
3528 key += self.nrow()
3529 if key >= self.nrow():
3530 raise IndexError("Row index out of range.")
3531 return self._getspectrum(key)
3532
3533 def __setitem__(self, key, value):
3534 if key < 0:
3535 key += self.nrow()
3536 if key >= self.nrow():
3537 raise IndexError("Row index out of range.")
3538 if not hasattr(value, "__len__") or \
3539 len(value) > self.nchan(self.getif(key)):
3540 raise ValueError("Spectrum length doesn't match.")
3541 return self._setspectrum(value, key)
3542
3543 def __len__(self):
3544 return self.nrow()
3545
3546 def __iter__(self):
3547 for i in range(len(self)):
3548 yield self[i]
Note: See TracBrowser for help on using the repository browser.