source: trunk/python/scantable.py@ 2756

Last change on this file since 2756 was 2754, checked in by Takeshi Nakazato, 12 years ago

New Development: No

JIRA Issue: Yes CSV-2532 (may be related to CSV-1908 and CSV-2161)

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: In tool level, added parameter 'freq_tolsr' to

scantable constructor and function sd.splitant.

Test Programs: test_sdsave, test_importasdm_sd

Put in Release Notes: Yes

Module(s): Module Names change impacts.

Description: Describe your changes here...

In importing MS to Scantable, frequency frame information is
imported as is by default, i.e., base frame in Scantable is
TOPO for ALMA data, which is forcibly converted to LSRK with
wrong time and direction reference.

Some functions have a boolean parameter 'freq_tolsr' that controls
the above behavior. If freq_tolsr is False (default), frequency
is imported as is, while frequency is converted to LSRK (wrongly)
when it is True.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 162.6 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2751]4import re
[2315]5import tempfile
[1948]6import numpy
[1691]7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
[1824]12from asap.env import is_casapy
[876]13from asap._asap import Scantable
[2004]14from asap._asap import filler, msfiller
[1824]15from asap.parameters import rcParams
[1862]16from asap.logging import asaplog, asaplog_post_dec
[1824]17from asap.selector import selector
18from asap.linecatalog import linecatalog
[1600]19from asap.coordinate import coordinate
[1859]20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]21from asap.asapfitter import fitter
[102]22
[1689]23def preserve_selection(func):
[1691]24 @wraps_dec(func)
[1689]25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
[1857]27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
[1689]31 return val
32 return wrap
33
[1846]34def is_scantable(filename):
35 """Is the given file a scantable?
[1689]36
[1846]37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
[1883]42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
[2753]48 match_pattern = '^Type = (Scantable)? *$'
[2751]49 if re.match(match_pattern,l):
[1883]50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
[1697]58
[1883]59def is_ms(filename):
60 """Is the given file a MeasurementSet?
[1697]61
[1883]62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
[2186]79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
[2277]91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
[2277]98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
[2186]102 ** when len(edge)!=2
[2277]103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]111 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]112 when [5,10] given, it is interpreted as
[2277]113 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
[2189]173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
[876]176class scantable(Scantable):
[1846]177 """\
178 The ASAP container for scans (single-dish data).
[102]179 """
[1819]180
[1862]181 @asaplog_post_dec
[2315]182 def __init__(self, filename, average=None, unit=None, parallactify=None,
183 **args):
[1846]184 """\
[102]185 Create a scantable from a saved one or make a reference
[1846]186
[102]187 Parameters:
[1846]188
189 filename: the name of an asap table on disk
190 or
191 the name of a rpfits/sdfits/ms file
192 (integrations within scans are auto averaged
193 and the whole file is read) or
194 [advanced] a reference to an existing scantable
195
196 average: average all integrations withinb a scan on read.
197 The default (True) is taken from .asaprc.
198
[484]199 unit: brightness unit; must be consistent with K or Jy.
[1846]200 Over-rides the default selected by the filler
201 (input rpfits/sdfits/ms) or replaces the value
202 in existing scantables
203
[1920]204 antenna: for MeasurementSet input data only:
[2349]205 Antenna selection. integer (id) or string
206 (name or id).
[1846]207
[2349]208 parallactify: Indicate that the data had been parallactified.
209 Default (false) is taken from rc file.
[1846]210
[2754]211 getpt: Whether to import direction from MS/POINTING
212 table properly or not.
213 This is effective only when filename is MS.
214 The default (True) is to import direction
215 from MS/POINTING.
216
217 freq_tolsr: Whether to convert frequency frame information
218 to LSRK or not.
219 This is effective only when filename is MS.
220 The default (False) is to import frequency
221 as is.
222
[710]223 """
[976]224 if average is None:
[710]225 average = rcParams['scantable.autoaverage']
[1593]226 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]227 varlist = vars()
[876]228 from asap._asap import stmath
[1819]229 self._math = stmath( rcParams['insitu'] )
[876]230 if isinstance(filename, Scantable):
231 Scantable.__init__(self, filename)
[181]232 else:
[1697]233 if isinstance(filename, str):
[976]234 filename = os.path.expandvars(filename)
235 filename = os.path.expanduser(filename)
236 if not os.path.exists(filename):
237 s = "File '%s' not found." % (filename)
238 raise IOError(s)
[1697]239 if is_scantable(filename):
240 ondisk = rcParams['scantable.storage'] == 'disk'
241 Scantable.__init__(self, filename, ondisk)
242 if unit is not None:
243 self.set_fluxunit(unit)
[2008]244 if average:
245 self._assign( self.average_time( scanav=True ) )
[1819]246 # do not reset to the default freqframe
247 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]248 elif is_ms(filename):
[1916]249 # Measurement Set
250 opts={'ms': {}}
[2753]251 mskeys=['getpt','antenna','freq_tolsr']
[1916]252 for key in mskeys:
253 if key in args.keys():
254 opts['ms'][key] = args[key]
255 self._fill([filename], unit, average, opts)
[1893]256 elif os.path.isfile(filename):
[1916]257 self._fill([filename], unit, average)
[2350]258 # only apply to new data not "copy constructor"
259 self.parallactify(parallactify)
[1883]260 else:
[1819]261 msg = "The given file '%s'is not a valid " \
262 "asap table." % (filename)
[1859]263 raise IOError(msg)
[1118]264 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]265 and isinstance(filename[-1], str):
[1916]266 self._fill(filename, unit, average)
[1586]267 self.parallactify(parallactify)
[1259]268 self._add_history("scantable", varlist)
[102]269
[1862]270 @asaplog_post_dec
[876]271 def save(self, name=None, format=None, overwrite=False):
[1846]272 """\
[1280]273 Store the scantable on disk. This can be an asap (aips++) Table,
274 SDFITS or MS2 format.
[1846]275
[116]276 Parameters:
[1846]277
[2431]278 name: the name of the outputfile. For format 'ASCII'
[1093]279 this is the root file name (data in 'name'.txt
[497]280 and header in 'name'_header.txt)
[1855]281
[116]282 format: an optional file format. Default is ASAP.
[1855]283 Allowed are:
284
285 * 'ASAP' (save as ASAP [aips++] Table),
286 * 'SDFITS' (save as SDFITS file)
287 * 'ASCII' (saves as ascii text file)
288 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]289 * 'FITS' (save as image FITS - not readable by
290 class)
[1855]291 * 'CLASS' (save as FITS readable by CLASS)
292
[411]293 overwrite: If the file should be overwritten if it exists.
[256]294 The default False is to return with warning
[411]295 without writing the output. USE WITH CARE.
[1855]296
[1846]297 Example::
298
[116]299 scan.save('myscan.asap')
[1118]300 scan.save('myscan.sdfits', 'SDFITS')
[1846]301
[116]302 """
[411]303 from os import path
[1593]304 format = format or rcParams['scantable.save']
[256]305 suffix = '.'+format.lower()
[1118]306 if name is None or name == "":
[256]307 name = 'scantable'+suffix
[718]308 msg = "No filename given. Using default name %s..." % name
309 asaplog.push(msg)
[411]310 name = path.expandvars(name)
[256]311 if path.isfile(name) or path.isdir(name):
312 if not overwrite:
[718]313 msg = "File %s exists." % name
[1859]314 raise IOError(msg)
[451]315 format2 = format.upper()
316 if format2 == 'ASAP':
[116]317 self._save(name)
[2029]318 elif format2 == 'MS2':
319 msopt = {'ms': {'overwrite': overwrite } }
320 from asap._asap import mswriter
321 writer = mswriter( self )
322 writer.write( name, msopt )
[116]323 else:
[989]324 from asap._asap import stwriter as stw
[1118]325 writer = stw(format2)
326 writer.write(self, name)
[116]327 return
328
[102]329 def copy(self):
[1846]330 """Return a copy of this scantable.
331
332 *Note*:
333
[1348]334 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]335
336 Example::
337
[102]338 copiedscan = scan.copy()
[1846]339
[102]340 """
[876]341 sd = scantable(Scantable._copy(self))
[113]342 return sd
343
[1093]344 def drop_scan(self, scanid=None):
[1846]345 """\
[1093]346 Return a new scantable where the specified scan number(s) has(have)
347 been dropped.
[1846]348
[1093]349 Parameters:
[1846]350
[1093]351 scanid: a (list of) scan number(s)
[1846]352
[1093]353 """
354 from asap import _is_sequence_or_number as _is_valid
355 from asap import _to_list
356 from asap import unique
357 if not _is_valid(scanid):
[2315]358 raise RuntimeError( 'Please specify a scanno to drop from the'
359 ' scantable' )
[1859]360 scanid = _to_list(scanid)
361 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
362 for sid in scanid: allscans.remove(sid)
363 if len(allscans) == 0:
364 raise ValueError("Can't remove all scans")
365 sel = selector(scans=allscans)
366 return self._select_copy(sel)
[1093]367
[1594]368 def _select_copy(self, selection):
369 orig = self.get_selection()
370 self.set_selection(orig+selection)
371 cp = self.copy()
372 self.set_selection(orig)
373 return cp
374
[102]375 def get_scan(self, scanid=None):
[1855]376 """\
[102]377 Return a specific scan (by scanno) or collection of scans (by
378 source name) in a new scantable.
[1846]379
380 *Note*:
381
[1348]382 See scantable.drop_scan() for the inverse operation.
[1846]383
[102]384 Parameters:
[1846]385
[513]386 scanid: a (list of) scanno or a source name, unix-style
387 patterns are accepted for source name matching, e.g.
388 '*_R' gets all 'ref scans
[1846]389
390 Example::
391
[513]392 # get all scans containing the source '323p459'
393 newscan = scan.get_scan('323p459')
394 # get all 'off' scans
395 refscans = scan.get_scan('*_R')
396 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]397 newscan = scan.get_scan([0, 2, 7, 10])
[1846]398
[102]399 """
400 if scanid is None:
[1859]401 raise RuntimeError( 'Please specify a scan no or name to '
402 'retrieve from the scantable' )
[102]403 try:
[946]404 bsel = self.get_selection()
405 sel = selector()
[102]406 if type(scanid) is str:
[946]407 sel.set_name(scanid)
[1594]408 return self._select_copy(sel)
[102]409 elif type(scanid) is int:
[946]410 sel.set_scans([scanid])
[1594]411 return self._select_copy(sel)
[381]412 elif type(scanid) is list:
[946]413 sel.set_scans(scanid)
[1594]414 return self._select_copy(sel)
[381]415 else:
[718]416 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]417 raise TypeError(msg)
[102]418 except RuntimeError:
[1859]419 raise
[102]420
421 def __str__(self):
[2315]422 tempFile = tempfile.NamedTemporaryFile()
423 Scantable._summary(self, tempFile.name)
424 tempFile.seek(0)
425 asaplog.clear()
426 return tempFile.file.read()
[102]427
[2315]428 @asaplog_post_dec
[976]429 def summary(self, filename=None):
[1846]430 """\
[102]431 Print a summary of the contents of this scantable.
[1846]432
[102]433 Parameters:
[1846]434
[1931]435 filename: the name of a file to write the putput to
[102]436 Default - no file output
[1846]437
[102]438 """
439 if filename is not None:
[256]440 if filename is "":
441 filename = 'scantable_summary.txt'
[415]442 from os.path import expandvars, isdir
[411]443 filename = expandvars(filename)
[2286]444 if isdir(filename):
[718]445 msg = "Illegal file name '%s'." % (filename)
[1859]446 raise IOError(msg)
[2286]447 else:
448 filename = ""
449 Scantable._summary(self, filename)
[710]450
[1512]451 def get_spectrum(self, rowno):
[1471]452 """Return the spectrum for the current row in the scantable as a list.
[1846]453
[1471]454 Parameters:
[1846]455
[1573]456 rowno: the row number to retrieve the spectrum from
[1846]457
[1471]458 """
459 return self._getspectrum(rowno)
[946]460
[1471]461 def get_mask(self, rowno):
462 """Return the mask for the current row in the scantable as a list.
[1846]463
[1471]464 Parameters:
[1846]465
[1573]466 rowno: the row number to retrieve the mask from
[1846]467
[1471]468 """
469 return self._getmask(rowno)
470
471 def set_spectrum(self, spec, rowno):
[1938]472 """Set the spectrum for the current row in the scantable.
[1846]473
[1471]474 Parameters:
[1846]475
[1855]476 spec: the new spectrum
[1846]477
[1855]478 rowno: the row number to set the spectrum for
479
[1471]480 """
[2348]481 assert(len(spec) == self.nchan(self.getif(rowno)))
[1471]482 return self._setspectrum(spec, rowno)
483
[1600]484 def get_coordinate(self, rowno):
485 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]486
487 *Note*:
488
[1600]489 * This coordinate is only valid until a scantable method modifies
490 the frequency axis.
491 * This coordinate does contain the original frequency set-up
492 NOT the new frame. The conversions however are done using the user
493 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
494 use scantable.freq_align first. Without it there is no closure,
[1846]495 i.e.::
[1600]496
[1846]497 c = myscan.get_coordinate(0)
498 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
499
[1600]500 Parameters:
[1846]501
[1600]502 rowno: the row number for the spectral coordinate
503
504 """
505 return coordinate(Scantable.get_coordinate(self, rowno))
506
[946]507 def get_selection(self):
[1846]508 """\
[1005]509 Get the selection object currently set on this scantable.
[1846]510
511 Example::
512
[1005]513 sel = scan.get_selection()
514 sel.set_ifs(0) # select IF 0
515 scan.set_selection(sel) # apply modified selection
[1846]516
[946]517 """
518 return selector(self._getselection())
519
[1576]520 def set_selection(self, selection=None, **kw):
[1846]521 """\
[1005]522 Select a subset of the data. All following operations on this scantable
523 are only applied to thi selection.
[1846]524
[1005]525 Parameters:
[1697]526
[1846]527 selection: a selector object (default unset the selection), or
[2431]528 any combination of 'pols', 'ifs', 'beams', 'scans',
529 'cycles', 'name', 'query'
[1697]530
[1846]531 Examples::
[1697]532
[1005]533 sel = selector() # create a selection object
[1118]534 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]535 scan.set_selection(sel) # set the selection
536 scan.summary() # will only print summary of scanno 0 an 3
537 scan.set_selection() # unset the selection
[1697]538 # or the equivalent
539 scan.set_selection(scans=[0,3])
540 scan.summary() # will only print summary of scanno 0 an 3
541 scan.set_selection() # unset the selection
[1846]542
[946]543 """
[1576]544 if selection is None:
545 # reset
546 if len(kw) == 0:
547 selection = selector()
548 else:
549 # try keywords
550 for k in kw:
551 if k not in selector.fields:
[2320]552 raise KeyError("Invalid selection key '%s', "
553 "valid keys are %s" % (k,
554 selector.fields))
[1576]555 selection = selector(**kw)
[946]556 self._setselection(selection)
557
[1819]558 def get_row(self, row=0, insitu=None):
[1846]559 """\
[1819]560 Select a row in the scantable.
561 Return a scantable with single row.
[1846]562
[1819]563 Parameters:
[1846]564
565 row: row no of integration, default is 0.
566 insitu: if False a new scantable is returned. Otherwise, the
567 scaling is done in-situ. The default is taken from .asaprc
568 (False)
569
[1819]570 """
[2349]571 if insitu is None:
572 insitu = rcParams['insitu']
[1819]573 if not insitu:
574 workscan = self.copy()
575 else:
576 workscan = self
577 # Select a row
[2349]578 sel = selector()
[1992]579 sel.set_rows([row])
[1819]580 workscan.set_selection(sel)
581 if not workscan.nrow() == 1:
[2349]582 msg = "Could not identify single row. %d rows selected." \
583 % (workscan.nrow())
[1819]584 raise RuntimeError(msg)
585 if insitu:
586 self._assign(workscan)
587 else:
588 return workscan
589
[1862]590 @asaplog_post_dec
[1907]591 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]592 """\
[135]593 Determine the specified statistic of the current beam/if/pol
[102]594 Takes a 'mask' as an optional parameter to specify which
595 channels should be excluded.
[1846]596
[102]597 Parameters:
[1846]598
[1819]599 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
600 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]601
[135]602 mask: an optional mask specifying where the statistic
[102]603 should be determined.
[1855]604
[1819]605 form: format string to print statistic values
[1846]606
[1907]607 row: row number of spectrum to process.
608 (default is None: for all rows)
[1846]609
[1907]610 Example:
[113]611 scan.set_unit('channel')
[1118]612 msk = scan.create_mask([100, 200], [500, 600])
[135]613 scan.stats(stat='mean', mask=m)
[1846]614
[102]615 """
[1593]616 mask = mask or []
[876]617 if not self._check_ifs():
[1118]618 raise ValueError("Cannot apply mask as the IFs have different "
619 "number of channels. Please use setselection() "
620 "to select individual IFs")
[1819]621 rtnabc = False
622 if stat.lower().endswith('_abc'): rtnabc = True
623 getchan = False
624 if stat.lower().startswith('min') or stat.lower().startswith('max'):
625 chan = self._math._minmaxchan(self, mask, stat)
626 getchan = True
627 statvals = []
[1907]628 if not rtnabc:
629 if row == None:
630 statvals = self._math._stats(self, mask, stat)
631 else:
632 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]633
[1819]634 #def cb(i):
635 # return statvals[i]
[256]636
[1819]637 #return self._row_callback(cb, stat)
[102]638
[1819]639 label=stat
640 #callback=cb
641 out = ""
642 #outvec = []
643 sep = '-'*50
[1907]644
645 if row == None:
646 rows = xrange(self.nrow())
647 elif isinstance(row, int):
648 rows = [ row ]
649
650 for i in rows:
[1819]651 refstr = ''
652 statunit= ''
653 if getchan:
654 qx, qy = self.chan2data(rowno=i, chan=chan[i])
655 if rtnabc:
656 statvals.append(qx['value'])
657 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
658 statunit= '['+qx['unit']+']'
659 else:
660 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
661
662 tm = self._gettime(i)
663 src = self._getsourcename(i)
664 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
665 out += 'Time[%s]:\n' % (tm)
[1907]666 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
667 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
668 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]669 #outvec.append(callback(i))
[1907]670 if len(rows) > 1:
671 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
672 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
673 else:
674 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
675 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]676 out += sep+"\n"
677
[1859]678 import os
679 if os.environ.has_key( 'USER' ):
680 usr = os.environ['USER']
681 else:
682 import commands
683 usr = commands.getoutput( 'whoami' )
684 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
685 f = open(tmpfile,'w')
686 print >> f, sep
687 print >> f, ' %s %s' % (label, statunit)
688 print >> f, sep
689 print >> f, out
690 f.close()
691 f = open(tmpfile,'r')
692 x = f.readlines()
693 f.close()
694 asaplog.push(''.join(x), False)
695
[1819]696 return statvals
697
698 def chan2data(self, rowno=0, chan=0):
[1846]699 """\
[1819]700 Returns channel/frequency/velocity and spectral value
701 at an arbitrary row and channel in the scantable.
[1846]702
[1819]703 Parameters:
[1846]704
[1819]705 rowno: a row number in the scantable. Default is the
706 first row, i.e. rowno=0
[1855]707
[1819]708 chan: a channel in the scantable. Default is the first
709 channel, i.e. pos=0
[1846]710
[1819]711 """
712 if isinstance(rowno, int) and isinstance(chan, int):
713 qx = {'unit': self.get_unit(),
714 'value': self._getabcissa(rowno)[chan]}
715 qy = {'unit': self.get_fluxunit(),
716 'value': self._getspectrum(rowno)[chan]}
717 return qx, qy
718
[1118]719 def stddev(self, mask=None):
[1846]720 """\
[135]721 Determine the standard deviation of the current beam/if/pol
722 Takes a 'mask' as an optional parameter to specify which
723 channels should be excluded.
[1846]724
[135]725 Parameters:
[1846]726
[135]727 mask: an optional mask specifying where the standard
728 deviation should be determined.
729
[1846]730 Example::
731
[135]732 scan.set_unit('channel')
[1118]733 msk = scan.create_mask([100, 200], [500, 600])
[135]734 scan.stddev(mask=m)
[1846]735
[135]736 """
[1118]737 return self.stats(stat='stddev', mask=mask);
[135]738
[1003]739
[1259]740 def get_column_names(self):
[1846]741 """\
[1003]742 Return a list of column names, which can be used for selection.
743 """
[1259]744 return list(Scantable.get_column_names(self))
[1003]745
[1730]746 def get_tsys(self, row=-1):
[1846]747 """\
[113]748 Return the System temperatures.
[1846]749
750 Parameters:
751
752 row: the rowno to get the information for. (default all rows)
753
[113]754 Returns:
[1846]755
[876]756 a list of Tsys values for the current selection
[1846]757
[113]758 """
[1730]759 if row > -1:
760 return self._get_column(self._gettsys, row)
[876]761 return self._row_callback(self._gettsys, "Tsys")
[256]762
[2406]763 def get_tsysspectrum(self, row=-1):
764 """\
765 Return the channel dependent system temperatures.
[1730]766
[2406]767 Parameters:
768
769 row: the rowno to get the information for. (default all rows)
770
771 Returns:
772
773 a list of Tsys values for the current selection
774
775 """
776 return self._get_column( self._gettsysspectrum, row )
777
[1730]778 def get_weather(self, row=-1):
[1846]779 """\
780 Return the weather informations.
781
782 Parameters:
783
784 row: the rowno to get the information for. (default all rows)
785
786 Returns:
787
788 a dict or list of of dicts of values for the current selection
789
790 """
791
[1730]792 values = self._get_column(self._get_weather, row)
793 if row > -1:
794 return {'temperature': values[0],
795 'pressure': values[1], 'humidity' : values[2],
796 'windspeed' : values[3], 'windaz' : values[4]
797 }
798 else:
799 out = []
800 for r in values:
801
802 out.append({'temperature': r[0],
803 'pressure': r[1], 'humidity' : r[2],
804 'windspeed' : r[3], 'windaz' : r[4]
805 })
806 return out
807
[876]808 def _row_callback(self, callback, label):
809 out = ""
[1118]810 outvec = []
[1590]811 sep = '-'*50
[876]812 for i in range(self.nrow()):
813 tm = self._gettime(i)
814 src = self._getsourcename(i)
[1590]815 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]816 out += 'Time[%s]:\n' % (tm)
[1590]817 if self.nbeam(-1) > 1:
818 out += ' Beam[%d] ' % (self.getbeam(i))
819 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
820 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]821 outvec.append(callback(i))
822 out += '= %3.3f\n' % (outvec[i])
[1590]823 out += sep+'\n'
[1859]824
825 asaplog.push(sep)
826 asaplog.push(" %s" % (label))
827 asaplog.push(sep)
828 asaplog.push(out)
[1861]829 asaplog.post()
[1175]830 return outvec
[256]831
[1947]832 def _get_column(self, callback, row=-1, *args):
[1070]833 """
834 """
835 if row == -1:
[1947]836 return [callback(i, *args) for i in range(self.nrow())]
[1070]837 else:
[1819]838 if 0 <= row < self.nrow():
[1947]839 return callback(row, *args)
[256]840
[1070]841
[1948]842 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]843 """\
[113]844 Get a list of time stamps for the observations.
[1938]845 Return a datetime object or a string (default) for each
846 integration time stamp in the scantable.
[1846]847
[113]848 Parameters:
[1846]849
[1348]850 row: row no of integration. Default -1 return all rows
[1855]851
[1348]852 asdatetime: return values as datetime objects rather than strings
[1846]853
[2349]854 prec: number of digits shown. Default -1 to automatic
855 calculation.
[1948]856 Note this number is equals to the digits of MVTime,
857 i.e., 0<prec<3: dates with hh:: only,
858 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]859 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
860
[113]861 """
[1175]862 from datetime import datetime
[1948]863 if prec < 0:
864 # automagically set necessary precision +1
[2349]865 prec = 7 - \
866 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]867 prec = max(6, int(prec))
868 else:
869 prec = max(0, prec)
870 if asdatetime:
871 #precision can be 1 millisecond at max
872 prec = min(12, prec)
873
[1947]874 times = self._get_column(self._gettime, row, prec)
[1348]875 if not asdatetime:
[1392]876 return times
[1947]877 format = "%Y/%m/%d/%H:%M:%S.%f"
878 if prec < 7:
879 nsub = 1 + (((6-prec)/2) % 3)
880 substr = [".%f","%S","%M"]
881 for i in range(nsub):
882 format = format.replace(substr[i],"")
[1175]883 if isinstance(times, list):
[1947]884 return [datetime.strptime(i, format) for i in times]
[1175]885 else:
[1947]886 return datetime.strptime(times, format)
[102]887
[1348]888
889 def get_inttime(self, row=-1):
[1846]890 """\
[1348]891 Get a list of integration times for the observations.
892 Return a time in seconds for each integration in the scantable.
[1846]893
[1348]894 Parameters:
[1846]895
[1348]896 row: row no of integration. Default -1 return all rows.
[1846]897
[1348]898 """
[1573]899 return self._get_column(self._getinttime, row)
[1348]900
[1573]901
[714]902 def get_sourcename(self, row=-1):
[1846]903 """\
[794]904 Get a list source names for the observations.
[714]905 Return a string for each integration in the scantable.
906 Parameters:
[1846]907
[1348]908 row: row no of integration. Default -1 return all rows.
[1846]909
[714]910 """
[1070]911 return self._get_column(self._getsourcename, row)
[714]912
[794]913 def get_elevation(self, row=-1):
[1846]914 """\
[794]915 Get a list of elevations for the observations.
916 Return a float for each integration in the scantable.
[1846]917
[794]918 Parameters:
[1846]919
[1348]920 row: row no of integration. Default -1 return all rows.
[1846]921
[794]922 """
[1070]923 return self._get_column(self._getelevation, row)
[794]924
925 def get_azimuth(self, row=-1):
[1846]926 """\
[794]927 Get a list of azimuths for the observations.
928 Return a float for each integration in the scantable.
[1846]929
[794]930 Parameters:
[1348]931 row: row no of integration. Default -1 return all rows.
[1846]932
[794]933 """
[1070]934 return self._get_column(self._getazimuth, row)
[794]935
936 def get_parangle(self, row=-1):
[1846]937 """\
[794]938 Get a list of parallactic angles for the observations.
939 Return a float for each integration in the scantable.
[1846]940
[794]941 Parameters:
[1846]942
[1348]943 row: row no of integration. Default -1 return all rows.
[1846]944
[794]945 """
[1070]946 return self._get_column(self._getparangle, row)
[794]947
[1070]948 def get_direction(self, row=-1):
949 """
950 Get a list of Positions on the sky (direction) for the observations.
[1594]951 Return a string for each integration in the scantable.
[1855]952
[1070]953 Parameters:
[1855]954
[1070]955 row: row no of integration. Default -1 return all rows
[1855]956
[1070]957 """
958 return self._get_column(self._getdirection, row)
959
[1391]960 def get_directionval(self, row=-1):
[1846]961 """\
[1391]962 Get a list of Positions on the sky (direction) for the observations.
963 Return a float for each integration in the scantable.
[1846]964
[1391]965 Parameters:
[1846]966
[1391]967 row: row no of integration. Default -1 return all rows
[1846]968
[1391]969 """
970 return self._get_column(self._getdirectionvec, row)
971
[1862]972 @asaplog_post_dec
[102]973 def set_unit(self, unit='channel'):
[1846]974 """\
[102]975 Set the unit for all following operations on this scantable
[1846]976
[102]977 Parameters:
[1846]978
979 unit: optional unit, default is 'channel'. Use one of '*Hz',
980 'km/s', 'channel' or equivalent ''
981
[102]982 """
[484]983 varlist = vars()
[1118]984 if unit in ['', 'pixel', 'channel']:
[113]985 unit = ''
986 inf = list(self._getcoordinfo())
987 inf[0] = unit
988 self._setcoordinfo(inf)
[1118]989 self._add_history("set_unit", varlist)
[113]990
[1862]991 @asaplog_post_dec
[484]992 def set_instrument(self, instr):
[1846]993 """\
[1348]994 Set the instrument for subsequent processing.
[1846]995
[358]996 Parameters:
[1846]997
[710]998 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]999 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1000
[358]1001 """
1002 self._setInstrument(instr)
[1118]1003 self._add_history("set_instument", vars())
[358]1004
[1862]1005 @asaplog_post_dec
[1190]1006 def set_feedtype(self, feedtype):
[1846]1007 """\
[1190]1008 Overwrite the feed type, which might not be set correctly.
[1846]1009
[1190]1010 Parameters:
[1846]1011
[1190]1012 feedtype: 'linear' or 'circular'
[1846]1013
[1190]1014 """
1015 self._setfeedtype(feedtype)
1016 self._add_history("set_feedtype", vars())
1017
[1862]1018 @asaplog_post_dec
[276]1019 def set_doppler(self, doppler='RADIO'):
[1846]1020 """\
[276]1021 Set the doppler for all following operations on this scantable.
[1846]1022
[276]1023 Parameters:
[1846]1024
[276]1025 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1026
[276]1027 """
[484]1028 varlist = vars()
[276]1029 inf = list(self._getcoordinfo())
1030 inf[2] = doppler
1031 self._setcoordinfo(inf)
[1118]1032 self._add_history("set_doppler", vars())
[710]1033
[1862]1034 @asaplog_post_dec
[226]1035 def set_freqframe(self, frame=None):
[1846]1036 """\
[113]1037 Set the frame type of the Spectral Axis.
[1846]1038
[113]1039 Parameters:
[1846]1040
[591]1041 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1042 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1043 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1044
1045 Example::
1046
[113]1047 scan.set_freqframe('BARY')
[1846]1048
[113]1049 """
[1593]1050 frame = frame or rcParams['scantable.freqframe']
[484]1051 varlist = vars()
[1819]1052 # "REST" is not implemented in casacore
1053 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1054 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1055 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1056 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1057
[989]1058 if frame in valid:
[113]1059 inf = list(self._getcoordinfo())
1060 inf[1] = frame
1061 self._setcoordinfo(inf)
[1118]1062 self._add_history("set_freqframe", varlist)
[102]1063 else:
[1118]1064 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1065 raise TypeError(msg)
[710]1066
[1862]1067 @asaplog_post_dec
[989]1068 def set_dirframe(self, frame=""):
[1846]1069 """\
[989]1070 Set the frame type of the Direction on the sky.
[1846]1071
[989]1072 Parameters:
[1846]1073
[989]1074 frame: an optional frame type, default ''. Valid frames are:
1075 'J2000', 'B1950', 'GALACTIC'
[1846]1076
1077 Example:
1078
[989]1079 scan.set_dirframe('GALACTIC')
[1846]1080
[989]1081 """
1082 varlist = vars()
[1859]1083 Scantable.set_dirframe(self, frame)
[1118]1084 self._add_history("set_dirframe", varlist)
[989]1085
[113]1086 def get_unit(self):
[1846]1087 """\
[113]1088 Get the default unit set in this scantable
[1846]1089
[113]1090 Returns:
[1846]1091
[113]1092 A unit string
[1846]1093
[113]1094 """
1095 inf = self._getcoordinfo()
1096 unit = inf[0]
1097 if unit == '': unit = 'channel'
1098 return unit
[102]1099
[1862]1100 @asaplog_post_dec
[158]1101 def get_abcissa(self, rowno=0):
[1846]1102 """\
[158]1103 Get the abcissa in the current coordinate setup for the currently
[113]1104 selected Beam/IF/Pol
[1846]1105
[113]1106 Parameters:
[1846]1107
[226]1108 rowno: an optional row number in the scantable. Default is the
1109 first row, i.e. rowno=0
[1846]1110
[113]1111 Returns:
[1846]1112
[1348]1113 The abcissa values and the format string (as a dictionary)
[1846]1114
[113]1115 """
[256]1116 abc = self._getabcissa(rowno)
[710]1117 lbl = self._getabcissalabel(rowno)
[158]1118 return abc, lbl
[113]1119
[1862]1120 @asaplog_post_dec
[2322]1121 def flag(self, mask=None, unflag=False, row=-1):
[1846]1122 """\
[1001]1123 Flag the selected data using an optional channel mask.
[1846]1124
[1001]1125 Parameters:
[1846]1126
[1001]1127 mask: an optional channel mask, created with create_mask. Default
1128 (no mask) is all channels.
[1855]1129
[1819]1130 unflag: if True, unflag the data
[1846]1131
[2322]1132 row: an optional row number in the scantable.
1133 Default -1 flags all rows
1134
[1001]1135 """
1136 varlist = vars()
[1593]1137 mask = mask or []
[1994]1138 self._flag(row, mask, unflag)
[1001]1139 self._add_history("flag", varlist)
1140
[1862]1141 @asaplog_post_dec
[2322]1142 def flag_row(self, rows=None, unflag=False):
[1846]1143 """\
[1819]1144 Flag the selected data in row-based manner.
[1846]1145
[1819]1146 Parameters:
[1846]1147
[1843]1148 rows: list of row numbers to be flagged. Default is no row
[2322]1149 (must be explicitly specified to execute row-based
1150 flagging).
[1855]1151
[1819]1152 unflag: if True, unflag the data.
[1846]1153
[1819]1154 """
1155 varlist = vars()
[2322]1156 if rows is None:
1157 rows = []
[1859]1158 self._flag_row(rows, unflag)
[1819]1159 self._add_history("flag_row", varlist)
1160
[1862]1161 @asaplog_post_dec
[1819]1162 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1163 """\
[1819]1164 Flag the selected data outside a specified range (in channel-base)
[1846]1165
[1819]1166 Parameters:
[1846]1167
[1819]1168 uthres: upper threshold.
[1855]1169
[1819]1170 dthres: lower threshold
[1846]1171
[2322]1172 clipoutside: True for flagging data outside the range
1173 [dthres:uthres].
[1928]1174 False for flagging data inside the range.
[1855]1175
[1846]1176 unflag: if True, unflag the data.
1177
[1819]1178 """
1179 varlist = vars()
[1859]1180 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1181 self._add_history("clip", varlist)
1182
[1862]1183 @asaplog_post_dec
[1584]1184 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1185 """\
[1192]1186 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1187 Flagged data in the scantable get interpolated over the region.
[1192]1188 No taper is applied.
[1846]1189
[1192]1190 Parameters:
[1846]1191
[1579]1192 start: the start frequency (really a period within the
1193 bandwidth) or period to remove
[1855]1194
[1579]1195 end: the end frequency or period to remove
[1855]1196
[2431]1197 unit: the frequency unit (default 'MHz') or '' for
[1579]1198 explicit lag channels
[1846]1199
1200 *Notes*:
1201
[1579]1202 It is recommended to flag edges of the band or strong
[1348]1203 signals beforehand.
[1846]1204
[1192]1205 """
1206 if insitu is None: insitu = rcParams['insitu']
1207 self._math._setinsitu(insitu)
1208 varlist = vars()
[1579]1209 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1210 if not (unit == "" or base.has_key(unit)):
[1192]1211 raise ValueError("%s is not a valid unit." % unit)
[1859]1212 if unit == "":
1213 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1214 else:
1215 s = scantable(self._math._lag_flag(self, start*base[unit],
1216 end*base[unit], "frequency"))
[1192]1217 s._add_history("lag_flag", varlist)
1218 if insitu:
1219 self._assign(s)
1220 else:
1221 return s
[1001]1222
[1862]1223 @asaplog_post_dec
[2349]1224 def fft(self, rowno=None, mask=None, getrealimag=False):
[2177]1225 """\
1226 Apply FFT to the spectra.
1227 Flagged data in the scantable get interpolated over the region.
1228
1229 Parameters:
[2186]1230
1231 rowno: The row number(s) to be processed. int, list
[2349]1232 and tuple are accepted. By default (None), FFT
[2186]1233 is applied to the whole data.
1234
1235 mask: Auxiliary channel mask(s). Given as a boolean
1236 list, it is applied to all specified rows.
1237 A list of boolean lists can also be used to
1238 apply different masks. In the latter case, the
1239 length of 'mask' must be the same as that of
[2349]1240 'rowno'. The default is None.
[2177]1241
1242 getrealimag: If True, returns the real and imaginary part
1243 values of the complex results.
1244 If False (the default), returns the amplitude
1245 (absolute value) normalised with Ndata/2 and
1246 phase (argument, in unit of radian).
1247
1248 Returns:
1249
[2186]1250 A list of dictionaries containing the results for each spectrum.
1251 Each dictionary contains two values, the real and the imaginary
1252 parts when getrealimag = True, or the amplitude(absolute value)
1253 and the phase(argument) when getrealimag = False. The key for
1254 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1255 respectively.
1256 """
[2349]1257 if rowno is None:
1258 rowno = []
[2177]1259 if isinstance(rowno, int):
1260 rowno = [rowno]
1261 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1262 raise TypeError("The row number(s) must be int, list or tuple.")
1263 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1264
[2411]1265 usecommonmask = True
1266
1267 if mask is None:
1268 mask = []
1269 if isinstance(mask, list) or isinstance(mask, tuple):
1270 if len(mask) == 0:
1271 mask = [[]]
1272 else:
1273 if isinstance(mask[0], bool):
1274 if len(mask) != self.nchan(self.getif(rowno[0])):
1275 raise ValueError("The spectra and the mask have "
1276 "different length.")
1277 mask = [mask]
1278 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1279 usecommonmask = False
1280 if len(mask) != len(rowno):
1281 raise ValueError("When specifying masks for each "
1282 "spectrum, the numbers of them "
1283 "must be identical.")
1284 for i in xrange(mask):
1285 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1286 raise ValueError("The spectra and the mask have "
1287 "different length.")
1288 else:
1289 raise TypeError("The mask must be a boolean list or "
1290 "a list of boolean list.")
1291 else:
[2349]1292 raise TypeError("The mask must be a boolean list or a list of "
1293 "boolean list.")
[2186]1294
1295 res = []
1296
1297 imask = 0
1298 for whichrow in rowno:
1299 fspec = self._fft(whichrow, mask[imask], getrealimag)
1300 nspec = len(fspec)
[2177]1301
[2349]1302 i = 0
1303 v1 = []
1304 v2 = []
1305 reselem = {"real":[],"imag":[]} if getrealimag \
1306 else {"ampl":[],"phase":[]}
[2177]1307
[2186]1308 while (i < nspec):
1309 v1.append(fspec[i])
1310 v2.append(fspec[i+1])
[2349]1311 i += 2
[2186]1312
[2177]1313 if getrealimag:
[2186]1314 reselem["real"] += v1
1315 reselem["imag"] += v2
[2177]1316 else:
[2186]1317 reselem["ampl"] += v1
1318 reselem["phase"] += v2
[2177]1319
[2186]1320 res.append(reselem)
1321
[2349]1322 if not usecommonmask:
1323 imask += 1
[2186]1324
[2177]1325 return res
1326
1327 @asaplog_post_dec
[113]1328 def create_mask(self, *args, **kwargs):
[1846]1329 """\
[1118]1330 Compute and return a mask based on [min, max] windows.
[189]1331 The specified windows are to be INCLUDED, when the mask is
[113]1332 applied.
[1846]1333
[102]1334 Parameters:
[1846]1335
[1118]1336 [min, max], [min2, max2], ...
[1024]1337 Pairs of start/end points (inclusive)specifying the regions
[102]1338 to be masked
[1855]1339
[189]1340 invert: optional argument. If specified as True,
1341 return an inverted mask, i.e. the regions
1342 specified are EXCLUDED
[1855]1343
[513]1344 row: create the mask using the specified row for
1345 unit conversions, default is row=0
1346 only necessary if frequency varies over rows.
[1846]1347
1348 Examples::
1349
[113]1350 scan.set_unit('channel')
[1846]1351 # a)
[1118]1352 msk = scan.create_mask([400, 500], [800, 900])
[189]1353 # masks everything outside 400 and 500
[113]1354 # and 800 and 900 in the unit 'channel'
1355
[1846]1356 # b)
[1118]1357 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1358 # masks the regions between 400 and 500
[113]1359 # and 800 and 900 in the unit 'channel'
[1846]1360
1361 # c)
1362 #mask only channel 400
[1554]1363 msk = scan.create_mask([400])
[1846]1364
[102]1365 """
[1554]1366 row = kwargs.get("row", 0)
[513]1367 data = self._getabcissa(row)
[113]1368 u = self._getcoordinfo()[0]
[1859]1369 if u == "":
1370 u = "channel"
1371 msg = "The current mask window unit is %s" % u
1372 i = self._check_ifs()
1373 if not i:
1374 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1375 asaplog.push(msg)
[2348]1376 n = len(data)
[1295]1377 msk = _n_bools(n, False)
[710]1378 # test if args is a 'list' or a 'normal *args - UGLY!!!
1379
[2349]1380 ws = (isinstance(args[-1][-1], int)
1381 or isinstance(args[-1][-1], float)) and args or args[0]
[710]1382 for window in ws:
[1554]1383 if len(window) == 1:
1384 window = [window[0], window[0]]
1385 if len(window) == 0 or len(window) > 2:
[2349]1386 raise ValueError("A window needs to be defined as "
1387 "[start(, end)]")
[1545]1388 if window[0] > window[1]:
1389 tmp = window[0]
1390 window[0] = window[1]
1391 window[1] = tmp
[102]1392 for i in range(n):
[1024]1393 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1394 msk[i] = True
[113]1395 if kwargs.has_key('invert'):
1396 if kwargs.get('invert'):
[1295]1397 msk = mask_not(msk)
[102]1398 return msk
[710]1399
[1931]1400 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1401 """\
[1819]1402 Compute and return a list of mask windows, [min, max].
[1846]1403
[1819]1404 Parameters:
[1846]1405
[1819]1406 mask: channel mask, created with create_mask.
[1855]1407
[1819]1408 row: calcutate the masklist using the specified row
1409 for unit conversions, default is row=0
1410 only necessary if frequency varies over rows.
[1846]1411
[1819]1412 Returns:
[1846]1413
[1819]1414 [min, max], [min2, max2], ...
1415 Pairs of start/end points (inclusive)specifying
1416 the masked regions
[1846]1417
[1819]1418 """
1419 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1420 raise TypeError("The mask should be list or tuple.")
[2427]1421 if len(mask) <= 0:
1422 raise TypeError("The mask elements should be > 0")
[2348]1423 data = self._getabcissa(row)
1424 if len(data) != len(mask):
[1819]1425 msg = "Number of channels in scantable != number of mask elements"
1426 raise TypeError(msg)
1427 u = self._getcoordinfo()[0]
[1859]1428 if u == "":
1429 u = "channel"
1430 msg = "The current mask window unit is %s" % u
1431 i = self._check_ifs()
1432 if not i:
1433 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1434 if not silent:
1435 asaplog.push(msg)
[2349]1436 masklist = []
[1819]1437 ist, ien = None, None
1438 ist, ien=self.get_mask_indices(mask)
1439 if ist is not None and ien is not None:
1440 for i in xrange(len(ist)):
1441 range=[data[ist[i]],data[ien[i]]]
1442 range.sort()
1443 masklist.append([range[0],range[1]])
1444 return masklist
1445
1446 def get_mask_indices(self, mask=None):
[1846]1447 """\
[1819]1448 Compute and Return lists of mask start indices and mask end indices.
[1855]1449
1450 Parameters:
1451
[1819]1452 mask: channel mask, created with create_mask.
[1846]1453
[1819]1454 Returns:
[1846]1455
[1819]1456 List of mask start indices and that of mask end indices,
1457 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1458
[1819]1459 """
1460 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1461 raise TypeError("The mask should be list or tuple.")
[2427]1462 if len(mask) <= 0:
1463 raise TypeError("The mask elements should be > 0")
[2349]1464 istart = []
1465 iend = []
1466 if mask[0]:
1467 istart.append(0)
[1819]1468 for i in range(len(mask)-1):
1469 if not mask[i] and mask[i+1]:
1470 istart.append(i+1)
1471 elif mask[i] and not mask[i+1]:
1472 iend.append(i)
[2349]1473 if mask[len(mask)-1]:
1474 iend.append(len(mask)-1)
[1819]1475 if len(istart) != len(iend):
1476 raise RuntimeError("Numbers of mask start != mask end.")
1477 for i in range(len(istart)):
1478 if istart[i] > iend[i]:
1479 raise RuntimeError("Mask start index > mask end index")
1480 break
1481 return istart,iend
1482
[2013]1483 @asaplog_post_dec
[2349]1484 def parse_maskexpr(self, maskstring):
[2013]1485 """
1486 Parse CASA type mask selection syntax (IF dependent).
1487
1488 Parameters:
1489 maskstring : A string mask selection expression.
1490 A comma separated selections mean different IF -
1491 channel combinations. IFs and channel selections
1492 are partitioned by a colon, ':'.
1493 examples:
[2015]1494 '' = all IFs (all channels)
[2013]1495 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1496 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1497 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1498 all channels in IF8
1499 Returns:
1500 A dictionary of selected (valid) IF and masklist pairs,
1501 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1502 """
1503 if not isinstance(maskstring,str):
1504 asaplog.post()
[2611]1505 asaplog.push("Mask expression should be a string.")
[2013]1506 asaplog.post("ERROR")
1507
1508 valid_ifs = self.getifnos()
1509 frequnit = self.get_unit()
1510 seldict = {}
[2015]1511 if maskstring == "":
1512 maskstring = str(valid_ifs)[1:-1]
[2611]1513 ## split each selection "IF range[:CHAN range]"
[2013]1514 sellist = maskstring.split(',')
1515 for currselstr in sellist:
1516 selset = currselstr.split(':')
1517 # spw and mask string (may include ~, < or >)
[2349]1518 spwmasklist = self._parse_selection(selset[0], typestr='integer',
[2611]1519 minval=min(valid_ifs),
[2349]1520 maxval=max(valid_ifs))
[2013]1521 for spwlist in spwmasklist:
1522 selspws = []
1523 for ispw in range(spwlist[0],spwlist[1]+1):
1524 # Put into the list only if ispw exists
1525 if valid_ifs.count(ispw):
1526 selspws.append(ispw)
1527 del spwmasklist, spwlist
1528
1529 # parse frequency mask list
1530 if len(selset) > 1:
[2349]1531 freqmasklist = self._parse_selection(selset[1], typestr='float',
1532 offset=0.)
[2013]1533 else:
1534 # want to select the whole spectrum
1535 freqmasklist = [None]
1536
1537 ## define a dictionary of spw - masklist combination
1538 for ispw in selspws:
1539 #print "working on", ispw
1540 spwstr = str(ispw)
1541 if len(selspws) == 0:
1542 # empty spw
1543 continue
1544 else:
1545 ## want to get min and max of the spw and
1546 ## offset to set for '<' and '>'
1547 if frequnit == 'channel':
1548 minfreq = 0
1549 maxfreq = self.nchan(ifno=ispw)
1550 offset = 0.5
1551 else:
1552 ## This is ugly part. need improvement
1553 for ifrow in xrange(self.nrow()):
1554 if self.getif(ifrow) == ispw:
1555 #print "IF",ispw,"found in row =",ifrow
1556 break
1557 freqcoord = self.get_coordinate(ifrow)
1558 freqs = self._getabcissa(ifrow)
1559 minfreq = min(freqs)
1560 maxfreq = max(freqs)
1561 if len(freqs) == 1:
1562 offset = 0.5
1563 elif frequnit.find('Hz') > 0:
[2349]1564 offset = abs(freqcoord.to_frequency(1,
1565 unit=frequnit)
1566 -freqcoord.to_frequency(0,
1567 unit=frequnit)
1568 )*0.5
[2013]1569 elif frequnit.find('m/s') > 0:
[2349]1570 offset = abs(freqcoord.to_velocity(1,
1571 unit=frequnit)
1572 -freqcoord.to_velocity(0,
1573 unit=frequnit)
1574 )*0.5
[2013]1575 else:
1576 asaplog.post()
1577 asaplog.push("Invalid frequency unit")
1578 asaplog.post("ERROR")
1579 del freqs, freqcoord, ifrow
1580 for freq in freqmasklist:
1581 selmask = freq or [minfreq, maxfreq]
1582 if selmask[0] == None:
1583 ## selection was "<freq[1]".
1584 if selmask[1] < minfreq:
1585 ## avoid adding region selection
1586 selmask = None
1587 else:
1588 selmask = [minfreq,selmask[1]-offset]
1589 elif selmask[1] == None:
1590 ## selection was ">freq[0]"
1591 if selmask[0] > maxfreq:
1592 ## avoid adding region selection
1593 selmask = None
1594 else:
1595 selmask = [selmask[0]+offset,maxfreq]
1596 if selmask:
1597 if not seldict.has_key(spwstr):
1598 # new spw selection
1599 seldict[spwstr] = []
1600 seldict[spwstr] += [selmask]
1601 del minfreq,maxfreq,offset,freq,selmask
1602 del spwstr
1603 del freqmasklist
1604 del valid_ifs
1605 if len(seldict) == 0:
1606 asaplog.post()
[2349]1607 asaplog.push("No valid selection in the mask expression: "
1608 +maskstring)
[2013]1609 asaplog.post("WARN")
1610 return None
1611 msg = "Selected masklist:\n"
1612 for sif, lmask in seldict.iteritems():
1613 msg += " IF"+sif+" - "+str(lmask)+"\n"
1614 asaplog.push(msg)
1615 return seldict
1616
[2611]1617 @asaplog_post_dec
1618 def parse_idx_selection(self, mode, selexpr):
1619 """
1620 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
1621 BEAMNO, and row number
1622
1623 Parameters:
1624 mode : which column to select.
1625 ['scan',|'if'|'pol'|'beam'|'row']
1626 selexpr : A comma separated selection expression.
1627 examples:
1628 '' = all (returns [])
1629 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
1630 (returns [0,1,4,5,6,9])
1631 Returns:
1632 A List of selected indices
1633 """
1634 if selexpr == "":
1635 return []
1636 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
1637 'b': 'beam', 'r': 'row'}
1638 smode = mode.lower()[0]
1639 if not (smode in valid_modes.keys()):
1640 msg = "Invalid mode '%s'. Valid modes are %s" %\
1641 (mode, str(valid_modes.values()))
1642 asaplog.post()
1643 asaplog.push(msg)
1644 asaplog.post("ERROR")
1645 mode = valid_modes[smode]
1646 minidx = None
1647 maxidx = None
1648 if smode == 'r':
1649 minidx = 0
1650 maxidx = self.nrow()
1651 else:
1652 idx = getattr(self,"get"+mode+"nos")()
1653 minidx = min(idx)
1654 maxidx = max(idx)
1655 del idx
1656 sellist = selexpr.split(',')
1657 idxlist = []
1658 for currselstr in sellist:
1659 # single range (may include ~, < or >)
1660 currlist = self._parse_selection(currselstr, typestr='integer',
1661 minval=minidx,maxval=maxidx)
1662 for thelist in currlist:
1663 idxlist += range(thelist[0],thelist[1]+1)
1664 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
1665 asaplog.push(msg)
1666 return idxlist
1667
[2349]1668 def _parse_selection(self, selstr, typestr='float', offset=0.,
[2351]1669 minval=None, maxval=None):
[2013]1670 """
1671 Parameters:
1672 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1673 typestr : The type of the values in returned list
1674 ('integer' or 'float')
1675 offset : The offset value to subtract from or add to
1676 the boundary value if the selection string
[2611]1677 includes '<' or '>' [Valid only for typestr='float']
[2013]1678 minval, maxval : The minimum/maximum values to set if the
1679 selection string includes '<' or '>'.
1680 The list element is filled with None by default.
1681 Returns:
1682 A list of min/max pair of selections.
1683 Example:
[2611]1684 _parse_selection('<3;5~7;9',typestr='int',minval=0)
1685 --> returns [[0,2],[5,7],[9,9]]
1686 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
1687 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
[2013]1688 """
1689 selgroups = selstr.split(';')
1690 sellists = []
1691 if typestr.lower().startswith('int'):
1692 formatfunc = int
[2611]1693 offset = 1
[2013]1694 else:
1695 formatfunc = float
1696
1697 for currsel in selgroups:
1698 if currsel.find('~') > 0:
[2611]1699 # val0 <= x <= val1
[2013]1700 minsel = formatfunc(currsel.split('~')[0].strip())
1701 maxsel = formatfunc(currsel.split('~')[1].strip())
[2611]1702 elif currsel.strip().find('<=') > -1:
1703 bound = currsel.split('<=')
1704 try: # try "x <= val"
1705 minsel = minval
1706 maxsel = formatfunc(bound[1].strip())
1707 except ValueError: # now "val <= x"
1708 minsel = formatfunc(bound[0].strip())
1709 maxsel = maxval
1710 elif currsel.strip().find('>=') > -1:
1711 bound = currsel.split('>=')
1712 try: # try "x >= val"
1713 minsel = formatfunc(bound[1].strip())
1714 maxsel = maxval
1715 except ValueError: # now "val >= x"
1716 minsel = minval
1717 maxsel = formatfunc(bound[0].strip())
1718 elif currsel.strip().find('<') > -1:
1719 bound = currsel.split('<')
1720 try: # try "x < val"
1721 minsel = minval
1722 maxsel = formatfunc(bound[1].strip()) \
1723 - formatfunc(offset)
1724 except ValueError: # now "val < x"
1725 minsel = formatfunc(bound[0].strip()) \
[2013]1726 + formatfunc(offset)
[2611]1727 maxsel = maxval
1728 elif currsel.strip().find('>') > -1:
1729 bound = currsel.split('>')
1730 try: # try "x > val"
1731 minsel = formatfunc(bound[1].strip()) \
1732 + formatfunc(offset)
1733 maxsel = maxval
1734 except ValueError: # now "val > x"
1735 minsel = minval
1736 maxsel = formatfunc(bound[0].strip()) \
1737 - formatfunc(offset)
[2013]1738 else:
1739 minsel = formatfunc(currsel)
1740 maxsel = formatfunc(currsel)
1741 sellists.append([minsel,maxsel])
1742 return sellists
1743
[1819]1744# def get_restfreqs(self):
1745# """
1746# Get the restfrequency(s) stored in this scantable.
1747# The return value(s) are always of unit 'Hz'
1748# Parameters:
1749# none
1750# Returns:
1751# a list of doubles
1752# """
1753# return list(self._getrestfreqs())
1754
1755 def get_restfreqs(self, ids=None):
[1846]1756 """\
[256]1757 Get the restfrequency(s) stored in this scantable.
1758 The return value(s) are always of unit 'Hz'
[1846]1759
[256]1760 Parameters:
[1846]1761
[1819]1762 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1763 be retrieved
[1846]1764
[256]1765 Returns:
[1846]1766
[1819]1767 dictionary containing ids and a list of doubles for each id
[1846]1768
[256]1769 """
[1819]1770 if ids is None:
[2349]1771 rfreqs = {}
[1819]1772 idlist = self.getmolnos()
1773 for i in idlist:
[2349]1774 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1775 return rfreqs
1776 else:
[2349]1777 if type(ids) == list or type(ids) == tuple:
1778 rfreqs = {}
[1819]1779 for i in ids:
[2349]1780 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1781 return rfreqs
1782 else:
1783 return list(self._getrestfreqs(ids))
[102]1784
[2349]1785 @asaplog_post_dec
[931]1786 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1787 """\
[931]1788 Set or replace the restfrequency specified and
[1938]1789 if the 'freqs' argument holds a scalar,
[931]1790 then that rest frequency will be applied to all the selected
1791 data. If the 'freqs' argument holds
1792 a vector, then it MUST be of equal or smaller length than
1793 the number of IFs (and the available restfrequencies will be
1794 replaced by this vector). In this case, *all* data have
1795 the restfrequency set per IF according
1796 to the corresponding value you give in the 'freqs' vector.
[1118]1797 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1798 IF 1 gets restfreq 2e9.
[1846]1799
[1395]1800 You can also specify the frequencies via a linecatalog.
[1153]1801
[931]1802 Parameters:
[1846]1803
[931]1804 freqs: list of rest frequency values or string idenitfiers
[1855]1805
[931]1806 unit: unit for rest frequency (default 'Hz')
[402]1807
[1846]1808
1809 Example::
1810
[1819]1811 # set the given restfrequency for the all currently selected IFs
[931]1812 scan.set_restfreqs(freqs=1.4e9)
[1845]1813 # set restfrequencies for the n IFs (n > 1) in the order of the
1814 # list, i.e
1815 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1816 # len(list_of_restfreqs) == nIF
1817 # for nIF == 1 the following will set multiple restfrequency for
1818 # that IF
[1819]1819 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1820 # set multiple restfrequencies per IF. as a list of lists where
1821 # the outer list has nIF elements, the inner s arbitrary
1822 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1823
[1846]1824 *Note*:
[1845]1825
[931]1826 To do more sophisticate Restfrequency setting, e.g. on a
1827 source and IF basis, use scantable.set_selection() before using
[1846]1828 this function::
[931]1829
[1846]1830 # provided your scantable is called scan
1831 selection = selector()
[2431]1832 selection.set_name('ORION*')
[1846]1833 selection.set_ifs([1])
1834 scan.set_selection(selection)
1835 scan.set_restfreqs(freqs=86.6e9)
1836
[931]1837 """
1838 varlist = vars()
[1157]1839 from asap import linecatalog
1840 # simple value
[1118]1841 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1842 self._setrestfreqs([freqs], [""], unit)
[1157]1843 # list of values
[1118]1844 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1845 # list values are scalars
[1118]1846 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1847 if len(freqs) == 1:
1848 self._setrestfreqs(freqs, [""], unit)
1849 else:
1850 # allow the 'old' mode of setting mulitple IFs
1851 savesel = self._getselection()
[2599]1852 sel = self.get_selection()
[1845]1853 iflist = self.getifnos()
1854 if len(freqs)>len(iflist):
1855 raise ValueError("number of elements in list of list "
1856 "exeeds the current IF selections")
1857 iflist = self.getifnos()
1858 for i, fval in enumerate(freqs):
1859 sel.set_ifs(iflist[i])
1860 self._setselection(sel)
1861 self._setrestfreqs([fval], [""], unit)
1862 self._setselection(savesel)
1863
1864 # list values are dict, {'value'=, 'name'=)
[1157]1865 elif isinstance(freqs[-1], dict):
[1845]1866 values = []
1867 names = []
1868 for d in freqs:
1869 values.append(d["value"])
1870 names.append(d["name"])
1871 self._setrestfreqs(values, names, unit)
[1819]1872 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1873 savesel = self._getselection()
[2599]1874 sel = self.get_selection()
[1322]1875 iflist = self.getifnos()
[1819]1876 if len(freqs)>len(iflist):
[1845]1877 raise ValueError("number of elements in list of list exeeds"
1878 " the current IF selections")
1879 for i, fval in enumerate(freqs):
[1322]1880 sel.set_ifs(iflist[i])
[1259]1881 self._setselection(sel)
[1845]1882 self._setrestfreqs(fval, [""], unit)
[1157]1883 self._setselection(savesel)
1884 # freqs are to be taken from a linecatalog
[1153]1885 elif isinstance(freqs, linecatalog):
1886 savesel = self._getselection()
[2599]1887 sel = self.get_selection()
[1153]1888 for i in xrange(freqs.nrow()):
[1322]1889 sel.set_ifs(iflist[i])
[1153]1890 self._setselection(sel)
[1845]1891 self._setrestfreqs([freqs.get_frequency(i)],
1892 [freqs.get_name(i)], "MHz")
[1153]1893 # ensure that we are not iterating past nIF
1894 if i == self.nif()-1: break
1895 self._setselection(savesel)
[931]1896 else:
1897 return
1898 self._add_history("set_restfreqs", varlist)
1899
[2349]1900 @asaplog_post_dec
[1360]1901 def shift_refpix(self, delta):
[1846]1902 """\
[1589]1903 Shift the reference pixel of the Spectra Coordinate by an
1904 integer amount.
[1846]1905
[1589]1906 Parameters:
[1846]1907
[1589]1908 delta: the amount to shift by
[1846]1909
1910 *Note*:
1911
[1589]1912 Be careful using this with broadband data.
[1846]1913
[1360]1914 """
[2349]1915 varlist = vars()
[1731]1916 Scantable.shift_refpix(self, delta)
[2349]1917 s._add_history("shift_refpix", varlist)
[931]1918
[1862]1919 @asaplog_post_dec
[1259]1920 def history(self, filename=None):
[1846]1921 """\
[1259]1922 Print the history. Optionally to a file.
[1846]1923
[1348]1924 Parameters:
[1846]1925
[1928]1926 filename: The name of the file to save the history to.
[1846]1927
[1259]1928 """
[484]1929 hist = list(self._gethistory())
[794]1930 out = "-"*80
[484]1931 for h in hist:
[489]1932 if h.startswith("---"):
[1857]1933 out = "\n".join([out, h])
[489]1934 else:
1935 items = h.split("##")
1936 date = items[0]
1937 func = items[1]
1938 items = items[2:]
[794]1939 out += "\n"+date+"\n"
1940 out += "Function: %s\n Parameters:" % (func)
[489]1941 for i in items:
[1938]1942 if i == '':
1943 continue
[489]1944 s = i.split("=")
[1118]1945 out += "\n %s = %s" % (s[0], s[1])
[1857]1946 out = "\n".join([out, "-"*80])
[1259]1947 if filename is not None:
1948 if filename is "":
1949 filename = 'scantable_history.txt'
1950 import os
1951 filename = os.path.expandvars(os.path.expanduser(filename))
1952 if not os.path.isdir(filename):
1953 data = open(filename, 'w')
1954 data.write(out)
1955 data.close()
1956 else:
1957 msg = "Illegal file name '%s'." % (filename)
[1859]1958 raise IOError(msg)
1959 return page(out)
[2349]1960
[513]1961 #
1962 # Maths business
1963 #
[1862]1964 @asaplog_post_dec
[931]1965 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1966 """\
[2349]1967 Return the (time) weighted average of a scan. Scans will be averaged
1968 only if the source direction (RA/DEC) is within 1' otherwise
[1846]1969
1970 *Note*:
1971
[1070]1972 in channels only - align if necessary
[1846]1973
[513]1974 Parameters:
[1846]1975
[513]1976 mask: an optional mask (only used for 'var' and 'tsys'
1977 weighting)
[1855]1978
[558]1979 scanav: True averages each scan separately
1980 False (default) averages all scans together,
[1855]1981
[1099]1982 weight: Weighting scheme.
1983 'none' (mean no weight)
1984 'var' (1/var(spec) weighted)
1985 'tsys' (1/Tsys**2 weighted)
1986 'tint' (integration time weighted)
1987 'tintsys' (Tint/Tsys**2)
1988 'median' ( median averaging)
[535]1989 The default is 'tint'
[1855]1990
[931]1991 align: align the spectra in velocity before averaging. It takes
1992 the time of the first spectrum as reference time.
[1846]1993
1994 Example::
1995
[513]1996 # time average the scantable without using a mask
[710]1997 newscan = scan.average_time()
[1846]1998
[513]1999 """
2000 varlist = vars()
[1593]2001 weight = weight or 'TINT'
2002 mask = mask or ()
2003 scanav = (scanav and 'SCAN') or 'NONE'
[1118]2004 scan = (self, )
[1859]2005
2006 if align:
2007 scan = (self.freq_align(insitu=False), )
2008 s = None
2009 if weight.upper() == 'MEDIAN':
2010 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2011 scanav))
2012 else:
2013 s = scantable(self._math._average(scan, mask, weight.upper(),
2014 scanav))
[1099]2015 s._add_history("average_time", varlist)
[513]2016 return s
[710]2017
[1862]2018 @asaplog_post_dec
[876]2019 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]2020 """\
[513]2021 Return a scan where all spectra are converted to either
2022 Jansky or Kelvin depending upon the flux units of the scan table.
2023 By default the function tries to look the values up internally.
2024 If it can't find them (or if you want to over-ride), you must
2025 specify EITHER jyperk OR eta (and D which it will try to look up
2026 also if you don't set it). jyperk takes precedence if you set both.
[1846]2027
[513]2028 Parameters:
[1846]2029
[513]2030 jyperk: the Jy / K conversion factor
[1855]2031
[513]2032 eta: the aperture efficiency
[1855]2033
[1928]2034 d: the geometric diameter (metres)
[1855]2035
[513]2036 insitu: if False a new scantable is returned.
2037 Otherwise, the scaling is done in-situ
2038 The default is taken from .asaprc (False)
[1846]2039
[513]2040 """
2041 if insitu is None: insitu = rcParams['insitu']
[876]2042 self._math._setinsitu(insitu)
[513]2043 varlist = vars()
[1593]2044 jyperk = jyperk or -1.0
2045 d = d or -1.0
2046 eta = eta or -1.0
[876]2047 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2048 s._add_history("convert_flux", varlist)
2049 if insitu: self._assign(s)
2050 else: return s
[513]2051
[1862]2052 @asaplog_post_dec
[876]2053 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]2054 """\
[513]2055 Return a scan after applying a gain-elevation correction.
2056 The correction can be made via either a polynomial or a
2057 table-based interpolation (and extrapolation if necessary).
2058 You specify polynomial coefficients, an ascii table or neither.
2059 If you specify neither, then a polynomial correction will be made
2060 with built in coefficients known for certain telescopes (an error
2061 will occur if the instrument is not known).
2062 The data and Tsys are *divided* by the scaling factors.
[1846]2063
[513]2064 Parameters:
[1846]2065
[513]2066 poly: Polynomial coefficients (default None) to compute a
2067 gain-elevation correction as a function of
2068 elevation (in degrees).
[1855]2069
[513]2070 filename: The name of an ascii file holding correction factors.
2071 The first row of the ascii file must give the column
2072 names and these MUST include columns
[2431]2073 'ELEVATION' (degrees) and 'FACTOR' (multiply data
[513]2074 by this) somewhere.
2075 The second row must give the data type of the
2076 column. Use 'R' for Real and 'I' for Integer.
2077 An example file would be
2078 (actual factors are arbitrary) :
2079
2080 TIME ELEVATION FACTOR
2081 R R R
2082 0.1 0 0.8
2083 0.2 20 0.85
2084 0.3 40 0.9
2085 0.4 60 0.85
2086 0.5 80 0.8
2087 0.6 90 0.75
[1855]2088
[513]2089 method: Interpolation method when correcting from a table.
[2431]2090 Values are 'nearest', 'linear' (default), 'cubic'
2091 and 'spline'
[1855]2092
[513]2093 insitu: if False a new scantable is returned.
2094 Otherwise, the scaling is done in-situ
2095 The default is taken from .asaprc (False)
[1846]2096
[513]2097 """
2098
2099 if insitu is None: insitu = rcParams['insitu']
[876]2100 self._math._setinsitu(insitu)
[513]2101 varlist = vars()
[1593]2102 poly = poly or ()
[513]2103 from os.path import expandvars
2104 filename = expandvars(filename)
[876]2105 s = scantable(self._math._gainel(self, poly, filename, method))
2106 s._add_history("gain_el", varlist)
[1593]2107 if insitu:
2108 self._assign(s)
2109 else:
2110 return s
[710]2111
[1862]2112 @asaplog_post_dec
[931]2113 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2114 """\
[513]2115 Return a scan where all rows have been aligned in frequency/velocity.
2116 The alignment frequency frame (e.g. LSRK) is that set by function
2117 set_freqframe.
[1846]2118
[513]2119 Parameters:
[1855]2120
[513]2121 reftime: reference time to align at. By default, the time of
2122 the first row of data is used.
[1855]2123
[513]2124 method: Interpolation method for regridding the spectra.
[2431]2125 Choose from 'nearest', 'linear', 'cubic' (default)
2126 and 'spline'
[1855]2127
[513]2128 insitu: if False a new scantable is returned.
2129 Otherwise, the scaling is done in-situ
2130 The default is taken from .asaprc (False)
[1846]2131
[513]2132 """
[931]2133 if insitu is None: insitu = rcParams["insitu"]
[2429]2134 oldInsitu = self._math._insitu()
[876]2135 self._math._setinsitu(insitu)
[513]2136 varlist = vars()
[1593]2137 reftime = reftime or ""
[931]2138 s = scantable(self._math._freq_align(self, reftime, method))
[876]2139 s._add_history("freq_align", varlist)
[2429]2140 self._math._setinsitu(oldInsitu)
[2349]2141 if insitu:
2142 self._assign(s)
2143 else:
2144 return s
[513]2145
[1862]2146 @asaplog_post_dec
[1725]2147 def opacity(self, tau=None, insitu=None):
[1846]2148 """\
[513]2149 Apply an opacity correction. The data
2150 and Tsys are multiplied by the correction factor.
[1846]2151
[513]2152 Parameters:
[1855]2153
[1689]2154 tau: (list of) opacity from which the correction factor is
[513]2155 exp(tau*ZD)
[1689]2156 where ZD is the zenith-distance.
2157 If a list is provided, it has to be of length nIF,
2158 nIF*nPol or 1 and in order of IF/POL, e.g.
2159 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2160 if tau is `None` the opacities are determined from a
2161 model.
[1855]2162
[513]2163 insitu: if False a new scantable is returned.
2164 Otherwise, the scaling is done in-situ
2165 The default is taken from .asaprc (False)
[1846]2166
[513]2167 """
[2349]2168 if insitu is None:
2169 insitu = rcParams['insitu']
[876]2170 self._math._setinsitu(insitu)
[513]2171 varlist = vars()
[1689]2172 if not hasattr(tau, "__len__"):
2173 tau = [tau]
[876]2174 s = scantable(self._math._opacity(self, tau))
2175 s._add_history("opacity", varlist)
[2349]2176 if insitu:
2177 self._assign(s)
2178 else:
2179 return s
[513]2180
[1862]2181 @asaplog_post_dec
[513]2182 def bin(self, width=5, insitu=None):
[1846]2183 """\
[513]2184 Return a scan where all spectra have been binned up.
[1846]2185
[1348]2186 Parameters:
[1846]2187
[513]2188 width: The bin width (default=5) in pixels
[1855]2189
[513]2190 insitu: if False a new scantable is returned.
2191 Otherwise, the scaling is done in-situ
2192 The default is taken from .asaprc (False)
[1846]2193
[513]2194 """
[2349]2195 if insitu is None:
2196 insitu = rcParams['insitu']
[876]2197 self._math._setinsitu(insitu)
[513]2198 varlist = vars()
[876]2199 s = scantable(self._math._bin(self, width))
[1118]2200 s._add_history("bin", varlist)
[1589]2201 if insitu:
2202 self._assign(s)
2203 else:
2204 return s
[513]2205
[1862]2206 @asaplog_post_dec
[2672]2207 def reshape(self, first, last, insitu=None):
2208 """Resize the band by providing first and last channel.
2209 This will cut off all channels outside [first, last].
2210 """
2211 if insitu is None:
2212 insitu = rcParams['insitu']
2213 varlist = vars()
2214 if last < 0:
2215 last = self.nchan()-1 + last
2216 s = None
2217 if insitu:
2218 s = self
2219 else:
2220 s = self.copy()
2221 s._reshape(first,last)
2222 s._add_history("reshape", varlist)
2223 if not insitu:
2224 return s
2225
2226 @asaplog_post_dec
[513]2227 def resample(self, width=5, method='cubic', insitu=None):
[1846]2228 """\
[1348]2229 Return a scan where all spectra have been binned up.
[1573]2230
[1348]2231 Parameters:
[1846]2232
[513]2233 width: The bin width (default=5) in pixels
[1855]2234
[513]2235 method: Interpolation method when correcting from a table.
[2431]2236 Values are 'nearest', 'linear', 'cubic' (default)
2237 and 'spline'
[1855]2238
[513]2239 insitu: if False a new scantable is returned.
2240 Otherwise, the scaling is done in-situ
2241 The default is taken from .asaprc (False)
[1846]2242
[513]2243 """
[2349]2244 if insitu is None:
2245 insitu = rcParams['insitu']
[876]2246 self._math._setinsitu(insitu)
[513]2247 varlist = vars()
[876]2248 s = scantable(self._math._resample(self, method, width))
[1118]2249 s._add_history("resample", varlist)
[2349]2250 if insitu:
2251 self._assign(s)
2252 else:
2253 return s
[513]2254
[1862]2255 @asaplog_post_dec
[946]2256 def average_pol(self, mask=None, weight='none'):
[1846]2257 """\
[946]2258 Average the Polarisations together.
[1846]2259
[946]2260 Parameters:
[1846]2261
[946]2262 mask: An optional mask defining the region, where the
2263 averaging will be applied. The output will have all
2264 specified points masked.
[1855]2265
[946]2266 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2267 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2268
[946]2269 """
2270 varlist = vars()
[1593]2271 mask = mask or ()
[1010]2272 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2273 s._add_history("average_pol", varlist)
[992]2274 return s
[513]2275
[1862]2276 @asaplog_post_dec
[1145]2277 def average_beam(self, mask=None, weight='none'):
[1846]2278 """\
[1145]2279 Average the Beams together.
[1846]2280
[1145]2281 Parameters:
2282 mask: An optional mask defining the region, where the
2283 averaging will be applied. The output will have all
2284 specified points masked.
[1855]2285
[1145]2286 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2287 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2288
[1145]2289 """
2290 varlist = vars()
[1593]2291 mask = mask or ()
[1145]2292 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2293 s._add_history("average_beam", varlist)
2294 return s
2295
[1586]2296 def parallactify(self, pflag):
[1846]2297 """\
[1843]2298 Set a flag to indicate whether this data should be treated as having
[1617]2299 been 'parallactified' (total phase == 0.0)
[1846]2300
[1617]2301 Parameters:
[1855]2302
[1843]2303 pflag: Bool indicating whether to turn this on (True) or
[1617]2304 off (False)
[1846]2305
[1617]2306 """
[1586]2307 varlist = vars()
2308 self._parallactify(pflag)
2309 self._add_history("parallactify", varlist)
2310
[1862]2311 @asaplog_post_dec
[992]2312 def convert_pol(self, poltype=None):
[1846]2313 """\
[992]2314 Convert the data to a different polarisation type.
[1565]2315 Note that you will need cross-polarisation terms for most conversions.
[1846]2316
[992]2317 Parameters:
[1855]2318
[992]2319 poltype: The new polarisation type. Valid types are:
[2431]2320 'linear', 'circular', 'stokes' and 'linpol'
[1846]2321
[992]2322 """
2323 varlist = vars()
[1859]2324 s = scantable(self._math._convertpol(self, poltype))
[1118]2325 s._add_history("convert_pol", varlist)
[992]2326 return s
2327
[1862]2328 @asaplog_post_dec
[2269]2329 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2330 insitu=None):
[1846]2331 """\
[513]2332 Smooth the spectrum by the specified kernel (conserving flux).
[1846]2333
[513]2334 Parameters:
[1846]2335
[513]2336 kernel: The type of smoothing kernel. Select from
[1574]2337 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2338 or 'poly'
[1855]2339
[513]2340 width: The width of the kernel in pixels. For hanning this is
2341 ignored otherwise it defauls to 5 pixels.
2342 For 'gaussian' it is the Full Width Half
2343 Maximum. For 'boxcar' it is the full width.
[1574]2344 For 'rmedian' and 'poly' it is the half width.
[1855]2345
[1574]2346 order: Optional parameter for 'poly' kernel (default is 2), to
2347 specify the order of the polnomial. Ignored by all other
2348 kernels.
[1855]2349
[1819]2350 plot: plot the original and the smoothed spectra.
2351 In this each indivual fit has to be approved, by
2352 typing 'y' or 'n'
[1855]2353
[513]2354 insitu: if False a new scantable is returned.
2355 Otherwise, the scaling is done in-situ
2356 The default is taken from .asaprc (False)
[1846]2357
[513]2358 """
2359 if insitu is None: insitu = rcParams['insitu']
[876]2360 self._math._setinsitu(insitu)
[513]2361 varlist = vars()
[1819]2362
2363 if plot: orgscan = self.copy()
2364
[1574]2365 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2366 s._add_history("smooth", varlist)
[1819]2367
[2610]2368 action = 'H'
[1819]2369 if plot:
[2150]2370 from asap.asapplotter import new_asaplot
2371 theplot = new_asaplot(rcParams['plotter.gui'])
[2535]2372 from matplotlib import rc as rcp
2373 rcp('lines', linewidth=1)
[2150]2374 theplot.set_panels()
[1819]2375 ylab=s._get_ordinate_label()
[2150]2376 #theplot.palette(0,["#777777","red"])
[1819]2377 for r in xrange(s.nrow()):
2378 xsm=s._getabcissa(r)
2379 ysm=s._getspectrum(r)
2380 xorg=orgscan._getabcissa(r)
2381 yorg=orgscan._getspectrum(r)
[2610]2382 if action != "N": #skip plotting if rejecting all
2383 theplot.clear()
2384 theplot.hold()
2385 theplot.set_axes('ylabel',ylab)
2386 theplot.set_axes('xlabel',s._getabcissalabel(r))
2387 theplot.set_axes('title',s._getsourcename(r))
2388 theplot.set_line(label='Original',color="#777777")
2389 theplot.plot(xorg,yorg)
2390 theplot.set_line(label='Smoothed',color="red")
2391 theplot.plot(xsm,ysm)
2392 ### Ugly part for legend
2393 for i in [0,1]:
2394 theplot.subplots[0]['lines'].append(
2395 [theplot.subplots[0]['axes'].lines[i]]
2396 )
2397 theplot.release()
2398 ### Ugly part for legend
2399 theplot.subplots[0]['lines']=[]
2400 res = self._get_verify_action("Accept smoothing?",action)
2401 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2402 if r == 0: action = None
2403 #res = raw_input("Accept smoothing ([y]/n): ")
[1819]2404 if res.upper() == 'N':
[2610]2405 # reject for the current rows
[1819]2406 s._setspectrum(yorg, r)
[2610]2407 elif res.upper() == 'R':
2408 # reject all the following rows
2409 action = "N"
2410 s._setspectrum(yorg, r)
2411 elif res.upper() == 'A':
2412 # accept all the following rows
2413 break
[2150]2414 theplot.quit()
2415 del theplot
[1819]2416 del orgscan
2417
[876]2418 if insitu: self._assign(s)
2419 else: return s
[513]2420
[2186]2421 @asaplog_post_dec
[2435]2422 def regrid_channel(self, width=5, plot=False, insitu=None):
2423 """\
2424 Regrid the spectra by the specified channel width
2425
2426 Parameters:
2427
2428 width: The channel width (float) of regridded spectra
2429 in the current spectral unit.
2430
2431 plot: [NOT IMPLEMENTED YET]
2432 plot the original and the regridded spectra.
2433 In this each indivual fit has to be approved, by
2434 typing 'y' or 'n'
2435
2436 insitu: if False a new scantable is returned.
2437 Otherwise, the scaling is done in-situ
2438 The default is taken from .asaprc (False)
2439
2440 """
2441 if insitu is None: insitu = rcParams['insitu']
2442 varlist = vars()
2443
2444 if plot:
2445 asaplog.post()
2446 asaplog.push("Verification plot is not implemtnetd yet.")
2447 asaplog.post("WARN")
2448
2449 s = self.copy()
2450 s._regrid_specchan(width)
2451
2452 s._add_history("regrid_channel", varlist)
2453
2454# if plot:
2455# from asap.asapplotter import new_asaplot
2456# theplot = new_asaplot(rcParams['plotter.gui'])
[2535]2457# from matplotlib import rc as rcp
2458# rcp('lines', linewidth=1)
[2435]2459# theplot.set_panels()
2460# ylab=s._get_ordinate_label()
2461# #theplot.palette(0,["#777777","red"])
2462# for r in xrange(s.nrow()):
2463# xsm=s._getabcissa(r)
2464# ysm=s._getspectrum(r)
2465# xorg=orgscan._getabcissa(r)
2466# yorg=orgscan._getspectrum(r)
2467# theplot.clear()
2468# theplot.hold()
2469# theplot.set_axes('ylabel',ylab)
2470# theplot.set_axes('xlabel',s._getabcissalabel(r))
2471# theplot.set_axes('title',s._getsourcename(r))
2472# theplot.set_line(label='Original',color="#777777")
2473# theplot.plot(xorg,yorg)
2474# theplot.set_line(label='Smoothed',color="red")
2475# theplot.plot(xsm,ysm)
2476# ### Ugly part for legend
2477# for i in [0,1]:
2478# theplot.subplots[0]['lines'].append(
2479# [theplot.subplots[0]['axes'].lines[i]]
2480# )
2481# theplot.release()
2482# ### Ugly part for legend
2483# theplot.subplots[0]['lines']=[]
2484# res = raw_input("Accept smoothing ([y]/n): ")
2485# if res.upper() == 'N':
2486# s._setspectrum(yorg, r)
2487# theplot.quit()
2488# del theplot
2489# del orgscan
2490
2491 if insitu: self._assign(s)
2492 else: return s
2493
2494 @asaplog_post_dec
[2186]2495 def _parse_wn(self, wn):
2496 if isinstance(wn, list) or isinstance(wn, tuple):
2497 return wn
2498 elif isinstance(wn, int):
2499 return [ wn ]
2500 elif isinstance(wn, str):
[2277]2501 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2502 val = wn.split('-')
2503 val = [int(val[0]), int(val[1])]
2504 val.sort()
2505 res = [i for i in xrange(val[0], val[1]+1)]
[2277]2506 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2507 val = int(wn[2:])+1
2508 res = [i for i in xrange(val)]
[2277]2509 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2510 val = int(wn[:-2])+1
2511 res = [i for i in xrange(val)]
[2277]2512 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]2513 val = int(wn[1:])
2514 res = [i for i in xrange(val)]
[2277]2515 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]2516 val = int(wn[:-1])
2517 res = [i for i in xrange(val)]
[2411]2518 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2519 # then interpreted in C++
2520 # side as [a,a+1,...,a_nyq]
2521 # (CAS-3759)
[2186]2522 val = int(wn[2:])
[2411]2523 res = [val, -999]
2524 #res = [i for i in xrange(val, self.nchan()/2+1)]
2525 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2526 # then interpreted in C++
2527 # side as [a,a+1,...,a_nyq]
2528 # (CAS-3759)
[2186]2529 val = int(wn[:-2])
[2411]2530 res = [val, -999]
2531 #res = [i for i in xrange(val, self.nchan()/2+1)]
2532 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2533 # then interpreted in C++
2534 # side as [a+1,a+2,...,a_nyq]
2535 # (CAS-3759)
[2186]2536 val = int(wn[1:])+1
[2411]2537 res = [val, -999]
2538 #res = [i for i in xrange(val, self.nchan()/2+1)]
2539 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2540 # then interpreted in C++
2541 # side as [a+1,a+2,...,a_nyq]
2542 # (CAS-3759)
[2186]2543 val = int(wn[:-1])+1
[2411]2544 res = [val, -999]
2545 #res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2546
[2186]2547 return res
2548 else:
2549 msg = 'wrong value given for addwn/rejwn'
2550 raise RuntimeError(msg)
2551
[2713]2552 @asaplog_post_dec
2553 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
2554 whichrow=None, uselinefinder=None, edge=None,
2555 threshold=None, chan_avg_limit=None):
2556 """\
2557 Calculates and returns model selection criteria for a specified
2558 baseline model and a given spectrum data.
2559 Available values include Akaike Information Criterion (AIC), the
2560 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
2561 Bayesian Information Criterion (BIC) and the Generalised Cross
2562 Validation (GCV).
[2186]2563
[2713]2564 Parameters:
2565 value: name of model selection criteria to calculate.
2566 available ones include 'aic', 'aicc', 'bic' and
2567 'gcv'. default is 'aicc'.
2568 blfunc: baseline function name. available ones include
2569 'chebyshev', 'cspline' and 'sinusoid'.
2570 default is 'chebyshev'.
2571 order: parameter for basline function. actually stands for
2572 order of polynomial (order) for 'chebyshev',
2573 number of spline pieces (npiece) for 'cspline' and
2574 maximum wave number for 'sinusoid', respectively.
2575 default is 5 (which is also the default order value
2576 for [auto_]chebyshev_baseline()).
2577 mask: an optional mask. default is [].
2578 whichrow: row number. default is 0 (the first row)
2579 uselinefinder: use sd.linefinder() to flag out line regions
2580 default is True.
2581 edge: an optional number of channel to drop at
2582 the edge of spectrum. If only one value is
2583 specified, the same number will be dropped
2584 from both sides of the spectrum. Default
2585 is to keep all channels. Nested tuples
2586 represent individual edge selection for
2587 different IFs (a number of spectral channels
2588 can be different)
2589 default is (0, 0).
2590 threshold: the threshold used by line finder. It is
2591 better to keep it large as only strong lines
2592 affect the baseline solution.
2593 default is 3.
2594 chan_avg_limit: a maximum number of consequtive spectral
2595 channels to average during the search of
2596 weak and broad lines. The default is no
2597 averaging (and no search for weak lines).
2598 If such lines can affect the fitted baseline
2599 (e.g. a high order polynomial is fitted),
2600 increase this parameter (usually values up
2601 to 8 are reasonable). Most users of this
2602 method should find the default value sufficient.
2603 default is 1.
2604
2605 Example:
2606 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
2607 """
2608
2609 try:
2610 varlist = vars()
2611
2612 if value is None: value = 'aicc'
2613 if blfunc is None: blfunc = 'chebyshev'
2614 if order is None: order = 5
2615 if mask is None: mask = []
2616 if whichrow is None: whichrow = 0
2617 if uselinefinder is None: uselinefinder = True
2618 if edge is None: edge = (0, 0)
2619 if threshold is None: threshold = 3
2620 if chan_avg_limit is None: chan_avg_limit = 1
2621
2622 return self._calc_aic(value, blfunc, order, mask,
2623 whichrow, uselinefinder, edge,
2624 threshold, chan_avg_limit)
2625
2626 except RuntimeError, e:
2627 raise_fitting_failure_exception(e)
2628
[1862]2629 @asaplog_post_dec
[2277]2630 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
[2269]2631 fftmethod=None, fftthresh=None,
2632 addwn=None, rejwn=None, clipthresh=None,
2633 clipniter=None, plot=None,
2634 getresidual=None, showprogress=None,
[2641]2635 minnrow=None, outlog=None, blfile=None, csvformat=None):
[2047]2636 """\
[2349]2637 Return a scan which has been baselined (all rows) with sinusoidal
2638 functions.
2639
[2047]2640 Parameters:
[2186]2641 insitu: if False a new scantable is returned.
[2081]2642 Otherwise, the scaling is done in-situ
2643 The default is taken from .asaprc (False)
[2186]2644 mask: an optional mask
2645 applyfft: if True use some method, such as FFT, to find
2646 strongest sinusoidal components in the wavenumber
2647 domain to be used for baseline fitting.
2648 default is True.
2649 fftmethod: method to find the strong sinusoidal components.
2650 now only 'fft' is available and it is the default.
2651 fftthresh: the threshold to select wave numbers to be used for
2652 fitting from the distribution of amplitudes in the
2653 wavenumber domain.
2654 both float and string values accepted.
2655 given a float value, the unit is set to sigma.
2656 for string values, allowed formats include:
[2349]2657 'xsigma' or 'x' (= x-sigma level. e.g.,
2658 '3sigma'), or
[2186]2659 'topx' (= the x strongest ones, e.g. 'top5').
2660 default is 3.0 (unit: sigma).
2661 addwn: the additional wave numbers to be used for fitting.
2662 list or integer value is accepted to specify every
2663 wave numbers. also string value can be used in case
2664 you need to specify wave numbers in a certain range,
2665 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2666 '<a' (= 0,1,...,a-2,a-1),
2667 '>=a' (= a, a+1, ... up to the maximum wave
2668 number corresponding to the Nyquist
2669 frequency for the case of FFT).
[2411]2670 default is [0].
[2186]2671 rejwn: the wave numbers NOT to be used for fitting.
2672 can be set just as addwn but has higher priority:
2673 wave numbers which are specified both in addwn
2674 and rejwn will NOT be used. default is [].
[2081]2675 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2676 clipniter: maximum number of iteration of 'clipthresh'-sigma
2677 clipping (default is 0)
[2081]2678 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2679 plot the fit and the residual. In this each
2680 indivual fit has to be approved, by typing 'y'
2681 or 'n'
2682 getresidual: if False, returns best-fit values instead of
2683 residual. (default is True)
[2189]2684 showprogress: show progress status for large data.
2685 default is True.
2686 minnrow: minimum number of input spectra to show.
2687 default is 1000.
[2081]2688 outlog: Output the coefficients of the best-fit
2689 function to logger (default is False)
2690 blfile: Name of a text file in which the best-fit
2691 parameter values to be written
[2186]2692 (default is '': no file/logger output)
[2641]2693 csvformat: if True blfile is csv-formatted, default is False.
[2047]2694
2695 Example:
[2349]2696 # return a scan baselined by a combination of sinusoidal curves
2697 # having wave numbers in spectral window up to 10,
[2047]2698 # also with 3-sigma clipping, iteration up to 4 times
[2186]2699 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]2700
2701 Note:
2702 The best-fit parameter values output in logger and/or blfile are now
2703 based on specunit of 'channel'.
[2047]2704 """
2705
[2186]2706 try:
2707 varlist = vars()
[2047]2708
[2186]2709 if insitu is None: insitu = rcParams['insitu']
2710 if insitu:
2711 workscan = self
2712 else:
2713 workscan = self.copy()
2714
[2410]2715 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2716 if mask is None: mask = []
[2186]2717 if applyfft is None: applyfft = True
2718 if fftmethod is None: fftmethod = 'fft'
2719 if fftthresh is None: fftthresh = 3.0
[2411]2720 if addwn is None: addwn = [0]
[2186]2721 if rejwn is None: rejwn = []
2722 if clipthresh is None: clipthresh = 3.0
2723 if clipniter is None: clipniter = 0
2724 if plot is None: plot = False
2725 if getresidual is None: getresidual = True
[2189]2726 if showprogress is None: showprogress = True
2727 if minnrow is None: minnrow = 1000
[2186]2728 if outlog is None: outlog = False
2729 if blfile is None: blfile = ''
[2641]2730 if csvformat is None: csvformat = False
[2047]2731
[2641]2732 if csvformat:
2733 scsvformat = "T"
2734 else:
2735 scsvformat = "F"
2736
[2081]2737 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2349]2738 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2739 str(fftthresh).lower(),
2740 workscan._parse_wn(addwn),
[2643]2741 workscan._parse_wn(rejwn),
2742 clipthresh, clipniter,
2743 getresidual,
[2349]2744 pack_progress_params(showprogress,
[2641]2745 minnrow),
2746 outlog, scsvformat+blfile)
[2186]2747 workscan._add_history('sinusoid_baseline', varlist)
[2047]2748
2749 if insitu:
2750 self._assign(workscan)
2751 else:
2752 return workscan
2753
2754 except RuntimeError, e:
[2186]2755 raise_fitting_failure_exception(e)
[2047]2756
2757
[2186]2758 @asaplog_post_dec
[2349]2759 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2760 fftmethod=None, fftthresh=None,
2761 addwn=None, rejwn=None, clipthresh=None,
2762 clipniter=None, edge=None, threshold=None,
2763 chan_avg_limit=None, plot=None,
2764 getresidual=None, showprogress=None,
[2641]2765 minnrow=None, outlog=None, blfile=None, csvformat=None):
[2047]2766 """\
[2349]2767 Return a scan which has been baselined (all rows) with sinusoidal
2768 functions.
[2047]2769 Spectral lines are detected first using linefinder and masked out
2770 to avoid them affecting the baseline solution.
2771
2772 Parameters:
[2189]2773 insitu: if False a new scantable is returned.
2774 Otherwise, the scaling is done in-situ
2775 The default is taken from .asaprc (False)
2776 mask: an optional mask retreived from scantable
2777 applyfft: if True use some method, such as FFT, to find
2778 strongest sinusoidal components in the wavenumber
2779 domain to be used for baseline fitting.
2780 default is True.
2781 fftmethod: method to find the strong sinusoidal components.
2782 now only 'fft' is available and it is the default.
2783 fftthresh: the threshold to select wave numbers to be used for
2784 fitting from the distribution of amplitudes in the
2785 wavenumber domain.
2786 both float and string values accepted.
2787 given a float value, the unit is set to sigma.
2788 for string values, allowed formats include:
[2349]2789 'xsigma' or 'x' (= x-sigma level. e.g.,
2790 '3sigma'), or
[2189]2791 'topx' (= the x strongest ones, e.g. 'top5').
2792 default is 3.0 (unit: sigma).
2793 addwn: the additional wave numbers to be used for fitting.
2794 list or integer value is accepted to specify every
2795 wave numbers. also string value can be used in case
2796 you need to specify wave numbers in a certain range,
2797 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2798 '<a' (= 0,1,...,a-2,a-1),
2799 '>=a' (= a, a+1, ... up to the maximum wave
2800 number corresponding to the Nyquist
2801 frequency for the case of FFT).
[2411]2802 default is [0].
[2189]2803 rejwn: the wave numbers NOT to be used for fitting.
2804 can be set just as addwn but has higher priority:
2805 wave numbers which are specified both in addwn
2806 and rejwn will NOT be used. default is [].
2807 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2808 clipniter: maximum number of iteration of 'clipthresh'-sigma
2809 clipping (default is 0)
[2189]2810 edge: an optional number of channel to drop at
2811 the edge of spectrum. If only one value is
2812 specified, the same number will be dropped
2813 from both sides of the spectrum. Default
2814 is to keep all channels. Nested tuples
2815 represent individual edge selection for
2816 different IFs (a number of spectral channels
2817 can be different)
2818 threshold: the threshold used by line finder. It is
2819 better to keep it large as only strong lines
2820 affect the baseline solution.
2821 chan_avg_limit: a maximum number of consequtive spectral
2822 channels to average during the search of
2823 weak and broad lines. The default is no
2824 averaging (and no search for weak lines).
2825 If such lines can affect the fitted baseline
2826 (e.g. a high order polynomial is fitted),
2827 increase this parameter (usually values up
2828 to 8 are reasonable). Most users of this
2829 method should find the default value sufficient.
2830 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2831 plot the fit and the residual. In this each
2832 indivual fit has to be approved, by typing 'y'
2833 or 'n'
2834 getresidual: if False, returns best-fit values instead of
2835 residual. (default is True)
2836 showprogress: show progress status for large data.
2837 default is True.
2838 minnrow: minimum number of input spectra to show.
2839 default is 1000.
2840 outlog: Output the coefficients of the best-fit
2841 function to logger (default is False)
2842 blfile: Name of a text file in which the best-fit
2843 parameter values to be written
2844 (default is "": no file/logger output)
[2641]2845 csvformat: if True blfile is csv-formatted, default is False.
[2047]2846
2847 Example:
[2186]2848 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]2849
2850 Note:
2851 The best-fit parameter values output in logger and/or blfile are now
2852 based on specunit of 'channel'.
[2047]2853 """
2854
[2186]2855 try:
2856 varlist = vars()
[2047]2857
[2186]2858 if insitu is None: insitu = rcParams['insitu']
2859 if insitu:
2860 workscan = self
[2047]2861 else:
[2186]2862 workscan = self.copy()
2863
[2410]2864 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2865 if mask is None: mask = []
[2186]2866 if applyfft is None: applyfft = True
2867 if fftmethod is None: fftmethod = 'fft'
2868 if fftthresh is None: fftthresh = 3.0
[2411]2869 if addwn is None: addwn = [0]
[2186]2870 if rejwn is None: rejwn = []
2871 if clipthresh is None: clipthresh = 3.0
2872 if clipniter is None: clipniter = 0
2873 if edge is None: edge = (0,0)
2874 if threshold is None: threshold = 3
2875 if chan_avg_limit is None: chan_avg_limit = 1
2876 if plot is None: plot = False
2877 if getresidual is None: getresidual = True
[2189]2878 if showprogress is None: showprogress = True
2879 if minnrow is None: minnrow = 1000
[2186]2880 if outlog is None: outlog = False
2881 if blfile is None: blfile = ''
[2641]2882 if csvformat is None: csvformat = False
[2047]2883
[2641]2884 if csvformat:
2885 scsvformat = "T"
2886 else:
2887 scsvformat = "F"
2888
[2277]2889 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2349]2890 workscan._auto_sinusoid_baseline(mask, applyfft,
2891 fftmethod.lower(),
2892 str(fftthresh).lower(),
2893 workscan._parse_wn(addwn),
2894 workscan._parse_wn(rejwn),
2895 clipthresh, clipniter,
2896 normalise_edge_param(edge),
2897 threshold, chan_avg_limit,
2898 getresidual,
2899 pack_progress_params(showprogress,
2900 minnrow),
[2641]2901 outlog, scsvformat+blfile)
[2047]2902 workscan._add_history("auto_sinusoid_baseline", varlist)
2903
2904 if insitu:
2905 self._assign(workscan)
2906 else:
2907 return workscan
2908
2909 except RuntimeError, e:
[2186]2910 raise_fitting_failure_exception(e)
[2047]2911
2912 @asaplog_post_dec
[2349]2913 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2914 clipthresh=None, clipniter=None, plot=None,
2915 getresidual=None, showprogress=None, minnrow=None,
[2641]2916 outlog=None, blfile=None, csvformat=None):
[1846]2917 """\
[2349]2918 Return a scan which has been baselined (all rows) by cubic spline
2919 function (piecewise cubic polynomial).
2920
[513]2921 Parameters:
[2189]2922 insitu: If False a new scantable is returned.
2923 Otherwise, the scaling is done in-situ
2924 The default is taken from .asaprc (False)
2925 mask: An optional mask
2926 npiece: Number of pieces. (default is 2)
2927 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]2928 clipniter: maximum number of iteration of 'clipthresh'-sigma
2929 clipping (default is 0)
[2189]2930 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2931 plot the fit and the residual. In this each
2932 indivual fit has to be approved, by typing 'y'
2933 or 'n'
2934 getresidual: if False, returns best-fit values instead of
2935 residual. (default is True)
2936 showprogress: show progress status for large data.
2937 default is True.
2938 minnrow: minimum number of input spectra to show.
2939 default is 1000.
2940 outlog: Output the coefficients of the best-fit
2941 function to logger (default is False)
2942 blfile: Name of a text file in which the best-fit
2943 parameter values to be written
2944 (default is "": no file/logger output)
[2641]2945 csvformat: if True blfile is csv-formatted, default is False.
[1846]2946
[2012]2947 Example:
[2349]2948 # return a scan baselined by a cubic spline consisting of 2 pieces
2949 # (i.e., 1 internal knot),
[2012]2950 # also with 3-sigma clipping, iteration up to 4 times
2951 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2952
2953 Note:
2954 The best-fit parameter values output in logger and/or blfile are now
2955 based on specunit of 'channel'.
[2012]2956 """
2957
[2186]2958 try:
2959 varlist = vars()
2960
2961 if insitu is None: insitu = rcParams['insitu']
2962 if insitu:
2963 workscan = self
2964 else:
2965 workscan = self.copy()
[1855]2966
[2410]2967 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2968 if mask is None: mask = []
[2189]2969 if npiece is None: npiece = 2
2970 if clipthresh is None: clipthresh = 3.0
2971 if clipniter is None: clipniter = 0
2972 if plot is None: plot = False
2973 if getresidual is None: getresidual = True
2974 if showprogress is None: showprogress = True
2975 if minnrow is None: minnrow = 1000
2976 if outlog is None: outlog = False
2977 if blfile is None: blfile = ''
[2641]2978 if csvformat is None: csvformat = False
[1855]2979
[2641]2980 if csvformat:
2981 scsvformat = "T"
2982 else:
2983 scsvformat = "F"
2984
[2012]2985 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2349]2986 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2987 getresidual,
2988 pack_progress_params(showprogress,
[2641]2989 minnrow),
2990 outlog, scsvformat+blfile)
[2012]2991 workscan._add_history("cspline_baseline", varlist)
2992
2993 if insitu:
2994 self._assign(workscan)
2995 else:
2996 return workscan
2997
2998 except RuntimeError, e:
[2186]2999 raise_fitting_failure_exception(e)
[1855]3000
[2186]3001 @asaplog_post_dec
[2349]3002 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
3003 clipthresh=None, clipniter=None,
3004 edge=None, threshold=None, chan_avg_limit=None,
3005 getresidual=None, plot=None,
3006 showprogress=None, minnrow=None, outlog=None,
[2641]3007 blfile=None, csvformat=None):
[2012]3008 """\
3009 Return a scan which has been baselined (all rows) by cubic spline
3010 function (piecewise cubic polynomial).
3011 Spectral lines are detected first using linefinder and masked out
3012 to avoid them affecting the baseline solution.
3013
3014 Parameters:
[2189]3015 insitu: if False a new scantable is returned.
3016 Otherwise, the scaling is done in-situ
3017 The default is taken from .asaprc (False)
3018 mask: an optional mask retreived from scantable
3019 npiece: Number of pieces. (default is 2)
3020 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3021 clipniter: maximum number of iteration of 'clipthresh'-sigma
3022 clipping (default is 0)
[2189]3023 edge: an optional number of channel to drop at
3024 the edge of spectrum. If only one value is
3025 specified, the same number will be dropped
3026 from both sides of the spectrum. Default
3027 is to keep all channels. Nested tuples
3028 represent individual edge selection for
3029 different IFs (a number of spectral channels
3030 can be different)
3031 threshold: the threshold used by line finder. It is
3032 better to keep it large as only strong lines
3033 affect the baseline solution.
3034 chan_avg_limit: a maximum number of consequtive spectral
3035 channels to average during the search of
3036 weak and broad lines. The default is no
3037 averaging (and no search for weak lines).
3038 If such lines can affect the fitted baseline
3039 (e.g. a high order polynomial is fitted),
3040 increase this parameter (usually values up
3041 to 8 are reasonable). Most users of this
3042 method should find the default value sufficient.
3043 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3044 plot the fit and the residual. In this each
3045 indivual fit has to be approved, by typing 'y'
3046 or 'n'
3047 getresidual: if False, returns best-fit values instead of
3048 residual. (default is True)
3049 showprogress: show progress status for large data.
3050 default is True.
3051 minnrow: minimum number of input spectra to show.
3052 default is 1000.
3053 outlog: Output the coefficients of the best-fit
3054 function to logger (default is False)
3055 blfile: Name of a text file in which the best-fit
3056 parameter values to be written
3057 (default is "": no file/logger output)
[2641]3058 csvformat: if True blfile is csv-formatted, default is False.
[1846]3059
[1907]3060 Example:
[2012]3061 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]3062
3063 Note:
3064 The best-fit parameter values output in logger and/or blfile are now
3065 based on specunit of 'channel'.
[2012]3066 """
[1846]3067
[2186]3068 try:
3069 varlist = vars()
[2012]3070
[2186]3071 if insitu is None: insitu = rcParams['insitu']
3072 if insitu:
3073 workscan = self
[1391]3074 else:
[2186]3075 workscan = self.copy()
3076
[2410]3077 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3078 if mask is None: mask = []
[2186]3079 if npiece is None: npiece = 2
3080 if clipthresh is None: clipthresh = 3.0
3081 if clipniter is None: clipniter = 0
3082 if edge is None: edge = (0, 0)
3083 if threshold is None: threshold = 3
3084 if chan_avg_limit is None: chan_avg_limit = 1
3085 if plot is None: plot = False
3086 if getresidual is None: getresidual = True
[2189]3087 if showprogress is None: showprogress = True
3088 if minnrow is None: minnrow = 1000
[2186]3089 if outlog is None: outlog = False
3090 if blfile is None: blfile = ''
[2641]3091 if csvformat is None: csvformat = False
[1819]3092
[2641]3093 if csvformat:
3094 scsvformat = "T"
3095 else:
3096 scsvformat = "F"
3097
[2277]3098 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2269]3099 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
3100 clipniter,
3101 normalise_edge_param(edge),
3102 threshold,
3103 chan_avg_limit, getresidual,
3104 pack_progress_params(showprogress,
3105 minnrow),
[2641]3106 outlog, scsvformat+blfile)
[2012]3107 workscan._add_history("auto_cspline_baseline", varlist)
[1907]3108
[1856]3109 if insitu:
3110 self._assign(workscan)
3111 else:
3112 return workscan
[2012]3113
3114 except RuntimeError, e:
[2186]3115 raise_fitting_failure_exception(e)
[513]3116
[1931]3117 @asaplog_post_dec
[2645]3118 def chebyshev_baseline(self, insitu=None, mask=None, order=None,
3119 clipthresh=None, clipniter=None, plot=None,
3120 getresidual=None, showprogress=None, minnrow=None,
3121 outlog=None, blfile=None, csvformat=None):
3122 """\
3123 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3124
3125 Parameters:
3126 insitu: If False a new scantable is returned.
3127 Otherwise, the scaling is done in-situ
3128 The default is taken from .asaprc (False)
3129 mask: An optional mask
3130 order: the maximum order of Chebyshev polynomial (default is 5)
3131 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3132 clipniter: maximum number of iteration of 'clipthresh'-sigma
3133 clipping (default is 0)
3134 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3135 plot the fit and the residual. In this each
3136 indivual fit has to be approved, by typing 'y'
3137 or 'n'
3138 getresidual: if False, returns best-fit values instead of
3139 residual. (default is True)
3140 showprogress: show progress status for large data.
3141 default is True.
3142 minnrow: minimum number of input spectra to show.
3143 default is 1000.
3144 outlog: Output the coefficients of the best-fit
3145 function to logger (default is False)
3146 blfile: Name of a text file in which the best-fit
3147 parameter values to be written
3148 (default is "": no file/logger output)
3149 csvformat: if True blfile is csv-formatted, default is False.
3150
3151 Example:
3152 # return a scan baselined by a cubic spline consisting of 2 pieces
3153 # (i.e., 1 internal knot),
3154 # also with 3-sigma clipping, iteration up to 4 times
3155 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3156
3157 Note:
3158 The best-fit parameter values output in logger and/or blfile are now
3159 based on specunit of 'channel'.
3160 """
3161
3162 try:
3163 varlist = vars()
3164
3165 if insitu is None: insitu = rcParams['insitu']
3166 if insitu:
3167 workscan = self
3168 else:
3169 workscan = self.copy()
3170
3171 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3172 if mask is None: mask = []
3173 if order is None: order = 5
3174 if clipthresh is None: clipthresh = 3.0
3175 if clipniter is None: clipniter = 0
3176 if plot is None: plot = False
3177 if getresidual is None: getresidual = True
3178 if showprogress is None: showprogress = True
3179 if minnrow is None: minnrow = 1000
3180 if outlog is None: outlog = False
3181 if blfile is None: blfile = ''
3182 if csvformat is None: csvformat = False
3183
3184 if csvformat:
3185 scsvformat = "T"
3186 else:
3187 scsvformat = "F"
3188
3189 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3190 workscan._chebyshev_baseline(mask, order, clipthresh, clipniter,
3191 getresidual,
3192 pack_progress_params(showprogress,
3193 minnrow),
3194 outlog, scsvformat+blfile)
3195 workscan._add_history("chebyshev_baseline", varlist)
3196
3197 if insitu:
3198 self._assign(workscan)
3199 else:
3200 return workscan
3201
3202 except RuntimeError, e:
3203 raise_fitting_failure_exception(e)
3204
3205 @asaplog_post_dec
3206 def auto_chebyshev_baseline(self, insitu=None, mask=None, order=None,
3207 clipthresh=None, clipniter=None,
3208 edge=None, threshold=None, chan_avg_limit=None,
3209 getresidual=None, plot=None,
3210 showprogress=None, minnrow=None, outlog=None,
3211 blfile=None, csvformat=None):
3212 """\
3213 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3214 Spectral lines are detected first using linefinder and masked out
3215 to avoid them affecting the baseline solution.
3216
3217 Parameters:
3218 insitu: if False a new scantable is returned.
3219 Otherwise, the scaling is done in-situ
3220 The default is taken from .asaprc (False)
3221 mask: an optional mask retreived from scantable
3222 order: the maximum order of Chebyshev polynomial (default is 5)
3223 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3224 clipniter: maximum number of iteration of 'clipthresh'-sigma
3225 clipping (default is 0)
3226 edge: an optional number of channel to drop at
3227 the edge of spectrum. If only one value is
3228 specified, the same number will be dropped
3229 from both sides of the spectrum. Default
3230 is to keep all channels. Nested tuples
3231 represent individual edge selection for
3232 different IFs (a number of spectral channels
3233 can be different)
3234 threshold: the threshold used by line finder. It is
3235 better to keep it large as only strong lines
3236 affect the baseline solution.
3237 chan_avg_limit: a maximum number of consequtive spectral
3238 channels to average during the search of
3239 weak and broad lines. The default is no
3240 averaging (and no search for weak lines).
3241 If such lines can affect the fitted baseline
3242 (e.g. a high order polynomial is fitted),
3243 increase this parameter (usually values up
3244 to 8 are reasonable). Most users of this
3245 method should find the default value sufficient.
3246 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3247 plot the fit and the residual. In this each
3248 indivual fit has to be approved, by typing 'y'
3249 or 'n'
3250 getresidual: if False, returns best-fit values instead of
3251 residual. (default is True)
3252 showprogress: show progress status for large data.
3253 default is True.
3254 minnrow: minimum number of input spectra to show.
3255 default is 1000.
3256 outlog: Output the coefficients of the best-fit
3257 function to logger (default is False)
3258 blfile: Name of a text file in which the best-fit
3259 parameter values to be written
3260 (default is "": no file/logger output)
3261 csvformat: if True blfile is csv-formatted, default is False.
3262
3263 Example:
3264 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3265
3266 Note:
3267 The best-fit parameter values output in logger and/or blfile are now
3268 based on specunit of 'channel'.
3269 """
3270
3271 try:
3272 varlist = vars()
3273
3274 if insitu is None: insitu = rcParams['insitu']
3275 if insitu:
3276 workscan = self
3277 else:
3278 workscan = self.copy()
3279
3280 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3281 if mask is None: mask = []
3282 if order is None: order = 5
3283 if clipthresh is None: clipthresh = 3.0
3284 if clipniter is None: clipniter = 0
3285 if edge is None: edge = (0, 0)
3286 if threshold is None: threshold = 3
3287 if chan_avg_limit is None: chan_avg_limit = 1
3288 if plot is None: plot = False
3289 if getresidual is None: getresidual = True
3290 if showprogress is None: showprogress = True
3291 if minnrow is None: minnrow = 1000
3292 if outlog is None: outlog = False
3293 if blfile is None: blfile = ''
3294 if csvformat is None: csvformat = False
3295
3296 if csvformat:
3297 scsvformat = "T"
3298 else:
3299 scsvformat = "F"
3300
3301 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3302 workscan._auto_chebyshev_baseline(mask, order, clipthresh,
3303 clipniter,
3304 normalise_edge_param(edge),
3305 threshold,
3306 chan_avg_limit, getresidual,
3307 pack_progress_params(showprogress,
3308 minnrow),
3309 outlog, scsvformat+blfile)
3310 workscan._add_history("auto_chebyshev_baseline", varlist)
3311
3312 if insitu:
3313 self._assign(workscan)
3314 else:
3315 return workscan
3316
3317 except RuntimeError, e:
3318 raise_fitting_failure_exception(e)
3319
3320 @asaplog_post_dec
[2269]3321 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
3322 getresidual=None, showprogress=None, minnrow=None,
[2641]3323 outlog=None, blfile=None, csvformat=None):
[1907]3324 """\
3325 Return a scan which has been baselined (all rows) by a polynomial.
3326 Parameters:
[2189]3327 insitu: if False a new scantable is returned.
3328 Otherwise, the scaling is done in-situ
3329 The default is taken from .asaprc (False)
3330 mask: an optional mask
3331 order: the order of the polynomial (default is 0)
3332 plot: plot the fit and the residual. In this each
3333 indivual fit has to be approved, by typing 'y'
3334 or 'n'
3335 getresidual: if False, returns best-fit values instead of
3336 residual. (default is True)
3337 showprogress: show progress status for large data.
3338 default is True.
3339 minnrow: minimum number of input spectra to show.
3340 default is 1000.
3341 outlog: Output the coefficients of the best-fit
3342 function to logger (default is False)
3343 blfile: Name of a text file in which the best-fit
3344 parameter values to be written
3345 (default is "": no file/logger output)
[2641]3346 csvformat: if True blfile is csv-formatted, default is False.
[2012]3347
[1907]3348 Example:
3349 # return a scan baselined by a third order polynomial,
3350 # not using a mask
3351 bscan = scan.poly_baseline(order=3)
3352 """
[1931]3353
[2186]3354 try:
3355 varlist = vars()
[1931]3356
[2269]3357 if insitu is None:
3358 insitu = rcParams["insitu"]
[2186]3359 if insitu:
3360 workscan = self
3361 else:
3362 workscan = self.copy()
[1907]3363
[2410]3364 #if mask is None: mask = [True for i in \
3365 # xrange(workscan.nchan())]
3366 if mask is None: mask = []
[2189]3367 if order is None: order = 0
3368 if plot is None: plot = False
3369 if getresidual is None: getresidual = True
3370 if showprogress is None: showprogress = True
3371 if minnrow is None: minnrow = 1000
3372 if outlog is None: outlog = False
3373 if blfile is None: blfile = ""
[2641]3374 if csvformat is None: csvformat = False
[1907]3375
[2641]3376 if csvformat:
3377 scsvformat = "T"
3378 else:
3379 scsvformat = "F"
3380
[2012]3381 if plot:
[2269]3382 outblfile = (blfile != "") and \
[2349]3383 os.path.exists(os.path.expanduser(
3384 os.path.expandvars(blfile))
3385 )
[2269]3386 if outblfile:
3387 blf = open(blfile, "a")
[2012]3388
[1907]3389 f = fitter()
3390 f.set_function(lpoly=order)
[2186]3391
3392 rows = xrange(workscan.nrow())
3393 #if len(rows) > 0: workscan._init_blinfo()
[2610]3394
3395 action = "H"
[1907]3396 for r in rows:
3397 f.x = workscan._getabcissa(r)
3398 f.y = workscan._getspectrum(r)
[2541]3399 if mask:
3400 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3401 else: # mask=None
3402 f.mask = workscan._getmask(r)
3403
[1907]3404 f.data = None
3405 f.fit()
[2541]3406
[2610]3407 if action != "Y": # skip plotting when accepting all
3408 f.plot(residual=True)
3409 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3410 #if accept_fit.upper() == "N":
3411 # #workscan._append_blinfo(None, None, None)
3412 # continue
3413 accept_fit = self._get_verify_action("Accept fit?",action)
3414 if r == 0: action = None
[1907]3415 if accept_fit.upper() == "N":
3416 continue
[2610]3417 elif accept_fit.upper() == "R":
3418 break
3419 elif accept_fit.upper() == "A":
3420 action = "Y"
[2012]3421
3422 blpars = f.get_parameters()
3423 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3424 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]3425 workscan._setspectrum((f.fitter.getresidual()
3426 if getresidual else f.fitter.getfit()), r)
[1907]3427
[2012]3428 if outblfile:
3429 rms = workscan.get_rms(f.mask, r)
[2269]3430 dataout = \
3431 workscan.format_blparams_row(blpars["params"],
3432 blpars["fixed"],
3433 rms, str(masklist),
[2641]3434 r, True, csvformat)
[2012]3435 blf.write(dataout)
3436
[1907]3437 f._p.unmap()
3438 f._p = None
[2012]3439
[2349]3440 if outblfile:
3441 blf.close()
[1907]3442 else:
[2269]3443 workscan._poly_baseline(mask, order, getresidual,
3444 pack_progress_params(showprogress,
3445 minnrow),
[2641]3446 outlog, scsvformat+blfile)
[1907]3447
3448 workscan._add_history("poly_baseline", varlist)
3449
3450 if insitu:
3451 self._assign(workscan)
3452 else:
3453 return workscan
3454
[1919]3455 except RuntimeError, e:
[2186]3456 raise_fitting_failure_exception(e)
[1907]3457
[2186]3458 @asaplog_post_dec
[2269]3459 def auto_poly_baseline(self, mask=None, order=None, edge=None,
3460 threshold=None, chan_avg_limit=None,
3461 plot=None, insitu=None,
3462 getresidual=None, showprogress=None,
[2641]3463 minnrow=None, outlog=None, blfile=None, csvformat=None):
[1846]3464 """\
[1931]3465 Return a scan which has been baselined (all rows) by a polynomial.
[880]3466 Spectral lines are detected first using linefinder and masked out
3467 to avoid them affecting the baseline solution.
3468
3469 Parameters:
[2189]3470 insitu: if False a new scantable is returned.
3471 Otherwise, the scaling is done in-situ
3472 The default is taken from .asaprc (False)
3473 mask: an optional mask retreived from scantable
3474 order: the order of the polynomial (default is 0)
3475 edge: an optional number of channel to drop at
3476 the edge of spectrum. If only one value is
3477 specified, the same number will be dropped
3478 from both sides of the spectrum. Default
3479 is to keep all channels. Nested tuples
3480 represent individual edge selection for
3481 different IFs (a number of spectral channels
3482 can be different)
3483 threshold: the threshold used by line finder. It is
3484 better to keep it large as only strong lines
3485 affect the baseline solution.
3486 chan_avg_limit: a maximum number of consequtive spectral
3487 channels to average during the search of
3488 weak and broad lines. The default is no
3489 averaging (and no search for weak lines).
3490 If such lines can affect the fitted baseline
3491 (e.g. a high order polynomial is fitted),
3492 increase this parameter (usually values up
3493 to 8 are reasonable). Most users of this
3494 method should find the default value sufficient.
3495 plot: plot the fit and the residual. In this each
3496 indivual fit has to be approved, by typing 'y'
3497 or 'n'
3498 getresidual: if False, returns best-fit values instead of
3499 residual. (default is True)
3500 showprogress: show progress status for large data.
3501 default is True.
3502 minnrow: minimum number of input spectra to show.
3503 default is 1000.
3504 outlog: Output the coefficients of the best-fit
3505 function to logger (default is False)
3506 blfile: Name of a text file in which the best-fit
3507 parameter values to be written
3508 (default is "": no file/logger output)
[2641]3509 csvformat: if True blfile is csv-formatted, default is False.
[1846]3510
[2012]3511 Example:
3512 bscan = scan.auto_poly_baseline(order=7, insitu=False)
3513 """
[880]3514
[2186]3515 try:
3516 varlist = vars()
[1846]3517
[2269]3518 if insitu is None:
3519 insitu = rcParams['insitu']
[2186]3520 if insitu:
3521 workscan = self
3522 else:
3523 workscan = self.copy()
[1846]3524
[2410]3525 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3526 if mask is None: mask = []
[2186]3527 if order is None: order = 0
3528 if edge is None: edge = (0, 0)
3529 if threshold is None: threshold = 3
3530 if chan_avg_limit is None: chan_avg_limit = 1
3531 if plot is None: plot = False
3532 if getresidual is None: getresidual = True
[2189]3533 if showprogress is None: showprogress = True
3534 if minnrow is None: minnrow = 1000
[2186]3535 if outlog is None: outlog = False
3536 if blfile is None: blfile = ''
[2641]3537 if csvformat is None: csvformat = False
[1846]3538
[2641]3539 if csvformat:
3540 scsvformat = "T"
3541 else:
3542 scsvformat = "F"
3543
[2186]3544 edge = normalise_edge_param(edge)
[880]3545
[2012]3546 if plot:
[2269]3547 outblfile = (blfile != "") and \
3548 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]3549 if outblfile: blf = open(blfile, "a")
3550
[2186]3551 from asap.asaplinefind import linefinder
[2012]3552 fl = linefinder()
[2269]3553 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]3554 fl.set_scan(workscan)
[2186]3555
[2012]3556 f = fitter()
3557 f.set_function(lpoly=order)
[880]3558
[2186]3559 rows = xrange(workscan.nrow())
3560 #if len(rows) > 0: workscan._init_blinfo()
[2610]3561
3562 action = "H"
[2012]3563 for r in rows:
[2186]3564 idx = 2*workscan.getif(r)
[2541]3565 if mask:
3566 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3567 else: # mask=None
3568 msk = workscan._getmask(r)
3569 fl.find_lines(r, msk, edge[idx:idx+2])
[907]3570
[2012]3571 f.x = workscan._getabcissa(r)
3572 f.y = workscan._getspectrum(r)
3573 f.mask = fl.get_mask()
3574 f.data = None
3575 f.fit()
3576
[2610]3577 if action != "Y": # skip plotting when accepting all
3578 f.plot(residual=True)
3579 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3580 accept_fit = self._get_verify_action("Accept fit?",action)
3581 if r == 0: action = None
[2012]3582 if accept_fit.upper() == "N":
3583 #workscan._append_blinfo(None, None, None)
3584 continue
[2610]3585 elif accept_fit.upper() == "R":
3586 break
3587 elif accept_fit.upper() == "A":
3588 action = "Y"
[2012]3589
3590 blpars = f.get_parameters()
3591 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3592 #workscan._append_blinfo(blpars, masklist, f.mask)
[2349]3593 workscan._setspectrum(
3594 (f.fitter.getresidual() if getresidual
3595 else f.fitter.getfit()), r
3596 )
[2012]3597
3598 if outblfile:
3599 rms = workscan.get_rms(f.mask, r)
[2269]3600 dataout = \
3601 workscan.format_blparams_row(blpars["params"],
3602 blpars["fixed"],
3603 rms, str(masklist),
[2641]3604 r, True, csvformat)
[2012]3605 blf.write(dataout)
3606
3607 f._p.unmap()
3608 f._p = None
3609
3610 if outblfile: blf.close()
3611 else:
[2269]3612 workscan._auto_poly_baseline(mask, order, edge, threshold,
3613 chan_avg_limit, getresidual,
3614 pack_progress_params(showprogress,
3615 minnrow),
[2641]3616 outlog, scsvformat+blfile)
[2012]3617
3618 workscan._add_history("auto_poly_baseline", varlist)
3619
3620 if insitu:
3621 self._assign(workscan)
3622 else:
3623 return workscan
3624
3625 except RuntimeError, e:
[2186]3626 raise_fitting_failure_exception(e)
[2012]3627
3628 def _init_blinfo(self):
3629 """\
3630 Initialise the following three auxiliary members:
3631 blpars : parameters of the best-fit baseline,
3632 masklists : mask data (edge positions of masked channels) and
3633 actualmask : mask data (in boolean list),
3634 to keep for use later (including output to logger/text files).
3635 Used by poly_baseline() and auto_poly_baseline() in case of
3636 'plot=True'.
3637 """
3638 self.blpars = []
3639 self.masklists = []
3640 self.actualmask = []
3641 return
[880]3642
[2012]3643 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3644 """\
3645 Append baseline-fitting related info to blpars, masklist and
3646 actualmask.
3647 """
3648 self.blpars.append(data_blpars)
3649 self.masklists.append(data_masklists)
3650 self.actualmask.append(data_actualmask)
3651 return
3652
[1862]3653 @asaplog_post_dec
[914]3654 def rotate_linpolphase(self, angle):
[1846]3655 """\
[914]3656 Rotate the phase of the complex polarization O=Q+iU correlation.
3657 This is always done in situ in the raw data. So if you call this
3658 function more than once then each call rotates the phase further.
[1846]3659
[914]3660 Parameters:
[1846]3661
[914]3662 angle: The angle (degrees) to rotate (add) by.
[1846]3663
3664 Example::
3665
[914]3666 scan.rotate_linpolphase(2.3)
[1846]3667
[914]3668 """
3669 varlist = vars()
[936]3670 self._math._rotate_linpolphase(self, angle)
[914]3671 self._add_history("rotate_linpolphase", varlist)
3672 return
[710]3673
[1862]3674 @asaplog_post_dec
[914]3675 def rotate_xyphase(self, angle):
[1846]3676 """\
[914]3677 Rotate the phase of the XY correlation. This is always done in situ
3678 in the data. So if you call this function more than once
3679 then each call rotates the phase further.
[1846]3680
[914]3681 Parameters:
[1846]3682
[914]3683 angle: The angle (degrees) to rotate (add) by.
[1846]3684
3685 Example::
3686
[914]3687 scan.rotate_xyphase(2.3)
[1846]3688
[914]3689 """
3690 varlist = vars()
[936]3691 self._math._rotate_xyphase(self, angle)
[914]3692 self._add_history("rotate_xyphase", varlist)
3693 return
3694
[1862]3695 @asaplog_post_dec
[914]3696 def swap_linears(self):
[1846]3697 """\
[1573]3698 Swap the linear polarisations XX and YY, or better the first two
[1348]3699 polarisations as this also works for ciculars.
[914]3700 """
3701 varlist = vars()
[936]3702 self._math._swap_linears(self)
[914]3703 self._add_history("swap_linears", varlist)
3704 return
3705
[1862]3706 @asaplog_post_dec
[914]3707 def invert_phase(self):
[1846]3708 """\
[914]3709 Invert the phase of the complex polarisation
3710 """
3711 varlist = vars()
[936]3712 self._math._invert_phase(self)
[914]3713 self._add_history("invert_phase", varlist)
3714 return
3715
[1862]3716 @asaplog_post_dec
[876]3717 def add(self, offset, insitu=None):
[1846]3718 """\
[513]3719 Return a scan where all spectra have the offset added
[1846]3720
[513]3721 Parameters:
[1846]3722
[513]3723 offset: the offset
[1855]3724
[513]3725 insitu: if False a new scantable is returned.
3726 Otherwise, the scaling is done in-situ
3727 The default is taken from .asaprc (False)
[1846]3728
[513]3729 """
3730 if insitu is None: insitu = rcParams['insitu']
[876]3731 self._math._setinsitu(insitu)
[513]3732 varlist = vars()
[876]3733 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]3734 s._add_history("add", varlist)
[876]3735 if insitu:
3736 self._assign(s)
3737 else:
[513]3738 return s
3739
[1862]3740 @asaplog_post_dec
[1308]3741 def scale(self, factor, tsys=True, insitu=None):
[1846]3742 """\
3743
[1938]3744 Return a scan where all spectra are scaled by the given 'factor'
[1846]3745
[513]3746 Parameters:
[1846]3747
[1819]3748 factor: the scaling factor (float or 1D float list)
[1855]3749
[513]3750 insitu: if False a new scantable is returned.
3751 Otherwise, the scaling is done in-situ
3752 The default is taken from .asaprc (False)
[1855]3753
[513]3754 tsys: if True (default) then apply the operation to Tsys
3755 as well as the data
[1846]3756
[513]3757 """
3758 if insitu is None: insitu = rcParams['insitu']
[876]3759 self._math._setinsitu(insitu)
[513]3760 varlist = vars()
[1819]3761 s = None
3762 import numpy
3763 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]3764 if isinstance(factor[0], list) or isinstance(factor[0],
3765 numpy.ndarray):
[1819]3766 from asapmath import _array2dOp
[2320]3767 s = _array2dOp( self, factor, "MUL", tsys, insitu )
[1819]3768 else:
[2320]3769 s = scantable( self._math._arrayop( self, factor,
3770 "MUL", tsys ) )
[1819]3771 else:
[2320]3772 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]3773 s._add_history("scale", varlist)
[876]3774 if insitu:
3775 self._assign(s)
3776 else:
[513]3777 return s
3778
[2349]3779 @preserve_selection
3780 def set_sourcetype(self, match, matchtype="pattern",
[1504]3781 sourcetype="reference"):
[1846]3782 """\
[1502]3783 Set the type of the source to be an source or reference scan
[1846]3784 using the provided pattern.
3785
[1502]3786 Parameters:
[1846]3787
[1504]3788 match: a Unix style pattern, regular expression or selector
[1855]3789
[1504]3790 matchtype: 'pattern' (default) UNIX style pattern or
3791 'regex' regular expression
[1855]3792
[1502]3793 sourcetype: the type of the source to use (source/reference)
[1846]3794
[1502]3795 """
3796 varlist = vars()
3797 stype = -1
[2480]3798 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
[1502]3799 stype = 1
[2480]3800 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
[1502]3801 stype = 0
[1504]3802 else:
[2480]3803 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
[1504]3804 if matchtype.lower().startswith("p"):
3805 matchtype = "pattern"
3806 elif matchtype.lower().startswith("r"):
3807 matchtype = "regex"
3808 else:
3809 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3810 sel = selector()
3811 if isinstance(match, selector):
3812 sel = match
3813 else:
[2480]3814 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
3815 self.set_selection(sel)
[1502]3816 self._setsourcetype(stype)
[1573]3817 self._add_history("set_sourcetype", varlist)
[1502]3818
[1862]3819 @asaplog_post_dec
[1857]3820 @preserve_selection
[1819]3821 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3822 """\
[670]3823 This function allows to build quotients automatically.
[1819]3824 It assumes the observation to have the same number of
[670]3825 "ons" and "offs"
[1846]3826
[670]3827 Parameters:
[1846]3828
[710]3829 preserve: you can preserve (default) the continuum or
3830 remove it. The equations used are
[1857]3831
[670]3832 preserve: Output = Toff * (on/off) - Toff
[1857]3833
[1070]3834 remove: Output = Toff * (on/off) - Ton
[1855]3835
[1573]3836 mode: the on/off detection mode
[1348]3837 'paired' (default)
3838 identifies 'off' scans by the
3839 trailing '_R' (Mopra/Parkes) or
3840 '_e'/'_w' (Tid) and matches
3841 on/off pairs from the observing pattern
[1502]3842 'time'
3843 finds the closest off in time
[1348]3844
[1857]3845 .. todo:: verify argument is not implemented
3846
[670]3847 """
[1857]3848 varlist = vars()
[1348]3849 modes = ["time", "paired"]
[670]3850 if not mode in modes:
[876]3851 msg = "please provide valid mode. Valid modes are %s" % (modes)
3852 raise ValueError(msg)
[1348]3853 s = None
3854 if mode.lower() == "paired":
[1857]3855 sel = self.get_selection()
[1875]3856 sel.set_query("SRCTYPE==psoff")
[1356]3857 self.set_selection(sel)
[1348]3858 offs = self.copy()
[1875]3859 sel.set_query("SRCTYPE==pson")
[1356]3860 self.set_selection(sel)
[1348]3861 ons = self.copy()
3862 s = scantable(self._math._quotient(ons, offs, preserve))
3863 elif mode.lower() == "time":
3864 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3865 s._add_history("auto_quotient", varlist)
[876]3866 return s
[710]3867
[1862]3868 @asaplog_post_dec
[1145]3869 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3870 """\
[1143]3871 Form a quotient using "off" beams when observing in "MX" mode.
[1846]3872
[1143]3873 Parameters:
[1846]3874
[1145]3875 mask: an optional mask to be used when weight == 'stddev'
[1855]3876
[1143]3877 weight: How to average the off beams. Default is 'median'.
[1855]3878
[1145]3879 preserve: you can preserve (default) the continuum or
[1855]3880 remove it. The equations used are:
[1846]3881
[1855]3882 preserve: Output = Toff * (on/off) - Toff
3883
3884 remove: Output = Toff * (on/off) - Ton
3885
[1217]3886 """
[1593]3887 mask = mask or ()
[1141]3888 varlist = vars()
3889 on = scantable(self._math._mx_extract(self, 'on'))
[1143]3890 preoff = scantable(self._math._mx_extract(self, 'off'))
3891 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3892 from asapmath import quotient
[1145]3893 q = quotient(on, off, preserve)
[1143]3894 q._add_history("mx_quotient", varlist)
[1217]3895 return q
[513]3896
[1862]3897 @asaplog_post_dec
[718]3898 def freq_switch(self, insitu=None):
[1846]3899 """\
[718]3900 Apply frequency switching to the data.
[1846]3901
[718]3902 Parameters:
[1846]3903
[718]3904 insitu: if False a new scantable is returned.
3905 Otherwise, the swictching is done in-situ
3906 The default is taken from .asaprc (False)
[1846]3907
[718]3908 """
3909 if insitu is None: insitu = rcParams['insitu']
[876]3910 self._math._setinsitu(insitu)
[718]3911 varlist = vars()
[876]3912 s = scantable(self._math._freqswitch(self))
[1118]3913 s._add_history("freq_switch", varlist)
[1856]3914 if insitu:
3915 self._assign(s)
3916 else:
3917 return s
[718]3918
[1862]3919 @asaplog_post_dec
[780]3920 def recalc_azel(self):
[1846]3921 """Recalculate the azimuth and elevation for each position."""
[780]3922 varlist = vars()
[876]3923 self._recalcazel()
[780]3924 self._add_history("recalc_azel", varlist)
3925 return
3926
[1862]3927 @asaplog_post_dec
[513]3928 def __add__(self, other):
[2574]3929 """
3930 implicit on all axes and on Tsys
3931 """
[513]3932 varlist = vars()
[2574]3933 s = self.__op( other, "ADD" )
[513]3934 s._add_history("operator +", varlist)
3935 return s
3936
[1862]3937 @asaplog_post_dec
[513]3938 def __sub__(self, other):
3939 """
3940 implicit on all axes and on Tsys
3941 """
3942 varlist = vars()
[2574]3943 s = self.__op( other, "SUB" )
[513]3944 s._add_history("operator -", varlist)
3945 return s
[710]3946
[1862]3947 @asaplog_post_dec
[513]3948 def __mul__(self, other):
3949 """
3950 implicit on all axes and on Tsys
3951 """
3952 varlist = vars()
[2574]3953 s = self.__op( other, "MUL" ) ;
[513]3954 s._add_history("operator *", varlist)
3955 return s
3956
[710]3957
[1862]3958 @asaplog_post_dec
[513]3959 def __div__(self, other):
3960 """
3961 implicit on all axes and on Tsys
3962 """
3963 varlist = vars()
[2574]3964 s = self.__op( other, "DIV" )
3965 s._add_history("operator /", varlist)
3966 return s
3967
3968 @asaplog_post_dec
3969 def __op( self, other, mode ):
[513]3970 s = None
3971 if isinstance(other, scantable):
[2574]3972 s = scantable(self._math._binaryop(self, other, mode))
[513]3973 elif isinstance(other, float):
3974 if other == 0.0:
[718]3975 raise ZeroDivisionError("Dividing by zero is not recommended")
[2574]3976 s = scantable(self._math._unaryop(self, other, mode, False))
[2144]3977 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]3978 if isinstance(other[0], list) \
3979 or isinstance(other[0], numpy.ndarray):
[2144]3980 from asapmath import _array2dOp
[2574]3981 s = _array2dOp( self, other, mode, False )
[2144]3982 else:
[2574]3983 s = scantable( self._math._arrayop( self, other,
3984 mode, False ) )
[513]3985 else:
[718]3986 raise TypeError("Other input is not a scantable or float value")
[513]3987 return s
3988
[1862]3989 @asaplog_post_dec
[530]3990 def get_fit(self, row=0):
[1846]3991 """\
[530]3992 Print or return the stored fits for a row in the scantable
[1846]3993
[530]3994 Parameters:
[1846]3995
[530]3996 row: the row which the fit has been applied to.
[1846]3997
[530]3998 """
3999 if row > self.nrow():
4000 return
[976]4001 from asap.asapfit import asapfit
[530]4002 fit = asapfit(self._getfit(row))
[1859]4003 asaplog.push( '%s' %(fit) )
4004 return fit.as_dict()
[530]4005
[2349]4006 @preserve_selection
[1483]4007 def flag_nans(self):
[1846]4008 """\
[1483]4009 Utility function to flag NaN values in the scantable.
4010 """
4011 import numpy
4012 basesel = self.get_selection()
4013 for i in range(self.nrow()):
[1589]4014 sel = self.get_row_selector(i)
4015 self.set_selection(basesel+sel)
[1483]4016 nans = numpy.isnan(self._getspectrum(0))
4017 if numpy.any(nans):
4018 bnans = [ bool(v) for v in nans]
4019 self.flag(bnans)
4020
[1588]4021 def get_row_selector(self, rowno):
[1992]4022 return selector(rows=[rowno])
[1573]4023
[484]4024 def _add_history(self, funcname, parameters):
[1435]4025 if not rcParams['scantable.history']:
4026 return
[484]4027 # create date
4028 sep = "##"
4029 from datetime import datetime
4030 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4031 hist = dstr+sep
4032 hist += funcname+sep#cdate+sep
[2349]4033 if parameters.has_key('self'):
4034 del parameters['self']
[1118]4035 for k, v in parameters.iteritems():
[484]4036 if type(v) is dict:
[1118]4037 for k2, v2 in v.iteritems():
[484]4038 hist += k2
4039 hist += "="
[1118]4040 if isinstance(v2, scantable):
[484]4041 hist += 'scantable'
4042 elif k2 == 'mask':
[1118]4043 if isinstance(v2, list) or isinstance(v2, tuple):
[513]4044 hist += str(self._zip_mask(v2))
4045 else:
4046 hist += str(v2)
[484]4047 else:
[513]4048 hist += str(v2)
[484]4049 else:
4050 hist += k
4051 hist += "="
[1118]4052 if isinstance(v, scantable):
[484]4053 hist += 'scantable'
4054 elif k == 'mask':
[1118]4055 if isinstance(v, list) or isinstance(v, tuple):
[513]4056 hist += str(self._zip_mask(v))
4057 else:
4058 hist += str(v)
[484]4059 else:
4060 hist += str(v)
4061 hist += sep
4062 hist = hist[:-2] # remove trailing '##'
4063 self._addhistory(hist)
4064
[710]4065
[484]4066 def _zip_mask(self, mask):
4067 mask = list(mask)
4068 i = 0
4069 segments = []
4070 while mask[i:].count(1):
4071 i += mask[i:].index(1)
4072 if mask[i:].count(0):
4073 j = i + mask[i:].index(0)
4074 else:
[710]4075 j = len(mask)
[1118]4076 segments.append([i, j])
[710]4077 i = j
[484]4078 return segments
[714]4079
[626]4080 def _get_ordinate_label(self):
4081 fu = "("+self.get_fluxunit()+")"
4082 import re
4083 lbl = "Intensity"
[1118]4084 if re.match(".K.", fu):
[626]4085 lbl = "Brightness Temperature "+ fu
[1118]4086 elif re.match(".Jy.", fu):
[626]4087 lbl = "Flux density "+ fu
4088 return lbl
[710]4089
[876]4090 def _check_ifs(self):
[2349]4091# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
[1986]4092 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]4093 nchans = filter(lambda t: t > 0, nchans)
[876]4094 return (sum(nchans)/len(nchans) == nchans[0])
[976]4095
[1862]4096 @asaplog_post_dec
[1916]4097 def _fill(self, names, unit, average, opts={}):
[976]4098 first = True
4099 fullnames = []
4100 for name in names:
4101 name = os.path.expandvars(name)
4102 name = os.path.expanduser(name)
4103 if not os.path.exists(name):
4104 msg = "File '%s' does not exists" % (name)
4105 raise IOError(msg)
4106 fullnames.append(name)
4107 if average:
4108 asaplog.push('Auto averaging integrations')
[1079]4109 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]4110 for name in fullnames:
[1073]4111 tbl = Scantable(stype)
[2004]4112 if is_ms( name ):
4113 r = msfiller( tbl )
4114 else:
4115 r = filler( tbl )
[976]4116 msg = "Importing %s..." % (name)
[1118]4117 asaplog.push(msg, False)
[2349]4118 r.open(name, opts)
[2480]4119 rx = rcParams['scantable.reference']
4120 r.setreferenceexpr(rx)
[1843]4121 r.fill()
[976]4122 if average:
[1118]4123 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]4124 if not first:
4125 tbl = self._math._merge([self, tbl])
4126 Scantable.__init__(self, tbl)
[1843]4127 r.close()
[1118]4128 del r, tbl
[976]4129 first = False
[1861]4130 #flush log
4131 asaplog.post()
[976]4132 if unit is not None:
4133 self.set_fluxunit(unit)
[1824]4134 if not is_casapy():
4135 self.set_freqframe(rcParams['scantable.freqframe'])
[976]4136
[2610]4137 def _get_verify_action( self, msg, action=None ):
4138 valid_act = ['Y', 'N', 'A', 'R']
4139 if not action or not isinstance(action, str):
4140 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
4141 if action == '':
4142 return "Y"
4143 elif (action.upper()[0] in valid_act):
4144 return action.upper()[0]
4145 elif (action.upper()[0] in ['H','?']):
4146 print "Available actions of verification [Y|n|a|r]"
4147 print " Y : Yes for current data (default)"
4148 print " N : No for current data"
4149 print " A : Accept all in the following and exit from verification"
4150 print " R : Reject all in the following and exit from verification"
4151 print " H or ?: help (show this message)"
4152 return self._get_verify_action(msg)
4153 else:
4154 return 'Y'
[2012]4155
[1402]4156 def __getitem__(self, key):
4157 if key < 0:
4158 key += self.nrow()
4159 if key >= self.nrow():
4160 raise IndexError("Row index out of range.")
4161 return self._getspectrum(key)
4162
4163 def __setitem__(self, key, value):
4164 if key < 0:
4165 key += self.nrow()
4166 if key >= self.nrow():
4167 raise IndexError("Row index out of range.")
4168 if not hasattr(value, "__len__") or \
4169 len(value) > self.nchan(self.getif(key)):
4170 raise ValueError("Spectrum length doesn't match.")
4171 return self._setspectrum(value, key)
4172
4173 def __len__(self):
4174 return self.nrow()
4175
4176 def __iter__(self):
4177 for i in range(len(self)):
4178 yield self[i]
Note: See TracBrowser for help on using the repository browser.