source: trunk/python/scantable.py@ 2877

Last change on this file since 2877 was 2877, checked in by Kana Sugimoto, 11 years ago

New Development: No (a bug fix)

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): scantable

Description: fixed a trivial bug in scantable.flag_nans().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 182.8 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2751]4import re
[2315]5import tempfile
[1948]6import numpy
[1691]7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
[1824]12from asap.env import is_casapy
[876]13from asap._asap import Scantable
[2004]14from asap._asap import filler, msfiller
[1824]15from asap.parameters import rcParams
[1862]16from asap.logging import asaplog, asaplog_post_dec
[1824]17from asap.selector import selector
18from asap.linecatalog import linecatalog
[1600]19from asap.coordinate import coordinate
[1859]20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]21from asap.asapfitter import fitter
[102]22
[1689]23def preserve_selection(func):
[1691]24 @wraps_dec(func)
[1689]25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
[1857]27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
[1689]31 return val
32 return wrap
33
[1846]34def is_scantable(filename):
35 """Is the given file a scantable?
[1689]36
[1846]37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
[1883]42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
[2753]48 match_pattern = '^Type = (Scantable)? *$'
[2751]49 if re.match(match_pattern,l):
[1883]50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
[1697]58
[1883]59def is_ms(filename):
60 """Is the given file a MeasurementSet?
[1697]61
[1883]62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
[2186]79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
[2277]91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
[2277]98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
[2186]102 ** when len(edge)!=2
[2277]103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]111 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]112 when [5,10] given, it is interpreted as
[2277]113 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
[2189]173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
[2767]176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
[2810]268 elif lf_params_n == 0:
[2767]269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
[876]303class scantable(Scantable):
[1846]304 """\
305 The ASAP container for scans (single-dish data).
[102]306 """
[1819]307
[1862]308 @asaplog_post_dec
[2315]309 def __init__(self, filename, average=None, unit=None, parallactify=None,
310 **args):
[1846]311 """\
[102]312 Create a scantable from a saved one or make a reference
[1846]313
[102]314 Parameters:
[1846]315
316 filename: the name of an asap table on disk
317 or
318 the name of a rpfits/sdfits/ms file
319 (integrations within scans are auto averaged
320 and the whole file is read) or
321 [advanced] a reference to an existing scantable
322
323 average: average all integrations withinb a scan on read.
324 The default (True) is taken from .asaprc.
325
[484]326 unit: brightness unit; must be consistent with K or Jy.
[1846]327 Over-rides the default selected by the filler
328 (input rpfits/sdfits/ms) or replaces the value
329 in existing scantables
330
[1920]331 antenna: for MeasurementSet input data only:
[2349]332 Antenna selection. integer (id) or string
333 (name or id).
[1846]334
[2349]335 parallactify: Indicate that the data had been parallactified.
336 Default (false) is taken from rc file.
[1846]337
[2754]338 getpt: Whether to import direction from MS/POINTING
339 table properly or not.
340 This is effective only when filename is MS.
341 The default (True) is to import direction
342 from MS/POINTING.
[710]343 """
[976]344 if average is None:
[710]345 average = rcParams['scantable.autoaverage']
[1593]346 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]347 varlist = vars()
[876]348 from asap._asap import stmath
[1819]349 self._math = stmath( rcParams['insitu'] )
[876]350 if isinstance(filename, Scantable):
351 Scantable.__init__(self, filename)
[181]352 else:
[1697]353 if isinstance(filename, str):
[976]354 filename = os.path.expandvars(filename)
355 filename = os.path.expanduser(filename)
356 if not os.path.exists(filename):
357 s = "File '%s' not found." % (filename)
358 raise IOError(s)
[1697]359 if is_scantable(filename):
360 ondisk = rcParams['scantable.storage'] == 'disk'
361 Scantable.__init__(self, filename, ondisk)
362 if unit is not None:
363 self.set_fluxunit(unit)
[2008]364 if average:
365 self._assign( self.average_time( scanav=True ) )
[1819]366 # do not reset to the default freqframe
367 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]368 elif is_ms(filename):
[1916]369 # Measurement Set
370 opts={'ms': {}}
[2844]371 mskeys=['getpt','antenna']
[1916]372 for key in mskeys:
373 if key in args.keys():
374 opts['ms'][key] = args[key]
375 self._fill([filename], unit, average, opts)
[1893]376 elif os.path.isfile(filename):
[2761]377 opts={'nro': {}}
378 nrokeys=['freqref']
379 for key in nrokeys:
380 if key in args.keys():
381 opts['nro'][key] = args[key]
382 self._fill([filename], unit, average, opts)
[2350]383 # only apply to new data not "copy constructor"
384 self.parallactify(parallactify)
[1883]385 else:
[1819]386 msg = "The given file '%s'is not a valid " \
387 "asap table." % (filename)
[1859]388 raise IOError(msg)
[1118]389 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]390 and isinstance(filename[-1], str):
[1916]391 self._fill(filename, unit, average)
[1586]392 self.parallactify(parallactify)
[1259]393 self._add_history("scantable", varlist)
[102]394
[1862]395 @asaplog_post_dec
[876]396 def save(self, name=None, format=None, overwrite=False):
[1846]397 """\
[1280]398 Store the scantable on disk. This can be an asap (aips++) Table,
399 SDFITS or MS2 format.
[1846]400
[116]401 Parameters:
[1846]402
[2431]403 name: the name of the outputfile. For format 'ASCII'
[1093]404 this is the root file name (data in 'name'.txt
[497]405 and header in 'name'_header.txt)
[1855]406
[116]407 format: an optional file format. Default is ASAP.
[1855]408 Allowed are:
409
410 * 'ASAP' (save as ASAP [aips++] Table),
411 * 'SDFITS' (save as SDFITS file)
412 * 'ASCII' (saves as ascii text file)
413 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]414 * 'FITS' (save as image FITS - not readable by
415 class)
[1855]416 * 'CLASS' (save as FITS readable by CLASS)
417
[411]418 overwrite: If the file should be overwritten if it exists.
[256]419 The default False is to return with warning
[411]420 without writing the output. USE WITH CARE.
[1855]421
[1846]422 Example::
423
[116]424 scan.save('myscan.asap')
[1118]425 scan.save('myscan.sdfits', 'SDFITS')
[1846]426
[116]427 """
[411]428 from os import path
[1593]429 format = format or rcParams['scantable.save']
[256]430 suffix = '.'+format.lower()
[1118]431 if name is None or name == "":
[256]432 name = 'scantable'+suffix
[718]433 msg = "No filename given. Using default name %s..." % name
434 asaplog.push(msg)
[411]435 name = path.expandvars(name)
[256]436 if path.isfile(name) or path.isdir(name):
437 if not overwrite:
[718]438 msg = "File %s exists." % name
[1859]439 raise IOError(msg)
[451]440 format2 = format.upper()
441 if format2 == 'ASAP':
[116]442 self._save(name)
[2029]443 elif format2 == 'MS2':
444 msopt = {'ms': {'overwrite': overwrite } }
445 from asap._asap import mswriter
446 writer = mswriter( self )
447 writer.write( name, msopt )
[116]448 else:
[989]449 from asap._asap import stwriter as stw
[1118]450 writer = stw(format2)
451 writer.write(self, name)
[116]452 return
453
[102]454 def copy(self):
[1846]455 """Return a copy of this scantable.
456
457 *Note*:
458
[1348]459 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]460
461 Example::
462
[102]463 copiedscan = scan.copy()
[1846]464
[102]465 """
[876]466 sd = scantable(Scantable._copy(self))
[113]467 return sd
468
[1093]469 def drop_scan(self, scanid=None):
[1846]470 """\
[1093]471 Return a new scantable where the specified scan number(s) has(have)
472 been dropped.
[1846]473
[1093]474 Parameters:
[1846]475
[1093]476 scanid: a (list of) scan number(s)
[1846]477
[1093]478 """
479 from asap import _is_sequence_or_number as _is_valid
480 from asap import _to_list
481 from asap import unique
482 if not _is_valid(scanid):
[2315]483 raise RuntimeError( 'Please specify a scanno to drop from the'
484 ' scantable' )
[1859]485 scanid = _to_list(scanid)
486 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
487 for sid in scanid: allscans.remove(sid)
488 if len(allscans) == 0:
489 raise ValueError("Can't remove all scans")
490 sel = selector(scans=allscans)
491 return self._select_copy(sel)
[1093]492
[1594]493 def _select_copy(self, selection):
494 orig = self.get_selection()
495 self.set_selection(orig+selection)
496 cp = self.copy()
497 self.set_selection(orig)
498 return cp
499
[102]500 def get_scan(self, scanid=None):
[1855]501 """\
[102]502 Return a specific scan (by scanno) or collection of scans (by
503 source name) in a new scantable.
[1846]504
505 *Note*:
506
[1348]507 See scantable.drop_scan() for the inverse operation.
[1846]508
[102]509 Parameters:
[1846]510
[513]511 scanid: a (list of) scanno or a source name, unix-style
512 patterns are accepted for source name matching, e.g.
513 '*_R' gets all 'ref scans
[1846]514
515 Example::
516
[513]517 # get all scans containing the source '323p459'
518 newscan = scan.get_scan('323p459')
519 # get all 'off' scans
520 refscans = scan.get_scan('*_R')
521 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]522 newscan = scan.get_scan([0, 2, 7, 10])
[1846]523
[102]524 """
525 if scanid is None:
[1859]526 raise RuntimeError( 'Please specify a scan no or name to '
527 'retrieve from the scantable' )
[102]528 try:
[946]529 bsel = self.get_selection()
530 sel = selector()
[102]531 if type(scanid) is str:
[946]532 sel.set_name(scanid)
[1594]533 return self._select_copy(sel)
[102]534 elif type(scanid) is int:
[946]535 sel.set_scans([scanid])
[1594]536 return self._select_copy(sel)
[381]537 elif type(scanid) is list:
[946]538 sel.set_scans(scanid)
[1594]539 return self._select_copy(sel)
[381]540 else:
[718]541 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]542 raise TypeError(msg)
[102]543 except RuntimeError:
[1859]544 raise
[102]545
546 def __str__(self):
[2315]547 tempFile = tempfile.NamedTemporaryFile()
548 Scantable._summary(self, tempFile.name)
549 tempFile.seek(0)
550 asaplog.clear()
551 return tempFile.file.read()
[102]552
[2315]553 @asaplog_post_dec
[976]554 def summary(self, filename=None):
[1846]555 """\
[102]556 Print a summary of the contents of this scantable.
[1846]557
[102]558 Parameters:
[1846]559
[1931]560 filename: the name of a file to write the putput to
[102]561 Default - no file output
[1846]562
[102]563 """
564 if filename is not None:
[256]565 if filename is "":
566 filename = 'scantable_summary.txt'
[415]567 from os.path import expandvars, isdir
[411]568 filename = expandvars(filename)
[2286]569 if isdir(filename):
[718]570 msg = "Illegal file name '%s'." % (filename)
[1859]571 raise IOError(msg)
[2286]572 else:
573 filename = ""
574 Scantable._summary(self, filename)
[710]575
[1512]576 def get_spectrum(self, rowno):
[1471]577 """Return the spectrum for the current row in the scantable as a list.
[1846]578
[1471]579 Parameters:
[1846]580
[1573]581 rowno: the row number to retrieve the spectrum from
[1846]582
[1471]583 """
584 return self._getspectrum(rowno)
[946]585
[1471]586 def get_mask(self, rowno):
587 """Return the mask for the current row in the scantable as a list.
[1846]588
[1471]589 Parameters:
[1846]590
[1573]591 rowno: the row number to retrieve the mask from
[1846]592
[1471]593 """
594 return self._getmask(rowno)
595
596 def set_spectrum(self, spec, rowno):
[1938]597 """Set the spectrum for the current row in the scantable.
[1846]598
[1471]599 Parameters:
[1846]600
[1855]601 spec: the new spectrum
[1846]602
[1855]603 rowno: the row number to set the spectrum for
604
[1471]605 """
[2348]606 assert(len(spec) == self.nchan(self.getif(rowno)))
[1471]607 return self._setspectrum(spec, rowno)
608
[1600]609 def get_coordinate(self, rowno):
610 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]611
612 *Note*:
613
[1600]614 * This coordinate is only valid until a scantable method modifies
615 the frequency axis.
616 * This coordinate does contain the original frequency set-up
617 NOT the new frame. The conversions however are done using the user
618 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
619 use scantable.freq_align first. Without it there is no closure,
[1846]620 i.e.::
[1600]621
[1846]622 c = myscan.get_coordinate(0)
623 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
624
[1600]625 Parameters:
[1846]626
[1600]627 rowno: the row number for the spectral coordinate
628
629 """
630 return coordinate(Scantable.get_coordinate(self, rowno))
631
[946]632 def get_selection(self):
[1846]633 """\
[1005]634 Get the selection object currently set on this scantable.
[1846]635
636 Example::
637
[1005]638 sel = scan.get_selection()
639 sel.set_ifs(0) # select IF 0
640 scan.set_selection(sel) # apply modified selection
[1846]641
[946]642 """
643 return selector(self._getselection())
644
[1576]645 def set_selection(self, selection=None, **kw):
[1846]646 """\
[1005]647 Select a subset of the data. All following operations on this scantable
648 are only applied to thi selection.
[1846]649
[1005]650 Parameters:
[1697]651
[1846]652 selection: a selector object (default unset the selection), or
[2431]653 any combination of 'pols', 'ifs', 'beams', 'scans',
654 'cycles', 'name', 'query'
[1697]655
[1846]656 Examples::
[1697]657
[1005]658 sel = selector() # create a selection object
[1118]659 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]660 scan.set_selection(sel) # set the selection
661 scan.summary() # will only print summary of scanno 0 an 3
662 scan.set_selection() # unset the selection
[1697]663 # or the equivalent
664 scan.set_selection(scans=[0,3])
665 scan.summary() # will only print summary of scanno 0 an 3
666 scan.set_selection() # unset the selection
[1846]667
[946]668 """
[1576]669 if selection is None:
670 # reset
671 if len(kw) == 0:
672 selection = selector()
673 else:
674 # try keywords
675 for k in kw:
676 if k not in selector.fields:
[2320]677 raise KeyError("Invalid selection key '%s', "
678 "valid keys are %s" % (k,
679 selector.fields))
[1576]680 selection = selector(**kw)
[946]681 self._setselection(selection)
682
[1819]683 def get_row(self, row=0, insitu=None):
[1846]684 """\
[1819]685 Select a row in the scantable.
686 Return a scantable with single row.
[1846]687
[1819]688 Parameters:
[1846]689
690 row: row no of integration, default is 0.
691 insitu: if False a new scantable is returned. Otherwise, the
692 scaling is done in-situ. The default is taken from .asaprc
693 (False)
694
[1819]695 """
[2349]696 if insitu is None:
697 insitu = rcParams['insitu']
[1819]698 if not insitu:
699 workscan = self.copy()
700 else:
701 workscan = self
702 # Select a row
[2349]703 sel = selector()
[1992]704 sel.set_rows([row])
[1819]705 workscan.set_selection(sel)
706 if not workscan.nrow() == 1:
[2349]707 msg = "Could not identify single row. %d rows selected." \
708 % (workscan.nrow())
[1819]709 raise RuntimeError(msg)
710 if insitu:
711 self._assign(workscan)
712 else:
713 return workscan
714
[1862]715 @asaplog_post_dec
[1907]716 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]717 """\
[135]718 Determine the specified statistic of the current beam/if/pol
[102]719 Takes a 'mask' as an optional parameter to specify which
720 channels should be excluded.
[1846]721
[102]722 Parameters:
[1846]723
[1819]724 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
725 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]726
[135]727 mask: an optional mask specifying where the statistic
[102]728 should be determined.
[1855]729
[1819]730 form: format string to print statistic values
[1846]731
[1907]732 row: row number of spectrum to process.
733 (default is None: for all rows)
[1846]734
[1907]735 Example:
[113]736 scan.set_unit('channel')
[1118]737 msk = scan.create_mask([100, 200], [500, 600])
[135]738 scan.stats(stat='mean', mask=m)
[1846]739
[102]740 """
[1593]741 mask = mask or []
[876]742 if not self._check_ifs():
[1118]743 raise ValueError("Cannot apply mask as the IFs have different "
744 "number of channels. Please use setselection() "
745 "to select individual IFs")
[1819]746 rtnabc = False
747 if stat.lower().endswith('_abc'): rtnabc = True
748 getchan = False
749 if stat.lower().startswith('min') or stat.lower().startswith('max'):
750 chan = self._math._minmaxchan(self, mask, stat)
751 getchan = True
752 statvals = []
[1907]753 if not rtnabc:
754 if row == None:
755 statvals = self._math._stats(self, mask, stat)
756 else:
757 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]758
[1819]759 #def cb(i):
760 # return statvals[i]
[256]761
[1819]762 #return self._row_callback(cb, stat)
[102]763
[1819]764 label=stat
765 #callback=cb
766 out = ""
767 #outvec = []
768 sep = '-'*50
[1907]769
770 if row == None:
771 rows = xrange(self.nrow())
772 elif isinstance(row, int):
773 rows = [ row ]
774
775 for i in rows:
[1819]776 refstr = ''
777 statunit= ''
778 if getchan:
779 qx, qy = self.chan2data(rowno=i, chan=chan[i])
780 if rtnabc:
781 statvals.append(qx['value'])
782 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
783 statunit= '['+qx['unit']+']'
784 else:
785 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
786
787 tm = self._gettime(i)
788 src = self._getsourcename(i)
789 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
790 out += 'Time[%s]:\n' % (tm)
[1907]791 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
792 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
793 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]794 #outvec.append(callback(i))
[1907]795 if len(rows) > 1:
796 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
797 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
798 else:
799 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
800 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]801 out += sep+"\n"
802
[1859]803 import os
804 if os.environ.has_key( 'USER' ):
805 usr = os.environ['USER']
806 else:
807 import commands
808 usr = commands.getoutput( 'whoami' )
809 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
810 f = open(tmpfile,'w')
811 print >> f, sep
812 print >> f, ' %s %s' % (label, statunit)
813 print >> f, sep
814 print >> f, out
815 f.close()
816 f = open(tmpfile,'r')
817 x = f.readlines()
818 f.close()
819 asaplog.push(''.join(x), False)
820
[1819]821 return statvals
822
823 def chan2data(self, rowno=0, chan=0):
[1846]824 """\
[1819]825 Returns channel/frequency/velocity and spectral value
826 at an arbitrary row and channel in the scantable.
[1846]827
[1819]828 Parameters:
[1846]829
[1819]830 rowno: a row number in the scantable. Default is the
831 first row, i.e. rowno=0
[1855]832
[1819]833 chan: a channel in the scantable. Default is the first
834 channel, i.e. pos=0
[1846]835
[1819]836 """
837 if isinstance(rowno, int) and isinstance(chan, int):
838 qx = {'unit': self.get_unit(),
839 'value': self._getabcissa(rowno)[chan]}
840 qy = {'unit': self.get_fluxunit(),
841 'value': self._getspectrum(rowno)[chan]}
842 return qx, qy
843
[1118]844 def stddev(self, mask=None):
[1846]845 """\
[135]846 Determine the standard deviation of the current beam/if/pol
847 Takes a 'mask' as an optional parameter to specify which
848 channels should be excluded.
[1846]849
[135]850 Parameters:
[1846]851
[135]852 mask: an optional mask specifying where the standard
853 deviation should be determined.
854
[1846]855 Example::
856
[135]857 scan.set_unit('channel')
[1118]858 msk = scan.create_mask([100, 200], [500, 600])
[135]859 scan.stddev(mask=m)
[1846]860
[135]861 """
[1118]862 return self.stats(stat='stddev', mask=mask);
[135]863
[1003]864
[1259]865 def get_column_names(self):
[1846]866 """\
[1003]867 Return a list of column names, which can be used for selection.
868 """
[1259]869 return list(Scantable.get_column_names(self))
[1003]870
[1730]871 def get_tsys(self, row=-1):
[1846]872 """\
[113]873 Return the System temperatures.
[1846]874
875 Parameters:
876
877 row: the rowno to get the information for. (default all rows)
878
[113]879 Returns:
[1846]880
[876]881 a list of Tsys values for the current selection
[1846]882
[113]883 """
[1730]884 if row > -1:
885 return self._get_column(self._gettsys, row)
[876]886 return self._row_callback(self._gettsys, "Tsys")
[256]887
[2406]888 def get_tsysspectrum(self, row=-1):
889 """\
890 Return the channel dependent system temperatures.
[1730]891
[2406]892 Parameters:
893
894 row: the rowno to get the information for. (default all rows)
895
896 Returns:
897
898 a list of Tsys values for the current selection
899
900 """
901 return self._get_column( self._gettsysspectrum, row )
902
[2791]903 def set_tsys(self, values, row=-1):
904 """\
905 Set the Tsys value(s) of the given 'row' or the whole scantable
906 (selection).
907
908 Parameters:
909
910 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
911 row: the row number to apply Tsys values to.
912 (default all rows)
913
914 """
915
916 if not hasattr(values, "__len__"):
917 values = [values]
918 self._settsys(values, row)
919
[1730]920 def get_weather(self, row=-1):
[1846]921 """\
922 Return the weather informations.
923
924 Parameters:
925
926 row: the rowno to get the information for. (default all rows)
927
928 Returns:
929
930 a dict or list of of dicts of values for the current selection
931
932 """
933
[1730]934 values = self._get_column(self._get_weather, row)
935 if row > -1:
936 return {'temperature': values[0],
937 'pressure': values[1], 'humidity' : values[2],
938 'windspeed' : values[3], 'windaz' : values[4]
939 }
940 else:
941 out = []
942 for r in values:
943
944 out.append({'temperature': r[0],
945 'pressure': r[1], 'humidity' : r[2],
946 'windspeed' : r[3], 'windaz' : r[4]
947 })
948 return out
949
[876]950 def _row_callback(self, callback, label):
951 out = ""
[1118]952 outvec = []
[1590]953 sep = '-'*50
[876]954 for i in range(self.nrow()):
955 tm = self._gettime(i)
956 src = self._getsourcename(i)
[1590]957 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]958 out += 'Time[%s]:\n' % (tm)
[1590]959 if self.nbeam(-1) > 1:
960 out += ' Beam[%d] ' % (self.getbeam(i))
961 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
962 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]963 outvec.append(callback(i))
964 out += '= %3.3f\n' % (outvec[i])
[1590]965 out += sep+'\n'
[1859]966
967 asaplog.push(sep)
968 asaplog.push(" %s" % (label))
969 asaplog.push(sep)
970 asaplog.push(out)
[1861]971 asaplog.post()
[1175]972 return outvec
[256]973
[1947]974 def _get_column(self, callback, row=-1, *args):
[1070]975 """
976 """
977 if row == -1:
[1947]978 return [callback(i, *args) for i in range(self.nrow())]
[1070]979 else:
[1819]980 if 0 <= row < self.nrow():
[1947]981 return callback(row, *args)
[256]982
[1070]983
[1948]984 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]985 """\
[113]986 Get a list of time stamps for the observations.
[1938]987 Return a datetime object or a string (default) for each
988 integration time stamp in the scantable.
[1846]989
[113]990 Parameters:
[1846]991
[1348]992 row: row no of integration. Default -1 return all rows
[1855]993
[1348]994 asdatetime: return values as datetime objects rather than strings
[1846]995
[2349]996 prec: number of digits shown. Default -1 to automatic
997 calculation.
[1948]998 Note this number is equals to the digits of MVTime,
999 i.e., 0<prec<3: dates with hh:: only,
1000 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]1001 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1002
[113]1003 """
[1175]1004 from datetime import datetime
[1948]1005 if prec < 0:
1006 # automagically set necessary precision +1
[2349]1007 prec = 7 - \
1008 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]1009 prec = max(6, int(prec))
1010 else:
1011 prec = max(0, prec)
1012 if asdatetime:
1013 #precision can be 1 millisecond at max
1014 prec = min(12, prec)
1015
[1947]1016 times = self._get_column(self._gettime, row, prec)
[1348]1017 if not asdatetime:
[1392]1018 return times
[1947]1019 format = "%Y/%m/%d/%H:%M:%S.%f"
1020 if prec < 7:
1021 nsub = 1 + (((6-prec)/2) % 3)
1022 substr = [".%f","%S","%M"]
1023 for i in range(nsub):
1024 format = format.replace(substr[i],"")
[1175]1025 if isinstance(times, list):
[1947]1026 return [datetime.strptime(i, format) for i in times]
[1175]1027 else:
[1947]1028 return datetime.strptime(times, format)
[102]1029
[1348]1030
1031 def get_inttime(self, row=-1):
[1846]1032 """\
[1348]1033 Get a list of integration times for the observations.
1034 Return a time in seconds for each integration in the scantable.
[1846]1035
[1348]1036 Parameters:
[1846]1037
[1348]1038 row: row no of integration. Default -1 return all rows.
[1846]1039
[1348]1040 """
[1573]1041 return self._get_column(self._getinttime, row)
[1348]1042
[1573]1043
[714]1044 def get_sourcename(self, row=-1):
[1846]1045 """\
[794]1046 Get a list source names for the observations.
[714]1047 Return a string for each integration in the scantable.
1048 Parameters:
[1846]1049
[1348]1050 row: row no of integration. Default -1 return all rows.
[1846]1051
[714]1052 """
[1070]1053 return self._get_column(self._getsourcename, row)
[714]1054
[794]1055 def get_elevation(self, row=-1):
[1846]1056 """\
[794]1057 Get a list of elevations for the observations.
1058 Return a float for each integration in the scantable.
[1846]1059
[794]1060 Parameters:
[1846]1061
[1348]1062 row: row no of integration. Default -1 return all rows.
[1846]1063
[794]1064 """
[1070]1065 return self._get_column(self._getelevation, row)
[794]1066
1067 def get_azimuth(self, row=-1):
[1846]1068 """\
[794]1069 Get a list of azimuths for the observations.
1070 Return a float for each integration in the scantable.
[1846]1071
[794]1072 Parameters:
[1348]1073 row: row no of integration. Default -1 return all rows.
[1846]1074
[794]1075 """
[1070]1076 return self._get_column(self._getazimuth, row)
[794]1077
1078 def get_parangle(self, row=-1):
[1846]1079 """\
[794]1080 Get a list of parallactic angles for the observations.
1081 Return a float for each integration in the scantable.
[1846]1082
[794]1083 Parameters:
[1846]1084
[1348]1085 row: row no of integration. Default -1 return all rows.
[1846]1086
[794]1087 """
[1070]1088 return self._get_column(self._getparangle, row)
[794]1089
[1070]1090 def get_direction(self, row=-1):
1091 """
1092 Get a list of Positions on the sky (direction) for the observations.
[1594]1093 Return a string for each integration in the scantable.
[1855]1094
[1070]1095 Parameters:
[1855]1096
[1070]1097 row: row no of integration. Default -1 return all rows
[1855]1098
[1070]1099 """
1100 return self._get_column(self._getdirection, row)
1101
[1391]1102 def get_directionval(self, row=-1):
[1846]1103 """\
[1391]1104 Get a list of Positions on the sky (direction) for the observations.
1105 Return a float for each integration in the scantable.
[1846]1106
[1391]1107 Parameters:
[1846]1108
[1391]1109 row: row no of integration. Default -1 return all rows
[1846]1110
[1391]1111 """
1112 return self._get_column(self._getdirectionvec, row)
1113
[1862]1114 @asaplog_post_dec
[102]1115 def set_unit(self, unit='channel'):
[1846]1116 """\
[102]1117 Set the unit for all following operations on this scantable
[1846]1118
[102]1119 Parameters:
[1846]1120
1121 unit: optional unit, default is 'channel'. Use one of '*Hz',
1122 'km/s', 'channel' or equivalent ''
1123
[102]1124 """
[484]1125 varlist = vars()
[1118]1126 if unit in ['', 'pixel', 'channel']:
[113]1127 unit = ''
1128 inf = list(self._getcoordinfo())
1129 inf[0] = unit
1130 self._setcoordinfo(inf)
[1118]1131 self._add_history("set_unit", varlist)
[113]1132
[1862]1133 @asaplog_post_dec
[484]1134 def set_instrument(self, instr):
[1846]1135 """\
[1348]1136 Set the instrument for subsequent processing.
[1846]1137
[358]1138 Parameters:
[1846]1139
[710]1140 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]1141 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1142
[358]1143 """
1144 self._setInstrument(instr)
[1118]1145 self._add_history("set_instument", vars())
[358]1146
[1862]1147 @asaplog_post_dec
[1190]1148 def set_feedtype(self, feedtype):
[1846]1149 """\
[1190]1150 Overwrite the feed type, which might not be set correctly.
[1846]1151
[1190]1152 Parameters:
[1846]1153
[1190]1154 feedtype: 'linear' or 'circular'
[1846]1155
[1190]1156 """
1157 self._setfeedtype(feedtype)
1158 self._add_history("set_feedtype", vars())
1159
[1862]1160 @asaplog_post_dec
[276]1161 def set_doppler(self, doppler='RADIO'):
[1846]1162 """\
[276]1163 Set the doppler for all following operations on this scantable.
[1846]1164
[276]1165 Parameters:
[1846]1166
[276]1167 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1168
[276]1169 """
[484]1170 varlist = vars()
[276]1171 inf = list(self._getcoordinfo())
1172 inf[2] = doppler
1173 self._setcoordinfo(inf)
[1118]1174 self._add_history("set_doppler", vars())
[710]1175
[1862]1176 @asaplog_post_dec
[226]1177 def set_freqframe(self, frame=None):
[1846]1178 """\
[113]1179 Set the frame type of the Spectral Axis.
[1846]1180
[113]1181 Parameters:
[1846]1182
[591]1183 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1184 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1185 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1186
1187 Example::
1188
[113]1189 scan.set_freqframe('BARY')
[1846]1190
[113]1191 """
[1593]1192 frame = frame or rcParams['scantable.freqframe']
[484]1193 varlist = vars()
[1819]1194 # "REST" is not implemented in casacore
1195 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1196 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1197 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1198 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1199
[989]1200 if frame in valid:
[113]1201 inf = list(self._getcoordinfo())
1202 inf[1] = frame
1203 self._setcoordinfo(inf)
[1118]1204 self._add_history("set_freqframe", varlist)
[102]1205 else:
[1118]1206 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1207 raise TypeError(msg)
[710]1208
[1862]1209 @asaplog_post_dec
[989]1210 def set_dirframe(self, frame=""):
[1846]1211 """\
[989]1212 Set the frame type of the Direction on the sky.
[1846]1213
[989]1214 Parameters:
[1846]1215
[989]1216 frame: an optional frame type, default ''. Valid frames are:
1217 'J2000', 'B1950', 'GALACTIC'
[1846]1218
1219 Example:
1220
[989]1221 scan.set_dirframe('GALACTIC')
[1846]1222
[989]1223 """
1224 varlist = vars()
[1859]1225 Scantable.set_dirframe(self, frame)
[1118]1226 self._add_history("set_dirframe", varlist)
[989]1227
[113]1228 def get_unit(self):
[1846]1229 """\
[113]1230 Get the default unit set in this scantable
[1846]1231
[113]1232 Returns:
[1846]1233
[113]1234 A unit string
[1846]1235
[113]1236 """
1237 inf = self._getcoordinfo()
1238 unit = inf[0]
1239 if unit == '': unit = 'channel'
1240 return unit
[102]1241
[1862]1242 @asaplog_post_dec
[158]1243 def get_abcissa(self, rowno=0):
[1846]1244 """\
[158]1245 Get the abcissa in the current coordinate setup for the currently
[113]1246 selected Beam/IF/Pol
[1846]1247
[113]1248 Parameters:
[1846]1249
[226]1250 rowno: an optional row number in the scantable. Default is the
1251 first row, i.e. rowno=0
[1846]1252
[113]1253 Returns:
[1846]1254
[1348]1255 The abcissa values and the format string (as a dictionary)
[1846]1256
[113]1257 """
[256]1258 abc = self._getabcissa(rowno)
[710]1259 lbl = self._getabcissalabel(rowno)
[158]1260 return abc, lbl
[113]1261
[1862]1262 @asaplog_post_dec
[2322]1263 def flag(self, mask=None, unflag=False, row=-1):
[1846]1264 """\
[1001]1265 Flag the selected data using an optional channel mask.
[1846]1266
[1001]1267 Parameters:
[1846]1268
[1001]1269 mask: an optional channel mask, created with create_mask. Default
1270 (no mask) is all channels.
[1855]1271
[1819]1272 unflag: if True, unflag the data
[1846]1273
[2322]1274 row: an optional row number in the scantable.
1275 Default -1 flags all rows
1276
[1001]1277 """
1278 varlist = vars()
[1593]1279 mask = mask or []
[1994]1280 self._flag(row, mask, unflag)
[1001]1281 self._add_history("flag", varlist)
1282
[1862]1283 @asaplog_post_dec
[2322]1284 def flag_row(self, rows=None, unflag=False):
[1846]1285 """\
[1819]1286 Flag the selected data in row-based manner.
[1846]1287
[1819]1288 Parameters:
[1846]1289
[1843]1290 rows: list of row numbers to be flagged. Default is no row
[2322]1291 (must be explicitly specified to execute row-based
1292 flagging).
[1855]1293
[1819]1294 unflag: if True, unflag the data.
[1846]1295
[1819]1296 """
1297 varlist = vars()
[2322]1298 if rows is None:
1299 rows = []
[1859]1300 self._flag_row(rows, unflag)
[1819]1301 self._add_history("flag_row", varlist)
1302
[1862]1303 @asaplog_post_dec
[1819]1304 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1305 """\
[1819]1306 Flag the selected data outside a specified range (in channel-base)
[1846]1307
[1819]1308 Parameters:
[1846]1309
[1819]1310 uthres: upper threshold.
[1855]1311
[1819]1312 dthres: lower threshold
[1846]1313
[2322]1314 clipoutside: True for flagging data outside the range
1315 [dthres:uthres].
[1928]1316 False for flagging data inside the range.
[1855]1317
[1846]1318 unflag: if True, unflag the data.
1319
[1819]1320 """
1321 varlist = vars()
[1859]1322 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1323 self._add_history("clip", varlist)
1324
[1862]1325 @asaplog_post_dec
[1584]1326 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1327 """\
[1192]1328 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1329 Flagged data in the scantable get interpolated over the region.
[1192]1330 No taper is applied.
[1846]1331
[1192]1332 Parameters:
[1846]1333
[1579]1334 start: the start frequency (really a period within the
1335 bandwidth) or period to remove
[1855]1336
[1579]1337 end: the end frequency or period to remove
[1855]1338
[2431]1339 unit: the frequency unit (default 'MHz') or '' for
[1579]1340 explicit lag channels
[1846]1341
1342 *Notes*:
1343
[1579]1344 It is recommended to flag edges of the band or strong
[1348]1345 signals beforehand.
[1846]1346
[1192]1347 """
1348 if insitu is None: insitu = rcParams['insitu']
1349 self._math._setinsitu(insitu)
1350 varlist = vars()
[1579]1351 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1352 if not (unit == "" or base.has_key(unit)):
[1192]1353 raise ValueError("%s is not a valid unit." % unit)
[1859]1354 if unit == "":
1355 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1356 else:
1357 s = scantable(self._math._lag_flag(self, start*base[unit],
1358 end*base[unit], "frequency"))
[1192]1359 s._add_history("lag_flag", varlist)
1360 if insitu:
1361 self._assign(s)
1362 else:
1363 return s
[1001]1364
[1862]1365 @asaplog_post_dec
[2349]1366 def fft(self, rowno=None, mask=None, getrealimag=False):
[2177]1367 """\
1368 Apply FFT to the spectra.
1369 Flagged data in the scantable get interpolated over the region.
1370
1371 Parameters:
[2186]1372
1373 rowno: The row number(s) to be processed. int, list
[2349]1374 and tuple are accepted. By default (None), FFT
[2186]1375 is applied to the whole data.
1376
1377 mask: Auxiliary channel mask(s). Given as a boolean
1378 list, it is applied to all specified rows.
1379 A list of boolean lists can also be used to
1380 apply different masks. In the latter case, the
1381 length of 'mask' must be the same as that of
[2349]1382 'rowno'. The default is None.
[2177]1383
1384 getrealimag: If True, returns the real and imaginary part
1385 values of the complex results.
1386 If False (the default), returns the amplitude
1387 (absolute value) normalised with Ndata/2 and
1388 phase (argument, in unit of radian).
1389
1390 Returns:
1391
[2186]1392 A list of dictionaries containing the results for each spectrum.
1393 Each dictionary contains two values, the real and the imaginary
1394 parts when getrealimag = True, or the amplitude(absolute value)
1395 and the phase(argument) when getrealimag = False. The key for
1396 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1397 respectively.
1398 """
[2349]1399 if rowno is None:
1400 rowno = []
[2177]1401 if isinstance(rowno, int):
1402 rowno = [rowno]
1403 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1404 raise TypeError("The row number(s) must be int, list or tuple.")
1405 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1406
[2411]1407 usecommonmask = True
1408
1409 if mask is None:
1410 mask = []
1411 if isinstance(mask, list) or isinstance(mask, tuple):
1412 if len(mask) == 0:
1413 mask = [[]]
1414 else:
1415 if isinstance(mask[0], bool):
1416 if len(mask) != self.nchan(self.getif(rowno[0])):
1417 raise ValueError("The spectra and the mask have "
1418 "different length.")
1419 mask = [mask]
1420 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1421 usecommonmask = False
1422 if len(mask) != len(rowno):
1423 raise ValueError("When specifying masks for each "
1424 "spectrum, the numbers of them "
1425 "must be identical.")
1426 for i in xrange(mask):
1427 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1428 raise ValueError("The spectra and the mask have "
1429 "different length.")
1430 else:
1431 raise TypeError("The mask must be a boolean list or "
1432 "a list of boolean list.")
1433 else:
[2349]1434 raise TypeError("The mask must be a boolean list or a list of "
1435 "boolean list.")
[2186]1436
1437 res = []
1438
1439 imask = 0
1440 for whichrow in rowno:
1441 fspec = self._fft(whichrow, mask[imask], getrealimag)
1442 nspec = len(fspec)
[2177]1443
[2349]1444 i = 0
1445 v1 = []
1446 v2 = []
1447 reselem = {"real":[],"imag":[]} if getrealimag \
1448 else {"ampl":[],"phase":[]}
[2177]1449
[2186]1450 while (i < nspec):
1451 v1.append(fspec[i])
1452 v2.append(fspec[i+1])
[2349]1453 i += 2
[2186]1454
[2177]1455 if getrealimag:
[2186]1456 reselem["real"] += v1
1457 reselem["imag"] += v2
[2177]1458 else:
[2186]1459 reselem["ampl"] += v1
1460 reselem["phase"] += v2
[2177]1461
[2186]1462 res.append(reselem)
1463
[2349]1464 if not usecommonmask:
1465 imask += 1
[2186]1466
[2177]1467 return res
1468
1469 @asaplog_post_dec
[113]1470 def create_mask(self, *args, **kwargs):
[1846]1471 """\
[1118]1472 Compute and return a mask based on [min, max] windows.
[189]1473 The specified windows are to be INCLUDED, when the mask is
[113]1474 applied.
[1846]1475
[102]1476 Parameters:
[1846]1477
[1118]1478 [min, max], [min2, max2], ...
[1024]1479 Pairs of start/end points (inclusive)specifying the regions
[102]1480 to be masked
[1855]1481
[189]1482 invert: optional argument. If specified as True,
1483 return an inverted mask, i.e. the regions
1484 specified are EXCLUDED
[1855]1485
[513]1486 row: create the mask using the specified row for
1487 unit conversions, default is row=0
1488 only necessary if frequency varies over rows.
[1846]1489
1490 Examples::
1491
[113]1492 scan.set_unit('channel')
[1846]1493 # a)
[1118]1494 msk = scan.create_mask([400, 500], [800, 900])
[189]1495 # masks everything outside 400 and 500
[113]1496 # and 800 and 900 in the unit 'channel'
1497
[1846]1498 # b)
[1118]1499 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1500 # masks the regions between 400 and 500
[113]1501 # and 800 and 900 in the unit 'channel'
[1846]1502
1503 # c)
1504 #mask only channel 400
[1554]1505 msk = scan.create_mask([400])
[1846]1506
[102]1507 """
[1554]1508 row = kwargs.get("row", 0)
[513]1509 data = self._getabcissa(row)
[113]1510 u = self._getcoordinfo()[0]
[1859]1511 if u == "":
1512 u = "channel"
1513 msg = "The current mask window unit is %s" % u
1514 i = self._check_ifs()
1515 if not i:
1516 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1517 asaplog.push(msg)
[2348]1518 n = len(data)
[1295]1519 msk = _n_bools(n, False)
[710]1520 # test if args is a 'list' or a 'normal *args - UGLY!!!
1521
[2349]1522 ws = (isinstance(args[-1][-1], int)
1523 or isinstance(args[-1][-1], float)) and args or args[0]
[710]1524 for window in ws:
[1554]1525 if len(window) == 1:
1526 window = [window[0], window[0]]
1527 if len(window) == 0 or len(window) > 2:
[2349]1528 raise ValueError("A window needs to be defined as "
1529 "[start(, end)]")
[1545]1530 if window[0] > window[1]:
1531 tmp = window[0]
1532 window[0] = window[1]
1533 window[1] = tmp
[102]1534 for i in range(n):
[1024]1535 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1536 msk[i] = True
[113]1537 if kwargs.has_key('invert'):
1538 if kwargs.get('invert'):
[1295]1539 msk = mask_not(msk)
[102]1540 return msk
[710]1541
[1931]1542 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1543 """\
[1819]1544 Compute and return a list of mask windows, [min, max].
[1846]1545
[1819]1546 Parameters:
[1846]1547
[1819]1548 mask: channel mask, created with create_mask.
[1855]1549
[1819]1550 row: calcutate the masklist using the specified row
1551 for unit conversions, default is row=0
1552 only necessary if frequency varies over rows.
[1846]1553
[1819]1554 Returns:
[1846]1555
[1819]1556 [min, max], [min2, max2], ...
1557 Pairs of start/end points (inclusive)specifying
1558 the masked regions
[1846]1559
[1819]1560 """
1561 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1562 raise TypeError("The mask should be list or tuple.")
[2427]1563 if len(mask) <= 0:
1564 raise TypeError("The mask elements should be > 0")
[2348]1565 data = self._getabcissa(row)
1566 if len(data) != len(mask):
[1819]1567 msg = "Number of channels in scantable != number of mask elements"
1568 raise TypeError(msg)
1569 u = self._getcoordinfo()[0]
[1859]1570 if u == "":
1571 u = "channel"
1572 msg = "The current mask window unit is %s" % u
1573 i = self._check_ifs()
1574 if not i:
1575 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1576 if not silent:
1577 asaplog.push(msg)
[2349]1578 masklist = []
[1819]1579 ist, ien = None, None
1580 ist, ien=self.get_mask_indices(mask)
1581 if ist is not None and ien is not None:
1582 for i in xrange(len(ist)):
1583 range=[data[ist[i]],data[ien[i]]]
1584 range.sort()
1585 masklist.append([range[0],range[1]])
1586 return masklist
1587
1588 def get_mask_indices(self, mask=None):
[1846]1589 """\
[1819]1590 Compute and Return lists of mask start indices and mask end indices.
[1855]1591
1592 Parameters:
1593
[1819]1594 mask: channel mask, created with create_mask.
[1846]1595
[1819]1596 Returns:
[1846]1597
[1819]1598 List of mask start indices and that of mask end indices,
1599 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1600
[1819]1601 """
1602 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1603 raise TypeError("The mask should be list or tuple.")
[2427]1604 if len(mask) <= 0:
1605 raise TypeError("The mask elements should be > 0")
[2349]1606 istart = []
1607 iend = []
1608 if mask[0]:
1609 istart.append(0)
[1819]1610 for i in range(len(mask)-1):
1611 if not mask[i] and mask[i+1]:
1612 istart.append(i+1)
1613 elif mask[i] and not mask[i+1]:
1614 iend.append(i)
[2349]1615 if mask[len(mask)-1]:
1616 iend.append(len(mask)-1)
[1819]1617 if len(istart) != len(iend):
1618 raise RuntimeError("Numbers of mask start != mask end.")
1619 for i in range(len(istart)):
1620 if istart[i] > iend[i]:
1621 raise RuntimeError("Mask start index > mask end index")
1622 break
1623 return istart,iend
1624
[2013]1625 @asaplog_post_dec
[2349]1626 def parse_maskexpr(self, maskstring):
[2013]1627 """
1628 Parse CASA type mask selection syntax (IF dependent).
1629
1630 Parameters:
1631 maskstring : A string mask selection expression.
1632 A comma separated selections mean different IF -
1633 channel combinations. IFs and channel selections
1634 are partitioned by a colon, ':'.
1635 examples:
[2015]1636 '' = all IFs (all channels)
[2013]1637 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1638 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1639 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1640 all channels in IF8
1641 Returns:
1642 A dictionary of selected (valid) IF and masklist pairs,
1643 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1644 """
1645 if not isinstance(maskstring,str):
1646 asaplog.post()
[2611]1647 asaplog.push("Mask expression should be a string.")
[2013]1648 asaplog.post("ERROR")
1649
1650 valid_ifs = self.getifnos()
1651 frequnit = self.get_unit()
1652 seldict = {}
[2015]1653 if maskstring == "":
1654 maskstring = str(valid_ifs)[1:-1]
[2611]1655 ## split each selection "IF range[:CHAN range]"
[2867]1656 # split maskstring by "<spaces>,<spaces>"
1657 comma_sep = re.compile('\s*,\s*')
1658 sellist = comma_sep.split(maskstring)
1659 # separator by "<spaces>:<spaces>"
1660 collon_sep = re.compile('\s*:\s*')
[2013]1661 for currselstr in sellist:
[2867]1662 selset = collon_sep.split(currselstr)
[2013]1663 # spw and mask string (may include ~, < or >)
[2349]1664 spwmasklist = self._parse_selection(selset[0], typestr='integer',
[2611]1665 minval=min(valid_ifs),
[2349]1666 maxval=max(valid_ifs))
[2013]1667 for spwlist in spwmasklist:
1668 selspws = []
1669 for ispw in range(spwlist[0],spwlist[1]+1):
1670 # Put into the list only if ispw exists
1671 if valid_ifs.count(ispw):
1672 selspws.append(ispw)
1673 del spwmasklist, spwlist
1674
1675 # parse frequency mask list
1676 if len(selset) > 1:
[2349]1677 freqmasklist = self._parse_selection(selset[1], typestr='float',
1678 offset=0.)
[2013]1679 else:
1680 # want to select the whole spectrum
1681 freqmasklist = [None]
1682
1683 ## define a dictionary of spw - masklist combination
1684 for ispw in selspws:
1685 #print "working on", ispw
1686 spwstr = str(ispw)
1687 if len(selspws) == 0:
1688 # empty spw
1689 continue
1690 else:
1691 ## want to get min and max of the spw and
1692 ## offset to set for '<' and '>'
1693 if frequnit == 'channel':
1694 minfreq = 0
1695 maxfreq = self.nchan(ifno=ispw)
1696 offset = 0.5
1697 else:
1698 ## This is ugly part. need improvement
1699 for ifrow in xrange(self.nrow()):
1700 if self.getif(ifrow) == ispw:
1701 #print "IF",ispw,"found in row =",ifrow
1702 break
1703 freqcoord = self.get_coordinate(ifrow)
1704 freqs = self._getabcissa(ifrow)
1705 minfreq = min(freqs)
1706 maxfreq = max(freqs)
1707 if len(freqs) == 1:
1708 offset = 0.5
1709 elif frequnit.find('Hz') > 0:
[2349]1710 offset = abs(freqcoord.to_frequency(1,
1711 unit=frequnit)
1712 -freqcoord.to_frequency(0,
1713 unit=frequnit)
1714 )*0.5
[2013]1715 elif frequnit.find('m/s') > 0:
[2349]1716 offset = abs(freqcoord.to_velocity(1,
1717 unit=frequnit)
1718 -freqcoord.to_velocity(0,
1719 unit=frequnit)
1720 )*0.5
[2013]1721 else:
1722 asaplog.post()
1723 asaplog.push("Invalid frequency unit")
1724 asaplog.post("ERROR")
1725 del freqs, freqcoord, ifrow
1726 for freq in freqmasklist:
1727 selmask = freq or [minfreq, maxfreq]
1728 if selmask[0] == None:
1729 ## selection was "<freq[1]".
1730 if selmask[1] < minfreq:
1731 ## avoid adding region selection
1732 selmask = None
1733 else:
1734 selmask = [minfreq,selmask[1]-offset]
1735 elif selmask[1] == None:
1736 ## selection was ">freq[0]"
1737 if selmask[0] > maxfreq:
1738 ## avoid adding region selection
1739 selmask = None
1740 else:
1741 selmask = [selmask[0]+offset,maxfreq]
1742 if selmask:
1743 if not seldict.has_key(spwstr):
1744 # new spw selection
1745 seldict[spwstr] = []
1746 seldict[spwstr] += [selmask]
1747 del minfreq,maxfreq,offset,freq,selmask
1748 del spwstr
1749 del freqmasklist
1750 del valid_ifs
1751 if len(seldict) == 0:
1752 asaplog.post()
[2349]1753 asaplog.push("No valid selection in the mask expression: "
1754 +maskstring)
[2013]1755 asaplog.post("WARN")
1756 return None
1757 msg = "Selected masklist:\n"
1758 for sif, lmask in seldict.iteritems():
1759 msg += " IF"+sif+" - "+str(lmask)+"\n"
1760 asaplog.push(msg)
1761 return seldict
1762
[2611]1763 @asaplog_post_dec
1764 def parse_idx_selection(self, mode, selexpr):
1765 """
1766 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
1767 BEAMNO, and row number
1768
1769 Parameters:
1770 mode : which column to select.
1771 ['scan',|'if'|'pol'|'beam'|'row']
1772 selexpr : A comma separated selection expression.
1773 examples:
1774 '' = all (returns [])
1775 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
1776 (returns [0,1,4,5,6,9])
1777 Returns:
1778 A List of selected indices
1779 """
1780 if selexpr == "":
1781 return []
1782 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
1783 'b': 'beam', 'r': 'row'}
1784 smode = mode.lower()[0]
1785 if not (smode in valid_modes.keys()):
1786 msg = "Invalid mode '%s'. Valid modes are %s" %\
1787 (mode, str(valid_modes.values()))
1788 asaplog.post()
1789 asaplog.push(msg)
1790 asaplog.post("ERROR")
1791 mode = valid_modes[smode]
1792 minidx = None
1793 maxidx = None
1794 if smode == 'r':
1795 minidx = 0
1796 maxidx = self.nrow()
1797 else:
1798 idx = getattr(self,"get"+mode+"nos")()
1799 minidx = min(idx)
1800 maxidx = max(idx)
1801 del idx
[2867]1802 # split selexpr by "<spaces>,<spaces>"
1803 comma_sep = re.compile('\s*,\s*')
1804 sellist = comma_sep.split(selexpr)
[2611]1805 idxlist = []
1806 for currselstr in sellist:
1807 # single range (may include ~, < or >)
1808 currlist = self._parse_selection(currselstr, typestr='integer',
1809 minval=minidx,maxval=maxidx)
1810 for thelist in currlist:
1811 idxlist += range(thelist[0],thelist[1]+1)
1812 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
1813 asaplog.push(msg)
1814 return idxlist
1815
[2349]1816 def _parse_selection(self, selstr, typestr='float', offset=0.,
[2351]1817 minval=None, maxval=None):
[2013]1818 """
1819 Parameters:
1820 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1821 typestr : The type of the values in returned list
1822 ('integer' or 'float')
1823 offset : The offset value to subtract from or add to
1824 the boundary value if the selection string
[2611]1825 includes '<' or '>' [Valid only for typestr='float']
[2013]1826 minval, maxval : The minimum/maximum values to set if the
1827 selection string includes '<' or '>'.
1828 The list element is filled with None by default.
1829 Returns:
1830 A list of min/max pair of selections.
1831 Example:
[2611]1832 _parse_selection('<3;5~7;9',typestr='int',minval=0)
1833 --> returns [[0,2],[5,7],[9,9]]
1834 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
1835 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
[2013]1836 """
[2867]1837 # split selstr by '<spaces>;<spaces>'
1838 semi_sep = re.compile('\s*;\s*')
1839 selgroups = semi_sep.split(selstr)
[2013]1840 sellists = []
1841 if typestr.lower().startswith('int'):
1842 formatfunc = int
[2611]1843 offset = 1
[2013]1844 else:
1845 formatfunc = float
1846
1847 for currsel in selgroups:
[2867]1848 if currsel.strip() == '*' or len(currsel.strip()) == 0:
1849 minsel = minval
1850 maxsel = maxval
[2013]1851 if currsel.find('~') > 0:
[2611]1852 # val0 <= x <= val1
[2013]1853 minsel = formatfunc(currsel.split('~')[0].strip())
[2867]1854 maxsel = formatfunc(currsel.split('~')[1].strip())
[2611]1855 elif currsel.strip().find('<=') > -1:
1856 bound = currsel.split('<=')
1857 try: # try "x <= val"
1858 minsel = minval
1859 maxsel = formatfunc(bound[1].strip())
1860 except ValueError: # now "val <= x"
1861 minsel = formatfunc(bound[0].strip())
1862 maxsel = maxval
1863 elif currsel.strip().find('>=') > -1:
1864 bound = currsel.split('>=')
1865 try: # try "x >= val"
1866 minsel = formatfunc(bound[1].strip())
1867 maxsel = maxval
1868 except ValueError: # now "val >= x"
1869 minsel = minval
1870 maxsel = formatfunc(bound[0].strip())
1871 elif currsel.strip().find('<') > -1:
1872 bound = currsel.split('<')
1873 try: # try "x < val"
1874 minsel = minval
1875 maxsel = formatfunc(bound[1].strip()) \
1876 - formatfunc(offset)
1877 except ValueError: # now "val < x"
1878 minsel = formatfunc(bound[0].strip()) \
[2013]1879 + formatfunc(offset)
[2611]1880 maxsel = maxval
1881 elif currsel.strip().find('>') > -1:
1882 bound = currsel.split('>')
1883 try: # try "x > val"
1884 minsel = formatfunc(bound[1].strip()) \
1885 + formatfunc(offset)
1886 maxsel = maxval
1887 except ValueError: # now "val > x"
1888 minsel = minval
1889 maxsel = formatfunc(bound[0].strip()) \
1890 - formatfunc(offset)
[2013]1891 else:
1892 minsel = formatfunc(currsel)
1893 maxsel = formatfunc(currsel)
1894 sellists.append([minsel,maxsel])
1895 return sellists
1896
[1819]1897# def get_restfreqs(self):
1898# """
1899# Get the restfrequency(s) stored in this scantable.
1900# The return value(s) are always of unit 'Hz'
1901# Parameters:
1902# none
1903# Returns:
1904# a list of doubles
1905# """
1906# return list(self._getrestfreqs())
1907
1908 def get_restfreqs(self, ids=None):
[1846]1909 """\
[256]1910 Get the restfrequency(s) stored in this scantable.
1911 The return value(s) are always of unit 'Hz'
[1846]1912
[256]1913 Parameters:
[1846]1914
[1819]1915 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1916 be retrieved
[1846]1917
[256]1918 Returns:
[1846]1919
[1819]1920 dictionary containing ids and a list of doubles for each id
[1846]1921
[256]1922 """
[1819]1923 if ids is None:
[2349]1924 rfreqs = {}
[1819]1925 idlist = self.getmolnos()
1926 for i in idlist:
[2349]1927 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1928 return rfreqs
1929 else:
[2349]1930 if type(ids) == list or type(ids) == tuple:
1931 rfreqs = {}
[1819]1932 for i in ids:
[2349]1933 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1934 return rfreqs
1935 else:
1936 return list(self._getrestfreqs(ids))
[102]1937
[2349]1938 @asaplog_post_dec
[931]1939 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1940 """\
[931]1941 Set or replace the restfrequency specified and
[1938]1942 if the 'freqs' argument holds a scalar,
[931]1943 then that rest frequency will be applied to all the selected
1944 data. If the 'freqs' argument holds
1945 a vector, then it MUST be of equal or smaller length than
1946 the number of IFs (and the available restfrequencies will be
1947 replaced by this vector). In this case, *all* data have
1948 the restfrequency set per IF according
1949 to the corresponding value you give in the 'freqs' vector.
[1118]1950 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1951 IF 1 gets restfreq 2e9.
[1846]1952
[1395]1953 You can also specify the frequencies via a linecatalog.
[1153]1954
[931]1955 Parameters:
[1846]1956
[931]1957 freqs: list of rest frequency values or string idenitfiers
[1855]1958
[931]1959 unit: unit for rest frequency (default 'Hz')
[402]1960
[1846]1961
1962 Example::
1963
[1819]1964 # set the given restfrequency for the all currently selected IFs
[931]1965 scan.set_restfreqs(freqs=1.4e9)
[1845]1966 # set restfrequencies for the n IFs (n > 1) in the order of the
1967 # list, i.e
1968 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1969 # len(list_of_restfreqs) == nIF
1970 # for nIF == 1 the following will set multiple restfrequency for
1971 # that IF
[1819]1972 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1973 # set multiple restfrequencies per IF. as a list of lists where
1974 # the outer list has nIF elements, the inner s arbitrary
1975 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1976
[1846]1977 *Note*:
[1845]1978
[931]1979 To do more sophisticate Restfrequency setting, e.g. on a
1980 source and IF basis, use scantable.set_selection() before using
[1846]1981 this function::
[931]1982
[1846]1983 # provided your scantable is called scan
1984 selection = selector()
[2431]1985 selection.set_name('ORION*')
[1846]1986 selection.set_ifs([1])
1987 scan.set_selection(selection)
1988 scan.set_restfreqs(freqs=86.6e9)
1989
[931]1990 """
1991 varlist = vars()
[1157]1992 from asap import linecatalog
1993 # simple value
[1118]1994 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1995 self._setrestfreqs([freqs], [""], unit)
[1157]1996 # list of values
[1118]1997 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1998 # list values are scalars
[1118]1999 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]2000 if len(freqs) == 1:
2001 self._setrestfreqs(freqs, [""], unit)
2002 else:
2003 # allow the 'old' mode of setting mulitple IFs
2004 savesel = self._getselection()
[2599]2005 sel = self.get_selection()
[1845]2006 iflist = self.getifnos()
2007 if len(freqs)>len(iflist):
2008 raise ValueError("number of elements in list of list "
2009 "exeeds the current IF selections")
2010 iflist = self.getifnos()
2011 for i, fval in enumerate(freqs):
2012 sel.set_ifs(iflist[i])
2013 self._setselection(sel)
2014 self._setrestfreqs([fval], [""], unit)
2015 self._setselection(savesel)
2016
2017 # list values are dict, {'value'=, 'name'=)
[1157]2018 elif isinstance(freqs[-1], dict):
[1845]2019 values = []
2020 names = []
2021 for d in freqs:
2022 values.append(d["value"])
2023 names.append(d["name"])
2024 self._setrestfreqs(values, names, unit)
[1819]2025 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]2026 savesel = self._getselection()
[2599]2027 sel = self.get_selection()
[1322]2028 iflist = self.getifnos()
[1819]2029 if len(freqs)>len(iflist):
[1845]2030 raise ValueError("number of elements in list of list exeeds"
2031 " the current IF selections")
2032 for i, fval in enumerate(freqs):
[1322]2033 sel.set_ifs(iflist[i])
[1259]2034 self._setselection(sel)
[1845]2035 self._setrestfreqs(fval, [""], unit)
[1157]2036 self._setselection(savesel)
2037 # freqs are to be taken from a linecatalog
[1153]2038 elif isinstance(freqs, linecatalog):
2039 savesel = self._getselection()
[2599]2040 sel = self.get_selection()
[1153]2041 for i in xrange(freqs.nrow()):
[1322]2042 sel.set_ifs(iflist[i])
[1153]2043 self._setselection(sel)
[1845]2044 self._setrestfreqs([freqs.get_frequency(i)],
2045 [freqs.get_name(i)], "MHz")
[1153]2046 # ensure that we are not iterating past nIF
2047 if i == self.nif()-1: break
2048 self._setselection(savesel)
[931]2049 else:
2050 return
2051 self._add_history("set_restfreqs", varlist)
2052
[2349]2053 @asaplog_post_dec
[1360]2054 def shift_refpix(self, delta):
[1846]2055 """\
[1589]2056 Shift the reference pixel of the Spectra Coordinate by an
2057 integer amount.
[1846]2058
[1589]2059 Parameters:
[1846]2060
[1589]2061 delta: the amount to shift by
[1846]2062
2063 *Note*:
2064
[1589]2065 Be careful using this with broadband data.
[1846]2066
[1360]2067 """
[2349]2068 varlist = vars()
[1731]2069 Scantable.shift_refpix(self, delta)
[2349]2070 s._add_history("shift_refpix", varlist)
[931]2071
[1862]2072 @asaplog_post_dec
[2820]2073 def history(self, filename=None, nrows=-1, start=0):
[1846]2074 """\
[1259]2075 Print the history. Optionally to a file.
[1846]2076
[1348]2077 Parameters:
[1846]2078
[1928]2079 filename: The name of the file to save the history to.
[1846]2080
[1259]2081 """
[2820]2082 n = self._historylength()
2083 if nrows == -1:
2084 nrows = n
2085 if start+nrows > n:
2086 nrows = nrows-start
2087 if n > 1000 and nrows == n:
2088 nrows = 1000
2089 start = n-1000
2090 asaplog.push("Warning: History has {0} entries. Displaying last "
2091 "1000".format(n))
2092 hist = list(self._gethistory(nrows, start))
[794]2093 out = "-"*80
[484]2094 for h in hist:
[2820]2095 if not h.strip():
2096 continue
2097 if h.find("---") >-1:
2098 continue
[489]2099 else:
2100 items = h.split("##")
2101 date = items[0]
2102 func = items[1]
2103 items = items[2:]
[794]2104 out += "\n"+date+"\n"
2105 out += "Function: %s\n Parameters:" % (func)
[489]2106 for i in items:
[1938]2107 if i == '':
2108 continue
[489]2109 s = i.split("=")
[1118]2110 out += "\n %s = %s" % (s[0], s[1])
[2820]2111 out = "\n".join([out, "*"*80])
[1259]2112 if filename is not None:
2113 if filename is "":
2114 filename = 'scantable_history.txt'
2115 filename = os.path.expandvars(os.path.expanduser(filename))
2116 if not os.path.isdir(filename):
2117 data = open(filename, 'w')
2118 data.write(out)
2119 data.close()
2120 else:
2121 msg = "Illegal file name '%s'." % (filename)
[1859]2122 raise IOError(msg)
2123 return page(out)
[2349]2124
[513]2125 #
2126 # Maths business
2127 #
[1862]2128 @asaplog_post_dec
[2818]2129 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2130 avmode="NONE"):
[1846]2131 """\
[2349]2132 Return the (time) weighted average of a scan. Scans will be averaged
2133 only if the source direction (RA/DEC) is within 1' otherwise
[1846]2134
2135 *Note*:
2136
[1070]2137 in channels only - align if necessary
[1846]2138
[513]2139 Parameters:
[1846]2140
[513]2141 mask: an optional mask (only used for 'var' and 'tsys'
2142 weighting)
[1855]2143
[558]2144 scanav: True averages each scan separately
2145 False (default) averages all scans together,
[1855]2146
[1099]2147 weight: Weighting scheme.
2148 'none' (mean no weight)
2149 'var' (1/var(spec) weighted)
2150 'tsys' (1/Tsys**2 weighted)
2151 'tint' (integration time weighted)
2152 'tintsys' (Tint/Tsys**2)
2153 'median' ( median averaging)
[535]2154 The default is 'tint'
[1855]2155
[931]2156 align: align the spectra in velocity before averaging. It takes
2157 the time of the first spectrum as reference time.
[2818]2158 avmode: 'SOURCE' - also select by source name - or
2159 'NONE' (default). Not applicable for scanav=True or
2160 weight=median
[1846]2161
2162 Example::
2163
[513]2164 # time average the scantable without using a mask
[710]2165 newscan = scan.average_time()
[1846]2166
[513]2167 """
2168 varlist = vars()
[1593]2169 weight = weight or 'TINT'
2170 mask = mask or ()
[2818]2171 scanav = (scanav and 'SCAN') or avmode.upper()
[1118]2172 scan = (self, )
[1859]2173
2174 if align:
2175 scan = (self.freq_align(insitu=False), )
[2818]2176 asaplog.push("Note: Alignment is don on a source-by-source basis")
2177 asaplog.push("Note: Averaging (by default) is not")
2178 # we need to set it to SOURCE averaging here
[1859]2179 s = None
2180 if weight.upper() == 'MEDIAN':
2181 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2182 scanav))
2183 else:
2184 s = scantable(self._math._average(scan, mask, weight.upper(),
2185 scanav))
[1099]2186 s._add_history("average_time", varlist)
[513]2187 return s
[710]2188
[1862]2189 @asaplog_post_dec
[876]2190 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]2191 """\
[513]2192 Return a scan where all spectra are converted to either
2193 Jansky or Kelvin depending upon the flux units of the scan table.
2194 By default the function tries to look the values up internally.
2195 If it can't find them (or if you want to over-ride), you must
2196 specify EITHER jyperk OR eta (and D which it will try to look up
2197 also if you don't set it). jyperk takes precedence if you set both.
[1846]2198
[513]2199 Parameters:
[1846]2200
[513]2201 jyperk: the Jy / K conversion factor
[1855]2202
[513]2203 eta: the aperture efficiency
[1855]2204
[1928]2205 d: the geometric diameter (metres)
[1855]2206
[513]2207 insitu: if False a new scantable is returned.
2208 Otherwise, the scaling is done in-situ
2209 The default is taken from .asaprc (False)
[1846]2210
[513]2211 """
2212 if insitu is None: insitu = rcParams['insitu']
[876]2213 self._math._setinsitu(insitu)
[513]2214 varlist = vars()
[1593]2215 jyperk = jyperk or -1.0
2216 d = d or -1.0
2217 eta = eta or -1.0
[876]2218 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2219 s._add_history("convert_flux", varlist)
2220 if insitu: self._assign(s)
2221 else: return s
[513]2222
[1862]2223 @asaplog_post_dec
[876]2224 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]2225 """\
[513]2226 Return a scan after applying a gain-elevation correction.
2227 The correction can be made via either a polynomial or a
2228 table-based interpolation (and extrapolation if necessary).
2229 You specify polynomial coefficients, an ascii table or neither.
2230 If you specify neither, then a polynomial correction will be made
2231 with built in coefficients known for certain telescopes (an error
2232 will occur if the instrument is not known).
2233 The data and Tsys are *divided* by the scaling factors.
[1846]2234
[513]2235 Parameters:
[1846]2236
[513]2237 poly: Polynomial coefficients (default None) to compute a
2238 gain-elevation correction as a function of
2239 elevation (in degrees).
[1855]2240
[513]2241 filename: The name of an ascii file holding correction factors.
2242 The first row of the ascii file must give the column
2243 names and these MUST include columns
[2431]2244 'ELEVATION' (degrees) and 'FACTOR' (multiply data
[513]2245 by this) somewhere.
2246 The second row must give the data type of the
2247 column. Use 'R' for Real and 'I' for Integer.
2248 An example file would be
2249 (actual factors are arbitrary) :
2250
2251 TIME ELEVATION FACTOR
2252 R R R
2253 0.1 0 0.8
2254 0.2 20 0.85
2255 0.3 40 0.9
2256 0.4 60 0.85
2257 0.5 80 0.8
2258 0.6 90 0.75
[1855]2259
[513]2260 method: Interpolation method when correcting from a table.
[2431]2261 Values are 'nearest', 'linear' (default), 'cubic'
2262 and 'spline'
[1855]2263
[513]2264 insitu: if False a new scantable is returned.
2265 Otherwise, the scaling is done in-situ
2266 The default is taken from .asaprc (False)
[1846]2267
[513]2268 """
2269
2270 if insitu is None: insitu = rcParams['insitu']
[876]2271 self._math._setinsitu(insitu)
[513]2272 varlist = vars()
[1593]2273 poly = poly or ()
[513]2274 from os.path import expandvars
2275 filename = expandvars(filename)
[876]2276 s = scantable(self._math._gainel(self, poly, filename, method))
2277 s._add_history("gain_el", varlist)
[1593]2278 if insitu:
2279 self._assign(s)
2280 else:
2281 return s
[710]2282
[1862]2283 @asaplog_post_dec
[931]2284 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2285 """\
[513]2286 Return a scan where all rows have been aligned in frequency/velocity.
2287 The alignment frequency frame (e.g. LSRK) is that set by function
2288 set_freqframe.
[1846]2289
[513]2290 Parameters:
[1855]2291
[513]2292 reftime: reference time to align at. By default, the time of
2293 the first row of data is used.
[1855]2294
[513]2295 method: Interpolation method for regridding the spectra.
[2431]2296 Choose from 'nearest', 'linear', 'cubic' (default)
2297 and 'spline'
[1855]2298
[513]2299 insitu: if False a new scantable is returned.
2300 Otherwise, the scaling is done in-situ
2301 The default is taken from .asaprc (False)
[1846]2302
[513]2303 """
[931]2304 if insitu is None: insitu = rcParams["insitu"]
[2429]2305 oldInsitu = self._math._insitu()
[876]2306 self._math._setinsitu(insitu)
[513]2307 varlist = vars()
[1593]2308 reftime = reftime or ""
[931]2309 s = scantable(self._math._freq_align(self, reftime, method))
[876]2310 s._add_history("freq_align", varlist)
[2429]2311 self._math._setinsitu(oldInsitu)
[2349]2312 if insitu:
2313 self._assign(s)
2314 else:
2315 return s
[513]2316
[1862]2317 @asaplog_post_dec
[1725]2318 def opacity(self, tau=None, insitu=None):
[1846]2319 """\
[513]2320 Apply an opacity correction. The data
2321 and Tsys are multiplied by the correction factor.
[1846]2322
[513]2323 Parameters:
[1855]2324
[1689]2325 tau: (list of) opacity from which the correction factor is
[513]2326 exp(tau*ZD)
[1689]2327 where ZD is the zenith-distance.
2328 If a list is provided, it has to be of length nIF,
2329 nIF*nPol or 1 and in order of IF/POL, e.g.
2330 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2331 if tau is `None` the opacities are determined from a
2332 model.
[1855]2333
[513]2334 insitu: if False a new scantable is returned.
2335 Otherwise, the scaling is done in-situ
2336 The default is taken from .asaprc (False)
[1846]2337
[513]2338 """
[2349]2339 if insitu is None:
2340 insitu = rcParams['insitu']
[876]2341 self._math._setinsitu(insitu)
[513]2342 varlist = vars()
[1689]2343 if not hasattr(tau, "__len__"):
2344 tau = [tau]
[876]2345 s = scantable(self._math._opacity(self, tau))
2346 s._add_history("opacity", varlist)
[2349]2347 if insitu:
2348 self._assign(s)
2349 else:
2350 return s
[513]2351
[1862]2352 @asaplog_post_dec
[513]2353 def bin(self, width=5, insitu=None):
[1846]2354 """\
[513]2355 Return a scan where all spectra have been binned up.
[1846]2356
[1348]2357 Parameters:
[1846]2358
[513]2359 width: The bin width (default=5) in pixels
[1855]2360
[513]2361 insitu: if False a new scantable is returned.
2362 Otherwise, the scaling is done in-situ
2363 The default is taken from .asaprc (False)
[1846]2364
[513]2365 """
[2349]2366 if insitu is None:
2367 insitu = rcParams['insitu']
[876]2368 self._math._setinsitu(insitu)
[513]2369 varlist = vars()
[876]2370 s = scantable(self._math._bin(self, width))
[1118]2371 s._add_history("bin", varlist)
[1589]2372 if insitu:
2373 self._assign(s)
2374 else:
2375 return s
[513]2376
[1862]2377 @asaplog_post_dec
[2672]2378 def reshape(self, first, last, insitu=None):
2379 """Resize the band by providing first and last channel.
2380 This will cut off all channels outside [first, last].
2381 """
2382 if insitu is None:
2383 insitu = rcParams['insitu']
2384 varlist = vars()
2385 if last < 0:
2386 last = self.nchan()-1 + last
2387 s = None
2388 if insitu:
2389 s = self
2390 else:
2391 s = self.copy()
2392 s._reshape(first,last)
2393 s._add_history("reshape", varlist)
2394 if not insitu:
2395 return s
2396
2397 @asaplog_post_dec
[513]2398 def resample(self, width=5, method='cubic', insitu=None):
[1846]2399 """\
[1348]2400 Return a scan where all spectra have been binned up.
[1573]2401
[1348]2402 Parameters:
[1846]2403
[513]2404 width: The bin width (default=5) in pixels
[1855]2405
[513]2406 method: Interpolation method when correcting from a table.
[2431]2407 Values are 'nearest', 'linear', 'cubic' (default)
2408 and 'spline'
[1855]2409
[513]2410 insitu: if False a new scantable is returned.
2411 Otherwise, the scaling is done in-situ
2412 The default is taken from .asaprc (False)
[1846]2413
[513]2414 """
[2349]2415 if insitu is None:
2416 insitu = rcParams['insitu']
[876]2417 self._math._setinsitu(insitu)
[513]2418 varlist = vars()
[876]2419 s = scantable(self._math._resample(self, method, width))
[1118]2420 s._add_history("resample", varlist)
[2349]2421 if insitu:
2422 self._assign(s)
2423 else:
2424 return s
[513]2425
[1862]2426 @asaplog_post_dec
[946]2427 def average_pol(self, mask=None, weight='none'):
[1846]2428 """\
[946]2429 Average the Polarisations together.
[1846]2430
[946]2431 Parameters:
[1846]2432
[946]2433 mask: An optional mask defining the region, where the
2434 averaging will be applied. The output will have all
2435 specified points masked.
[1855]2436
[946]2437 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2438 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2439
[946]2440 """
2441 varlist = vars()
[1593]2442 mask = mask or ()
[1010]2443 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2444 s._add_history("average_pol", varlist)
[992]2445 return s
[513]2446
[1862]2447 @asaplog_post_dec
[1145]2448 def average_beam(self, mask=None, weight='none'):
[1846]2449 """\
[1145]2450 Average the Beams together.
[1846]2451
[1145]2452 Parameters:
2453 mask: An optional mask defining the region, where the
2454 averaging will be applied. The output will have all
2455 specified points masked.
[1855]2456
[1145]2457 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2458 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2459
[1145]2460 """
2461 varlist = vars()
[1593]2462 mask = mask or ()
[1145]2463 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2464 s._add_history("average_beam", varlist)
2465 return s
2466
[1586]2467 def parallactify(self, pflag):
[1846]2468 """\
[1843]2469 Set a flag to indicate whether this data should be treated as having
[1617]2470 been 'parallactified' (total phase == 0.0)
[1846]2471
[1617]2472 Parameters:
[1855]2473
[1843]2474 pflag: Bool indicating whether to turn this on (True) or
[1617]2475 off (False)
[1846]2476
[1617]2477 """
[1586]2478 varlist = vars()
2479 self._parallactify(pflag)
2480 self._add_history("parallactify", varlist)
2481
[1862]2482 @asaplog_post_dec
[992]2483 def convert_pol(self, poltype=None):
[1846]2484 """\
[992]2485 Convert the data to a different polarisation type.
[1565]2486 Note that you will need cross-polarisation terms for most conversions.
[1846]2487
[992]2488 Parameters:
[1855]2489
[992]2490 poltype: The new polarisation type. Valid types are:
[2431]2491 'linear', 'circular', 'stokes' and 'linpol'
[1846]2492
[992]2493 """
2494 varlist = vars()
[1859]2495 s = scantable(self._math._convertpol(self, poltype))
[1118]2496 s._add_history("convert_pol", varlist)
[992]2497 return s
2498
[1862]2499 @asaplog_post_dec
[2269]2500 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2501 insitu=None):
[1846]2502 """\
[513]2503 Smooth the spectrum by the specified kernel (conserving flux).
[1846]2504
[513]2505 Parameters:
[1846]2506
[513]2507 kernel: The type of smoothing kernel. Select from
[1574]2508 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2509 or 'poly'
[1855]2510
[513]2511 width: The width of the kernel in pixels. For hanning this is
2512 ignored otherwise it defauls to 5 pixels.
2513 For 'gaussian' it is the Full Width Half
2514 Maximum. For 'boxcar' it is the full width.
[1574]2515 For 'rmedian' and 'poly' it is the half width.
[1855]2516
[1574]2517 order: Optional parameter for 'poly' kernel (default is 2), to
2518 specify the order of the polnomial. Ignored by all other
2519 kernels.
[1855]2520
[1819]2521 plot: plot the original and the smoothed spectra.
2522 In this each indivual fit has to be approved, by
2523 typing 'y' or 'n'
[1855]2524
[513]2525 insitu: if False a new scantable is returned.
2526 Otherwise, the scaling is done in-situ
2527 The default is taken from .asaprc (False)
[1846]2528
[513]2529 """
2530 if insitu is None: insitu = rcParams['insitu']
[876]2531 self._math._setinsitu(insitu)
[513]2532 varlist = vars()
[1819]2533
2534 if plot: orgscan = self.copy()
2535
[1574]2536 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2537 s._add_history("smooth", varlist)
[1819]2538
[2610]2539 action = 'H'
[1819]2540 if plot:
[2150]2541 from asap.asapplotter import new_asaplot
2542 theplot = new_asaplot(rcParams['plotter.gui'])
[2535]2543 from matplotlib import rc as rcp
2544 rcp('lines', linewidth=1)
[2150]2545 theplot.set_panels()
[1819]2546 ylab=s._get_ordinate_label()
[2150]2547 #theplot.palette(0,["#777777","red"])
[1819]2548 for r in xrange(s.nrow()):
2549 xsm=s._getabcissa(r)
2550 ysm=s._getspectrum(r)
2551 xorg=orgscan._getabcissa(r)
2552 yorg=orgscan._getspectrum(r)
[2610]2553 if action != "N": #skip plotting if rejecting all
2554 theplot.clear()
2555 theplot.hold()
2556 theplot.set_axes('ylabel',ylab)
2557 theplot.set_axes('xlabel',s._getabcissalabel(r))
2558 theplot.set_axes('title',s._getsourcename(r))
2559 theplot.set_line(label='Original',color="#777777")
2560 theplot.plot(xorg,yorg)
2561 theplot.set_line(label='Smoothed',color="red")
2562 theplot.plot(xsm,ysm)
2563 ### Ugly part for legend
2564 for i in [0,1]:
2565 theplot.subplots[0]['lines'].append(
2566 [theplot.subplots[0]['axes'].lines[i]]
2567 )
2568 theplot.release()
2569 ### Ugly part for legend
2570 theplot.subplots[0]['lines']=[]
2571 res = self._get_verify_action("Accept smoothing?",action)
2572 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2573 if r == 0: action = None
2574 #res = raw_input("Accept smoothing ([y]/n): ")
[1819]2575 if res.upper() == 'N':
[2610]2576 # reject for the current rows
[1819]2577 s._setspectrum(yorg, r)
[2610]2578 elif res.upper() == 'R':
2579 # reject all the following rows
2580 action = "N"
2581 s._setspectrum(yorg, r)
2582 elif res.upper() == 'A':
2583 # accept all the following rows
2584 break
[2150]2585 theplot.quit()
2586 del theplot
[1819]2587 del orgscan
2588
[876]2589 if insitu: self._assign(s)
2590 else: return s
[513]2591
[2186]2592 @asaplog_post_dec
[2435]2593 def regrid_channel(self, width=5, plot=False, insitu=None):
2594 """\
2595 Regrid the spectra by the specified channel width
2596
2597 Parameters:
2598
2599 width: The channel width (float) of regridded spectra
2600 in the current spectral unit.
2601
2602 plot: [NOT IMPLEMENTED YET]
2603 plot the original and the regridded spectra.
2604 In this each indivual fit has to be approved, by
2605 typing 'y' or 'n'
2606
2607 insitu: if False a new scantable is returned.
2608 Otherwise, the scaling is done in-situ
2609 The default is taken from .asaprc (False)
2610
2611 """
2612 if insitu is None: insitu = rcParams['insitu']
2613 varlist = vars()
2614
2615 if plot:
2616 asaplog.post()
2617 asaplog.push("Verification plot is not implemtnetd yet.")
2618 asaplog.post("WARN")
2619
2620 s = self.copy()
2621 s._regrid_specchan(width)
2622
2623 s._add_history("regrid_channel", varlist)
2624
2625# if plot:
2626# from asap.asapplotter import new_asaplot
2627# theplot = new_asaplot(rcParams['plotter.gui'])
[2535]2628# from matplotlib import rc as rcp
2629# rcp('lines', linewidth=1)
[2435]2630# theplot.set_panels()
2631# ylab=s._get_ordinate_label()
2632# #theplot.palette(0,["#777777","red"])
2633# for r in xrange(s.nrow()):
2634# xsm=s._getabcissa(r)
2635# ysm=s._getspectrum(r)
2636# xorg=orgscan._getabcissa(r)
2637# yorg=orgscan._getspectrum(r)
2638# theplot.clear()
2639# theplot.hold()
2640# theplot.set_axes('ylabel',ylab)
2641# theplot.set_axes('xlabel',s._getabcissalabel(r))
2642# theplot.set_axes('title',s._getsourcename(r))
2643# theplot.set_line(label='Original',color="#777777")
2644# theplot.plot(xorg,yorg)
2645# theplot.set_line(label='Smoothed',color="red")
2646# theplot.plot(xsm,ysm)
2647# ### Ugly part for legend
2648# for i in [0,1]:
2649# theplot.subplots[0]['lines'].append(
2650# [theplot.subplots[0]['axes'].lines[i]]
2651# )
2652# theplot.release()
2653# ### Ugly part for legend
2654# theplot.subplots[0]['lines']=[]
2655# res = raw_input("Accept smoothing ([y]/n): ")
2656# if res.upper() == 'N':
2657# s._setspectrum(yorg, r)
2658# theplot.quit()
2659# del theplot
2660# del orgscan
2661
2662 if insitu: self._assign(s)
2663 else: return s
2664
2665 @asaplog_post_dec
[2186]2666 def _parse_wn(self, wn):
2667 if isinstance(wn, list) or isinstance(wn, tuple):
2668 return wn
2669 elif isinstance(wn, int):
2670 return [ wn ]
2671 elif isinstance(wn, str):
[2277]2672 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2673 val = wn.split('-')
2674 val = [int(val[0]), int(val[1])]
2675 val.sort()
2676 res = [i for i in xrange(val[0], val[1]+1)]
[2277]2677 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2678 val = int(wn[2:])+1
2679 res = [i for i in xrange(val)]
[2277]2680 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2681 val = int(wn[:-2])+1
2682 res = [i for i in xrange(val)]
[2277]2683 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]2684 val = int(wn[1:])
2685 res = [i for i in xrange(val)]
[2277]2686 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]2687 val = int(wn[:-1])
2688 res = [i for i in xrange(val)]
[2411]2689 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2690 # then interpreted in C++
2691 # side as [a,a+1,...,a_nyq]
2692 # (CAS-3759)
[2186]2693 val = int(wn[2:])
[2411]2694 res = [val, -999]
2695 #res = [i for i in xrange(val, self.nchan()/2+1)]
2696 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2697 # then interpreted in C++
2698 # side as [a,a+1,...,a_nyq]
2699 # (CAS-3759)
[2186]2700 val = int(wn[:-2])
[2411]2701 res = [val, -999]
2702 #res = [i for i in xrange(val, self.nchan()/2+1)]
2703 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2704 # then interpreted in C++
2705 # side as [a+1,a+2,...,a_nyq]
2706 # (CAS-3759)
[2186]2707 val = int(wn[1:])+1
[2411]2708 res = [val, -999]
2709 #res = [i for i in xrange(val, self.nchan()/2+1)]
2710 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2711 # then interpreted in C++
2712 # side as [a+1,a+2,...,a_nyq]
2713 # (CAS-3759)
[2186]2714 val = int(wn[:-1])+1
[2411]2715 res = [val, -999]
2716 #res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2717
[2186]2718 return res
2719 else:
2720 msg = 'wrong value given for addwn/rejwn'
2721 raise RuntimeError(msg)
2722
[2713]2723 @asaplog_post_dec
[2810]2724 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
[2767]2725 """\
2726 Subtract baseline based on parameters written in Baseline Table.
2727
2728 Parameters:
[2809]2729 insitu: if True, baseline fitting/subtraction is done
[2810]2730 in-situ. If False, a new scantable with
2731 baseline subtracted is returned. Actually,
2732 format of the returned value depends on both
2733 insitu and retfitres (see below).
[2767]2734 The default is taken from .asaprc (False)
[2810]2735 retfitres: if True, the results of baseline fitting (i.e.,
2736 coefficients and rms) are returned.
2737 default is False.
2738 The format of the returned value of this
2739 function varies as follows:
2740 (1) in case insitu=True and retfitres=True:
2741 fitting result.
2742 (2) in case insitu=True and retfitres=False:
2743 None.
2744 (3) in case insitu=False and retfitres=True:
2745 a dictionary containing a new scantable
2746 (with baseline subtracted) and the fitting
2747 results.
2748 (4) in case insitu=False and retfitres=False:
2749 a new scantable (with baseline subtracted).
[2767]2750 inbltable: name of input baseline table. The row number of
2751 scantable and that of inbltable must be
2752 identical.
2753 outbltable: name of output baseline table where baseline
2754 parameters and fitting results recorded.
2755 default is ''(no output).
[2809]2756 overwrite: if True when an existing baseline table is
2757 specified for outbltable, overwrites it.
2758 Otherwise there is no harm.
[2767]2759 default is False.
2760 """
2761
2762 try:
2763 varlist = vars()
[2810]2764 if retfitres is None: retfitres = False
[2767]2765 if inbltable is None: raise ValueError("bltable missing.")
2766 if outbltable is None: outbltable = ''
2767 if overwrite is None: overwrite = False
2768
2769 if insitu is None: insitu = rcParams['insitu']
2770 if insitu:
2771 workscan = self
2772 else:
2773 workscan = self.copy()
2774
2775 sres = workscan._apply_bltable(inbltable,
[2810]2776 retfitres,
[2767]2777 outbltable,
2778 os.path.exists(outbltable),
2779 overwrite)
[2810]2780 if retfitres: res = parse_fitresult(sres)
[2767]2781
2782 workscan._add_history('apply_bltable', varlist)
2783
2784 if insitu:
2785 self._assign(workscan)
[2810]2786 if retfitres:
2787 return res
2788 else:
2789 return None
[2767]2790 else:
[2810]2791 if retfitres:
2792 return {'scantable': workscan, 'fitresults': res}
2793 else:
2794 return workscan
[2767]2795
2796 except RuntimeError, e:
2797 raise_fitting_failure_exception(e)
2798
2799 @asaplog_post_dec
[2810]2800 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
[2767]2801 """\
2802 Subtract baseline based on parameters written in the input list.
2803
2804 Parameters:
[2809]2805 insitu: if True, baseline fitting/subtraction is done
[2810]2806 in-situ. If False, a new scantable with
2807 baseline subtracted is returned. Actually,
2808 format of the returned value depends on both
2809 insitu and retfitres (see below).
[2767]2810 The default is taken from .asaprc (False)
[2810]2811 retfitres: if True, the results of baseline fitting (i.e.,
2812 coefficients and rms) are returned.
2813 default is False.
2814 The format of the returned value of this
2815 function varies as follows:
2816 (1) in case insitu=True and retfitres=True:
2817 fitting result.
2818 (2) in case insitu=True and retfitres=False:
2819 None.
2820 (3) in case insitu=False and retfitres=True:
2821 a dictionary containing a new scantable
2822 (with baseline subtracted) and the fitting
2823 results.
2824 (4) in case insitu=False and retfitres=False:
2825 a new scantable (with baseline subtracted).
[2767]2826 blinfo: baseline parameter set stored in a dictionary
2827 or a list of dictionary. Each dictionary
2828 corresponds to each spectrum and must contain
2829 the following keys and values:
2830 'row': row number,
2831 'blfunc': function name. available ones include
2832 'poly', 'chebyshev', 'cspline' and
2833 'sinusoid',
2834 'order': maximum order of polynomial. needed
2835 if blfunc='poly' or 'chebyshev',
2836 'npiece': number or piecewise polynomial.
2837 needed if blfunc='cspline',
2838 'nwave': a list of sinusoidal wave numbers.
2839 needed if blfunc='sinusoid', and
2840 'masklist': min-max windows for channel mask.
2841 the specified ranges will be used
2842 for fitting.
2843 bltable: name of output baseline table where baseline
2844 parameters and fitting results recorded.
2845 default is ''(no output).
[2809]2846 overwrite: if True when an existing baseline table is
2847 specified for bltable, overwrites it.
2848 Otherwise there is no harm.
[2767]2849 default is False.
2850
2851 Example:
2852 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
2853 'masklist':[[10,350],[352,510]]},
2854 {'row':1, 'blfunc':'cspline', 'npiece':3,
2855 'masklist':[[3,16],[19,404],[407,511]]}
2856 ])
2857
2858 the first spectrum (row=0) will be fitted with polynomial
2859 of order=5 and the next one (row=1) will be fitted with cubic
2860 spline consisting of 3 pieces.
2861 """
2862
2863 try:
2864 varlist = vars()
[2810]2865 if retfitres is None: retfitres = False
[2767]2866 if blinfo is None: blinfo = []
2867 if bltable is None: bltable = ''
2868 if overwrite is None: overwrite = False
2869
2870 if insitu is None: insitu = rcParams['insitu']
2871 if insitu:
2872 workscan = self
2873 else:
2874 workscan = self.copy()
2875
2876 nrow = workscan.nrow()
2877
2878 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
2879
2880 print "in_blinfo=< "+ str(in_blinfo)+" >"
2881
2882 sres = workscan._sub_baseline(in_blinfo,
[2810]2883 retfitres,
[2767]2884 bltable,
2885 os.path.exists(bltable),
2886 overwrite)
[2810]2887 if retfitres: res = parse_fitresult(sres)
[2767]2888
2889 workscan._add_history('sub_baseline', varlist)
2890
2891 if insitu:
2892 self._assign(workscan)
[2810]2893 if retfitres:
2894 return res
2895 else:
2896 return None
[2767]2897 else:
[2810]2898 if retfitres:
2899 return {'scantable': workscan, 'fitresults': res}
2900 else:
2901 return workscan
[2767]2902
2903 except RuntimeError, e:
2904 raise_fitting_failure_exception(e)
2905
2906 @asaplog_post_dec
[2713]2907 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
2908 whichrow=None, uselinefinder=None, edge=None,
2909 threshold=None, chan_avg_limit=None):
2910 """\
2911 Calculates and returns model selection criteria for a specified
2912 baseline model and a given spectrum data.
2913 Available values include Akaike Information Criterion (AIC), the
2914 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
2915 Bayesian Information Criterion (BIC) and the Generalised Cross
2916 Validation (GCV).
[2186]2917
[2713]2918 Parameters:
2919 value: name of model selection criteria to calculate.
2920 available ones include 'aic', 'aicc', 'bic' and
2921 'gcv'. default is 'aicc'.
2922 blfunc: baseline function name. available ones include
2923 'chebyshev', 'cspline' and 'sinusoid'.
2924 default is 'chebyshev'.
2925 order: parameter for basline function. actually stands for
2926 order of polynomial (order) for 'chebyshev',
2927 number of spline pieces (npiece) for 'cspline' and
2928 maximum wave number for 'sinusoid', respectively.
2929 default is 5 (which is also the default order value
2930 for [auto_]chebyshev_baseline()).
2931 mask: an optional mask. default is [].
2932 whichrow: row number. default is 0 (the first row)
2933 uselinefinder: use sd.linefinder() to flag out line regions
2934 default is True.
2935 edge: an optional number of channel to drop at
2936 the edge of spectrum. If only one value is
2937 specified, the same number will be dropped
2938 from both sides of the spectrum. Default
2939 is to keep all channels. Nested tuples
2940 represent individual edge selection for
2941 different IFs (a number of spectral channels
2942 can be different)
2943 default is (0, 0).
2944 threshold: the threshold used by line finder. It is
2945 better to keep it large as only strong lines
2946 affect the baseline solution.
2947 default is 3.
2948 chan_avg_limit: a maximum number of consequtive spectral
2949 channels to average during the search of
2950 weak and broad lines. The default is no
2951 averaging (and no search for weak lines).
2952 If such lines can affect the fitted baseline
2953 (e.g. a high order polynomial is fitted),
2954 increase this parameter (usually values up
2955 to 8 are reasonable). Most users of this
2956 method should find the default value sufficient.
2957 default is 1.
2958
2959 Example:
2960 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
2961 """
2962
2963 try:
2964 varlist = vars()
2965
2966 if value is None: value = 'aicc'
2967 if blfunc is None: blfunc = 'chebyshev'
2968 if order is None: order = 5
2969 if mask is None: mask = []
2970 if whichrow is None: whichrow = 0
2971 if uselinefinder is None: uselinefinder = True
2972 if edge is None: edge = (0, 0)
2973 if threshold is None: threshold = 3
2974 if chan_avg_limit is None: chan_avg_limit = 1
2975
2976 return self._calc_aic(value, blfunc, order, mask,
2977 whichrow, uselinefinder, edge,
2978 threshold, chan_avg_limit)
2979
2980 except RuntimeError, e:
2981 raise_fitting_failure_exception(e)
2982
[1862]2983 @asaplog_post_dec
[2771]2984 def sinusoid_baseline(self, mask=None, applyfft=None,
[2269]2985 fftmethod=None, fftthresh=None,
[2771]2986 addwn=None, rejwn=None,
2987 insitu=None,
2988 clipthresh=None, clipniter=None,
2989 plot=None,
2990 getresidual=None,
2991 showprogress=None, minnrow=None,
2992 outlog=None,
[2767]2993 blfile=None, csvformat=None,
2994 bltable=None):
[2047]2995 """\
[2349]2996 Return a scan which has been baselined (all rows) with sinusoidal
2997 functions.
2998
[2047]2999 Parameters:
[2186]3000 mask: an optional mask
3001 applyfft: if True use some method, such as FFT, to find
3002 strongest sinusoidal components in the wavenumber
3003 domain to be used for baseline fitting.
3004 default is True.
3005 fftmethod: method to find the strong sinusoidal components.
3006 now only 'fft' is available and it is the default.
3007 fftthresh: the threshold to select wave numbers to be used for
3008 fitting from the distribution of amplitudes in the
3009 wavenumber domain.
3010 both float and string values accepted.
3011 given a float value, the unit is set to sigma.
3012 for string values, allowed formats include:
[2349]3013 'xsigma' or 'x' (= x-sigma level. e.g.,
3014 '3sigma'), or
[2186]3015 'topx' (= the x strongest ones, e.g. 'top5').
3016 default is 3.0 (unit: sigma).
3017 addwn: the additional wave numbers to be used for fitting.
3018 list or integer value is accepted to specify every
3019 wave numbers. also string value can be used in case
3020 you need to specify wave numbers in a certain range,
3021 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3022 '<a' (= 0,1,...,a-2,a-1),
3023 '>=a' (= a, a+1, ... up to the maximum wave
3024 number corresponding to the Nyquist
3025 frequency for the case of FFT).
[2411]3026 default is [0].
[2186]3027 rejwn: the wave numbers NOT to be used for fitting.
3028 can be set just as addwn but has higher priority:
3029 wave numbers which are specified both in addwn
3030 and rejwn will NOT be used. default is [].
[2771]3031 insitu: if False a new scantable is returned.
3032 Otherwise, the scaling is done in-situ
3033 The default is taken from .asaprc (False)
[2081]3034 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3035 clipniter: maximum number of iteration of 'clipthresh'-sigma
3036 clipping (default is 0)
[2081]3037 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3038 plot the fit and the residual. In this each
3039 indivual fit has to be approved, by typing 'y'
3040 or 'n'
3041 getresidual: if False, returns best-fit values instead of
3042 residual. (default is True)
[2189]3043 showprogress: show progress status for large data.
3044 default is True.
3045 minnrow: minimum number of input spectra to show.
3046 default is 1000.
[2081]3047 outlog: Output the coefficients of the best-fit
3048 function to logger (default is False)
3049 blfile: Name of a text file in which the best-fit
3050 parameter values to be written
[2186]3051 (default is '': no file/logger output)
[2641]3052 csvformat: if True blfile is csv-formatted, default is False.
[2767]3053 bltable: name of a baseline table where fitting results
3054 (coefficients, rms, etc.) are to be written.
3055 if given, fitting results will NOT be output to
3056 scantable (insitu=True) or None will be
3057 returned (insitu=False).
3058 (default is "": no table output)
[2047]3059
3060 Example:
[2349]3061 # return a scan baselined by a combination of sinusoidal curves
3062 # having wave numbers in spectral window up to 10,
[2047]3063 # also with 3-sigma clipping, iteration up to 4 times
[2186]3064 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]3065
3066 Note:
3067 The best-fit parameter values output in logger and/or blfile are now
3068 based on specunit of 'channel'.
[2047]3069 """
3070
[2186]3071 try:
3072 varlist = vars()
[2047]3073
[2186]3074 if insitu is None: insitu = rcParams['insitu']
3075 if insitu:
3076 workscan = self
3077 else:
3078 workscan = self.copy()
3079
[2410]3080 if mask is None: mask = []
[2186]3081 if applyfft is None: applyfft = True
3082 if fftmethod is None: fftmethod = 'fft'
3083 if fftthresh is None: fftthresh = 3.0
[2411]3084 if addwn is None: addwn = [0]
[2186]3085 if rejwn is None: rejwn = []
3086 if clipthresh is None: clipthresh = 3.0
3087 if clipniter is None: clipniter = 0
3088 if plot is None: plot = False
3089 if getresidual is None: getresidual = True
[2189]3090 if showprogress is None: showprogress = True
3091 if minnrow is None: minnrow = 1000
[2186]3092 if outlog is None: outlog = False
3093 if blfile is None: blfile = ''
[2641]3094 if csvformat is None: csvformat = False
[2767]3095 if bltable is None: bltable = ''
[2047]3096
[2767]3097 sapplyfft = 'true' if applyfft else 'false'
3098 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3099
[2767]3100 scsvformat = 'T' if csvformat else 'F'
3101
[2081]3102 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3103 workscan._sinusoid_baseline(mask,
3104 fftinfo,
3105 #applyfft, fftmethod.lower(),
3106 #str(fftthresh).lower(),
[2349]3107 workscan._parse_wn(addwn),
[2643]3108 workscan._parse_wn(rejwn),
3109 clipthresh, clipniter,
3110 getresidual,
[2349]3111 pack_progress_params(showprogress,
[2641]3112 minnrow),
[2767]3113 outlog, scsvformat+blfile,
3114 bltable)
[2186]3115 workscan._add_history('sinusoid_baseline', varlist)
[2767]3116
3117 if bltable == '':
3118 if insitu:
3119 self._assign(workscan)
3120 else:
3121 return workscan
[2047]3122 else:
[2767]3123 if not insitu:
3124 return None
[2047]3125
3126 except RuntimeError, e:
[2186]3127 raise_fitting_failure_exception(e)
[2047]3128
3129
[2186]3130 @asaplog_post_dec
[2771]3131 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
[2349]3132 fftmethod=None, fftthresh=None,
[2771]3133 addwn=None, rejwn=None,
3134 insitu=None,
3135 clipthresh=None, clipniter=None,
3136 edge=None, threshold=None, chan_avg_limit=None,
3137 plot=None,
3138 getresidual=None,
3139 showprogress=None, minnrow=None,
3140 outlog=None,
[2767]3141 blfile=None, csvformat=None,
3142 bltable=None):
[2047]3143 """\
[2349]3144 Return a scan which has been baselined (all rows) with sinusoidal
3145 functions.
[2047]3146 Spectral lines are detected first using linefinder and masked out
3147 to avoid them affecting the baseline solution.
3148
3149 Parameters:
[2189]3150 mask: an optional mask retreived from scantable
3151 applyfft: if True use some method, such as FFT, to find
3152 strongest sinusoidal components in the wavenumber
3153 domain to be used for baseline fitting.
3154 default is True.
3155 fftmethod: method to find the strong sinusoidal components.
3156 now only 'fft' is available and it is the default.
3157 fftthresh: the threshold to select wave numbers to be used for
3158 fitting from the distribution of amplitudes in the
3159 wavenumber domain.
3160 both float and string values accepted.
3161 given a float value, the unit is set to sigma.
3162 for string values, allowed formats include:
[2349]3163 'xsigma' or 'x' (= x-sigma level. e.g.,
3164 '3sigma'), or
[2189]3165 'topx' (= the x strongest ones, e.g. 'top5').
3166 default is 3.0 (unit: sigma).
3167 addwn: the additional wave numbers to be used for fitting.
3168 list or integer value is accepted to specify every
3169 wave numbers. also string value can be used in case
3170 you need to specify wave numbers in a certain range,
3171 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3172 '<a' (= 0,1,...,a-2,a-1),
3173 '>=a' (= a, a+1, ... up to the maximum wave
3174 number corresponding to the Nyquist
3175 frequency for the case of FFT).
[2411]3176 default is [0].
[2189]3177 rejwn: the wave numbers NOT to be used for fitting.
3178 can be set just as addwn but has higher priority:
3179 wave numbers which are specified both in addwn
3180 and rejwn will NOT be used. default is [].
[2771]3181 insitu: if False a new scantable is returned.
3182 Otherwise, the scaling is done in-situ
3183 The default is taken from .asaprc (False)
[2189]3184 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3185 clipniter: maximum number of iteration of 'clipthresh'-sigma
3186 clipping (default is 0)
[2189]3187 edge: an optional number of channel to drop at
3188 the edge of spectrum. If only one value is
3189 specified, the same number will be dropped
3190 from both sides of the spectrum. Default
3191 is to keep all channels. Nested tuples
3192 represent individual edge selection for
3193 different IFs (a number of spectral channels
3194 can be different)
3195 threshold: the threshold used by line finder. It is
3196 better to keep it large as only strong lines
3197 affect the baseline solution.
3198 chan_avg_limit: a maximum number of consequtive spectral
3199 channels to average during the search of
3200 weak and broad lines. The default is no
3201 averaging (and no search for weak lines).
3202 If such lines can affect the fitted baseline
3203 (e.g. a high order polynomial is fitted),
3204 increase this parameter (usually values up
3205 to 8 are reasonable). Most users of this
3206 method should find the default value sufficient.
3207 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3208 plot the fit and the residual. In this each
3209 indivual fit has to be approved, by typing 'y'
3210 or 'n'
3211 getresidual: if False, returns best-fit values instead of
3212 residual. (default is True)
3213 showprogress: show progress status for large data.
3214 default is True.
3215 minnrow: minimum number of input spectra to show.
3216 default is 1000.
3217 outlog: Output the coefficients of the best-fit
3218 function to logger (default is False)
3219 blfile: Name of a text file in which the best-fit
3220 parameter values to be written
3221 (default is "": no file/logger output)
[2641]3222 csvformat: if True blfile is csv-formatted, default is False.
[2767]3223 bltable: name of a baseline table where fitting results
3224 (coefficients, rms, etc.) are to be written.
3225 if given, fitting results will NOT be output to
3226 scantable (insitu=True) or None will be
3227 returned (insitu=False).
3228 (default is "": no table output)
[2047]3229
3230 Example:
[2186]3231 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]3232
3233 Note:
3234 The best-fit parameter values output in logger and/or blfile are now
3235 based on specunit of 'channel'.
[2047]3236 """
3237
[2186]3238 try:
3239 varlist = vars()
[2047]3240
[2186]3241 if insitu is None: insitu = rcParams['insitu']
3242 if insitu:
3243 workscan = self
[2047]3244 else:
[2186]3245 workscan = self.copy()
3246
[2410]3247 if mask is None: mask = []
[2186]3248 if applyfft is None: applyfft = True
3249 if fftmethod is None: fftmethod = 'fft'
3250 if fftthresh is None: fftthresh = 3.0
[2411]3251 if addwn is None: addwn = [0]
[2186]3252 if rejwn is None: rejwn = []
3253 if clipthresh is None: clipthresh = 3.0
3254 if clipniter is None: clipniter = 0
3255 if edge is None: edge = (0,0)
3256 if threshold is None: threshold = 3
3257 if chan_avg_limit is None: chan_avg_limit = 1
3258 if plot is None: plot = False
3259 if getresidual is None: getresidual = True
[2189]3260 if showprogress is None: showprogress = True
3261 if minnrow is None: minnrow = 1000
[2186]3262 if outlog is None: outlog = False
3263 if blfile is None: blfile = ''
[2641]3264 if csvformat is None: csvformat = False
[2767]3265 if bltable is None: bltable = ''
[2047]3266
[2767]3267 sapplyfft = 'true' if applyfft else 'false'
3268 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3269
[2767]3270 scsvformat = 'T' if csvformat else 'F'
3271
[2277]3272 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3273 workscan._auto_sinusoid_baseline(mask,
3274 fftinfo,
[2349]3275 workscan._parse_wn(addwn),
3276 workscan._parse_wn(rejwn),
3277 clipthresh, clipniter,
3278 normalise_edge_param(edge),
3279 threshold, chan_avg_limit,
3280 getresidual,
3281 pack_progress_params(showprogress,
3282 minnrow),
[2767]3283 outlog, scsvformat+blfile, bltable)
[2047]3284 workscan._add_history("auto_sinusoid_baseline", varlist)
[2767]3285
3286 if bltable == '':
3287 if insitu:
3288 self._assign(workscan)
3289 else:
3290 return workscan
[2047]3291 else:
[2767]3292 if not insitu:
3293 return None
[2047]3294
3295 except RuntimeError, e:
[2186]3296 raise_fitting_failure_exception(e)
[2047]3297
3298 @asaplog_post_dec
[2771]3299 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]3300 clipthresh=None, clipniter=None, plot=None,
3301 getresidual=None, showprogress=None, minnrow=None,
[2767]3302 outlog=None, blfile=None, csvformat=None,
3303 bltable=None):
[1846]3304 """\
[2349]3305 Return a scan which has been baselined (all rows) by cubic spline
3306 function (piecewise cubic polynomial).
3307
[513]3308 Parameters:
[2771]3309 mask: An optional mask
3310 npiece: Number of pieces. (default is 2)
[2189]3311 insitu: If False a new scantable is returned.
3312 Otherwise, the scaling is done in-situ
3313 The default is taken from .asaprc (False)
3314 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3315 clipniter: maximum number of iteration of 'clipthresh'-sigma
3316 clipping (default is 0)
[2189]3317 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3318 plot the fit and the residual. In this each
3319 indivual fit has to be approved, by typing 'y'
3320 or 'n'
3321 getresidual: if False, returns best-fit values instead of
3322 residual. (default is True)
3323 showprogress: show progress status for large data.
3324 default is True.
3325 minnrow: minimum number of input spectra to show.
3326 default is 1000.
3327 outlog: Output the coefficients of the best-fit
3328 function to logger (default is False)
3329 blfile: Name of a text file in which the best-fit
3330 parameter values to be written
3331 (default is "": no file/logger output)
[2641]3332 csvformat: if True blfile is csv-formatted, default is False.
[2767]3333 bltable: name of a baseline table where fitting results
3334 (coefficients, rms, etc.) are to be written.
3335 if given, fitting results will NOT be output to
3336 scantable (insitu=True) or None will be
3337 returned (insitu=False).
3338 (default is "": no table output)
[1846]3339
[2012]3340 Example:
[2349]3341 # return a scan baselined by a cubic spline consisting of 2 pieces
3342 # (i.e., 1 internal knot),
[2012]3343 # also with 3-sigma clipping, iteration up to 4 times
3344 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]3345
3346 Note:
3347 The best-fit parameter values output in logger and/or blfile are now
3348 based on specunit of 'channel'.
[2012]3349 """
3350
[2186]3351 try:
3352 varlist = vars()
3353
3354 if insitu is None: insitu = rcParams['insitu']
3355 if insitu:
3356 workscan = self
3357 else:
3358 workscan = self.copy()
[1855]3359
[2410]3360 if mask is None: mask = []
[2189]3361 if npiece is None: npiece = 2
3362 if clipthresh is None: clipthresh = 3.0
3363 if clipniter is None: clipniter = 0
3364 if plot is None: plot = False
3365 if getresidual is None: getresidual = True
3366 if showprogress is None: showprogress = True
3367 if minnrow is None: minnrow = 1000
3368 if outlog is None: outlog = False
3369 if blfile is None: blfile = ''
[2767]3370 if csvformat is None: csvformat = False
3371 if bltable is None: bltable = ''
[1855]3372
[2767]3373 scsvformat = 'T' if csvformat else 'F'
[2641]3374
[2012]3375 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3376 workscan._cspline_baseline(mask, npiece,
3377 clipthresh, clipniter,
[2349]3378 getresidual,
3379 pack_progress_params(showprogress,
[2641]3380 minnrow),
[2767]3381 outlog, scsvformat+blfile,
3382 bltable)
[2012]3383 workscan._add_history("cspline_baseline", varlist)
[2767]3384
3385 if bltable == '':
3386 if insitu:
3387 self._assign(workscan)
3388 else:
3389 return workscan
[2012]3390 else:
[2767]3391 if not insitu:
3392 return None
[2012]3393
3394 except RuntimeError, e:
[2186]3395 raise_fitting_failure_exception(e)
[1855]3396
[2186]3397 @asaplog_post_dec
[2771]3398 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]3399 clipthresh=None, clipniter=None,
3400 edge=None, threshold=None, chan_avg_limit=None,
3401 getresidual=None, plot=None,
3402 showprogress=None, minnrow=None, outlog=None,
[2767]3403 blfile=None, csvformat=None, bltable=None):
[2012]3404 """\
3405 Return a scan which has been baselined (all rows) by cubic spline
3406 function (piecewise cubic polynomial).
3407 Spectral lines are detected first using linefinder and masked out
3408 to avoid them affecting the baseline solution.
3409
3410 Parameters:
[2771]3411 mask: an optional mask retreived from scantable
3412 npiece: Number of pieces. (default is 2)
[2189]3413 insitu: if False a new scantable is returned.
3414 Otherwise, the scaling is done in-situ
3415 The default is taken from .asaprc (False)
3416 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3417 clipniter: maximum number of iteration of 'clipthresh'-sigma
3418 clipping (default is 0)
[2189]3419 edge: an optional number of channel to drop at
3420 the edge of spectrum. If only one value is
3421 specified, the same number will be dropped
3422 from both sides of the spectrum. Default
3423 is to keep all channels. Nested tuples
3424 represent individual edge selection for
3425 different IFs (a number of spectral channels
3426 can be different)
3427 threshold: the threshold used by line finder. It is
3428 better to keep it large as only strong lines
3429 affect the baseline solution.
3430 chan_avg_limit: a maximum number of consequtive spectral
3431 channels to average during the search of
3432 weak and broad lines. The default is no
3433 averaging (and no search for weak lines).
3434 If such lines can affect the fitted baseline
3435 (e.g. a high order polynomial is fitted),
3436 increase this parameter (usually values up
3437 to 8 are reasonable). Most users of this
3438 method should find the default value sufficient.
3439 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3440 plot the fit and the residual. In this each
3441 indivual fit has to be approved, by typing 'y'
3442 or 'n'
3443 getresidual: if False, returns best-fit values instead of
3444 residual. (default is True)
3445 showprogress: show progress status for large data.
3446 default is True.
3447 minnrow: minimum number of input spectra to show.
3448 default is 1000.
3449 outlog: Output the coefficients of the best-fit
3450 function to logger (default is False)
3451 blfile: Name of a text file in which the best-fit
3452 parameter values to be written
3453 (default is "": no file/logger output)
[2641]3454 csvformat: if True blfile is csv-formatted, default is False.
[2767]3455 bltable: name of a baseline table where fitting results
3456 (coefficients, rms, etc.) are to be written.
3457 if given, fitting results will NOT be output to
3458 scantable (insitu=True) or None will be
3459 returned (insitu=False).
3460 (default is "": no table output)
[1846]3461
[1907]3462 Example:
[2012]3463 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]3464
3465 Note:
3466 The best-fit parameter values output in logger and/or blfile are now
3467 based on specunit of 'channel'.
[2012]3468 """
[1846]3469
[2186]3470 try:
3471 varlist = vars()
[2012]3472
[2186]3473 if insitu is None: insitu = rcParams['insitu']
3474 if insitu:
3475 workscan = self
[1391]3476 else:
[2186]3477 workscan = self.copy()
3478
[2410]3479 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3480 if mask is None: mask = []
[2186]3481 if npiece is None: npiece = 2
3482 if clipthresh is None: clipthresh = 3.0
3483 if clipniter is None: clipniter = 0
3484 if edge is None: edge = (0, 0)
3485 if threshold is None: threshold = 3
3486 if chan_avg_limit is None: chan_avg_limit = 1
3487 if plot is None: plot = False
3488 if getresidual is None: getresidual = True
[2189]3489 if showprogress is None: showprogress = True
3490 if minnrow is None: minnrow = 1000
[2186]3491 if outlog is None: outlog = False
3492 if blfile is None: blfile = ''
[2641]3493 if csvformat is None: csvformat = False
[2767]3494 if bltable is None: bltable = ''
[1819]3495
[2767]3496 scsvformat = 'T' if csvformat else 'F'
[2641]3497
[2277]3498 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3499 workscan._auto_cspline_baseline(mask, npiece,
3500 clipthresh, clipniter,
[2269]3501 normalise_edge_param(edge),
3502 threshold,
3503 chan_avg_limit, getresidual,
3504 pack_progress_params(showprogress,
3505 minnrow),
[2767]3506 outlog,
3507 scsvformat+blfile,
3508 bltable)
[2012]3509 workscan._add_history("auto_cspline_baseline", varlist)
[2767]3510
3511 if bltable == '':
3512 if insitu:
3513 self._assign(workscan)
3514 else:
3515 return workscan
[1856]3516 else:
[2767]3517 if not insitu:
3518 return None
[2012]3519
3520 except RuntimeError, e:
[2186]3521 raise_fitting_failure_exception(e)
[513]3522
[1931]3523 @asaplog_post_dec
[2771]3524 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]3525 clipthresh=None, clipniter=None, plot=None,
3526 getresidual=None, showprogress=None, minnrow=None,
[2767]3527 outlog=None, blfile=None, csvformat=None,
3528 bltable=None):
[2645]3529 """\
3530 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3531
3532 Parameters:
[2771]3533 mask: An optional mask
3534 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]3535 insitu: If False a new scantable is returned.
3536 Otherwise, the scaling is done in-situ
3537 The default is taken from .asaprc (False)
3538 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3539 clipniter: maximum number of iteration of 'clipthresh'-sigma
3540 clipping (default is 0)
3541 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3542 plot the fit and the residual. In this each
3543 indivual fit has to be approved, by typing 'y'
3544 or 'n'
3545 getresidual: if False, returns best-fit values instead of
3546 residual. (default is True)
3547 showprogress: show progress status for large data.
3548 default is True.
3549 minnrow: minimum number of input spectra to show.
3550 default is 1000.
3551 outlog: Output the coefficients of the best-fit
3552 function to logger (default is False)
3553 blfile: Name of a text file in which the best-fit
3554 parameter values to be written
3555 (default is "": no file/logger output)
3556 csvformat: if True blfile is csv-formatted, default is False.
[2767]3557 bltable: name of a baseline table where fitting results
3558 (coefficients, rms, etc.) are to be written.
3559 if given, fitting results will NOT be output to
3560 scantable (insitu=True) or None will be
3561 returned (insitu=False).
3562 (default is "": no table output)
[2645]3563
3564 Example:
3565 # return a scan baselined by a cubic spline consisting of 2 pieces
3566 # (i.e., 1 internal knot),
3567 # also with 3-sigma clipping, iteration up to 4 times
3568 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3569
3570 Note:
3571 The best-fit parameter values output in logger and/or blfile are now
3572 based on specunit of 'channel'.
3573 """
3574
3575 try:
3576 varlist = vars()
3577
3578 if insitu is None: insitu = rcParams['insitu']
3579 if insitu:
3580 workscan = self
3581 else:
3582 workscan = self.copy()
3583
3584 if mask is None: mask = []
3585 if order is None: order = 5
3586 if clipthresh is None: clipthresh = 3.0
3587 if clipniter is None: clipniter = 0
3588 if plot is None: plot = False
3589 if getresidual is None: getresidual = True
3590 if showprogress is None: showprogress = True
3591 if minnrow is None: minnrow = 1000
3592 if outlog is None: outlog = False
3593 if blfile is None: blfile = ''
[2767]3594 if csvformat is None: csvformat = False
3595 if bltable is None: bltable = ''
[2645]3596
[2767]3597 scsvformat = 'T' if csvformat else 'F'
[2645]3598
3599 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3600 workscan._chebyshev_baseline(mask, order,
3601 clipthresh, clipniter,
[2645]3602 getresidual,
3603 pack_progress_params(showprogress,
3604 minnrow),
[2767]3605 outlog, scsvformat+blfile,
3606 bltable)
[2645]3607 workscan._add_history("chebyshev_baseline", varlist)
[2767]3608
3609 if bltable == '':
3610 if insitu:
3611 self._assign(workscan)
3612 else:
3613 return workscan
[2645]3614 else:
[2767]3615 if not insitu:
3616 return None
[2645]3617
3618 except RuntimeError, e:
3619 raise_fitting_failure_exception(e)
3620
3621 @asaplog_post_dec
[2771]3622 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]3623 clipthresh=None, clipniter=None,
3624 edge=None, threshold=None, chan_avg_limit=None,
3625 getresidual=None, plot=None,
3626 showprogress=None, minnrow=None, outlog=None,
[2767]3627 blfile=None, csvformat=None, bltable=None):
[2645]3628 """\
3629 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3630 Spectral lines are detected first using linefinder and masked out
3631 to avoid them affecting the baseline solution.
3632
3633 Parameters:
[2771]3634 mask: an optional mask retreived from scantable
3635 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]3636 insitu: if False a new scantable is returned.
3637 Otherwise, the scaling is done in-situ
3638 The default is taken from .asaprc (False)
3639 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3640 clipniter: maximum number of iteration of 'clipthresh'-sigma
3641 clipping (default is 0)
3642 edge: an optional number of channel to drop at
3643 the edge of spectrum. If only one value is
3644 specified, the same number will be dropped
3645 from both sides of the spectrum. Default
3646 is to keep all channels. Nested tuples
3647 represent individual edge selection for
3648 different IFs (a number of spectral channels
3649 can be different)
3650 threshold: the threshold used by line finder. It is
3651 better to keep it large as only strong lines
3652 affect the baseline solution.
3653 chan_avg_limit: a maximum number of consequtive spectral
3654 channels to average during the search of
3655 weak and broad lines. The default is no
3656 averaging (and no search for weak lines).
3657 If such lines can affect the fitted baseline
3658 (e.g. a high order polynomial is fitted),
3659 increase this parameter (usually values up
3660 to 8 are reasonable). Most users of this
3661 method should find the default value sufficient.
3662 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3663 plot the fit and the residual. In this each
3664 indivual fit has to be approved, by typing 'y'
3665 or 'n'
3666 getresidual: if False, returns best-fit values instead of
3667 residual. (default is True)
3668 showprogress: show progress status for large data.
3669 default is True.
3670 minnrow: minimum number of input spectra to show.
3671 default is 1000.
3672 outlog: Output the coefficients of the best-fit
3673 function to logger (default is False)
3674 blfile: Name of a text file in which the best-fit
3675 parameter values to be written
3676 (default is "": no file/logger output)
3677 csvformat: if True blfile is csv-formatted, default is False.
[2767]3678 bltable: name of a baseline table where fitting results
3679 (coefficients, rms, etc.) are to be written.
3680 if given, fitting results will NOT be output to
3681 scantable (insitu=True) or None will be
3682 returned (insitu=False).
3683 (default is "": no table output)
[2645]3684
3685 Example:
3686 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3687
3688 Note:
3689 The best-fit parameter values output in logger and/or blfile are now
3690 based on specunit of 'channel'.
3691 """
3692
3693 try:
3694 varlist = vars()
3695
3696 if insitu is None: insitu = rcParams['insitu']
3697 if insitu:
3698 workscan = self
3699 else:
3700 workscan = self.copy()
3701
3702 if mask is None: mask = []
3703 if order is None: order = 5
3704 if clipthresh is None: clipthresh = 3.0
3705 if clipniter is None: clipniter = 0
3706 if edge is None: edge = (0, 0)
3707 if threshold is None: threshold = 3
3708 if chan_avg_limit is None: chan_avg_limit = 1
3709 if plot is None: plot = False
3710 if getresidual is None: getresidual = True
3711 if showprogress is None: showprogress = True
3712 if minnrow is None: minnrow = 1000
3713 if outlog is None: outlog = False
3714 if blfile is None: blfile = ''
3715 if csvformat is None: csvformat = False
[2767]3716 if bltable is None: bltable = ''
[2645]3717
[2767]3718 scsvformat = 'T' if csvformat else 'F'
[2645]3719
3720 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3721 workscan._auto_chebyshev_baseline(mask, order,
3722 clipthresh, clipniter,
[2645]3723 normalise_edge_param(edge),
3724 threshold,
3725 chan_avg_limit, getresidual,
3726 pack_progress_params(showprogress,
3727 minnrow),
[2767]3728 outlog, scsvformat+blfile,
3729 bltable)
[2645]3730 workscan._add_history("auto_chebyshev_baseline", varlist)
[2767]3731
3732 if bltable == '':
3733 if insitu:
3734 self._assign(workscan)
3735 else:
3736 return workscan
[2645]3737 else:
[2767]3738 if not insitu:
3739 return None
[2645]3740
3741 except RuntimeError, e:
3742 raise_fitting_failure_exception(e)
3743
3744 @asaplog_post_dec
[2771]3745 def poly_baseline(self, mask=None, order=None, insitu=None,
[2767]3746 clipthresh=None, clipniter=None, plot=None,
[2269]3747 getresidual=None, showprogress=None, minnrow=None,
[2767]3748 outlog=None, blfile=None, csvformat=None,
3749 bltable=None):
[1907]3750 """\
3751 Return a scan which has been baselined (all rows) by a polynomial.
3752 Parameters:
[2771]3753 mask: an optional mask
3754 order: the order of the polynomial (default is 0)
[2189]3755 insitu: if False a new scantable is returned.
3756 Otherwise, the scaling is done in-situ
3757 The default is taken from .asaprc (False)
[2767]3758 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3759 clipniter: maximum number of iteration of 'clipthresh'-sigma
3760 clipping (default is 0)
[2189]3761 plot: plot the fit and the residual. In this each
3762 indivual fit has to be approved, by typing 'y'
3763 or 'n'
3764 getresidual: if False, returns best-fit values instead of
3765 residual. (default is True)
3766 showprogress: show progress status for large data.
3767 default is True.
3768 minnrow: minimum number of input spectra to show.
3769 default is 1000.
3770 outlog: Output the coefficients of the best-fit
3771 function to logger (default is False)
3772 blfile: Name of a text file in which the best-fit
3773 parameter values to be written
3774 (default is "": no file/logger output)
[2641]3775 csvformat: if True blfile is csv-formatted, default is False.
[2767]3776 bltable: name of a baseline table where fitting results
3777 (coefficients, rms, etc.) are to be written.
3778 if given, fitting results will NOT be output to
3779 scantable (insitu=True) or None will be
3780 returned (insitu=False).
3781 (default is "": no table output)
[2012]3782
[1907]3783 Example:
3784 # return a scan baselined by a third order polynomial,
3785 # not using a mask
3786 bscan = scan.poly_baseline(order=3)
3787 """
[1931]3788
[2186]3789 try:
3790 varlist = vars()
[1931]3791
[2269]3792 if insitu is None:
3793 insitu = rcParams["insitu"]
[2186]3794 if insitu:
3795 workscan = self
3796 else:
3797 workscan = self.copy()
[1907]3798
[2410]3799 if mask is None: mask = []
[2189]3800 if order is None: order = 0
[2767]3801 if clipthresh is None: clipthresh = 3.0
3802 if clipniter is None: clipniter = 0
[2189]3803 if plot is None: plot = False
3804 if getresidual is None: getresidual = True
3805 if showprogress is None: showprogress = True
3806 if minnrow is None: minnrow = 1000
3807 if outlog is None: outlog = False
[2767]3808 if blfile is None: blfile = ''
[2641]3809 if csvformat is None: csvformat = False
[2767]3810 if bltable is None: bltable = ''
[1907]3811
[2767]3812 scsvformat = 'T' if csvformat else 'F'
[2641]3813
[2012]3814 if plot:
[2269]3815 outblfile = (blfile != "") and \
[2349]3816 os.path.exists(os.path.expanduser(
3817 os.path.expandvars(blfile))
3818 )
[2269]3819 if outblfile:
3820 blf = open(blfile, "a")
[2012]3821
[1907]3822 f = fitter()
3823 f.set_function(lpoly=order)
[2186]3824
3825 rows = xrange(workscan.nrow())
3826 #if len(rows) > 0: workscan._init_blinfo()
[2610]3827
3828 action = "H"
[1907]3829 for r in rows:
3830 f.x = workscan._getabcissa(r)
3831 f.y = workscan._getspectrum(r)
[2541]3832 if mask:
3833 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3834 else: # mask=None
3835 f.mask = workscan._getmask(r)
3836
[1907]3837 f.data = None
3838 f.fit()
[2541]3839
[2610]3840 if action != "Y": # skip plotting when accepting all
3841 f.plot(residual=True)
3842 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3843 #if accept_fit.upper() == "N":
3844 # #workscan._append_blinfo(None, None, None)
3845 # continue
3846 accept_fit = self._get_verify_action("Accept fit?",action)
3847 if r == 0: action = None
[1907]3848 if accept_fit.upper() == "N":
3849 continue
[2610]3850 elif accept_fit.upper() == "R":
3851 break
3852 elif accept_fit.upper() == "A":
3853 action = "Y"
[2012]3854
3855 blpars = f.get_parameters()
3856 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3857 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]3858 workscan._setspectrum((f.fitter.getresidual()
3859 if getresidual else f.fitter.getfit()), r)
[1907]3860
[2012]3861 if outblfile:
3862 rms = workscan.get_rms(f.mask, r)
[2269]3863 dataout = \
3864 workscan.format_blparams_row(blpars["params"],
3865 blpars["fixed"],
3866 rms, str(masklist),
[2641]3867 r, True, csvformat)
[2012]3868 blf.write(dataout)
3869
[1907]3870 f._p.unmap()
3871 f._p = None
[2012]3872
[2349]3873 if outblfile:
3874 blf.close()
[1907]3875 else:
[2767]3876 workscan._poly_baseline(mask, order,
3877 clipthresh, clipniter, #
3878 getresidual,
[2269]3879 pack_progress_params(showprogress,
3880 minnrow),
[2767]3881 outlog, scsvformat+blfile,
3882 bltable) #
[1907]3883
3884 workscan._add_history("poly_baseline", varlist)
3885
3886 if insitu:
3887 self._assign(workscan)
3888 else:
3889 return workscan
3890
[1919]3891 except RuntimeError, e:
[2186]3892 raise_fitting_failure_exception(e)
[1907]3893
[2186]3894 @asaplog_post_dec
[2771]3895 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
[2767]3896 clipthresh=None, clipniter=None,
3897 edge=None, threshold=None, chan_avg_limit=None,
3898 getresidual=None, plot=None,
3899 showprogress=None, minnrow=None, outlog=None,
3900 blfile=None, csvformat=None, bltable=None):
[1846]3901 """\
[1931]3902 Return a scan which has been baselined (all rows) by a polynomial.
[880]3903 Spectral lines are detected first using linefinder and masked out
3904 to avoid them affecting the baseline solution.
3905
3906 Parameters:
[2771]3907 mask: an optional mask retreived from scantable
3908 order: the order of the polynomial (default is 0)
[2189]3909 insitu: if False a new scantable is returned.
3910 Otherwise, the scaling is done in-situ
3911 The default is taken from .asaprc (False)
[2767]3912 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3913 clipniter: maximum number of iteration of 'clipthresh'-sigma
3914 clipping (default is 0)
[2189]3915 edge: an optional number of channel to drop at
3916 the edge of spectrum. If only one value is
3917 specified, the same number will be dropped
3918 from both sides of the spectrum. Default
3919 is to keep all channels. Nested tuples
3920 represent individual edge selection for
3921 different IFs (a number of spectral channels
3922 can be different)
3923 threshold: the threshold used by line finder. It is
3924 better to keep it large as only strong lines
3925 affect the baseline solution.
3926 chan_avg_limit: a maximum number of consequtive spectral
3927 channels to average during the search of
3928 weak and broad lines. The default is no
3929 averaging (and no search for weak lines).
3930 If such lines can affect the fitted baseline
3931 (e.g. a high order polynomial is fitted),
3932 increase this parameter (usually values up
3933 to 8 are reasonable). Most users of this
3934 method should find the default value sufficient.
3935 plot: plot the fit and the residual. In this each
3936 indivual fit has to be approved, by typing 'y'
3937 or 'n'
3938 getresidual: if False, returns best-fit values instead of
3939 residual. (default is True)
3940 showprogress: show progress status for large data.
3941 default is True.
3942 minnrow: minimum number of input spectra to show.
3943 default is 1000.
3944 outlog: Output the coefficients of the best-fit
3945 function to logger (default is False)
3946 blfile: Name of a text file in which the best-fit
3947 parameter values to be written
3948 (default is "": no file/logger output)
[2641]3949 csvformat: if True blfile is csv-formatted, default is False.
[2767]3950 bltable: name of a baseline table where fitting results
3951 (coefficients, rms, etc.) are to be written.
3952 if given, fitting results will NOT be output to
3953 scantable (insitu=True) or None will be
3954 returned (insitu=False).
3955 (default is "": no table output)
[1846]3956
[2012]3957 Example:
3958 bscan = scan.auto_poly_baseline(order=7, insitu=False)
3959 """
[880]3960
[2186]3961 try:
3962 varlist = vars()
[1846]3963
[2269]3964 if insitu is None:
3965 insitu = rcParams['insitu']
[2186]3966 if insitu:
3967 workscan = self
3968 else:
3969 workscan = self.copy()
[1846]3970
[2410]3971 if mask is None: mask = []
[2186]3972 if order is None: order = 0
[2767]3973 if clipthresh is None: clipthresh = 3.0
3974 if clipniter is None: clipniter = 0
[2186]3975 if edge is None: edge = (0, 0)
3976 if threshold is None: threshold = 3
3977 if chan_avg_limit is None: chan_avg_limit = 1
3978 if plot is None: plot = False
3979 if getresidual is None: getresidual = True
[2189]3980 if showprogress is None: showprogress = True
3981 if minnrow is None: minnrow = 1000
[2186]3982 if outlog is None: outlog = False
3983 if blfile is None: blfile = ''
[2641]3984 if csvformat is None: csvformat = False
[2767]3985 if bltable is None: bltable = ''
[1846]3986
[2767]3987 scsvformat = 'T' if csvformat else 'F'
[2641]3988
[2186]3989 edge = normalise_edge_param(edge)
[880]3990
[2012]3991 if plot:
[2269]3992 outblfile = (blfile != "") and \
3993 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]3994 if outblfile: blf = open(blfile, "a")
3995
[2186]3996 from asap.asaplinefind import linefinder
[2012]3997 fl = linefinder()
[2269]3998 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]3999 fl.set_scan(workscan)
[2186]4000
[2012]4001 f = fitter()
4002 f.set_function(lpoly=order)
[880]4003
[2186]4004 rows = xrange(workscan.nrow())
4005 #if len(rows) > 0: workscan._init_blinfo()
[2610]4006
4007 action = "H"
[2012]4008 for r in rows:
[2186]4009 idx = 2*workscan.getif(r)
[2541]4010 if mask:
4011 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4012 else: # mask=None
4013 msk = workscan._getmask(r)
4014 fl.find_lines(r, msk, edge[idx:idx+2])
[907]4015
[2012]4016 f.x = workscan._getabcissa(r)
4017 f.y = workscan._getspectrum(r)
4018 f.mask = fl.get_mask()
4019 f.data = None
4020 f.fit()
4021
[2610]4022 if action != "Y": # skip plotting when accepting all
4023 f.plot(residual=True)
4024 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4025 accept_fit = self._get_verify_action("Accept fit?",action)
4026 if r == 0: action = None
[2012]4027 if accept_fit.upper() == "N":
4028 #workscan._append_blinfo(None, None, None)
4029 continue
[2610]4030 elif accept_fit.upper() == "R":
4031 break
4032 elif accept_fit.upper() == "A":
4033 action = "Y"
[2012]4034
4035 blpars = f.get_parameters()
4036 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4037 #workscan._append_blinfo(blpars, masklist, f.mask)
[2349]4038 workscan._setspectrum(
4039 (f.fitter.getresidual() if getresidual
4040 else f.fitter.getfit()), r
4041 )
[2012]4042
4043 if outblfile:
4044 rms = workscan.get_rms(f.mask, r)
[2269]4045 dataout = \
4046 workscan.format_blparams_row(blpars["params"],
4047 blpars["fixed"],
4048 rms, str(masklist),
[2641]4049 r, True, csvformat)
[2012]4050 blf.write(dataout)
4051
4052 f._p.unmap()
4053 f._p = None
4054
4055 if outblfile: blf.close()
4056 else:
[2767]4057 workscan._auto_poly_baseline(mask, order,
4058 clipthresh, clipniter,
4059 edge, threshold,
[2269]4060 chan_avg_limit, getresidual,
4061 pack_progress_params(showprogress,
4062 minnrow),
[2767]4063 outlog, scsvformat+blfile,
4064 bltable)
4065 workscan._add_history("auto_poly_baseline", varlist)
[2012]4066
[2767]4067 if bltable == '':
4068 if insitu:
4069 self._assign(workscan)
4070 else:
4071 return workscan
[2012]4072 else:
[2767]4073 if not insitu:
4074 return None
[2012]4075
4076 except RuntimeError, e:
[2186]4077 raise_fitting_failure_exception(e)
[2012]4078
4079 def _init_blinfo(self):
4080 """\
4081 Initialise the following three auxiliary members:
4082 blpars : parameters of the best-fit baseline,
4083 masklists : mask data (edge positions of masked channels) and
4084 actualmask : mask data (in boolean list),
4085 to keep for use later (including output to logger/text files).
4086 Used by poly_baseline() and auto_poly_baseline() in case of
4087 'plot=True'.
4088 """
4089 self.blpars = []
4090 self.masklists = []
4091 self.actualmask = []
4092 return
[880]4093
[2012]4094 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4095 """\
4096 Append baseline-fitting related info to blpars, masklist and
4097 actualmask.
4098 """
4099 self.blpars.append(data_blpars)
4100 self.masklists.append(data_masklists)
4101 self.actualmask.append(data_actualmask)
4102 return
4103
[1862]4104 @asaplog_post_dec
[914]4105 def rotate_linpolphase(self, angle):
[1846]4106 """\
[914]4107 Rotate the phase of the complex polarization O=Q+iU correlation.
4108 This is always done in situ in the raw data. So if you call this
4109 function more than once then each call rotates the phase further.
[1846]4110
[914]4111 Parameters:
[1846]4112
[914]4113 angle: The angle (degrees) to rotate (add) by.
[1846]4114
4115 Example::
4116
[914]4117 scan.rotate_linpolphase(2.3)
[1846]4118
[914]4119 """
4120 varlist = vars()
[936]4121 self._math._rotate_linpolphase(self, angle)
[914]4122 self._add_history("rotate_linpolphase", varlist)
4123 return
[710]4124
[1862]4125 @asaplog_post_dec
[914]4126 def rotate_xyphase(self, angle):
[1846]4127 """\
[914]4128 Rotate the phase of the XY correlation. This is always done in situ
4129 in the data. So if you call this function more than once
4130 then each call rotates the phase further.
[1846]4131
[914]4132 Parameters:
[1846]4133
[914]4134 angle: The angle (degrees) to rotate (add) by.
[1846]4135
4136 Example::
4137
[914]4138 scan.rotate_xyphase(2.3)
[1846]4139
[914]4140 """
4141 varlist = vars()
[936]4142 self._math._rotate_xyphase(self, angle)
[914]4143 self._add_history("rotate_xyphase", varlist)
4144 return
4145
[1862]4146 @asaplog_post_dec
[914]4147 def swap_linears(self):
[1846]4148 """\
[1573]4149 Swap the linear polarisations XX and YY, or better the first two
[1348]4150 polarisations as this also works for ciculars.
[914]4151 """
4152 varlist = vars()
[936]4153 self._math._swap_linears(self)
[914]4154 self._add_history("swap_linears", varlist)
4155 return
4156
[1862]4157 @asaplog_post_dec
[914]4158 def invert_phase(self):
[1846]4159 """\
[914]4160 Invert the phase of the complex polarisation
4161 """
4162 varlist = vars()
[936]4163 self._math._invert_phase(self)
[914]4164 self._add_history("invert_phase", varlist)
4165 return
4166
[1862]4167 @asaplog_post_dec
[876]4168 def add(self, offset, insitu=None):
[1846]4169 """\
[513]4170 Return a scan where all spectra have the offset added
[1846]4171
[513]4172 Parameters:
[1846]4173
[513]4174 offset: the offset
[1855]4175
[513]4176 insitu: if False a new scantable is returned.
4177 Otherwise, the scaling is done in-situ
4178 The default is taken from .asaprc (False)
[1846]4179
[513]4180 """
4181 if insitu is None: insitu = rcParams['insitu']
[876]4182 self._math._setinsitu(insitu)
[513]4183 varlist = vars()
[876]4184 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]4185 s._add_history("add", varlist)
[876]4186 if insitu:
4187 self._assign(s)
4188 else:
[513]4189 return s
4190
[1862]4191 @asaplog_post_dec
[1308]4192 def scale(self, factor, tsys=True, insitu=None):
[1846]4193 """\
4194
[1938]4195 Return a scan where all spectra are scaled by the given 'factor'
[1846]4196
[513]4197 Parameters:
[1846]4198
[1819]4199 factor: the scaling factor (float or 1D float list)
[1855]4200
[513]4201 insitu: if False a new scantable is returned.
4202 Otherwise, the scaling is done in-situ
4203 The default is taken from .asaprc (False)
[1855]4204
[513]4205 tsys: if True (default) then apply the operation to Tsys
4206 as well as the data
[1846]4207
[513]4208 """
4209 if insitu is None: insitu = rcParams['insitu']
[876]4210 self._math._setinsitu(insitu)
[513]4211 varlist = vars()
[1819]4212 s = None
4213 import numpy
4214 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]4215 if isinstance(factor[0], list) or isinstance(factor[0],
4216 numpy.ndarray):
[1819]4217 from asapmath import _array2dOp
[2320]4218 s = _array2dOp( self, factor, "MUL", tsys, insitu )
[1819]4219 else:
[2320]4220 s = scantable( self._math._arrayop( self, factor,
4221 "MUL", tsys ) )
[1819]4222 else:
[2320]4223 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]4224 s._add_history("scale", varlist)
[876]4225 if insitu:
4226 self._assign(s)
4227 else:
[513]4228 return s
4229
[2349]4230 @preserve_selection
4231 def set_sourcetype(self, match, matchtype="pattern",
[1504]4232 sourcetype="reference"):
[1846]4233 """\
[1502]4234 Set the type of the source to be an source or reference scan
[1846]4235 using the provided pattern.
4236
[1502]4237 Parameters:
[1846]4238
[1504]4239 match: a Unix style pattern, regular expression or selector
[1855]4240
[1504]4241 matchtype: 'pattern' (default) UNIX style pattern or
4242 'regex' regular expression
[1855]4243
[1502]4244 sourcetype: the type of the source to use (source/reference)
[1846]4245
[1502]4246 """
4247 varlist = vars()
4248 stype = -1
[2480]4249 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
[1502]4250 stype = 1
[2480]4251 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
[1502]4252 stype = 0
[1504]4253 else:
[2480]4254 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
[1504]4255 if matchtype.lower().startswith("p"):
4256 matchtype = "pattern"
4257 elif matchtype.lower().startswith("r"):
4258 matchtype = "regex"
4259 else:
4260 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]4261 sel = selector()
4262 if isinstance(match, selector):
4263 sel = match
4264 else:
[2480]4265 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4266 self.set_selection(sel)
[1502]4267 self._setsourcetype(stype)
[1573]4268 self._add_history("set_sourcetype", varlist)
[1502]4269
[2818]4270
4271 def set_sourcename(self, name):
4272 varlist = vars()
4273 self._setsourcename(name)
4274 self._add_history("set_sourcename", varlist)
4275
[1862]4276 @asaplog_post_dec
[1857]4277 @preserve_selection
[1819]4278 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]4279 """\
[670]4280 This function allows to build quotients automatically.
[1819]4281 It assumes the observation to have the same number of
[670]4282 "ons" and "offs"
[1846]4283
[670]4284 Parameters:
[1846]4285
[710]4286 preserve: you can preserve (default) the continuum or
4287 remove it. The equations used are
[1857]4288
[670]4289 preserve: Output = Toff * (on/off) - Toff
[1857]4290
[1070]4291 remove: Output = Toff * (on/off) - Ton
[1855]4292
[1573]4293 mode: the on/off detection mode
[1348]4294 'paired' (default)
4295 identifies 'off' scans by the
4296 trailing '_R' (Mopra/Parkes) or
4297 '_e'/'_w' (Tid) and matches
4298 on/off pairs from the observing pattern
[1502]4299 'time'
4300 finds the closest off in time
[1348]4301
[1857]4302 .. todo:: verify argument is not implemented
4303
[670]4304 """
[1857]4305 varlist = vars()
[1348]4306 modes = ["time", "paired"]
[670]4307 if not mode in modes:
[876]4308 msg = "please provide valid mode. Valid modes are %s" % (modes)
4309 raise ValueError(msg)
[1348]4310 s = None
4311 if mode.lower() == "paired":
[2840]4312 from asap._asap import srctype
[1857]4313 sel = self.get_selection()
[2840]4314 #sel.set_query("SRCTYPE==psoff")
4315 sel.set_types(srctype.psoff)
[1356]4316 self.set_selection(sel)
[1348]4317 offs = self.copy()
[2840]4318 #sel.set_query("SRCTYPE==pson")
4319 sel.set_types(srctype.pson)
[1356]4320 self.set_selection(sel)
[1348]4321 ons = self.copy()
4322 s = scantable(self._math._quotient(ons, offs, preserve))
4323 elif mode.lower() == "time":
4324 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]4325 s._add_history("auto_quotient", varlist)
[876]4326 return s
[710]4327
[1862]4328 @asaplog_post_dec
[1145]4329 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]4330 """\
[1143]4331 Form a quotient using "off" beams when observing in "MX" mode.
[1846]4332
[1143]4333 Parameters:
[1846]4334
[1145]4335 mask: an optional mask to be used when weight == 'stddev'
[1855]4336
[1143]4337 weight: How to average the off beams. Default is 'median'.
[1855]4338
[1145]4339 preserve: you can preserve (default) the continuum or
[1855]4340 remove it. The equations used are:
[1846]4341
[1855]4342 preserve: Output = Toff * (on/off) - Toff
4343
4344 remove: Output = Toff * (on/off) - Ton
4345
[1217]4346 """
[1593]4347 mask = mask or ()
[1141]4348 varlist = vars()
4349 on = scantable(self._math._mx_extract(self, 'on'))
[1143]4350 preoff = scantable(self._math._mx_extract(self, 'off'))
4351 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]4352 from asapmath import quotient
[1145]4353 q = quotient(on, off, preserve)
[1143]4354 q._add_history("mx_quotient", varlist)
[1217]4355 return q
[513]4356
[1862]4357 @asaplog_post_dec
[718]4358 def freq_switch(self, insitu=None):
[1846]4359 """\
[718]4360 Apply frequency switching to the data.
[1846]4361
[718]4362 Parameters:
[1846]4363
[718]4364 insitu: if False a new scantable is returned.
4365 Otherwise, the swictching is done in-situ
4366 The default is taken from .asaprc (False)
[1846]4367
[718]4368 """
4369 if insitu is None: insitu = rcParams['insitu']
[876]4370 self._math._setinsitu(insitu)
[718]4371 varlist = vars()
[876]4372 s = scantable(self._math._freqswitch(self))
[1118]4373 s._add_history("freq_switch", varlist)
[1856]4374 if insitu:
4375 self._assign(s)
4376 else:
4377 return s
[718]4378
[1862]4379 @asaplog_post_dec
[780]4380 def recalc_azel(self):
[1846]4381 """Recalculate the azimuth and elevation for each position."""
[780]4382 varlist = vars()
[876]4383 self._recalcazel()
[780]4384 self._add_history("recalc_azel", varlist)
4385 return
4386
[1862]4387 @asaplog_post_dec
[513]4388 def __add__(self, other):
[2574]4389 """
4390 implicit on all axes and on Tsys
4391 """
[513]4392 varlist = vars()
[2574]4393 s = self.__op( other, "ADD" )
[513]4394 s._add_history("operator +", varlist)
4395 return s
4396
[1862]4397 @asaplog_post_dec
[513]4398 def __sub__(self, other):
4399 """
4400 implicit on all axes and on Tsys
4401 """
4402 varlist = vars()
[2574]4403 s = self.__op( other, "SUB" )
[513]4404 s._add_history("operator -", varlist)
4405 return s
[710]4406
[1862]4407 @asaplog_post_dec
[513]4408 def __mul__(self, other):
4409 """
4410 implicit on all axes and on Tsys
4411 """
4412 varlist = vars()
[2574]4413 s = self.__op( other, "MUL" ) ;
[513]4414 s._add_history("operator *", varlist)
4415 return s
4416
[710]4417
[1862]4418 @asaplog_post_dec
[513]4419 def __div__(self, other):
4420 """
4421 implicit on all axes and on Tsys
4422 """
4423 varlist = vars()
[2574]4424 s = self.__op( other, "DIV" )
4425 s._add_history("operator /", varlist)
4426 return s
4427
4428 @asaplog_post_dec
4429 def __op( self, other, mode ):
[513]4430 s = None
4431 if isinstance(other, scantable):
[2574]4432 s = scantable(self._math._binaryop(self, other, mode))
[513]4433 elif isinstance(other, float):
4434 if other == 0.0:
[718]4435 raise ZeroDivisionError("Dividing by zero is not recommended")
[2574]4436 s = scantable(self._math._unaryop(self, other, mode, False))
[2144]4437 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]4438 if isinstance(other[0], list) \
4439 or isinstance(other[0], numpy.ndarray):
[2144]4440 from asapmath import _array2dOp
[2574]4441 s = _array2dOp( self, other, mode, False )
[2144]4442 else:
[2574]4443 s = scantable( self._math._arrayop( self, other,
4444 mode, False ) )
[513]4445 else:
[718]4446 raise TypeError("Other input is not a scantable or float value")
[513]4447 return s
4448
[1862]4449 @asaplog_post_dec
[530]4450 def get_fit(self, row=0):
[1846]4451 """\
[530]4452 Print or return the stored fits for a row in the scantable
[1846]4453
[530]4454 Parameters:
[1846]4455
[530]4456 row: the row which the fit has been applied to.
[1846]4457
[530]4458 """
4459 if row > self.nrow():
4460 return
[976]4461 from asap.asapfit import asapfit
[530]4462 fit = asapfit(self._getfit(row))
[1859]4463 asaplog.push( '%s' %(fit) )
4464 return fit.as_dict()
[530]4465
[2349]4466 @preserve_selection
[1483]4467 def flag_nans(self):
[1846]4468 """\
[1483]4469 Utility function to flag NaN values in the scantable.
4470 """
4471 import numpy
4472 basesel = self.get_selection()
4473 for i in range(self.nrow()):
[1589]4474 sel = self.get_row_selector(i)
4475 self.set_selection(basesel+sel)
[1483]4476 nans = numpy.isnan(self._getspectrum(0))
[2877]4477 if numpy.any(nans):
4478 bnans = [ bool(v) for v in nans]
4479 self.flag(bnans)
4480
4481 self.set_selection(basesel)
[1483]4482
[1588]4483 def get_row_selector(self, rowno):
[1992]4484 return selector(rows=[rowno])
[1573]4485
[484]4486 def _add_history(self, funcname, parameters):
[1435]4487 if not rcParams['scantable.history']:
4488 return
[484]4489 # create date
4490 sep = "##"
4491 from datetime import datetime
4492 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4493 hist = dstr+sep
4494 hist += funcname+sep#cdate+sep
[2349]4495 if parameters.has_key('self'):
4496 del parameters['self']
[1118]4497 for k, v in parameters.iteritems():
[484]4498 if type(v) is dict:
[1118]4499 for k2, v2 in v.iteritems():
[484]4500 hist += k2
4501 hist += "="
[1118]4502 if isinstance(v2, scantable):
[484]4503 hist += 'scantable'
4504 elif k2 == 'mask':
[1118]4505 if isinstance(v2, list) or isinstance(v2, tuple):
[513]4506 hist += str(self._zip_mask(v2))
4507 else:
4508 hist += str(v2)
[484]4509 else:
[513]4510 hist += str(v2)
[484]4511 else:
4512 hist += k
4513 hist += "="
[1118]4514 if isinstance(v, scantable):
[484]4515 hist += 'scantable'
4516 elif k == 'mask':
[1118]4517 if isinstance(v, list) or isinstance(v, tuple):
[513]4518 hist += str(self._zip_mask(v))
4519 else:
4520 hist += str(v)
[484]4521 else:
4522 hist += str(v)
4523 hist += sep
4524 hist = hist[:-2] # remove trailing '##'
4525 self._addhistory(hist)
4526
[710]4527
[484]4528 def _zip_mask(self, mask):
4529 mask = list(mask)
4530 i = 0
4531 segments = []
4532 while mask[i:].count(1):
4533 i += mask[i:].index(1)
4534 if mask[i:].count(0):
4535 j = i + mask[i:].index(0)
4536 else:
[710]4537 j = len(mask)
[1118]4538 segments.append([i, j])
[710]4539 i = j
[484]4540 return segments
[714]4541
[626]4542 def _get_ordinate_label(self):
4543 fu = "("+self.get_fluxunit()+")"
4544 import re
4545 lbl = "Intensity"
[1118]4546 if re.match(".K.", fu):
[626]4547 lbl = "Brightness Temperature "+ fu
[1118]4548 elif re.match(".Jy.", fu):
[626]4549 lbl = "Flux density "+ fu
4550 return lbl
[710]4551
[876]4552 def _check_ifs(self):
[2349]4553# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
[1986]4554 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]4555 nchans = filter(lambda t: t > 0, nchans)
[876]4556 return (sum(nchans)/len(nchans) == nchans[0])
[976]4557
[1862]4558 @asaplog_post_dec
[1916]4559 def _fill(self, names, unit, average, opts={}):
[976]4560 first = True
4561 fullnames = []
4562 for name in names:
4563 name = os.path.expandvars(name)
4564 name = os.path.expanduser(name)
4565 if not os.path.exists(name):
4566 msg = "File '%s' does not exists" % (name)
4567 raise IOError(msg)
4568 fullnames.append(name)
4569 if average:
4570 asaplog.push('Auto averaging integrations')
[1079]4571 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]4572 for name in fullnames:
[1073]4573 tbl = Scantable(stype)
[2004]4574 if is_ms( name ):
4575 r = msfiller( tbl )
4576 else:
4577 r = filler( tbl )
[976]4578 msg = "Importing %s..." % (name)
[1118]4579 asaplog.push(msg, False)
[2349]4580 r.open(name, opts)
[2480]4581 rx = rcParams['scantable.reference']
4582 r.setreferenceexpr(rx)
[1843]4583 r.fill()
[976]4584 if average:
[1118]4585 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]4586 if not first:
4587 tbl = self._math._merge([self, tbl])
4588 Scantable.__init__(self, tbl)
[1843]4589 r.close()
[1118]4590 del r, tbl
[976]4591 first = False
[1861]4592 #flush log
4593 asaplog.post()
[976]4594 if unit is not None:
4595 self.set_fluxunit(unit)
[1824]4596 if not is_casapy():
4597 self.set_freqframe(rcParams['scantable.freqframe'])
[976]4598
[2610]4599 def _get_verify_action( self, msg, action=None ):
4600 valid_act = ['Y', 'N', 'A', 'R']
4601 if not action or not isinstance(action, str):
4602 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
4603 if action == '':
4604 return "Y"
4605 elif (action.upper()[0] in valid_act):
4606 return action.upper()[0]
4607 elif (action.upper()[0] in ['H','?']):
4608 print "Available actions of verification [Y|n|a|r]"
4609 print " Y : Yes for current data (default)"
4610 print " N : No for current data"
4611 print " A : Accept all in the following and exit from verification"
4612 print " R : Reject all in the following and exit from verification"
4613 print " H or ?: help (show this message)"
4614 return self._get_verify_action(msg)
4615 else:
4616 return 'Y'
[2012]4617
[1402]4618 def __getitem__(self, key):
4619 if key < 0:
4620 key += self.nrow()
4621 if key >= self.nrow():
4622 raise IndexError("Row index out of range.")
4623 return self._getspectrum(key)
4624
4625 def __setitem__(self, key, value):
4626 if key < 0:
4627 key += self.nrow()
4628 if key >= self.nrow():
4629 raise IndexError("Row index out of range.")
4630 if not hasattr(value, "__len__") or \
4631 len(value) > self.nchan(self.getif(key)):
4632 raise ValueError("Spectrum length doesn't match.")
4633 return self._setspectrum(value, key)
4634
4635 def __len__(self):
4636 return self.nrow()
4637
4638 def __iter__(self):
4639 for i in range(len(self)):
4640 yield self[i]
Note: See TracBrowser for help on using the repository browser.