source: trunk/python/scantable.py@ 2854

Last change on this file since 2854 was 2844, checked in by Takeshi Nakazato, 11 years ago

New Development: No

JIRA Issue: Yes CAS-5535

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: Removed freq_tolsr option from scantable constructor

Test Programs: sdsave unit test

Put in Release Notes: No

Module(s): sd

Description: Describe your changes here...

The parameter freq_tolsr is removed from scantable constructor.
It is removed from python layer as well as C++ layer.
Now the code always behaves like 'freq_tolsr=False'.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 182.3 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2751]4import re
[2315]5import tempfile
[1948]6import numpy
[1691]7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
[1824]12from asap.env import is_casapy
[876]13from asap._asap import Scantable
[2004]14from asap._asap import filler, msfiller
[1824]15from asap.parameters import rcParams
[1862]16from asap.logging import asaplog, asaplog_post_dec
[1824]17from asap.selector import selector
18from asap.linecatalog import linecatalog
[1600]19from asap.coordinate import coordinate
[1859]20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]21from asap.asapfitter import fitter
[102]22
[1689]23def preserve_selection(func):
[1691]24 @wraps_dec(func)
[1689]25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
[1857]27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
[1689]31 return val
32 return wrap
33
[1846]34def is_scantable(filename):
35 """Is the given file a scantable?
[1689]36
[1846]37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
[1883]42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
[2753]48 match_pattern = '^Type = (Scantable)? *$'
[2751]49 if re.match(match_pattern,l):
[1883]50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
[1697]58
[1883]59def is_ms(filename):
60 """Is the given file a MeasurementSet?
[1697]61
[1883]62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
[2186]79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
[2277]91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
[2277]98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
[2186]102 ** when len(edge)!=2
[2277]103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]111 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]112 when [5,10] given, it is interpreted as
[2277]113 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
[2189]173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
[2767]176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
[2810]268 elif lf_params_n == 0:
[2767]269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
[876]303class scantable(Scantable):
[1846]304 """\
305 The ASAP container for scans (single-dish data).
[102]306 """
[1819]307
[1862]308 @asaplog_post_dec
[2315]309 def __init__(self, filename, average=None, unit=None, parallactify=None,
310 **args):
[1846]311 """\
[102]312 Create a scantable from a saved one or make a reference
[1846]313
[102]314 Parameters:
[1846]315
316 filename: the name of an asap table on disk
317 or
318 the name of a rpfits/sdfits/ms file
319 (integrations within scans are auto averaged
320 and the whole file is read) or
321 [advanced] a reference to an existing scantable
322
323 average: average all integrations withinb a scan on read.
324 The default (True) is taken from .asaprc.
325
[484]326 unit: brightness unit; must be consistent with K or Jy.
[1846]327 Over-rides the default selected by the filler
328 (input rpfits/sdfits/ms) or replaces the value
329 in existing scantables
330
[1920]331 antenna: for MeasurementSet input data only:
[2349]332 Antenna selection. integer (id) or string
333 (name or id).
[1846]334
[2349]335 parallactify: Indicate that the data had been parallactified.
336 Default (false) is taken from rc file.
[1846]337
[2754]338 getpt: Whether to import direction from MS/POINTING
339 table properly or not.
340 This is effective only when filename is MS.
341 The default (True) is to import direction
342 from MS/POINTING.
[710]343 """
[976]344 if average is None:
[710]345 average = rcParams['scantable.autoaverage']
[1593]346 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]347 varlist = vars()
[876]348 from asap._asap import stmath
[1819]349 self._math = stmath( rcParams['insitu'] )
[876]350 if isinstance(filename, Scantable):
351 Scantable.__init__(self, filename)
[181]352 else:
[1697]353 if isinstance(filename, str):
[976]354 filename = os.path.expandvars(filename)
355 filename = os.path.expanduser(filename)
356 if not os.path.exists(filename):
357 s = "File '%s' not found." % (filename)
358 raise IOError(s)
[1697]359 if is_scantable(filename):
360 ondisk = rcParams['scantable.storage'] == 'disk'
361 Scantable.__init__(self, filename, ondisk)
362 if unit is not None:
363 self.set_fluxunit(unit)
[2008]364 if average:
365 self._assign( self.average_time( scanav=True ) )
[1819]366 # do not reset to the default freqframe
367 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]368 elif is_ms(filename):
[1916]369 # Measurement Set
370 opts={'ms': {}}
[2844]371 mskeys=['getpt','antenna']
[1916]372 for key in mskeys:
373 if key in args.keys():
374 opts['ms'][key] = args[key]
375 self._fill([filename], unit, average, opts)
[1893]376 elif os.path.isfile(filename):
[2761]377 opts={'nro': {}}
378 nrokeys=['freqref']
379 for key in nrokeys:
380 if key in args.keys():
381 opts['nro'][key] = args[key]
382 self._fill([filename], unit, average, opts)
[2350]383 # only apply to new data not "copy constructor"
384 self.parallactify(parallactify)
[1883]385 else:
[1819]386 msg = "The given file '%s'is not a valid " \
387 "asap table." % (filename)
[1859]388 raise IOError(msg)
[1118]389 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]390 and isinstance(filename[-1], str):
[1916]391 self._fill(filename, unit, average)
[1586]392 self.parallactify(parallactify)
[1259]393 self._add_history("scantable", varlist)
[102]394
[1862]395 @asaplog_post_dec
[876]396 def save(self, name=None, format=None, overwrite=False):
[1846]397 """\
[1280]398 Store the scantable on disk. This can be an asap (aips++) Table,
399 SDFITS or MS2 format.
[1846]400
[116]401 Parameters:
[1846]402
[2431]403 name: the name of the outputfile. For format 'ASCII'
[1093]404 this is the root file name (data in 'name'.txt
[497]405 and header in 'name'_header.txt)
[1855]406
[116]407 format: an optional file format. Default is ASAP.
[1855]408 Allowed are:
409
410 * 'ASAP' (save as ASAP [aips++] Table),
411 * 'SDFITS' (save as SDFITS file)
412 * 'ASCII' (saves as ascii text file)
413 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]414 * 'FITS' (save as image FITS - not readable by
415 class)
[1855]416 * 'CLASS' (save as FITS readable by CLASS)
417
[411]418 overwrite: If the file should be overwritten if it exists.
[256]419 The default False is to return with warning
[411]420 without writing the output. USE WITH CARE.
[1855]421
[1846]422 Example::
423
[116]424 scan.save('myscan.asap')
[1118]425 scan.save('myscan.sdfits', 'SDFITS')
[1846]426
[116]427 """
[411]428 from os import path
[1593]429 format = format or rcParams['scantable.save']
[256]430 suffix = '.'+format.lower()
[1118]431 if name is None or name == "":
[256]432 name = 'scantable'+suffix
[718]433 msg = "No filename given. Using default name %s..." % name
434 asaplog.push(msg)
[411]435 name = path.expandvars(name)
[256]436 if path.isfile(name) or path.isdir(name):
437 if not overwrite:
[718]438 msg = "File %s exists." % name
[1859]439 raise IOError(msg)
[451]440 format2 = format.upper()
441 if format2 == 'ASAP':
[116]442 self._save(name)
[2029]443 elif format2 == 'MS2':
444 msopt = {'ms': {'overwrite': overwrite } }
445 from asap._asap import mswriter
446 writer = mswriter( self )
447 writer.write( name, msopt )
[116]448 else:
[989]449 from asap._asap import stwriter as stw
[1118]450 writer = stw(format2)
451 writer.write(self, name)
[116]452 return
453
[102]454 def copy(self):
[1846]455 """Return a copy of this scantable.
456
457 *Note*:
458
[1348]459 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]460
461 Example::
462
[102]463 copiedscan = scan.copy()
[1846]464
[102]465 """
[876]466 sd = scantable(Scantable._copy(self))
[113]467 return sd
468
[1093]469 def drop_scan(self, scanid=None):
[1846]470 """\
[1093]471 Return a new scantable where the specified scan number(s) has(have)
472 been dropped.
[1846]473
[1093]474 Parameters:
[1846]475
[1093]476 scanid: a (list of) scan number(s)
[1846]477
[1093]478 """
479 from asap import _is_sequence_or_number as _is_valid
480 from asap import _to_list
481 from asap import unique
482 if not _is_valid(scanid):
[2315]483 raise RuntimeError( 'Please specify a scanno to drop from the'
484 ' scantable' )
[1859]485 scanid = _to_list(scanid)
486 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
487 for sid in scanid: allscans.remove(sid)
488 if len(allscans) == 0:
489 raise ValueError("Can't remove all scans")
490 sel = selector(scans=allscans)
491 return self._select_copy(sel)
[1093]492
[1594]493 def _select_copy(self, selection):
494 orig = self.get_selection()
495 self.set_selection(orig+selection)
496 cp = self.copy()
497 self.set_selection(orig)
498 return cp
499
[102]500 def get_scan(self, scanid=None):
[1855]501 """\
[102]502 Return a specific scan (by scanno) or collection of scans (by
503 source name) in a new scantable.
[1846]504
505 *Note*:
506
[1348]507 See scantable.drop_scan() for the inverse operation.
[1846]508
[102]509 Parameters:
[1846]510
[513]511 scanid: a (list of) scanno or a source name, unix-style
512 patterns are accepted for source name matching, e.g.
513 '*_R' gets all 'ref scans
[1846]514
515 Example::
516
[513]517 # get all scans containing the source '323p459'
518 newscan = scan.get_scan('323p459')
519 # get all 'off' scans
520 refscans = scan.get_scan('*_R')
521 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]522 newscan = scan.get_scan([0, 2, 7, 10])
[1846]523
[102]524 """
525 if scanid is None:
[1859]526 raise RuntimeError( 'Please specify a scan no or name to '
527 'retrieve from the scantable' )
[102]528 try:
[946]529 bsel = self.get_selection()
530 sel = selector()
[102]531 if type(scanid) is str:
[946]532 sel.set_name(scanid)
[1594]533 return self._select_copy(sel)
[102]534 elif type(scanid) is int:
[946]535 sel.set_scans([scanid])
[1594]536 return self._select_copy(sel)
[381]537 elif type(scanid) is list:
[946]538 sel.set_scans(scanid)
[1594]539 return self._select_copy(sel)
[381]540 else:
[718]541 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]542 raise TypeError(msg)
[102]543 except RuntimeError:
[1859]544 raise
[102]545
546 def __str__(self):
[2315]547 tempFile = tempfile.NamedTemporaryFile()
548 Scantable._summary(self, tempFile.name)
549 tempFile.seek(0)
550 asaplog.clear()
551 return tempFile.file.read()
[102]552
[2315]553 @asaplog_post_dec
[976]554 def summary(self, filename=None):
[1846]555 """\
[102]556 Print a summary of the contents of this scantable.
[1846]557
[102]558 Parameters:
[1846]559
[1931]560 filename: the name of a file to write the putput to
[102]561 Default - no file output
[1846]562
[102]563 """
564 if filename is not None:
[256]565 if filename is "":
566 filename = 'scantable_summary.txt'
[415]567 from os.path import expandvars, isdir
[411]568 filename = expandvars(filename)
[2286]569 if isdir(filename):
[718]570 msg = "Illegal file name '%s'." % (filename)
[1859]571 raise IOError(msg)
[2286]572 else:
573 filename = ""
574 Scantable._summary(self, filename)
[710]575
[1512]576 def get_spectrum(self, rowno):
[1471]577 """Return the spectrum for the current row in the scantable as a list.
[1846]578
[1471]579 Parameters:
[1846]580
[1573]581 rowno: the row number to retrieve the spectrum from
[1846]582
[1471]583 """
584 return self._getspectrum(rowno)
[946]585
[1471]586 def get_mask(self, rowno):
587 """Return the mask for the current row in the scantable as a list.
[1846]588
[1471]589 Parameters:
[1846]590
[1573]591 rowno: the row number to retrieve the mask from
[1846]592
[1471]593 """
594 return self._getmask(rowno)
595
596 def set_spectrum(self, spec, rowno):
[1938]597 """Set the spectrum for the current row in the scantable.
[1846]598
[1471]599 Parameters:
[1846]600
[1855]601 spec: the new spectrum
[1846]602
[1855]603 rowno: the row number to set the spectrum for
604
[1471]605 """
[2348]606 assert(len(spec) == self.nchan(self.getif(rowno)))
[1471]607 return self._setspectrum(spec, rowno)
608
[1600]609 def get_coordinate(self, rowno):
610 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]611
612 *Note*:
613
[1600]614 * This coordinate is only valid until a scantable method modifies
615 the frequency axis.
616 * This coordinate does contain the original frequency set-up
617 NOT the new frame. The conversions however are done using the user
618 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
619 use scantable.freq_align first. Without it there is no closure,
[1846]620 i.e.::
[1600]621
[1846]622 c = myscan.get_coordinate(0)
623 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
624
[1600]625 Parameters:
[1846]626
[1600]627 rowno: the row number for the spectral coordinate
628
629 """
630 return coordinate(Scantable.get_coordinate(self, rowno))
631
[946]632 def get_selection(self):
[1846]633 """\
[1005]634 Get the selection object currently set on this scantable.
[1846]635
636 Example::
637
[1005]638 sel = scan.get_selection()
639 sel.set_ifs(0) # select IF 0
640 scan.set_selection(sel) # apply modified selection
[1846]641
[946]642 """
643 return selector(self._getselection())
644
[1576]645 def set_selection(self, selection=None, **kw):
[1846]646 """\
[1005]647 Select a subset of the data. All following operations on this scantable
648 are only applied to thi selection.
[1846]649
[1005]650 Parameters:
[1697]651
[1846]652 selection: a selector object (default unset the selection), or
[2431]653 any combination of 'pols', 'ifs', 'beams', 'scans',
654 'cycles', 'name', 'query'
[1697]655
[1846]656 Examples::
[1697]657
[1005]658 sel = selector() # create a selection object
[1118]659 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]660 scan.set_selection(sel) # set the selection
661 scan.summary() # will only print summary of scanno 0 an 3
662 scan.set_selection() # unset the selection
[1697]663 # or the equivalent
664 scan.set_selection(scans=[0,3])
665 scan.summary() # will only print summary of scanno 0 an 3
666 scan.set_selection() # unset the selection
[1846]667
[946]668 """
[1576]669 if selection is None:
670 # reset
671 if len(kw) == 0:
672 selection = selector()
673 else:
674 # try keywords
675 for k in kw:
676 if k not in selector.fields:
[2320]677 raise KeyError("Invalid selection key '%s', "
678 "valid keys are %s" % (k,
679 selector.fields))
[1576]680 selection = selector(**kw)
[946]681 self._setselection(selection)
682
[1819]683 def get_row(self, row=0, insitu=None):
[1846]684 """\
[1819]685 Select a row in the scantable.
686 Return a scantable with single row.
[1846]687
[1819]688 Parameters:
[1846]689
690 row: row no of integration, default is 0.
691 insitu: if False a new scantable is returned. Otherwise, the
692 scaling is done in-situ. The default is taken from .asaprc
693 (False)
694
[1819]695 """
[2349]696 if insitu is None:
697 insitu = rcParams['insitu']
[1819]698 if not insitu:
699 workscan = self.copy()
700 else:
701 workscan = self
702 # Select a row
[2349]703 sel = selector()
[1992]704 sel.set_rows([row])
[1819]705 workscan.set_selection(sel)
706 if not workscan.nrow() == 1:
[2349]707 msg = "Could not identify single row. %d rows selected." \
708 % (workscan.nrow())
[1819]709 raise RuntimeError(msg)
710 if insitu:
711 self._assign(workscan)
712 else:
713 return workscan
714
[1862]715 @asaplog_post_dec
[1907]716 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]717 """\
[135]718 Determine the specified statistic of the current beam/if/pol
[102]719 Takes a 'mask' as an optional parameter to specify which
720 channels should be excluded.
[1846]721
[102]722 Parameters:
[1846]723
[1819]724 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
725 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]726
[135]727 mask: an optional mask specifying where the statistic
[102]728 should be determined.
[1855]729
[1819]730 form: format string to print statistic values
[1846]731
[1907]732 row: row number of spectrum to process.
733 (default is None: for all rows)
[1846]734
[1907]735 Example:
[113]736 scan.set_unit('channel')
[1118]737 msk = scan.create_mask([100, 200], [500, 600])
[135]738 scan.stats(stat='mean', mask=m)
[1846]739
[102]740 """
[1593]741 mask = mask or []
[876]742 if not self._check_ifs():
[1118]743 raise ValueError("Cannot apply mask as the IFs have different "
744 "number of channels. Please use setselection() "
745 "to select individual IFs")
[1819]746 rtnabc = False
747 if stat.lower().endswith('_abc'): rtnabc = True
748 getchan = False
749 if stat.lower().startswith('min') or stat.lower().startswith('max'):
750 chan = self._math._minmaxchan(self, mask, stat)
751 getchan = True
752 statvals = []
[1907]753 if not rtnabc:
754 if row == None:
755 statvals = self._math._stats(self, mask, stat)
756 else:
757 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]758
[1819]759 #def cb(i):
760 # return statvals[i]
[256]761
[1819]762 #return self._row_callback(cb, stat)
[102]763
[1819]764 label=stat
765 #callback=cb
766 out = ""
767 #outvec = []
768 sep = '-'*50
[1907]769
770 if row == None:
771 rows = xrange(self.nrow())
772 elif isinstance(row, int):
773 rows = [ row ]
774
775 for i in rows:
[1819]776 refstr = ''
777 statunit= ''
778 if getchan:
779 qx, qy = self.chan2data(rowno=i, chan=chan[i])
780 if rtnabc:
781 statvals.append(qx['value'])
782 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
783 statunit= '['+qx['unit']+']'
784 else:
785 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
786
787 tm = self._gettime(i)
788 src = self._getsourcename(i)
789 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
790 out += 'Time[%s]:\n' % (tm)
[1907]791 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
792 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
793 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]794 #outvec.append(callback(i))
[1907]795 if len(rows) > 1:
796 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
797 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
798 else:
799 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
800 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]801 out += sep+"\n"
802
[1859]803 import os
804 if os.environ.has_key( 'USER' ):
805 usr = os.environ['USER']
806 else:
807 import commands
808 usr = commands.getoutput( 'whoami' )
809 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
810 f = open(tmpfile,'w')
811 print >> f, sep
812 print >> f, ' %s %s' % (label, statunit)
813 print >> f, sep
814 print >> f, out
815 f.close()
816 f = open(tmpfile,'r')
817 x = f.readlines()
818 f.close()
819 asaplog.push(''.join(x), False)
820
[1819]821 return statvals
822
823 def chan2data(self, rowno=0, chan=0):
[1846]824 """\
[1819]825 Returns channel/frequency/velocity and spectral value
826 at an arbitrary row and channel in the scantable.
[1846]827
[1819]828 Parameters:
[1846]829
[1819]830 rowno: a row number in the scantable. Default is the
831 first row, i.e. rowno=0
[1855]832
[1819]833 chan: a channel in the scantable. Default is the first
834 channel, i.e. pos=0
[1846]835
[1819]836 """
837 if isinstance(rowno, int) and isinstance(chan, int):
838 qx = {'unit': self.get_unit(),
839 'value': self._getabcissa(rowno)[chan]}
840 qy = {'unit': self.get_fluxunit(),
841 'value': self._getspectrum(rowno)[chan]}
842 return qx, qy
843
[1118]844 def stddev(self, mask=None):
[1846]845 """\
[135]846 Determine the standard deviation of the current beam/if/pol
847 Takes a 'mask' as an optional parameter to specify which
848 channels should be excluded.
[1846]849
[135]850 Parameters:
[1846]851
[135]852 mask: an optional mask specifying where the standard
853 deviation should be determined.
854
[1846]855 Example::
856
[135]857 scan.set_unit('channel')
[1118]858 msk = scan.create_mask([100, 200], [500, 600])
[135]859 scan.stddev(mask=m)
[1846]860
[135]861 """
[1118]862 return self.stats(stat='stddev', mask=mask);
[135]863
[1003]864
[1259]865 def get_column_names(self):
[1846]866 """\
[1003]867 Return a list of column names, which can be used for selection.
868 """
[1259]869 return list(Scantable.get_column_names(self))
[1003]870
[1730]871 def get_tsys(self, row=-1):
[1846]872 """\
[113]873 Return the System temperatures.
[1846]874
875 Parameters:
876
877 row: the rowno to get the information for. (default all rows)
878
[113]879 Returns:
[1846]880
[876]881 a list of Tsys values for the current selection
[1846]882
[113]883 """
[1730]884 if row > -1:
885 return self._get_column(self._gettsys, row)
[876]886 return self._row_callback(self._gettsys, "Tsys")
[256]887
[2406]888 def get_tsysspectrum(self, row=-1):
889 """\
890 Return the channel dependent system temperatures.
[1730]891
[2406]892 Parameters:
893
894 row: the rowno to get the information for. (default all rows)
895
896 Returns:
897
898 a list of Tsys values for the current selection
899
900 """
901 return self._get_column( self._gettsysspectrum, row )
902
[2791]903 def set_tsys(self, values, row=-1):
904 """\
905 Set the Tsys value(s) of the given 'row' or the whole scantable
906 (selection).
907
908 Parameters:
909
910 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
911 row: the row number to apply Tsys values to.
912 (default all rows)
913
914 """
915
916 if not hasattr(values, "__len__"):
917 values = [values]
918 self._settsys(values, row)
919
[1730]920 def get_weather(self, row=-1):
[1846]921 """\
922 Return the weather informations.
923
924 Parameters:
925
926 row: the rowno to get the information for. (default all rows)
927
928 Returns:
929
930 a dict or list of of dicts of values for the current selection
931
932 """
933
[1730]934 values = self._get_column(self._get_weather, row)
935 if row > -1:
936 return {'temperature': values[0],
937 'pressure': values[1], 'humidity' : values[2],
938 'windspeed' : values[3], 'windaz' : values[4]
939 }
940 else:
941 out = []
942 for r in values:
943
944 out.append({'temperature': r[0],
945 'pressure': r[1], 'humidity' : r[2],
946 'windspeed' : r[3], 'windaz' : r[4]
947 })
948 return out
949
[876]950 def _row_callback(self, callback, label):
951 out = ""
[1118]952 outvec = []
[1590]953 sep = '-'*50
[876]954 for i in range(self.nrow()):
955 tm = self._gettime(i)
956 src = self._getsourcename(i)
[1590]957 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]958 out += 'Time[%s]:\n' % (tm)
[1590]959 if self.nbeam(-1) > 1:
960 out += ' Beam[%d] ' % (self.getbeam(i))
961 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
962 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]963 outvec.append(callback(i))
964 out += '= %3.3f\n' % (outvec[i])
[1590]965 out += sep+'\n'
[1859]966
967 asaplog.push(sep)
968 asaplog.push(" %s" % (label))
969 asaplog.push(sep)
970 asaplog.push(out)
[1861]971 asaplog.post()
[1175]972 return outvec
[256]973
[1947]974 def _get_column(self, callback, row=-1, *args):
[1070]975 """
976 """
977 if row == -1:
[1947]978 return [callback(i, *args) for i in range(self.nrow())]
[1070]979 else:
[1819]980 if 0 <= row < self.nrow():
[1947]981 return callback(row, *args)
[256]982
[1070]983
[1948]984 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]985 """\
[113]986 Get a list of time stamps for the observations.
[1938]987 Return a datetime object or a string (default) for each
988 integration time stamp in the scantable.
[1846]989
[113]990 Parameters:
[1846]991
[1348]992 row: row no of integration. Default -1 return all rows
[1855]993
[1348]994 asdatetime: return values as datetime objects rather than strings
[1846]995
[2349]996 prec: number of digits shown. Default -1 to automatic
997 calculation.
[1948]998 Note this number is equals to the digits of MVTime,
999 i.e., 0<prec<3: dates with hh:: only,
1000 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]1001 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1002
[113]1003 """
[1175]1004 from datetime import datetime
[1948]1005 if prec < 0:
1006 # automagically set necessary precision +1
[2349]1007 prec = 7 - \
1008 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]1009 prec = max(6, int(prec))
1010 else:
1011 prec = max(0, prec)
1012 if asdatetime:
1013 #precision can be 1 millisecond at max
1014 prec = min(12, prec)
1015
[1947]1016 times = self._get_column(self._gettime, row, prec)
[1348]1017 if not asdatetime:
[1392]1018 return times
[1947]1019 format = "%Y/%m/%d/%H:%M:%S.%f"
1020 if prec < 7:
1021 nsub = 1 + (((6-prec)/2) % 3)
1022 substr = [".%f","%S","%M"]
1023 for i in range(nsub):
1024 format = format.replace(substr[i],"")
[1175]1025 if isinstance(times, list):
[1947]1026 return [datetime.strptime(i, format) for i in times]
[1175]1027 else:
[1947]1028 return datetime.strptime(times, format)
[102]1029
[1348]1030
1031 def get_inttime(self, row=-1):
[1846]1032 """\
[1348]1033 Get a list of integration times for the observations.
1034 Return a time in seconds for each integration in the scantable.
[1846]1035
[1348]1036 Parameters:
[1846]1037
[1348]1038 row: row no of integration. Default -1 return all rows.
[1846]1039
[1348]1040 """
[1573]1041 return self._get_column(self._getinttime, row)
[1348]1042
[1573]1043
[714]1044 def get_sourcename(self, row=-1):
[1846]1045 """\
[794]1046 Get a list source names for the observations.
[714]1047 Return a string for each integration in the scantable.
1048 Parameters:
[1846]1049
[1348]1050 row: row no of integration. Default -1 return all rows.
[1846]1051
[714]1052 """
[1070]1053 return self._get_column(self._getsourcename, row)
[714]1054
[794]1055 def get_elevation(self, row=-1):
[1846]1056 """\
[794]1057 Get a list of elevations for the observations.
1058 Return a float for each integration in the scantable.
[1846]1059
[794]1060 Parameters:
[1846]1061
[1348]1062 row: row no of integration. Default -1 return all rows.
[1846]1063
[794]1064 """
[1070]1065 return self._get_column(self._getelevation, row)
[794]1066
1067 def get_azimuth(self, row=-1):
[1846]1068 """\
[794]1069 Get a list of azimuths for the observations.
1070 Return a float for each integration in the scantable.
[1846]1071
[794]1072 Parameters:
[1348]1073 row: row no of integration. Default -1 return all rows.
[1846]1074
[794]1075 """
[1070]1076 return self._get_column(self._getazimuth, row)
[794]1077
1078 def get_parangle(self, row=-1):
[1846]1079 """\
[794]1080 Get a list of parallactic angles for the observations.
1081 Return a float for each integration in the scantable.
[1846]1082
[794]1083 Parameters:
[1846]1084
[1348]1085 row: row no of integration. Default -1 return all rows.
[1846]1086
[794]1087 """
[1070]1088 return self._get_column(self._getparangle, row)
[794]1089
[1070]1090 def get_direction(self, row=-1):
1091 """
1092 Get a list of Positions on the sky (direction) for the observations.
[1594]1093 Return a string for each integration in the scantable.
[1855]1094
[1070]1095 Parameters:
[1855]1096
[1070]1097 row: row no of integration. Default -1 return all rows
[1855]1098
[1070]1099 """
1100 return self._get_column(self._getdirection, row)
1101
[1391]1102 def get_directionval(self, row=-1):
[1846]1103 """\
[1391]1104 Get a list of Positions on the sky (direction) for the observations.
1105 Return a float for each integration in the scantable.
[1846]1106
[1391]1107 Parameters:
[1846]1108
[1391]1109 row: row no of integration. Default -1 return all rows
[1846]1110
[1391]1111 """
1112 return self._get_column(self._getdirectionvec, row)
1113
[1862]1114 @asaplog_post_dec
[102]1115 def set_unit(self, unit='channel'):
[1846]1116 """\
[102]1117 Set the unit for all following operations on this scantable
[1846]1118
[102]1119 Parameters:
[1846]1120
1121 unit: optional unit, default is 'channel'. Use one of '*Hz',
1122 'km/s', 'channel' or equivalent ''
1123
[102]1124 """
[484]1125 varlist = vars()
[1118]1126 if unit in ['', 'pixel', 'channel']:
[113]1127 unit = ''
1128 inf = list(self._getcoordinfo())
1129 inf[0] = unit
1130 self._setcoordinfo(inf)
[1118]1131 self._add_history("set_unit", varlist)
[113]1132
[1862]1133 @asaplog_post_dec
[484]1134 def set_instrument(self, instr):
[1846]1135 """\
[1348]1136 Set the instrument for subsequent processing.
[1846]1137
[358]1138 Parameters:
[1846]1139
[710]1140 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]1141 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1142
[358]1143 """
1144 self._setInstrument(instr)
[1118]1145 self._add_history("set_instument", vars())
[358]1146
[1862]1147 @asaplog_post_dec
[1190]1148 def set_feedtype(self, feedtype):
[1846]1149 """\
[1190]1150 Overwrite the feed type, which might not be set correctly.
[1846]1151
[1190]1152 Parameters:
[1846]1153
[1190]1154 feedtype: 'linear' or 'circular'
[1846]1155
[1190]1156 """
1157 self._setfeedtype(feedtype)
1158 self._add_history("set_feedtype", vars())
1159
[1862]1160 @asaplog_post_dec
[276]1161 def set_doppler(self, doppler='RADIO'):
[1846]1162 """\
[276]1163 Set the doppler for all following operations on this scantable.
[1846]1164
[276]1165 Parameters:
[1846]1166
[276]1167 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1168
[276]1169 """
[484]1170 varlist = vars()
[276]1171 inf = list(self._getcoordinfo())
1172 inf[2] = doppler
1173 self._setcoordinfo(inf)
[1118]1174 self._add_history("set_doppler", vars())
[710]1175
[1862]1176 @asaplog_post_dec
[226]1177 def set_freqframe(self, frame=None):
[1846]1178 """\
[113]1179 Set the frame type of the Spectral Axis.
[1846]1180
[113]1181 Parameters:
[1846]1182
[591]1183 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1184 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1185 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1186
1187 Example::
1188
[113]1189 scan.set_freqframe('BARY')
[1846]1190
[113]1191 """
[1593]1192 frame = frame or rcParams['scantable.freqframe']
[484]1193 varlist = vars()
[1819]1194 # "REST" is not implemented in casacore
1195 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1196 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1197 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1198 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1199
[989]1200 if frame in valid:
[113]1201 inf = list(self._getcoordinfo())
1202 inf[1] = frame
1203 self._setcoordinfo(inf)
[1118]1204 self._add_history("set_freqframe", varlist)
[102]1205 else:
[1118]1206 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1207 raise TypeError(msg)
[710]1208
[1862]1209 @asaplog_post_dec
[989]1210 def set_dirframe(self, frame=""):
[1846]1211 """\
[989]1212 Set the frame type of the Direction on the sky.
[1846]1213
[989]1214 Parameters:
[1846]1215
[989]1216 frame: an optional frame type, default ''. Valid frames are:
1217 'J2000', 'B1950', 'GALACTIC'
[1846]1218
1219 Example:
1220
[989]1221 scan.set_dirframe('GALACTIC')
[1846]1222
[989]1223 """
1224 varlist = vars()
[1859]1225 Scantable.set_dirframe(self, frame)
[1118]1226 self._add_history("set_dirframe", varlist)
[989]1227
[113]1228 def get_unit(self):
[1846]1229 """\
[113]1230 Get the default unit set in this scantable
[1846]1231
[113]1232 Returns:
[1846]1233
[113]1234 A unit string
[1846]1235
[113]1236 """
1237 inf = self._getcoordinfo()
1238 unit = inf[0]
1239 if unit == '': unit = 'channel'
1240 return unit
[102]1241
[1862]1242 @asaplog_post_dec
[158]1243 def get_abcissa(self, rowno=0):
[1846]1244 """\
[158]1245 Get the abcissa in the current coordinate setup for the currently
[113]1246 selected Beam/IF/Pol
[1846]1247
[113]1248 Parameters:
[1846]1249
[226]1250 rowno: an optional row number in the scantable. Default is the
1251 first row, i.e. rowno=0
[1846]1252
[113]1253 Returns:
[1846]1254
[1348]1255 The abcissa values and the format string (as a dictionary)
[1846]1256
[113]1257 """
[256]1258 abc = self._getabcissa(rowno)
[710]1259 lbl = self._getabcissalabel(rowno)
[158]1260 return abc, lbl
[113]1261
[1862]1262 @asaplog_post_dec
[2322]1263 def flag(self, mask=None, unflag=False, row=-1):
[1846]1264 """\
[1001]1265 Flag the selected data using an optional channel mask.
[1846]1266
[1001]1267 Parameters:
[1846]1268
[1001]1269 mask: an optional channel mask, created with create_mask. Default
1270 (no mask) is all channels.
[1855]1271
[1819]1272 unflag: if True, unflag the data
[1846]1273
[2322]1274 row: an optional row number in the scantable.
1275 Default -1 flags all rows
1276
[1001]1277 """
1278 varlist = vars()
[1593]1279 mask = mask or []
[1994]1280 self._flag(row, mask, unflag)
[1001]1281 self._add_history("flag", varlist)
1282
[1862]1283 @asaplog_post_dec
[2322]1284 def flag_row(self, rows=None, unflag=False):
[1846]1285 """\
[1819]1286 Flag the selected data in row-based manner.
[1846]1287
[1819]1288 Parameters:
[1846]1289
[1843]1290 rows: list of row numbers to be flagged. Default is no row
[2322]1291 (must be explicitly specified to execute row-based
1292 flagging).
[1855]1293
[1819]1294 unflag: if True, unflag the data.
[1846]1295
[1819]1296 """
1297 varlist = vars()
[2322]1298 if rows is None:
1299 rows = []
[1859]1300 self._flag_row(rows, unflag)
[1819]1301 self._add_history("flag_row", varlist)
1302
[1862]1303 @asaplog_post_dec
[1819]1304 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1305 """\
[1819]1306 Flag the selected data outside a specified range (in channel-base)
[1846]1307
[1819]1308 Parameters:
[1846]1309
[1819]1310 uthres: upper threshold.
[1855]1311
[1819]1312 dthres: lower threshold
[1846]1313
[2322]1314 clipoutside: True for flagging data outside the range
1315 [dthres:uthres].
[1928]1316 False for flagging data inside the range.
[1855]1317
[1846]1318 unflag: if True, unflag the data.
1319
[1819]1320 """
1321 varlist = vars()
[1859]1322 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1323 self._add_history("clip", varlist)
1324
[1862]1325 @asaplog_post_dec
[1584]1326 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1327 """\
[1192]1328 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1329 Flagged data in the scantable get interpolated over the region.
[1192]1330 No taper is applied.
[1846]1331
[1192]1332 Parameters:
[1846]1333
[1579]1334 start: the start frequency (really a period within the
1335 bandwidth) or period to remove
[1855]1336
[1579]1337 end: the end frequency or period to remove
[1855]1338
[2431]1339 unit: the frequency unit (default 'MHz') or '' for
[1579]1340 explicit lag channels
[1846]1341
1342 *Notes*:
1343
[1579]1344 It is recommended to flag edges of the band or strong
[1348]1345 signals beforehand.
[1846]1346
[1192]1347 """
1348 if insitu is None: insitu = rcParams['insitu']
1349 self._math._setinsitu(insitu)
1350 varlist = vars()
[1579]1351 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1352 if not (unit == "" or base.has_key(unit)):
[1192]1353 raise ValueError("%s is not a valid unit." % unit)
[1859]1354 if unit == "":
1355 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1356 else:
1357 s = scantable(self._math._lag_flag(self, start*base[unit],
1358 end*base[unit], "frequency"))
[1192]1359 s._add_history("lag_flag", varlist)
1360 if insitu:
1361 self._assign(s)
1362 else:
1363 return s
[1001]1364
[1862]1365 @asaplog_post_dec
[2349]1366 def fft(self, rowno=None, mask=None, getrealimag=False):
[2177]1367 """\
1368 Apply FFT to the spectra.
1369 Flagged data in the scantable get interpolated over the region.
1370
1371 Parameters:
[2186]1372
1373 rowno: The row number(s) to be processed. int, list
[2349]1374 and tuple are accepted. By default (None), FFT
[2186]1375 is applied to the whole data.
1376
1377 mask: Auxiliary channel mask(s). Given as a boolean
1378 list, it is applied to all specified rows.
1379 A list of boolean lists can also be used to
1380 apply different masks. In the latter case, the
1381 length of 'mask' must be the same as that of
[2349]1382 'rowno'. The default is None.
[2177]1383
1384 getrealimag: If True, returns the real and imaginary part
1385 values of the complex results.
1386 If False (the default), returns the amplitude
1387 (absolute value) normalised with Ndata/2 and
1388 phase (argument, in unit of radian).
1389
1390 Returns:
1391
[2186]1392 A list of dictionaries containing the results for each spectrum.
1393 Each dictionary contains two values, the real and the imaginary
1394 parts when getrealimag = True, or the amplitude(absolute value)
1395 and the phase(argument) when getrealimag = False. The key for
1396 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1397 respectively.
1398 """
[2349]1399 if rowno is None:
1400 rowno = []
[2177]1401 if isinstance(rowno, int):
1402 rowno = [rowno]
1403 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1404 raise TypeError("The row number(s) must be int, list or tuple.")
1405 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1406
[2411]1407 usecommonmask = True
1408
1409 if mask is None:
1410 mask = []
1411 if isinstance(mask, list) or isinstance(mask, tuple):
1412 if len(mask) == 0:
1413 mask = [[]]
1414 else:
1415 if isinstance(mask[0], bool):
1416 if len(mask) != self.nchan(self.getif(rowno[0])):
1417 raise ValueError("The spectra and the mask have "
1418 "different length.")
1419 mask = [mask]
1420 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1421 usecommonmask = False
1422 if len(mask) != len(rowno):
1423 raise ValueError("When specifying masks for each "
1424 "spectrum, the numbers of them "
1425 "must be identical.")
1426 for i in xrange(mask):
1427 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1428 raise ValueError("The spectra and the mask have "
1429 "different length.")
1430 else:
1431 raise TypeError("The mask must be a boolean list or "
1432 "a list of boolean list.")
1433 else:
[2349]1434 raise TypeError("The mask must be a boolean list or a list of "
1435 "boolean list.")
[2186]1436
1437 res = []
1438
1439 imask = 0
1440 for whichrow in rowno:
1441 fspec = self._fft(whichrow, mask[imask], getrealimag)
1442 nspec = len(fspec)
[2177]1443
[2349]1444 i = 0
1445 v1 = []
1446 v2 = []
1447 reselem = {"real":[],"imag":[]} if getrealimag \
1448 else {"ampl":[],"phase":[]}
[2177]1449
[2186]1450 while (i < nspec):
1451 v1.append(fspec[i])
1452 v2.append(fspec[i+1])
[2349]1453 i += 2
[2186]1454
[2177]1455 if getrealimag:
[2186]1456 reselem["real"] += v1
1457 reselem["imag"] += v2
[2177]1458 else:
[2186]1459 reselem["ampl"] += v1
1460 reselem["phase"] += v2
[2177]1461
[2186]1462 res.append(reselem)
1463
[2349]1464 if not usecommonmask:
1465 imask += 1
[2186]1466
[2177]1467 return res
1468
1469 @asaplog_post_dec
[113]1470 def create_mask(self, *args, **kwargs):
[1846]1471 """\
[1118]1472 Compute and return a mask based on [min, max] windows.
[189]1473 The specified windows are to be INCLUDED, when the mask is
[113]1474 applied.
[1846]1475
[102]1476 Parameters:
[1846]1477
[1118]1478 [min, max], [min2, max2], ...
[1024]1479 Pairs of start/end points (inclusive)specifying the regions
[102]1480 to be masked
[1855]1481
[189]1482 invert: optional argument. If specified as True,
1483 return an inverted mask, i.e. the regions
1484 specified are EXCLUDED
[1855]1485
[513]1486 row: create the mask using the specified row for
1487 unit conversions, default is row=0
1488 only necessary if frequency varies over rows.
[1846]1489
1490 Examples::
1491
[113]1492 scan.set_unit('channel')
[1846]1493 # a)
[1118]1494 msk = scan.create_mask([400, 500], [800, 900])
[189]1495 # masks everything outside 400 and 500
[113]1496 # and 800 and 900 in the unit 'channel'
1497
[1846]1498 # b)
[1118]1499 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1500 # masks the regions between 400 and 500
[113]1501 # and 800 and 900 in the unit 'channel'
[1846]1502
1503 # c)
1504 #mask only channel 400
[1554]1505 msk = scan.create_mask([400])
[1846]1506
[102]1507 """
[1554]1508 row = kwargs.get("row", 0)
[513]1509 data = self._getabcissa(row)
[113]1510 u = self._getcoordinfo()[0]
[1859]1511 if u == "":
1512 u = "channel"
1513 msg = "The current mask window unit is %s" % u
1514 i = self._check_ifs()
1515 if not i:
1516 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1517 asaplog.push(msg)
[2348]1518 n = len(data)
[1295]1519 msk = _n_bools(n, False)
[710]1520 # test if args is a 'list' or a 'normal *args - UGLY!!!
1521
[2349]1522 ws = (isinstance(args[-1][-1], int)
1523 or isinstance(args[-1][-1], float)) and args or args[0]
[710]1524 for window in ws:
[1554]1525 if len(window) == 1:
1526 window = [window[0], window[0]]
1527 if len(window) == 0 or len(window) > 2:
[2349]1528 raise ValueError("A window needs to be defined as "
1529 "[start(, end)]")
[1545]1530 if window[0] > window[1]:
1531 tmp = window[0]
1532 window[0] = window[1]
1533 window[1] = tmp
[102]1534 for i in range(n):
[1024]1535 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1536 msk[i] = True
[113]1537 if kwargs.has_key('invert'):
1538 if kwargs.get('invert'):
[1295]1539 msk = mask_not(msk)
[102]1540 return msk
[710]1541
[1931]1542 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1543 """\
[1819]1544 Compute and return a list of mask windows, [min, max].
[1846]1545
[1819]1546 Parameters:
[1846]1547
[1819]1548 mask: channel mask, created with create_mask.
[1855]1549
[1819]1550 row: calcutate the masklist using the specified row
1551 for unit conversions, default is row=0
1552 only necessary if frequency varies over rows.
[1846]1553
[1819]1554 Returns:
[1846]1555
[1819]1556 [min, max], [min2, max2], ...
1557 Pairs of start/end points (inclusive)specifying
1558 the masked regions
[1846]1559
[1819]1560 """
1561 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1562 raise TypeError("The mask should be list or tuple.")
[2427]1563 if len(mask) <= 0:
1564 raise TypeError("The mask elements should be > 0")
[2348]1565 data = self._getabcissa(row)
1566 if len(data) != len(mask):
[1819]1567 msg = "Number of channels in scantable != number of mask elements"
1568 raise TypeError(msg)
1569 u = self._getcoordinfo()[0]
[1859]1570 if u == "":
1571 u = "channel"
1572 msg = "The current mask window unit is %s" % u
1573 i = self._check_ifs()
1574 if not i:
1575 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1576 if not silent:
1577 asaplog.push(msg)
[2349]1578 masklist = []
[1819]1579 ist, ien = None, None
1580 ist, ien=self.get_mask_indices(mask)
1581 if ist is not None and ien is not None:
1582 for i in xrange(len(ist)):
1583 range=[data[ist[i]],data[ien[i]]]
1584 range.sort()
1585 masklist.append([range[0],range[1]])
1586 return masklist
1587
1588 def get_mask_indices(self, mask=None):
[1846]1589 """\
[1819]1590 Compute and Return lists of mask start indices and mask end indices.
[1855]1591
1592 Parameters:
1593
[1819]1594 mask: channel mask, created with create_mask.
[1846]1595
[1819]1596 Returns:
[1846]1597
[1819]1598 List of mask start indices and that of mask end indices,
1599 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1600
[1819]1601 """
1602 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1603 raise TypeError("The mask should be list or tuple.")
[2427]1604 if len(mask) <= 0:
1605 raise TypeError("The mask elements should be > 0")
[2349]1606 istart = []
1607 iend = []
1608 if mask[0]:
1609 istart.append(0)
[1819]1610 for i in range(len(mask)-1):
1611 if not mask[i] and mask[i+1]:
1612 istart.append(i+1)
1613 elif mask[i] and not mask[i+1]:
1614 iend.append(i)
[2349]1615 if mask[len(mask)-1]:
1616 iend.append(len(mask)-1)
[1819]1617 if len(istart) != len(iend):
1618 raise RuntimeError("Numbers of mask start != mask end.")
1619 for i in range(len(istart)):
1620 if istart[i] > iend[i]:
1621 raise RuntimeError("Mask start index > mask end index")
1622 break
1623 return istart,iend
1624
[2013]1625 @asaplog_post_dec
[2349]1626 def parse_maskexpr(self, maskstring):
[2013]1627 """
1628 Parse CASA type mask selection syntax (IF dependent).
1629
1630 Parameters:
1631 maskstring : A string mask selection expression.
1632 A comma separated selections mean different IF -
1633 channel combinations. IFs and channel selections
1634 are partitioned by a colon, ':'.
1635 examples:
[2015]1636 '' = all IFs (all channels)
[2013]1637 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1638 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1639 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1640 all channels in IF8
1641 Returns:
1642 A dictionary of selected (valid) IF and masklist pairs,
1643 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1644 """
1645 if not isinstance(maskstring,str):
1646 asaplog.post()
[2611]1647 asaplog.push("Mask expression should be a string.")
[2013]1648 asaplog.post("ERROR")
1649
1650 valid_ifs = self.getifnos()
1651 frequnit = self.get_unit()
1652 seldict = {}
[2015]1653 if maskstring == "":
1654 maskstring = str(valid_ifs)[1:-1]
[2611]1655 ## split each selection "IF range[:CHAN range]"
[2013]1656 sellist = maskstring.split(',')
1657 for currselstr in sellist:
1658 selset = currselstr.split(':')
1659 # spw and mask string (may include ~, < or >)
[2349]1660 spwmasklist = self._parse_selection(selset[0], typestr='integer',
[2611]1661 minval=min(valid_ifs),
[2349]1662 maxval=max(valid_ifs))
[2013]1663 for spwlist in spwmasklist:
1664 selspws = []
1665 for ispw in range(spwlist[0],spwlist[1]+1):
1666 # Put into the list only if ispw exists
1667 if valid_ifs.count(ispw):
1668 selspws.append(ispw)
1669 del spwmasklist, spwlist
1670
1671 # parse frequency mask list
1672 if len(selset) > 1:
[2349]1673 freqmasklist = self._parse_selection(selset[1], typestr='float',
1674 offset=0.)
[2013]1675 else:
1676 # want to select the whole spectrum
1677 freqmasklist = [None]
1678
1679 ## define a dictionary of spw - masklist combination
1680 for ispw in selspws:
1681 #print "working on", ispw
1682 spwstr = str(ispw)
1683 if len(selspws) == 0:
1684 # empty spw
1685 continue
1686 else:
1687 ## want to get min and max of the spw and
1688 ## offset to set for '<' and '>'
1689 if frequnit == 'channel':
1690 minfreq = 0
1691 maxfreq = self.nchan(ifno=ispw)
1692 offset = 0.5
1693 else:
1694 ## This is ugly part. need improvement
1695 for ifrow in xrange(self.nrow()):
1696 if self.getif(ifrow) == ispw:
1697 #print "IF",ispw,"found in row =",ifrow
1698 break
1699 freqcoord = self.get_coordinate(ifrow)
1700 freqs = self._getabcissa(ifrow)
1701 minfreq = min(freqs)
1702 maxfreq = max(freqs)
1703 if len(freqs) == 1:
1704 offset = 0.5
1705 elif frequnit.find('Hz') > 0:
[2349]1706 offset = abs(freqcoord.to_frequency(1,
1707 unit=frequnit)
1708 -freqcoord.to_frequency(0,
1709 unit=frequnit)
1710 )*0.5
[2013]1711 elif frequnit.find('m/s') > 0:
[2349]1712 offset = abs(freqcoord.to_velocity(1,
1713 unit=frequnit)
1714 -freqcoord.to_velocity(0,
1715 unit=frequnit)
1716 )*0.5
[2013]1717 else:
1718 asaplog.post()
1719 asaplog.push("Invalid frequency unit")
1720 asaplog.post("ERROR")
1721 del freqs, freqcoord, ifrow
1722 for freq in freqmasklist:
1723 selmask = freq or [minfreq, maxfreq]
1724 if selmask[0] == None:
1725 ## selection was "<freq[1]".
1726 if selmask[1] < minfreq:
1727 ## avoid adding region selection
1728 selmask = None
1729 else:
1730 selmask = [minfreq,selmask[1]-offset]
1731 elif selmask[1] == None:
1732 ## selection was ">freq[0]"
1733 if selmask[0] > maxfreq:
1734 ## avoid adding region selection
1735 selmask = None
1736 else:
1737 selmask = [selmask[0]+offset,maxfreq]
1738 if selmask:
1739 if not seldict.has_key(spwstr):
1740 # new spw selection
1741 seldict[spwstr] = []
1742 seldict[spwstr] += [selmask]
1743 del minfreq,maxfreq,offset,freq,selmask
1744 del spwstr
1745 del freqmasklist
1746 del valid_ifs
1747 if len(seldict) == 0:
1748 asaplog.post()
[2349]1749 asaplog.push("No valid selection in the mask expression: "
1750 +maskstring)
[2013]1751 asaplog.post("WARN")
1752 return None
1753 msg = "Selected masklist:\n"
1754 for sif, lmask in seldict.iteritems():
1755 msg += " IF"+sif+" - "+str(lmask)+"\n"
1756 asaplog.push(msg)
1757 return seldict
1758
[2611]1759 @asaplog_post_dec
1760 def parse_idx_selection(self, mode, selexpr):
1761 """
1762 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
1763 BEAMNO, and row number
1764
1765 Parameters:
1766 mode : which column to select.
1767 ['scan',|'if'|'pol'|'beam'|'row']
1768 selexpr : A comma separated selection expression.
1769 examples:
1770 '' = all (returns [])
1771 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
1772 (returns [0,1,4,5,6,9])
1773 Returns:
1774 A List of selected indices
1775 """
1776 if selexpr == "":
1777 return []
1778 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
1779 'b': 'beam', 'r': 'row'}
1780 smode = mode.lower()[0]
1781 if not (smode in valid_modes.keys()):
1782 msg = "Invalid mode '%s'. Valid modes are %s" %\
1783 (mode, str(valid_modes.values()))
1784 asaplog.post()
1785 asaplog.push(msg)
1786 asaplog.post("ERROR")
1787 mode = valid_modes[smode]
1788 minidx = None
1789 maxidx = None
1790 if smode == 'r':
1791 minidx = 0
1792 maxidx = self.nrow()
1793 else:
1794 idx = getattr(self,"get"+mode+"nos")()
1795 minidx = min(idx)
1796 maxidx = max(idx)
1797 del idx
1798 sellist = selexpr.split(',')
1799 idxlist = []
1800 for currselstr in sellist:
1801 # single range (may include ~, < or >)
1802 currlist = self._parse_selection(currselstr, typestr='integer',
1803 minval=minidx,maxval=maxidx)
1804 for thelist in currlist:
1805 idxlist += range(thelist[0],thelist[1]+1)
1806 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
1807 asaplog.push(msg)
1808 return idxlist
1809
[2349]1810 def _parse_selection(self, selstr, typestr='float', offset=0.,
[2351]1811 minval=None, maxval=None):
[2013]1812 """
1813 Parameters:
1814 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1815 typestr : The type of the values in returned list
1816 ('integer' or 'float')
1817 offset : The offset value to subtract from or add to
1818 the boundary value if the selection string
[2611]1819 includes '<' or '>' [Valid only for typestr='float']
[2013]1820 minval, maxval : The minimum/maximum values to set if the
1821 selection string includes '<' or '>'.
1822 The list element is filled with None by default.
1823 Returns:
1824 A list of min/max pair of selections.
1825 Example:
[2611]1826 _parse_selection('<3;5~7;9',typestr='int',minval=0)
1827 --> returns [[0,2],[5,7],[9,9]]
1828 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
1829 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
[2013]1830 """
1831 selgroups = selstr.split(';')
1832 sellists = []
1833 if typestr.lower().startswith('int'):
1834 formatfunc = int
[2611]1835 offset = 1
[2013]1836 else:
1837 formatfunc = float
1838
1839 for currsel in selgroups:
1840 if currsel.find('~') > 0:
[2611]1841 # val0 <= x <= val1
[2013]1842 minsel = formatfunc(currsel.split('~')[0].strip())
1843 maxsel = formatfunc(currsel.split('~')[1].strip())
[2611]1844 elif currsel.strip().find('<=') > -1:
1845 bound = currsel.split('<=')
1846 try: # try "x <= val"
1847 minsel = minval
1848 maxsel = formatfunc(bound[1].strip())
1849 except ValueError: # now "val <= x"
1850 minsel = formatfunc(bound[0].strip())
1851 maxsel = maxval
1852 elif currsel.strip().find('>=') > -1:
1853 bound = currsel.split('>=')
1854 try: # try "x >= val"
1855 minsel = formatfunc(bound[1].strip())
1856 maxsel = maxval
1857 except ValueError: # now "val >= x"
1858 minsel = minval
1859 maxsel = formatfunc(bound[0].strip())
1860 elif currsel.strip().find('<') > -1:
1861 bound = currsel.split('<')
1862 try: # try "x < val"
1863 minsel = minval
1864 maxsel = formatfunc(bound[1].strip()) \
1865 - formatfunc(offset)
1866 except ValueError: # now "val < x"
1867 minsel = formatfunc(bound[0].strip()) \
[2013]1868 + formatfunc(offset)
[2611]1869 maxsel = maxval
1870 elif currsel.strip().find('>') > -1:
1871 bound = currsel.split('>')
1872 try: # try "x > val"
1873 minsel = formatfunc(bound[1].strip()) \
1874 + formatfunc(offset)
1875 maxsel = maxval
1876 except ValueError: # now "val > x"
1877 minsel = minval
1878 maxsel = formatfunc(bound[0].strip()) \
1879 - formatfunc(offset)
[2013]1880 else:
1881 minsel = formatfunc(currsel)
1882 maxsel = formatfunc(currsel)
1883 sellists.append([minsel,maxsel])
1884 return sellists
1885
[1819]1886# def get_restfreqs(self):
1887# """
1888# Get the restfrequency(s) stored in this scantable.
1889# The return value(s) are always of unit 'Hz'
1890# Parameters:
1891# none
1892# Returns:
1893# a list of doubles
1894# """
1895# return list(self._getrestfreqs())
1896
1897 def get_restfreqs(self, ids=None):
[1846]1898 """\
[256]1899 Get the restfrequency(s) stored in this scantable.
1900 The return value(s) are always of unit 'Hz'
[1846]1901
[256]1902 Parameters:
[1846]1903
[1819]1904 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1905 be retrieved
[1846]1906
[256]1907 Returns:
[1846]1908
[1819]1909 dictionary containing ids and a list of doubles for each id
[1846]1910
[256]1911 """
[1819]1912 if ids is None:
[2349]1913 rfreqs = {}
[1819]1914 idlist = self.getmolnos()
1915 for i in idlist:
[2349]1916 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1917 return rfreqs
1918 else:
[2349]1919 if type(ids) == list or type(ids) == tuple:
1920 rfreqs = {}
[1819]1921 for i in ids:
[2349]1922 rfreqs[i] = list(self._getrestfreqs(i))
[1819]1923 return rfreqs
1924 else:
1925 return list(self._getrestfreqs(ids))
[102]1926
[2349]1927 @asaplog_post_dec
[931]1928 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1929 """\
[931]1930 Set or replace the restfrequency specified and
[1938]1931 if the 'freqs' argument holds a scalar,
[931]1932 then that rest frequency will be applied to all the selected
1933 data. If the 'freqs' argument holds
1934 a vector, then it MUST be of equal or smaller length than
1935 the number of IFs (and the available restfrequencies will be
1936 replaced by this vector). In this case, *all* data have
1937 the restfrequency set per IF according
1938 to the corresponding value you give in the 'freqs' vector.
[1118]1939 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1940 IF 1 gets restfreq 2e9.
[1846]1941
[1395]1942 You can also specify the frequencies via a linecatalog.
[1153]1943
[931]1944 Parameters:
[1846]1945
[931]1946 freqs: list of rest frequency values or string idenitfiers
[1855]1947
[931]1948 unit: unit for rest frequency (default 'Hz')
[402]1949
[1846]1950
1951 Example::
1952
[1819]1953 # set the given restfrequency for the all currently selected IFs
[931]1954 scan.set_restfreqs(freqs=1.4e9)
[1845]1955 # set restfrequencies for the n IFs (n > 1) in the order of the
1956 # list, i.e
1957 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1958 # len(list_of_restfreqs) == nIF
1959 # for nIF == 1 the following will set multiple restfrequency for
1960 # that IF
[1819]1961 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1962 # set multiple restfrequencies per IF. as a list of lists where
1963 # the outer list has nIF elements, the inner s arbitrary
1964 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1965
[1846]1966 *Note*:
[1845]1967
[931]1968 To do more sophisticate Restfrequency setting, e.g. on a
1969 source and IF basis, use scantable.set_selection() before using
[1846]1970 this function::
[931]1971
[1846]1972 # provided your scantable is called scan
1973 selection = selector()
[2431]1974 selection.set_name('ORION*')
[1846]1975 selection.set_ifs([1])
1976 scan.set_selection(selection)
1977 scan.set_restfreqs(freqs=86.6e9)
1978
[931]1979 """
1980 varlist = vars()
[1157]1981 from asap import linecatalog
1982 # simple value
[1118]1983 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1984 self._setrestfreqs([freqs], [""], unit)
[1157]1985 # list of values
[1118]1986 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1987 # list values are scalars
[1118]1988 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1989 if len(freqs) == 1:
1990 self._setrestfreqs(freqs, [""], unit)
1991 else:
1992 # allow the 'old' mode of setting mulitple IFs
1993 savesel = self._getselection()
[2599]1994 sel = self.get_selection()
[1845]1995 iflist = self.getifnos()
1996 if len(freqs)>len(iflist):
1997 raise ValueError("number of elements in list of list "
1998 "exeeds the current IF selections")
1999 iflist = self.getifnos()
2000 for i, fval in enumerate(freqs):
2001 sel.set_ifs(iflist[i])
2002 self._setselection(sel)
2003 self._setrestfreqs([fval], [""], unit)
2004 self._setselection(savesel)
2005
2006 # list values are dict, {'value'=, 'name'=)
[1157]2007 elif isinstance(freqs[-1], dict):
[1845]2008 values = []
2009 names = []
2010 for d in freqs:
2011 values.append(d["value"])
2012 names.append(d["name"])
2013 self._setrestfreqs(values, names, unit)
[1819]2014 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]2015 savesel = self._getselection()
[2599]2016 sel = self.get_selection()
[1322]2017 iflist = self.getifnos()
[1819]2018 if len(freqs)>len(iflist):
[1845]2019 raise ValueError("number of elements in list of list exeeds"
2020 " the current IF selections")
2021 for i, fval in enumerate(freqs):
[1322]2022 sel.set_ifs(iflist[i])
[1259]2023 self._setselection(sel)
[1845]2024 self._setrestfreqs(fval, [""], unit)
[1157]2025 self._setselection(savesel)
2026 # freqs are to be taken from a linecatalog
[1153]2027 elif isinstance(freqs, linecatalog):
2028 savesel = self._getselection()
[2599]2029 sel = self.get_selection()
[1153]2030 for i in xrange(freqs.nrow()):
[1322]2031 sel.set_ifs(iflist[i])
[1153]2032 self._setselection(sel)
[1845]2033 self._setrestfreqs([freqs.get_frequency(i)],
2034 [freqs.get_name(i)], "MHz")
[1153]2035 # ensure that we are not iterating past nIF
2036 if i == self.nif()-1: break
2037 self._setselection(savesel)
[931]2038 else:
2039 return
2040 self._add_history("set_restfreqs", varlist)
2041
[2349]2042 @asaplog_post_dec
[1360]2043 def shift_refpix(self, delta):
[1846]2044 """\
[1589]2045 Shift the reference pixel of the Spectra Coordinate by an
2046 integer amount.
[1846]2047
[1589]2048 Parameters:
[1846]2049
[1589]2050 delta: the amount to shift by
[1846]2051
2052 *Note*:
2053
[1589]2054 Be careful using this with broadband data.
[1846]2055
[1360]2056 """
[2349]2057 varlist = vars()
[1731]2058 Scantable.shift_refpix(self, delta)
[2349]2059 s._add_history("shift_refpix", varlist)
[931]2060
[1862]2061 @asaplog_post_dec
[2820]2062 def history(self, filename=None, nrows=-1, start=0):
[1846]2063 """\
[1259]2064 Print the history. Optionally to a file.
[1846]2065
[1348]2066 Parameters:
[1846]2067
[1928]2068 filename: The name of the file to save the history to.
[1846]2069
[1259]2070 """
[2820]2071 n = self._historylength()
2072 if nrows == -1:
2073 nrows = n
2074 if start+nrows > n:
2075 nrows = nrows-start
2076 if n > 1000 and nrows == n:
2077 nrows = 1000
2078 start = n-1000
2079 asaplog.push("Warning: History has {0} entries. Displaying last "
2080 "1000".format(n))
2081 hist = list(self._gethistory(nrows, start))
[794]2082 out = "-"*80
[484]2083 for h in hist:
[2820]2084 if not h.strip():
2085 continue
2086 if h.find("---") >-1:
2087 continue
[489]2088 else:
2089 items = h.split("##")
2090 date = items[0]
2091 func = items[1]
2092 items = items[2:]
[794]2093 out += "\n"+date+"\n"
2094 out += "Function: %s\n Parameters:" % (func)
[489]2095 for i in items:
[1938]2096 if i == '':
2097 continue
[489]2098 s = i.split("=")
[1118]2099 out += "\n %s = %s" % (s[0], s[1])
[2820]2100 out = "\n".join([out, "*"*80])
[1259]2101 if filename is not None:
2102 if filename is "":
2103 filename = 'scantable_history.txt'
2104 filename = os.path.expandvars(os.path.expanduser(filename))
2105 if not os.path.isdir(filename):
2106 data = open(filename, 'w')
2107 data.write(out)
2108 data.close()
2109 else:
2110 msg = "Illegal file name '%s'." % (filename)
[1859]2111 raise IOError(msg)
2112 return page(out)
[2349]2113
[513]2114 #
2115 # Maths business
2116 #
[1862]2117 @asaplog_post_dec
[2818]2118 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2119 avmode="NONE"):
[1846]2120 """\
[2349]2121 Return the (time) weighted average of a scan. Scans will be averaged
2122 only if the source direction (RA/DEC) is within 1' otherwise
[1846]2123
2124 *Note*:
2125
[1070]2126 in channels only - align if necessary
[1846]2127
[513]2128 Parameters:
[1846]2129
[513]2130 mask: an optional mask (only used for 'var' and 'tsys'
2131 weighting)
[1855]2132
[558]2133 scanav: True averages each scan separately
2134 False (default) averages all scans together,
[1855]2135
[1099]2136 weight: Weighting scheme.
2137 'none' (mean no weight)
2138 'var' (1/var(spec) weighted)
2139 'tsys' (1/Tsys**2 weighted)
2140 'tint' (integration time weighted)
2141 'tintsys' (Tint/Tsys**2)
2142 'median' ( median averaging)
[535]2143 The default is 'tint'
[1855]2144
[931]2145 align: align the spectra in velocity before averaging. It takes
2146 the time of the first spectrum as reference time.
[2818]2147 avmode: 'SOURCE' - also select by source name - or
2148 'NONE' (default). Not applicable for scanav=True or
2149 weight=median
[1846]2150
2151 Example::
2152
[513]2153 # time average the scantable without using a mask
[710]2154 newscan = scan.average_time()
[1846]2155
[513]2156 """
2157 varlist = vars()
[1593]2158 weight = weight or 'TINT'
2159 mask = mask or ()
[2818]2160 scanav = (scanav and 'SCAN') or avmode.upper()
[1118]2161 scan = (self, )
[1859]2162
2163 if align:
2164 scan = (self.freq_align(insitu=False), )
[2818]2165 asaplog.push("Note: Alignment is don on a source-by-source basis")
2166 asaplog.push("Note: Averaging (by default) is not")
2167 # we need to set it to SOURCE averaging here
[1859]2168 s = None
2169 if weight.upper() == 'MEDIAN':
2170 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2171 scanav))
2172 else:
2173 s = scantable(self._math._average(scan, mask, weight.upper(),
2174 scanav))
[1099]2175 s._add_history("average_time", varlist)
[513]2176 return s
[710]2177
[1862]2178 @asaplog_post_dec
[876]2179 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]2180 """\
[513]2181 Return a scan where all spectra are converted to either
2182 Jansky or Kelvin depending upon the flux units of the scan table.
2183 By default the function tries to look the values up internally.
2184 If it can't find them (or if you want to over-ride), you must
2185 specify EITHER jyperk OR eta (and D which it will try to look up
2186 also if you don't set it). jyperk takes precedence if you set both.
[1846]2187
[513]2188 Parameters:
[1846]2189
[513]2190 jyperk: the Jy / K conversion factor
[1855]2191
[513]2192 eta: the aperture efficiency
[1855]2193
[1928]2194 d: the geometric diameter (metres)
[1855]2195
[513]2196 insitu: if False a new scantable is returned.
2197 Otherwise, the scaling is done in-situ
2198 The default is taken from .asaprc (False)
[1846]2199
[513]2200 """
2201 if insitu is None: insitu = rcParams['insitu']
[876]2202 self._math._setinsitu(insitu)
[513]2203 varlist = vars()
[1593]2204 jyperk = jyperk or -1.0
2205 d = d or -1.0
2206 eta = eta or -1.0
[876]2207 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2208 s._add_history("convert_flux", varlist)
2209 if insitu: self._assign(s)
2210 else: return s
[513]2211
[1862]2212 @asaplog_post_dec
[876]2213 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]2214 """\
[513]2215 Return a scan after applying a gain-elevation correction.
2216 The correction can be made via either a polynomial or a
2217 table-based interpolation (and extrapolation if necessary).
2218 You specify polynomial coefficients, an ascii table or neither.
2219 If you specify neither, then a polynomial correction will be made
2220 with built in coefficients known for certain telescopes (an error
2221 will occur if the instrument is not known).
2222 The data and Tsys are *divided* by the scaling factors.
[1846]2223
[513]2224 Parameters:
[1846]2225
[513]2226 poly: Polynomial coefficients (default None) to compute a
2227 gain-elevation correction as a function of
2228 elevation (in degrees).
[1855]2229
[513]2230 filename: The name of an ascii file holding correction factors.
2231 The first row of the ascii file must give the column
2232 names and these MUST include columns
[2431]2233 'ELEVATION' (degrees) and 'FACTOR' (multiply data
[513]2234 by this) somewhere.
2235 The second row must give the data type of the
2236 column. Use 'R' for Real and 'I' for Integer.
2237 An example file would be
2238 (actual factors are arbitrary) :
2239
2240 TIME ELEVATION FACTOR
2241 R R R
2242 0.1 0 0.8
2243 0.2 20 0.85
2244 0.3 40 0.9
2245 0.4 60 0.85
2246 0.5 80 0.8
2247 0.6 90 0.75
[1855]2248
[513]2249 method: Interpolation method when correcting from a table.
[2431]2250 Values are 'nearest', 'linear' (default), 'cubic'
2251 and 'spline'
[1855]2252
[513]2253 insitu: if False a new scantable is returned.
2254 Otherwise, the scaling is done in-situ
2255 The default is taken from .asaprc (False)
[1846]2256
[513]2257 """
2258
2259 if insitu is None: insitu = rcParams['insitu']
[876]2260 self._math._setinsitu(insitu)
[513]2261 varlist = vars()
[1593]2262 poly = poly or ()
[513]2263 from os.path import expandvars
2264 filename = expandvars(filename)
[876]2265 s = scantable(self._math._gainel(self, poly, filename, method))
2266 s._add_history("gain_el", varlist)
[1593]2267 if insitu:
2268 self._assign(s)
2269 else:
2270 return s
[710]2271
[1862]2272 @asaplog_post_dec
[931]2273 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2274 """\
[513]2275 Return a scan where all rows have been aligned in frequency/velocity.
2276 The alignment frequency frame (e.g. LSRK) is that set by function
2277 set_freqframe.
[1846]2278
[513]2279 Parameters:
[1855]2280
[513]2281 reftime: reference time to align at. By default, the time of
2282 the first row of data is used.
[1855]2283
[513]2284 method: Interpolation method for regridding the spectra.
[2431]2285 Choose from 'nearest', 'linear', 'cubic' (default)
2286 and 'spline'
[1855]2287
[513]2288 insitu: if False a new scantable is returned.
2289 Otherwise, the scaling is done in-situ
2290 The default is taken from .asaprc (False)
[1846]2291
[513]2292 """
[931]2293 if insitu is None: insitu = rcParams["insitu"]
[2429]2294 oldInsitu = self._math._insitu()
[876]2295 self._math._setinsitu(insitu)
[513]2296 varlist = vars()
[1593]2297 reftime = reftime or ""
[931]2298 s = scantable(self._math._freq_align(self, reftime, method))
[876]2299 s._add_history("freq_align", varlist)
[2429]2300 self._math._setinsitu(oldInsitu)
[2349]2301 if insitu:
2302 self._assign(s)
2303 else:
2304 return s
[513]2305
[1862]2306 @asaplog_post_dec
[1725]2307 def opacity(self, tau=None, insitu=None):
[1846]2308 """\
[513]2309 Apply an opacity correction. The data
2310 and Tsys are multiplied by the correction factor.
[1846]2311
[513]2312 Parameters:
[1855]2313
[1689]2314 tau: (list of) opacity from which the correction factor is
[513]2315 exp(tau*ZD)
[1689]2316 where ZD is the zenith-distance.
2317 If a list is provided, it has to be of length nIF,
2318 nIF*nPol or 1 and in order of IF/POL, e.g.
2319 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2320 if tau is `None` the opacities are determined from a
2321 model.
[1855]2322
[513]2323 insitu: if False a new scantable is returned.
2324 Otherwise, the scaling is done in-situ
2325 The default is taken from .asaprc (False)
[1846]2326
[513]2327 """
[2349]2328 if insitu is None:
2329 insitu = rcParams['insitu']
[876]2330 self._math._setinsitu(insitu)
[513]2331 varlist = vars()
[1689]2332 if not hasattr(tau, "__len__"):
2333 tau = [tau]
[876]2334 s = scantable(self._math._opacity(self, tau))
2335 s._add_history("opacity", varlist)
[2349]2336 if insitu:
2337 self._assign(s)
2338 else:
2339 return s
[513]2340
[1862]2341 @asaplog_post_dec
[513]2342 def bin(self, width=5, insitu=None):
[1846]2343 """\
[513]2344 Return a scan where all spectra have been binned up.
[1846]2345
[1348]2346 Parameters:
[1846]2347
[513]2348 width: The bin width (default=5) in pixels
[1855]2349
[513]2350 insitu: if False a new scantable is returned.
2351 Otherwise, the scaling is done in-situ
2352 The default is taken from .asaprc (False)
[1846]2353
[513]2354 """
[2349]2355 if insitu is None:
2356 insitu = rcParams['insitu']
[876]2357 self._math._setinsitu(insitu)
[513]2358 varlist = vars()
[876]2359 s = scantable(self._math._bin(self, width))
[1118]2360 s._add_history("bin", varlist)
[1589]2361 if insitu:
2362 self._assign(s)
2363 else:
2364 return s
[513]2365
[1862]2366 @asaplog_post_dec
[2672]2367 def reshape(self, first, last, insitu=None):
2368 """Resize the band by providing first and last channel.
2369 This will cut off all channels outside [first, last].
2370 """
2371 if insitu is None:
2372 insitu = rcParams['insitu']
2373 varlist = vars()
2374 if last < 0:
2375 last = self.nchan()-1 + last
2376 s = None
2377 if insitu:
2378 s = self
2379 else:
2380 s = self.copy()
2381 s._reshape(first,last)
2382 s._add_history("reshape", varlist)
2383 if not insitu:
2384 return s
2385
2386 @asaplog_post_dec
[513]2387 def resample(self, width=5, method='cubic', insitu=None):
[1846]2388 """\
[1348]2389 Return a scan where all spectra have been binned up.
[1573]2390
[1348]2391 Parameters:
[1846]2392
[513]2393 width: The bin width (default=5) in pixels
[1855]2394
[513]2395 method: Interpolation method when correcting from a table.
[2431]2396 Values are 'nearest', 'linear', 'cubic' (default)
2397 and 'spline'
[1855]2398
[513]2399 insitu: if False a new scantable is returned.
2400 Otherwise, the scaling is done in-situ
2401 The default is taken from .asaprc (False)
[1846]2402
[513]2403 """
[2349]2404 if insitu is None:
2405 insitu = rcParams['insitu']
[876]2406 self._math._setinsitu(insitu)
[513]2407 varlist = vars()
[876]2408 s = scantable(self._math._resample(self, method, width))
[1118]2409 s._add_history("resample", varlist)
[2349]2410 if insitu:
2411 self._assign(s)
2412 else:
2413 return s
[513]2414
[1862]2415 @asaplog_post_dec
[946]2416 def average_pol(self, mask=None, weight='none'):
[1846]2417 """\
[946]2418 Average the Polarisations together.
[1846]2419
[946]2420 Parameters:
[1846]2421
[946]2422 mask: An optional mask defining the region, where the
2423 averaging will be applied. The output will have all
2424 specified points masked.
[1855]2425
[946]2426 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2427 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2428
[946]2429 """
2430 varlist = vars()
[1593]2431 mask = mask or ()
[1010]2432 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2433 s._add_history("average_pol", varlist)
[992]2434 return s
[513]2435
[1862]2436 @asaplog_post_dec
[1145]2437 def average_beam(self, mask=None, weight='none'):
[1846]2438 """\
[1145]2439 Average the Beams together.
[1846]2440
[1145]2441 Parameters:
2442 mask: An optional mask defining the region, where the
2443 averaging will be applied. The output will have all
2444 specified points masked.
[1855]2445
[1145]2446 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2447 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2448
[1145]2449 """
2450 varlist = vars()
[1593]2451 mask = mask or ()
[1145]2452 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2453 s._add_history("average_beam", varlist)
2454 return s
2455
[1586]2456 def parallactify(self, pflag):
[1846]2457 """\
[1843]2458 Set a flag to indicate whether this data should be treated as having
[1617]2459 been 'parallactified' (total phase == 0.0)
[1846]2460
[1617]2461 Parameters:
[1855]2462
[1843]2463 pflag: Bool indicating whether to turn this on (True) or
[1617]2464 off (False)
[1846]2465
[1617]2466 """
[1586]2467 varlist = vars()
2468 self._parallactify(pflag)
2469 self._add_history("parallactify", varlist)
2470
[1862]2471 @asaplog_post_dec
[992]2472 def convert_pol(self, poltype=None):
[1846]2473 """\
[992]2474 Convert the data to a different polarisation type.
[1565]2475 Note that you will need cross-polarisation terms for most conversions.
[1846]2476
[992]2477 Parameters:
[1855]2478
[992]2479 poltype: The new polarisation type. Valid types are:
[2431]2480 'linear', 'circular', 'stokes' and 'linpol'
[1846]2481
[992]2482 """
2483 varlist = vars()
[1859]2484 s = scantable(self._math._convertpol(self, poltype))
[1118]2485 s._add_history("convert_pol", varlist)
[992]2486 return s
2487
[1862]2488 @asaplog_post_dec
[2269]2489 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2490 insitu=None):
[1846]2491 """\
[513]2492 Smooth the spectrum by the specified kernel (conserving flux).
[1846]2493
[513]2494 Parameters:
[1846]2495
[513]2496 kernel: The type of smoothing kernel. Select from
[1574]2497 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2498 or 'poly'
[1855]2499
[513]2500 width: The width of the kernel in pixels. For hanning this is
2501 ignored otherwise it defauls to 5 pixels.
2502 For 'gaussian' it is the Full Width Half
2503 Maximum. For 'boxcar' it is the full width.
[1574]2504 For 'rmedian' and 'poly' it is the half width.
[1855]2505
[1574]2506 order: Optional parameter for 'poly' kernel (default is 2), to
2507 specify the order of the polnomial. Ignored by all other
2508 kernels.
[1855]2509
[1819]2510 plot: plot the original and the smoothed spectra.
2511 In this each indivual fit has to be approved, by
2512 typing 'y' or 'n'
[1855]2513
[513]2514 insitu: if False a new scantable is returned.
2515 Otherwise, the scaling is done in-situ
2516 The default is taken from .asaprc (False)
[1846]2517
[513]2518 """
2519 if insitu is None: insitu = rcParams['insitu']
[876]2520 self._math._setinsitu(insitu)
[513]2521 varlist = vars()
[1819]2522
2523 if plot: orgscan = self.copy()
2524
[1574]2525 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2526 s._add_history("smooth", varlist)
[1819]2527
[2610]2528 action = 'H'
[1819]2529 if plot:
[2150]2530 from asap.asapplotter import new_asaplot
2531 theplot = new_asaplot(rcParams['plotter.gui'])
[2535]2532 from matplotlib import rc as rcp
2533 rcp('lines', linewidth=1)
[2150]2534 theplot.set_panels()
[1819]2535 ylab=s._get_ordinate_label()
[2150]2536 #theplot.palette(0,["#777777","red"])
[1819]2537 for r in xrange(s.nrow()):
2538 xsm=s._getabcissa(r)
2539 ysm=s._getspectrum(r)
2540 xorg=orgscan._getabcissa(r)
2541 yorg=orgscan._getspectrum(r)
[2610]2542 if action != "N": #skip plotting if rejecting all
2543 theplot.clear()
2544 theplot.hold()
2545 theplot.set_axes('ylabel',ylab)
2546 theplot.set_axes('xlabel',s._getabcissalabel(r))
2547 theplot.set_axes('title',s._getsourcename(r))
2548 theplot.set_line(label='Original',color="#777777")
2549 theplot.plot(xorg,yorg)
2550 theplot.set_line(label='Smoothed',color="red")
2551 theplot.plot(xsm,ysm)
2552 ### Ugly part for legend
2553 for i in [0,1]:
2554 theplot.subplots[0]['lines'].append(
2555 [theplot.subplots[0]['axes'].lines[i]]
2556 )
2557 theplot.release()
2558 ### Ugly part for legend
2559 theplot.subplots[0]['lines']=[]
2560 res = self._get_verify_action("Accept smoothing?",action)
2561 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2562 if r == 0: action = None
2563 #res = raw_input("Accept smoothing ([y]/n): ")
[1819]2564 if res.upper() == 'N':
[2610]2565 # reject for the current rows
[1819]2566 s._setspectrum(yorg, r)
[2610]2567 elif res.upper() == 'R':
2568 # reject all the following rows
2569 action = "N"
2570 s._setspectrum(yorg, r)
2571 elif res.upper() == 'A':
2572 # accept all the following rows
2573 break
[2150]2574 theplot.quit()
2575 del theplot
[1819]2576 del orgscan
2577
[876]2578 if insitu: self._assign(s)
2579 else: return s
[513]2580
[2186]2581 @asaplog_post_dec
[2435]2582 def regrid_channel(self, width=5, plot=False, insitu=None):
2583 """\
2584 Regrid the spectra by the specified channel width
2585
2586 Parameters:
2587
2588 width: The channel width (float) of regridded spectra
2589 in the current spectral unit.
2590
2591 plot: [NOT IMPLEMENTED YET]
2592 plot the original and the regridded spectra.
2593 In this each indivual fit has to be approved, by
2594 typing 'y' or 'n'
2595
2596 insitu: if False a new scantable is returned.
2597 Otherwise, the scaling is done in-situ
2598 The default is taken from .asaprc (False)
2599
2600 """
2601 if insitu is None: insitu = rcParams['insitu']
2602 varlist = vars()
2603
2604 if plot:
2605 asaplog.post()
2606 asaplog.push("Verification plot is not implemtnetd yet.")
2607 asaplog.post("WARN")
2608
2609 s = self.copy()
2610 s._regrid_specchan(width)
2611
2612 s._add_history("regrid_channel", varlist)
2613
2614# if plot:
2615# from asap.asapplotter import new_asaplot
2616# theplot = new_asaplot(rcParams['plotter.gui'])
[2535]2617# from matplotlib import rc as rcp
2618# rcp('lines', linewidth=1)
[2435]2619# theplot.set_panels()
2620# ylab=s._get_ordinate_label()
2621# #theplot.palette(0,["#777777","red"])
2622# for r in xrange(s.nrow()):
2623# xsm=s._getabcissa(r)
2624# ysm=s._getspectrum(r)
2625# xorg=orgscan._getabcissa(r)
2626# yorg=orgscan._getspectrum(r)
2627# theplot.clear()
2628# theplot.hold()
2629# theplot.set_axes('ylabel',ylab)
2630# theplot.set_axes('xlabel',s._getabcissalabel(r))
2631# theplot.set_axes('title',s._getsourcename(r))
2632# theplot.set_line(label='Original',color="#777777")
2633# theplot.plot(xorg,yorg)
2634# theplot.set_line(label='Smoothed',color="red")
2635# theplot.plot(xsm,ysm)
2636# ### Ugly part for legend
2637# for i in [0,1]:
2638# theplot.subplots[0]['lines'].append(
2639# [theplot.subplots[0]['axes'].lines[i]]
2640# )
2641# theplot.release()
2642# ### Ugly part for legend
2643# theplot.subplots[0]['lines']=[]
2644# res = raw_input("Accept smoothing ([y]/n): ")
2645# if res.upper() == 'N':
2646# s._setspectrum(yorg, r)
2647# theplot.quit()
2648# del theplot
2649# del orgscan
2650
2651 if insitu: self._assign(s)
2652 else: return s
2653
2654 @asaplog_post_dec
[2186]2655 def _parse_wn(self, wn):
2656 if isinstance(wn, list) or isinstance(wn, tuple):
2657 return wn
2658 elif isinstance(wn, int):
2659 return [ wn ]
2660 elif isinstance(wn, str):
[2277]2661 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2662 val = wn.split('-')
2663 val = [int(val[0]), int(val[1])]
2664 val.sort()
2665 res = [i for i in xrange(val[0], val[1]+1)]
[2277]2666 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2667 val = int(wn[2:])+1
2668 res = [i for i in xrange(val)]
[2277]2669 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2670 val = int(wn[:-2])+1
2671 res = [i for i in xrange(val)]
[2277]2672 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]2673 val = int(wn[1:])
2674 res = [i for i in xrange(val)]
[2277]2675 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]2676 val = int(wn[:-1])
2677 res = [i for i in xrange(val)]
[2411]2678 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2679 # then interpreted in C++
2680 # side as [a,a+1,...,a_nyq]
2681 # (CAS-3759)
[2186]2682 val = int(wn[2:])
[2411]2683 res = [val, -999]
2684 #res = [i for i in xrange(val, self.nchan()/2+1)]
2685 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2686 # then interpreted in C++
2687 # side as [a,a+1,...,a_nyq]
2688 # (CAS-3759)
[2186]2689 val = int(wn[:-2])
[2411]2690 res = [val, -999]
2691 #res = [i for i in xrange(val, self.nchan()/2+1)]
2692 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2693 # then interpreted in C++
2694 # side as [a+1,a+2,...,a_nyq]
2695 # (CAS-3759)
[2186]2696 val = int(wn[1:])+1
[2411]2697 res = [val, -999]
2698 #res = [i for i in xrange(val, self.nchan()/2+1)]
2699 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2700 # then interpreted in C++
2701 # side as [a+1,a+2,...,a_nyq]
2702 # (CAS-3759)
[2186]2703 val = int(wn[:-1])+1
[2411]2704 res = [val, -999]
2705 #res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2706
[2186]2707 return res
2708 else:
2709 msg = 'wrong value given for addwn/rejwn'
2710 raise RuntimeError(msg)
2711
[2713]2712 @asaplog_post_dec
[2810]2713 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
[2767]2714 """\
2715 Subtract baseline based on parameters written in Baseline Table.
2716
2717 Parameters:
[2809]2718 insitu: if True, baseline fitting/subtraction is done
[2810]2719 in-situ. If False, a new scantable with
2720 baseline subtracted is returned. Actually,
2721 format of the returned value depends on both
2722 insitu and retfitres (see below).
[2767]2723 The default is taken from .asaprc (False)
[2810]2724 retfitres: if True, the results of baseline fitting (i.e.,
2725 coefficients and rms) are returned.
2726 default is False.
2727 The format of the returned value of this
2728 function varies as follows:
2729 (1) in case insitu=True and retfitres=True:
2730 fitting result.
2731 (2) in case insitu=True and retfitres=False:
2732 None.
2733 (3) in case insitu=False and retfitres=True:
2734 a dictionary containing a new scantable
2735 (with baseline subtracted) and the fitting
2736 results.
2737 (4) in case insitu=False and retfitres=False:
2738 a new scantable (with baseline subtracted).
[2767]2739 inbltable: name of input baseline table. The row number of
2740 scantable and that of inbltable must be
2741 identical.
2742 outbltable: name of output baseline table where baseline
2743 parameters and fitting results recorded.
2744 default is ''(no output).
[2809]2745 overwrite: if True when an existing baseline table is
2746 specified for outbltable, overwrites it.
2747 Otherwise there is no harm.
[2767]2748 default is False.
2749 """
2750
2751 try:
2752 varlist = vars()
[2810]2753 if retfitres is None: retfitres = False
[2767]2754 if inbltable is None: raise ValueError("bltable missing.")
2755 if outbltable is None: outbltable = ''
2756 if overwrite is None: overwrite = False
2757
2758 if insitu is None: insitu = rcParams['insitu']
2759 if insitu:
2760 workscan = self
2761 else:
2762 workscan = self.copy()
2763
2764 sres = workscan._apply_bltable(inbltable,
[2810]2765 retfitres,
[2767]2766 outbltable,
2767 os.path.exists(outbltable),
2768 overwrite)
[2810]2769 if retfitres: res = parse_fitresult(sres)
[2767]2770
2771 workscan._add_history('apply_bltable', varlist)
2772
2773 if insitu:
2774 self._assign(workscan)
[2810]2775 if retfitres:
2776 return res
2777 else:
2778 return None
[2767]2779 else:
[2810]2780 if retfitres:
2781 return {'scantable': workscan, 'fitresults': res}
2782 else:
2783 return workscan
[2767]2784
2785 except RuntimeError, e:
2786 raise_fitting_failure_exception(e)
2787
2788 @asaplog_post_dec
[2810]2789 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
[2767]2790 """\
2791 Subtract baseline based on parameters written in the input list.
2792
2793 Parameters:
[2809]2794 insitu: if True, baseline fitting/subtraction is done
[2810]2795 in-situ. If False, a new scantable with
2796 baseline subtracted is returned. Actually,
2797 format of the returned value depends on both
2798 insitu and retfitres (see below).
[2767]2799 The default is taken from .asaprc (False)
[2810]2800 retfitres: if True, the results of baseline fitting (i.e.,
2801 coefficients and rms) are returned.
2802 default is False.
2803 The format of the returned value of this
2804 function varies as follows:
2805 (1) in case insitu=True and retfitres=True:
2806 fitting result.
2807 (2) in case insitu=True and retfitres=False:
2808 None.
2809 (3) in case insitu=False and retfitres=True:
2810 a dictionary containing a new scantable
2811 (with baseline subtracted) and the fitting
2812 results.
2813 (4) in case insitu=False and retfitres=False:
2814 a new scantable (with baseline subtracted).
[2767]2815 blinfo: baseline parameter set stored in a dictionary
2816 or a list of dictionary. Each dictionary
2817 corresponds to each spectrum and must contain
2818 the following keys and values:
2819 'row': row number,
2820 'blfunc': function name. available ones include
2821 'poly', 'chebyshev', 'cspline' and
2822 'sinusoid',
2823 'order': maximum order of polynomial. needed
2824 if blfunc='poly' or 'chebyshev',
2825 'npiece': number or piecewise polynomial.
2826 needed if blfunc='cspline',
2827 'nwave': a list of sinusoidal wave numbers.
2828 needed if blfunc='sinusoid', and
2829 'masklist': min-max windows for channel mask.
2830 the specified ranges will be used
2831 for fitting.
2832 bltable: name of output baseline table where baseline
2833 parameters and fitting results recorded.
2834 default is ''(no output).
[2809]2835 overwrite: if True when an existing baseline table is
2836 specified for bltable, overwrites it.
2837 Otherwise there is no harm.
[2767]2838 default is False.
2839
2840 Example:
2841 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
2842 'masklist':[[10,350],[352,510]]},
2843 {'row':1, 'blfunc':'cspline', 'npiece':3,
2844 'masklist':[[3,16],[19,404],[407,511]]}
2845 ])
2846
2847 the first spectrum (row=0) will be fitted with polynomial
2848 of order=5 and the next one (row=1) will be fitted with cubic
2849 spline consisting of 3 pieces.
2850 """
2851
2852 try:
2853 varlist = vars()
[2810]2854 if retfitres is None: retfitres = False
[2767]2855 if blinfo is None: blinfo = []
2856 if bltable is None: bltable = ''
2857 if overwrite is None: overwrite = False
2858
2859 if insitu is None: insitu = rcParams['insitu']
2860 if insitu:
2861 workscan = self
2862 else:
2863 workscan = self.copy()
2864
2865 nrow = workscan.nrow()
2866
2867 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
2868
2869 print "in_blinfo=< "+ str(in_blinfo)+" >"
2870
2871 sres = workscan._sub_baseline(in_blinfo,
[2810]2872 retfitres,
[2767]2873 bltable,
2874 os.path.exists(bltable),
2875 overwrite)
[2810]2876 if retfitres: res = parse_fitresult(sres)
[2767]2877
2878 workscan._add_history('sub_baseline', varlist)
2879
2880 if insitu:
2881 self._assign(workscan)
[2810]2882 if retfitres:
2883 return res
2884 else:
2885 return None
[2767]2886 else:
[2810]2887 if retfitres:
2888 return {'scantable': workscan, 'fitresults': res}
2889 else:
2890 return workscan
[2767]2891
2892 except RuntimeError, e:
2893 raise_fitting_failure_exception(e)
2894
2895 @asaplog_post_dec
[2713]2896 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
2897 whichrow=None, uselinefinder=None, edge=None,
2898 threshold=None, chan_avg_limit=None):
2899 """\
2900 Calculates and returns model selection criteria for a specified
2901 baseline model and a given spectrum data.
2902 Available values include Akaike Information Criterion (AIC), the
2903 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
2904 Bayesian Information Criterion (BIC) and the Generalised Cross
2905 Validation (GCV).
[2186]2906
[2713]2907 Parameters:
2908 value: name of model selection criteria to calculate.
2909 available ones include 'aic', 'aicc', 'bic' and
2910 'gcv'. default is 'aicc'.
2911 blfunc: baseline function name. available ones include
2912 'chebyshev', 'cspline' and 'sinusoid'.
2913 default is 'chebyshev'.
2914 order: parameter for basline function. actually stands for
2915 order of polynomial (order) for 'chebyshev',
2916 number of spline pieces (npiece) for 'cspline' and
2917 maximum wave number for 'sinusoid', respectively.
2918 default is 5 (which is also the default order value
2919 for [auto_]chebyshev_baseline()).
2920 mask: an optional mask. default is [].
2921 whichrow: row number. default is 0 (the first row)
2922 uselinefinder: use sd.linefinder() to flag out line regions
2923 default is True.
2924 edge: an optional number of channel to drop at
2925 the edge of spectrum. If only one value is
2926 specified, the same number will be dropped
2927 from both sides of the spectrum. Default
2928 is to keep all channels. Nested tuples
2929 represent individual edge selection for
2930 different IFs (a number of spectral channels
2931 can be different)
2932 default is (0, 0).
2933 threshold: the threshold used by line finder. It is
2934 better to keep it large as only strong lines
2935 affect the baseline solution.
2936 default is 3.
2937 chan_avg_limit: a maximum number of consequtive spectral
2938 channels to average during the search of
2939 weak and broad lines. The default is no
2940 averaging (and no search for weak lines).
2941 If such lines can affect the fitted baseline
2942 (e.g. a high order polynomial is fitted),
2943 increase this parameter (usually values up
2944 to 8 are reasonable). Most users of this
2945 method should find the default value sufficient.
2946 default is 1.
2947
2948 Example:
2949 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
2950 """
2951
2952 try:
2953 varlist = vars()
2954
2955 if value is None: value = 'aicc'
2956 if blfunc is None: blfunc = 'chebyshev'
2957 if order is None: order = 5
2958 if mask is None: mask = []
2959 if whichrow is None: whichrow = 0
2960 if uselinefinder is None: uselinefinder = True
2961 if edge is None: edge = (0, 0)
2962 if threshold is None: threshold = 3
2963 if chan_avg_limit is None: chan_avg_limit = 1
2964
2965 return self._calc_aic(value, blfunc, order, mask,
2966 whichrow, uselinefinder, edge,
2967 threshold, chan_avg_limit)
2968
2969 except RuntimeError, e:
2970 raise_fitting_failure_exception(e)
2971
[1862]2972 @asaplog_post_dec
[2771]2973 def sinusoid_baseline(self, mask=None, applyfft=None,
[2269]2974 fftmethod=None, fftthresh=None,
[2771]2975 addwn=None, rejwn=None,
2976 insitu=None,
2977 clipthresh=None, clipniter=None,
2978 plot=None,
2979 getresidual=None,
2980 showprogress=None, minnrow=None,
2981 outlog=None,
[2767]2982 blfile=None, csvformat=None,
2983 bltable=None):
[2047]2984 """\
[2349]2985 Return a scan which has been baselined (all rows) with sinusoidal
2986 functions.
2987
[2047]2988 Parameters:
[2186]2989 mask: an optional mask
2990 applyfft: if True use some method, such as FFT, to find
2991 strongest sinusoidal components in the wavenumber
2992 domain to be used for baseline fitting.
2993 default is True.
2994 fftmethod: method to find the strong sinusoidal components.
2995 now only 'fft' is available and it is the default.
2996 fftthresh: the threshold to select wave numbers to be used for
2997 fitting from the distribution of amplitudes in the
2998 wavenumber domain.
2999 both float and string values accepted.
3000 given a float value, the unit is set to sigma.
3001 for string values, allowed formats include:
[2349]3002 'xsigma' or 'x' (= x-sigma level. e.g.,
3003 '3sigma'), or
[2186]3004 'topx' (= the x strongest ones, e.g. 'top5').
3005 default is 3.0 (unit: sigma).
3006 addwn: the additional wave numbers to be used for fitting.
3007 list or integer value is accepted to specify every
3008 wave numbers. also string value can be used in case
3009 you need to specify wave numbers in a certain range,
3010 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3011 '<a' (= 0,1,...,a-2,a-1),
3012 '>=a' (= a, a+1, ... up to the maximum wave
3013 number corresponding to the Nyquist
3014 frequency for the case of FFT).
[2411]3015 default is [0].
[2186]3016 rejwn: the wave numbers NOT to be used for fitting.
3017 can be set just as addwn but has higher priority:
3018 wave numbers which are specified both in addwn
3019 and rejwn will NOT be used. default is [].
[2771]3020 insitu: if False a new scantable is returned.
3021 Otherwise, the scaling is done in-situ
3022 The default is taken from .asaprc (False)
[2081]3023 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3024 clipniter: maximum number of iteration of 'clipthresh'-sigma
3025 clipping (default is 0)
[2081]3026 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3027 plot the fit and the residual. In this each
3028 indivual fit has to be approved, by typing 'y'
3029 or 'n'
3030 getresidual: if False, returns best-fit values instead of
3031 residual. (default is True)
[2189]3032 showprogress: show progress status for large data.
3033 default is True.
3034 minnrow: minimum number of input spectra to show.
3035 default is 1000.
[2081]3036 outlog: Output the coefficients of the best-fit
3037 function to logger (default is False)
3038 blfile: Name of a text file in which the best-fit
3039 parameter values to be written
[2186]3040 (default is '': no file/logger output)
[2641]3041 csvformat: if True blfile is csv-formatted, default is False.
[2767]3042 bltable: name of a baseline table where fitting results
3043 (coefficients, rms, etc.) are to be written.
3044 if given, fitting results will NOT be output to
3045 scantable (insitu=True) or None will be
3046 returned (insitu=False).
3047 (default is "": no table output)
[2047]3048
3049 Example:
[2349]3050 # return a scan baselined by a combination of sinusoidal curves
3051 # having wave numbers in spectral window up to 10,
[2047]3052 # also with 3-sigma clipping, iteration up to 4 times
[2186]3053 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]3054
3055 Note:
3056 The best-fit parameter values output in logger and/or blfile are now
3057 based on specunit of 'channel'.
[2047]3058 """
3059
[2186]3060 try:
3061 varlist = vars()
[2047]3062
[2186]3063 if insitu is None: insitu = rcParams['insitu']
3064 if insitu:
3065 workscan = self
3066 else:
3067 workscan = self.copy()
3068
[2410]3069 if mask is None: mask = []
[2186]3070 if applyfft is None: applyfft = True
3071 if fftmethod is None: fftmethod = 'fft'
3072 if fftthresh is None: fftthresh = 3.0
[2411]3073 if addwn is None: addwn = [0]
[2186]3074 if rejwn is None: rejwn = []
3075 if clipthresh is None: clipthresh = 3.0
3076 if clipniter is None: clipniter = 0
3077 if plot is None: plot = False
3078 if getresidual is None: getresidual = True
[2189]3079 if showprogress is None: showprogress = True
3080 if minnrow is None: minnrow = 1000
[2186]3081 if outlog is None: outlog = False
3082 if blfile is None: blfile = ''
[2641]3083 if csvformat is None: csvformat = False
[2767]3084 if bltable is None: bltable = ''
[2047]3085
[2767]3086 sapplyfft = 'true' if applyfft else 'false'
3087 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3088
[2767]3089 scsvformat = 'T' if csvformat else 'F'
3090
[2081]3091 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3092 workscan._sinusoid_baseline(mask,
3093 fftinfo,
3094 #applyfft, fftmethod.lower(),
3095 #str(fftthresh).lower(),
[2349]3096 workscan._parse_wn(addwn),
[2643]3097 workscan._parse_wn(rejwn),
3098 clipthresh, clipniter,
3099 getresidual,
[2349]3100 pack_progress_params(showprogress,
[2641]3101 minnrow),
[2767]3102 outlog, scsvformat+blfile,
3103 bltable)
[2186]3104 workscan._add_history('sinusoid_baseline', varlist)
[2767]3105
3106 if bltable == '':
3107 if insitu:
3108 self._assign(workscan)
3109 else:
3110 return workscan
[2047]3111 else:
[2767]3112 if not insitu:
3113 return None
[2047]3114
3115 except RuntimeError, e:
[2186]3116 raise_fitting_failure_exception(e)
[2047]3117
3118
[2186]3119 @asaplog_post_dec
[2771]3120 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
[2349]3121 fftmethod=None, fftthresh=None,
[2771]3122 addwn=None, rejwn=None,
3123 insitu=None,
3124 clipthresh=None, clipniter=None,
3125 edge=None, threshold=None, chan_avg_limit=None,
3126 plot=None,
3127 getresidual=None,
3128 showprogress=None, minnrow=None,
3129 outlog=None,
[2767]3130 blfile=None, csvformat=None,
3131 bltable=None):
[2047]3132 """\
[2349]3133 Return a scan which has been baselined (all rows) with sinusoidal
3134 functions.
[2047]3135 Spectral lines are detected first using linefinder and masked out
3136 to avoid them affecting the baseline solution.
3137
3138 Parameters:
[2189]3139 mask: an optional mask retreived from scantable
3140 applyfft: if True use some method, such as FFT, to find
3141 strongest sinusoidal components in the wavenumber
3142 domain to be used for baseline fitting.
3143 default is True.
3144 fftmethod: method to find the strong sinusoidal components.
3145 now only 'fft' is available and it is the default.
3146 fftthresh: the threshold to select wave numbers to be used for
3147 fitting from the distribution of amplitudes in the
3148 wavenumber domain.
3149 both float and string values accepted.
3150 given a float value, the unit is set to sigma.
3151 for string values, allowed formats include:
[2349]3152 'xsigma' or 'x' (= x-sigma level. e.g.,
3153 '3sigma'), or
[2189]3154 'topx' (= the x strongest ones, e.g. 'top5').
3155 default is 3.0 (unit: sigma).
3156 addwn: the additional wave numbers to be used for fitting.
3157 list or integer value is accepted to specify every
3158 wave numbers. also string value can be used in case
3159 you need to specify wave numbers in a certain range,
3160 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3161 '<a' (= 0,1,...,a-2,a-1),
3162 '>=a' (= a, a+1, ... up to the maximum wave
3163 number corresponding to the Nyquist
3164 frequency for the case of FFT).
[2411]3165 default is [0].
[2189]3166 rejwn: the wave numbers NOT to be used for fitting.
3167 can be set just as addwn but has higher priority:
3168 wave numbers which are specified both in addwn
3169 and rejwn will NOT be used. default is [].
[2771]3170 insitu: if False a new scantable is returned.
3171 Otherwise, the scaling is done in-situ
3172 The default is taken from .asaprc (False)
[2189]3173 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3174 clipniter: maximum number of iteration of 'clipthresh'-sigma
3175 clipping (default is 0)
[2189]3176 edge: an optional number of channel to drop at
3177 the edge of spectrum. If only one value is
3178 specified, the same number will be dropped
3179 from both sides of the spectrum. Default
3180 is to keep all channels. Nested tuples
3181 represent individual edge selection for
3182 different IFs (a number of spectral channels
3183 can be different)
3184 threshold: the threshold used by line finder. It is
3185 better to keep it large as only strong lines
3186 affect the baseline solution.
3187 chan_avg_limit: a maximum number of consequtive spectral
3188 channels to average during the search of
3189 weak and broad lines. The default is no
3190 averaging (and no search for weak lines).
3191 If such lines can affect the fitted baseline
3192 (e.g. a high order polynomial is fitted),
3193 increase this parameter (usually values up
3194 to 8 are reasonable). Most users of this
3195 method should find the default value sufficient.
3196 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3197 plot the fit and the residual. In this each
3198 indivual fit has to be approved, by typing 'y'
3199 or 'n'
3200 getresidual: if False, returns best-fit values instead of
3201 residual. (default is True)
3202 showprogress: show progress status for large data.
3203 default is True.
3204 minnrow: minimum number of input spectra to show.
3205 default is 1000.
3206 outlog: Output the coefficients of the best-fit
3207 function to logger (default is False)
3208 blfile: Name of a text file in which the best-fit
3209 parameter values to be written
3210 (default is "": no file/logger output)
[2641]3211 csvformat: if True blfile is csv-formatted, default is False.
[2767]3212 bltable: name of a baseline table where fitting results
3213 (coefficients, rms, etc.) are to be written.
3214 if given, fitting results will NOT be output to
3215 scantable (insitu=True) or None will be
3216 returned (insitu=False).
3217 (default is "": no table output)
[2047]3218
3219 Example:
[2186]3220 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]3221
3222 Note:
3223 The best-fit parameter values output in logger and/or blfile are now
3224 based on specunit of 'channel'.
[2047]3225 """
3226
[2186]3227 try:
3228 varlist = vars()
[2047]3229
[2186]3230 if insitu is None: insitu = rcParams['insitu']
3231 if insitu:
3232 workscan = self
[2047]3233 else:
[2186]3234 workscan = self.copy()
3235
[2410]3236 if mask is None: mask = []
[2186]3237 if applyfft is None: applyfft = True
3238 if fftmethod is None: fftmethod = 'fft'
3239 if fftthresh is None: fftthresh = 3.0
[2411]3240 if addwn is None: addwn = [0]
[2186]3241 if rejwn is None: rejwn = []
3242 if clipthresh is None: clipthresh = 3.0
3243 if clipniter is None: clipniter = 0
3244 if edge is None: edge = (0,0)
3245 if threshold is None: threshold = 3
3246 if chan_avg_limit is None: chan_avg_limit = 1
3247 if plot is None: plot = False
3248 if getresidual is None: getresidual = True
[2189]3249 if showprogress is None: showprogress = True
3250 if minnrow is None: minnrow = 1000
[2186]3251 if outlog is None: outlog = False
3252 if blfile is None: blfile = ''
[2641]3253 if csvformat is None: csvformat = False
[2767]3254 if bltable is None: bltable = ''
[2047]3255
[2767]3256 sapplyfft = 'true' if applyfft else 'false'
3257 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3258
[2767]3259 scsvformat = 'T' if csvformat else 'F'
3260
[2277]3261 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3262 workscan._auto_sinusoid_baseline(mask,
3263 fftinfo,
[2349]3264 workscan._parse_wn(addwn),
3265 workscan._parse_wn(rejwn),
3266 clipthresh, clipniter,
3267 normalise_edge_param(edge),
3268 threshold, chan_avg_limit,
3269 getresidual,
3270 pack_progress_params(showprogress,
3271 minnrow),
[2767]3272 outlog, scsvformat+blfile, bltable)
[2047]3273 workscan._add_history("auto_sinusoid_baseline", varlist)
[2767]3274
3275 if bltable == '':
3276 if insitu:
3277 self._assign(workscan)
3278 else:
3279 return workscan
[2047]3280 else:
[2767]3281 if not insitu:
3282 return None
[2047]3283
3284 except RuntimeError, e:
[2186]3285 raise_fitting_failure_exception(e)
[2047]3286
3287 @asaplog_post_dec
[2771]3288 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]3289 clipthresh=None, clipniter=None, plot=None,
3290 getresidual=None, showprogress=None, minnrow=None,
[2767]3291 outlog=None, blfile=None, csvformat=None,
3292 bltable=None):
[1846]3293 """\
[2349]3294 Return a scan which has been baselined (all rows) by cubic spline
3295 function (piecewise cubic polynomial).
3296
[513]3297 Parameters:
[2771]3298 mask: An optional mask
3299 npiece: Number of pieces. (default is 2)
[2189]3300 insitu: If False a new scantable is returned.
3301 Otherwise, the scaling is done in-situ
3302 The default is taken from .asaprc (False)
3303 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3304 clipniter: maximum number of iteration of 'clipthresh'-sigma
3305 clipping (default is 0)
[2189]3306 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3307 plot the fit and the residual. In this each
3308 indivual fit has to be approved, by typing 'y'
3309 or 'n'
3310 getresidual: if False, returns best-fit values instead of
3311 residual. (default is True)
3312 showprogress: show progress status for large data.
3313 default is True.
3314 minnrow: minimum number of input spectra to show.
3315 default is 1000.
3316 outlog: Output the coefficients of the best-fit
3317 function to logger (default is False)
3318 blfile: Name of a text file in which the best-fit
3319 parameter values to be written
3320 (default is "": no file/logger output)
[2641]3321 csvformat: if True blfile is csv-formatted, default is False.
[2767]3322 bltable: name of a baseline table where fitting results
3323 (coefficients, rms, etc.) are to be written.
3324 if given, fitting results will NOT be output to
3325 scantable (insitu=True) or None will be
3326 returned (insitu=False).
3327 (default is "": no table output)
[1846]3328
[2012]3329 Example:
[2349]3330 # return a scan baselined by a cubic spline consisting of 2 pieces
3331 # (i.e., 1 internal knot),
[2012]3332 # also with 3-sigma clipping, iteration up to 4 times
3333 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]3334
3335 Note:
3336 The best-fit parameter values output in logger and/or blfile are now
3337 based on specunit of 'channel'.
[2012]3338 """
3339
[2186]3340 try:
3341 varlist = vars()
3342
3343 if insitu is None: insitu = rcParams['insitu']
3344 if insitu:
3345 workscan = self
3346 else:
3347 workscan = self.copy()
[1855]3348
[2410]3349 if mask is None: mask = []
[2189]3350 if npiece is None: npiece = 2
3351 if clipthresh is None: clipthresh = 3.0
3352 if clipniter is None: clipniter = 0
3353 if plot is None: plot = False
3354 if getresidual is None: getresidual = True
3355 if showprogress is None: showprogress = True
3356 if minnrow is None: minnrow = 1000
3357 if outlog is None: outlog = False
3358 if blfile is None: blfile = ''
[2767]3359 if csvformat is None: csvformat = False
3360 if bltable is None: bltable = ''
[1855]3361
[2767]3362 scsvformat = 'T' if csvformat else 'F'
[2641]3363
[2012]3364 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3365 workscan._cspline_baseline(mask, npiece,
3366 clipthresh, clipniter,
[2349]3367 getresidual,
3368 pack_progress_params(showprogress,
[2641]3369 minnrow),
[2767]3370 outlog, scsvformat+blfile,
3371 bltable)
[2012]3372 workscan._add_history("cspline_baseline", varlist)
[2767]3373
3374 if bltable == '':
3375 if insitu:
3376 self._assign(workscan)
3377 else:
3378 return workscan
[2012]3379 else:
[2767]3380 if not insitu:
3381 return None
[2012]3382
3383 except RuntimeError, e:
[2186]3384 raise_fitting_failure_exception(e)
[1855]3385
[2186]3386 @asaplog_post_dec
[2771]3387 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]3388 clipthresh=None, clipniter=None,
3389 edge=None, threshold=None, chan_avg_limit=None,
3390 getresidual=None, plot=None,
3391 showprogress=None, minnrow=None, outlog=None,
[2767]3392 blfile=None, csvformat=None, bltable=None):
[2012]3393 """\
3394 Return a scan which has been baselined (all rows) by cubic spline
3395 function (piecewise cubic polynomial).
3396 Spectral lines are detected first using linefinder and masked out
3397 to avoid them affecting the baseline solution.
3398
3399 Parameters:
[2771]3400 mask: an optional mask retreived from scantable
3401 npiece: Number of pieces. (default is 2)
[2189]3402 insitu: if False a new scantable is returned.
3403 Otherwise, the scaling is done in-situ
3404 The default is taken from .asaprc (False)
3405 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3406 clipniter: maximum number of iteration of 'clipthresh'-sigma
3407 clipping (default is 0)
[2189]3408 edge: an optional number of channel to drop at
3409 the edge of spectrum. If only one value is
3410 specified, the same number will be dropped
3411 from both sides of the spectrum. Default
3412 is to keep all channels. Nested tuples
3413 represent individual edge selection for
3414 different IFs (a number of spectral channels
3415 can be different)
3416 threshold: the threshold used by line finder. It is
3417 better to keep it large as only strong lines
3418 affect the baseline solution.
3419 chan_avg_limit: a maximum number of consequtive spectral
3420 channels to average during the search of
3421 weak and broad lines. The default is no
3422 averaging (and no search for weak lines).
3423 If such lines can affect the fitted baseline
3424 (e.g. a high order polynomial is fitted),
3425 increase this parameter (usually values up
3426 to 8 are reasonable). Most users of this
3427 method should find the default value sufficient.
3428 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3429 plot the fit and the residual. In this each
3430 indivual fit has to be approved, by typing 'y'
3431 or 'n'
3432 getresidual: if False, returns best-fit values instead of
3433 residual. (default is True)
3434 showprogress: show progress status for large data.
3435 default is True.
3436 minnrow: minimum number of input spectra to show.
3437 default is 1000.
3438 outlog: Output the coefficients of the best-fit
3439 function to logger (default is False)
3440 blfile: Name of a text file in which the best-fit
3441 parameter values to be written
3442 (default is "": no file/logger output)
[2641]3443 csvformat: if True blfile is csv-formatted, default is False.
[2767]3444 bltable: name of a baseline table where fitting results
3445 (coefficients, rms, etc.) are to be written.
3446 if given, fitting results will NOT be output to
3447 scantable (insitu=True) or None will be
3448 returned (insitu=False).
3449 (default is "": no table output)
[1846]3450
[1907]3451 Example:
[2012]3452 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]3453
3454 Note:
3455 The best-fit parameter values output in logger and/or blfile are now
3456 based on specunit of 'channel'.
[2012]3457 """
[1846]3458
[2186]3459 try:
3460 varlist = vars()
[2012]3461
[2186]3462 if insitu is None: insitu = rcParams['insitu']
3463 if insitu:
3464 workscan = self
[1391]3465 else:
[2186]3466 workscan = self.copy()
3467
[2410]3468 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3469 if mask is None: mask = []
[2186]3470 if npiece is None: npiece = 2
3471 if clipthresh is None: clipthresh = 3.0
3472 if clipniter is None: clipniter = 0
3473 if edge is None: edge = (0, 0)
3474 if threshold is None: threshold = 3
3475 if chan_avg_limit is None: chan_avg_limit = 1
3476 if plot is None: plot = False
3477 if getresidual is None: getresidual = True
[2189]3478 if showprogress is None: showprogress = True
3479 if minnrow is None: minnrow = 1000
[2186]3480 if outlog is None: outlog = False
3481 if blfile is None: blfile = ''
[2641]3482 if csvformat is None: csvformat = False
[2767]3483 if bltable is None: bltable = ''
[1819]3484
[2767]3485 scsvformat = 'T' if csvformat else 'F'
[2641]3486
[2277]3487 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3488 workscan._auto_cspline_baseline(mask, npiece,
3489 clipthresh, clipniter,
[2269]3490 normalise_edge_param(edge),
3491 threshold,
3492 chan_avg_limit, getresidual,
3493 pack_progress_params(showprogress,
3494 minnrow),
[2767]3495 outlog,
3496 scsvformat+blfile,
3497 bltable)
[2012]3498 workscan._add_history("auto_cspline_baseline", varlist)
[2767]3499
3500 if bltable == '':
3501 if insitu:
3502 self._assign(workscan)
3503 else:
3504 return workscan
[1856]3505 else:
[2767]3506 if not insitu:
3507 return None
[2012]3508
3509 except RuntimeError, e:
[2186]3510 raise_fitting_failure_exception(e)
[513]3511
[1931]3512 @asaplog_post_dec
[2771]3513 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]3514 clipthresh=None, clipniter=None, plot=None,
3515 getresidual=None, showprogress=None, minnrow=None,
[2767]3516 outlog=None, blfile=None, csvformat=None,
3517 bltable=None):
[2645]3518 """\
3519 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3520
3521 Parameters:
[2771]3522 mask: An optional mask
3523 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]3524 insitu: If False a new scantable is returned.
3525 Otherwise, the scaling is done in-situ
3526 The default is taken from .asaprc (False)
3527 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3528 clipniter: maximum number of iteration of 'clipthresh'-sigma
3529 clipping (default is 0)
3530 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3531 plot the fit and the residual. In this each
3532 indivual fit has to be approved, by typing 'y'
3533 or 'n'
3534 getresidual: if False, returns best-fit values instead of
3535 residual. (default is True)
3536 showprogress: show progress status for large data.
3537 default is True.
3538 minnrow: minimum number of input spectra to show.
3539 default is 1000.
3540 outlog: Output the coefficients of the best-fit
3541 function to logger (default is False)
3542 blfile: Name of a text file in which the best-fit
3543 parameter values to be written
3544 (default is "": no file/logger output)
3545 csvformat: if True blfile is csv-formatted, default is False.
[2767]3546 bltable: name of a baseline table where fitting results
3547 (coefficients, rms, etc.) are to be written.
3548 if given, fitting results will NOT be output to
3549 scantable (insitu=True) or None will be
3550 returned (insitu=False).
3551 (default is "": no table output)
[2645]3552
3553 Example:
3554 # return a scan baselined by a cubic spline consisting of 2 pieces
3555 # (i.e., 1 internal knot),
3556 # also with 3-sigma clipping, iteration up to 4 times
3557 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3558
3559 Note:
3560 The best-fit parameter values output in logger and/or blfile are now
3561 based on specunit of 'channel'.
3562 """
3563
3564 try:
3565 varlist = vars()
3566
3567 if insitu is None: insitu = rcParams['insitu']
3568 if insitu:
3569 workscan = self
3570 else:
3571 workscan = self.copy()
3572
3573 if mask is None: mask = []
3574 if order is None: order = 5
3575 if clipthresh is None: clipthresh = 3.0
3576 if clipniter is None: clipniter = 0
3577 if plot is None: plot = False
3578 if getresidual is None: getresidual = True
3579 if showprogress is None: showprogress = True
3580 if minnrow is None: minnrow = 1000
3581 if outlog is None: outlog = False
3582 if blfile is None: blfile = ''
[2767]3583 if csvformat is None: csvformat = False
3584 if bltable is None: bltable = ''
[2645]3585
[2767]3586 scsvformat = 'T' if csvformat else 'F'
[2645]3587
3588 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3589 workscan._chebyshev_baseline(mask, order,
3590 clipthresh, clipniter,
[2645]3591 getresidual,
3592 pack_progress_params(showprogress,
3593 minnrow),
[2767]3594 outlog, scsvformat+blfile,
3595 bltable)
[2645]3596 workscan._add_history("chebyshev_baseline", varlist)
[2767]3597
3598 if bltable == '':
3599 if insitu:
3600 self._assign(workscan)
3601 else:
3602 return workscan
[2645]3603 else:
[2767]3604 if not insitu:
3605 return None
[2645]3606
3607 except RuntimeError, e:
3608 raise_fitting_failure_exception(e)
3609
3610 @asaplog_post_dec
[2771]3611 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]3612 clipthresh=None, clipniter=None,
3613 edge=None, threshold=None, chan_avg_limit=None,
3614 getresidual=None, plot=None,
3615 showprogress=None, minnrow=None, outlog=None,
[2767]3616 blfile=None, csvformat=None, bltable=None):
[2645]3617 """\
3618 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3619 Spectral lines are detected first using linefinder and masked out
3620 to avoid them affecting the baseline solution.
3621
3622 Parameters:
[2771]3623 mask: an optional mask retreived from scantable
3624 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]3625 insitu: if False a new scantable is returned.
3626 Otherwise, the scaling is done in-situ
3627 The default is taken from .asaprc (False)
3628 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3629 clipniter: maximum number of iteration of 'clipthresh'-sigma
3630 clipping (default is 0)
3631 edge: an optional number of channel to drop at
3632 the edge of spectrum. If only one value is
3633 specified, the same number will be dropped
3634 from both sides of the spectrum. Default
3635 is to keep all channels. Nested tuples
3636 represent individual edge selection for
3637 different IFs (a number of spectral channels
3638 can be different)
3639 threshold: the threshold used by line finder. It is
3640 better to keep it large as only strong lines
3641 affect the baseline solution.
3642 chan_avg_limit: a maximum number of consequtive spectral
3643 channels to average during the search of
3644 weak and broad lines. The default is no
3645 averaging (and no search for weak lines).
3646 If such lines can affect the fitted baseline
3647 (e.g. a high order polynomial is fitted),
3648 increase this parameter (usually values up
3649 to 8 are reasonable). Most users of this
3650 method should find the default value sufficient.
3651 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3652 plot the fit and the residual. In this each
3653 indivual fit has to be approved, by typing 'y'
3654 or 'n'
3655 getresidual: if False, returns best-fit values instead of
3656 residual. (default is True)
3657 showprogress: show progress status for large data.
3658 default is True.
3659 minnrow: minimum number of input spectra to show.
3660 default is 1000.
3661 outlog: Output the coefficients of the best-fit
3662 function to logger (default is False)
3663 blfile: Name of a text file in which the best-fit
3664 parameter values to be written
3665 (default is "": no file/logger output)
3666 csvformat: if True blfile is csv-formatted, default is False.
[2767]3667 bltable: name of a baseline table where fitting results
3668 (coefficients, rms, etc.) are to be written.
3669 if given, fitting results will NOT be output to
3670 scantable (insitu=True) or None will be
3671 returned (insitu=False).
3672 (default is "": no table output)
[2645]3673
3674 Example:
3675 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3676
3677 Note:
3678 The best-fit parameter values output in logger and/or blfile are now
3679 based on specunit of 'channel'.
3680 """
3681
3682 try:
3683 varlist = vars()
3684
3685 if insitu is None: insitu = rcParams['insitu']
3686 if insitu:
3687 workscan = self
3688 else:
3689 workscan = self.copy()
3690
3691 if mask is None: mask = []
3692 if order is None: order = 5
3693 if clipthresh is None: clipthresh = 3.0
3694 if clipniter is None: clipniter = 0
3695 if edge is None: edge = (0, 0)
3696 if threshold is None: threshold = 3
3697 if chan_avg_limit is None: chan_avg_limit = 1
3698 if plot is None: plot = False
3699 if getresidual is None: getresidual = True
3700 if showprogress is None: showprogress = True
3701 if minnrow is None: minnrow = 1000
3702 if outlog is None: outlog = False
3703 if blfile is None: blfile = ''
3704 if csvformat is None: csvformat = False
[2767]3705 if bltable is None: bltable = ''
[2645]3706
[2767]3707 scsvformat = 'T' if csvformat else 'F'
[2645]3708
3709 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3710 workscan._auto_chebyshev_baseline(mask, order,
3711 clipthresh, clipniter,
[2645]3712 normalise_edge_param(edge),
3713 threshold,
3714 chan_avg_limit, getresidual,
3715 pack_progress_params(showprogress,
3716 minnrow),
[2767]3717 outlog, scsvformat+blfile,
3718 bltable)
[2645]3719 workscan._add_history("auto_chebyshev_baseline", varlist)
[2767]3720
3721 if bltable == '':
3722 if insitu:
3723 self._assign(workscan)
3724 else:
3725 return workscan
[2645]3726 else:
[2767]3727 if not insitu:
3728 return None
[2645]3729
3730 except RuntimeError, e:
3731 raise_fitting_failure_exception(e)
3732
3733 @asaplog_post_dec
[2771]3734 def poly_baseline(self, mask=None, order=None, insitu=None,
[2767]3735 clipthresh=None, clipniter=None, plot=None,
[2269]3736 getresidual=None, showprogress=None, minnrow=None,
[2767]3737 outlog=None, blfile=None, csvformat=None,
3738 bltable=None):
[1907]3739 """\
3740 Return a scan which has been baselined (all rows) by a polynomial.
3741 Parameters:
[2771]3742 mask: an optional mask
3743 order: the order of the polynomial (default is 0)
[2189]3744 insitu: if False a new scantable is returned.
3745 Otherwise, the scaling is done in-situ
3746 The default is taken from .asaprc (False)
[2767]3747 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3748 clipniter: maximum number of iteration of 'clipthresh'-sigma
3749 clipping (default is 0)
[2189]3750 plot: plot the fit and the residual. In this each
3751 indivual fit has to be approved, by typing 'y'
3752 or 'n'
3753 getresidual: if False, returns best-fit values instead of
3754 residual. (default is True)
3755 showprogress: show progress status for large data.
3756 default is True.
3757 minnrow: minimum number of input spectra to show.
3758 default is 1000.
3759 outlog: Output the coefficients of the best-fit
3760 function to logger (default is False)
3761 blfile: Name of a text file in which the best-fit
3762 parameter values to be written
3763 (default is "": no file/logger output)
[2641]3764 csvformat: if True blfile is csv-formatted, default is False.
[2767]3765 bltable: name of a baseline table where fitting results
3766 (coefficients, rms, etc.) are to be written.
3767 if given, fitting results will NOT be output to
3768 scantable (insitu=True) or None will be
3769 returned (insitu=False).
3770 (default is "": no table output)
[2012]3771
[1907]3772 Example:
3773 # return a scan baselined by a third order polynomial,
3774 # not using a mask
3775 bscan = scan.poly_baseline(order=3)
3776 """
[1931]3777
[2186]3778 try:
3779 varlist = vars()
[1931]3780
[2269]3781 if insitu is None:
3782 insitu = rcParams["insitu"]
[2186]3783 if insitu:
3784 workscan = self
3785 else:
3786 workscan = self.copy()
[1907]3787
[2410]3788 if mask is None: mask = []
[2189]3789 if order is None: order = 0
[2767]3790 if clipthresh is None: clipthresh = 3.0
3791 if clipniter is None: clipniter = 0
[2189]3792 if plot is None: plot = False
3793 if getresidual is None: getresidual = True
3794 if showprogress is None: showprogress = True
3795 if minnrow is None: minnrow = 1000
3796 if outlog is None: outlog = False
[2767]3797 if blfile is None: blfile = ''
[2641]3798 if csvformat is None: csvformat = False
[2767]3799 if bltable is None: bltable = ''
[1907]3800
[2767]3801 scsvformat = 'T' if csvformat else 'F'
[2641]3802
[2012]3803 if plot:
[2269]3804 outblfile = (blfile != "") and \
[2349]3805 os.path.exists(os.path.expanduser(
3806 os.path.expandvars(blfile))
3807 )
[2269]3808 if outblfile:
3809 blf = open(blfile, "a")
[2012]3810
[1907]3811 f = fitter()
3812 f.set_function(lpoly=order)
[2186]3813
3814 rows = xrange(workscan.nrow())
3815 #if len(rows) > 0: workscan._init_blinfo()
[2610]3816
3817 action = "H"
[1907]3818 for r in rows:
3819 f.x = workscan._getabcissa(r)
3820 f.y = workscan._getspectrum(r)
[2541]3821 if mask:
3822 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3823 else: # mask=None
3824 f.mask = workscan._getmask(r)
3825
[1907]3826 f.data = None
3827 f.fit()
[2541]3828
[2610]3829 if action != "Y": # skip plotting when accepting all
3830 f.plot(residual=True)
3831 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3832 #if accept_fit.upper() == "N":
3833 # #workscan._append_blinfo(None, None, None)
3834 # continue
3835 accept_fit = self._get_verify_action("Accept fit?",action)
3836 if r == 0: action = None
[1907]3837 if accept_fit.upper() == "N":
3838 continue
[2610]3839 elif accept_fit.upper() == "R":
3840 break
3841 elif accept_fit.upper() == "A":
3842 action = "Y"
[2012]3843
3844 blpars = f.get_parameters()
3845 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3846 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]3847 workscan._setspectrum((f.fitter.getresidual()
3848 if getresidual else f.fitter.getfit()), r)
[1907]3849
[2012]3850 if outblfile:
3851 rms = workscan.get_rms(f.mask, r)
[2269]3852 dataout = \
3853 workscan.format_blparams_row(blpars["params"],
3854 blpars["fixed"],
3855 rms, str(masklist),
[2641]3856 r, True, csvformat)
[2012]3857 blf.write(dataout)
3858
[1907]3859 f._p.unmap()
3860 f._p = None
[2012]3861
[2349]3862 if outblfile:
3863 blf.close()
[1907]3864 else:
[2767]3865 workscan._poly_baseline(mask, order,
3866 clipthresh, clipniter, #
3867 getresidual,
[2269]3868 pack_progress_params(showprogress,
3869 minnrow),
[2767]3870 outlog, scsvformat+blfile,
3871 bltable) #
[1907]3872
3873 workscan._add_history("poly_baseline", varlist)
3874
3875 if insitu:
3876 self._assign(workscan)
3877 else:
3878 return workscan
3879
[1919]3880 except RuntimeError, e:
[2186]3881 raise_fitting_failure_exception(e)
[1907]3882
[2186]3883 @asaplog_post_dec
[2771]3884 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
[2767]3885 clipthresh=None, clipniter=None,
3886 edge=None, threshold=None, chan_avg_limit=None,
3887 getresidual=None, plot=None,
3888 showprogress=None, minnrow=None, outlog=None,
3889 blfile=None, csvformat=None, bltable=None):
[1846]3890 """\
[1931]3891 Return a scan which has been baselined (all rows) by a polynomial.
[880]3892 Spectral lines are detected first using linefinder and masked out
3893 to avoid them affecting the baseline solution.
3894
3895 Parameters:
[2771]3896 mask: an optional mask retreived from scantable
3897 order: the order of the polynomial (default is 0)
[2189]3898 insitu: if False a new scantable is returned.
3899 Otherwise, the scaling is done in-situ
3900 The default is taken from .asaprc (False)
[2767]3901 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3902 clipniter: maximum number of iteration of 'clipthresh'-sigma
3903 clipping (default is 0)
[2189]3904 edge: an optional number of channel to drop at
3905 the edge of spectrum. If only one value is
3906 specified, the same number will be dropped
3907 from both sides of the spectrum. Default
3908 is to keep all channels. Nested tuples
3909 represent individual edge selection for
3910 different IFs (a number of spectral channels
3911 can be different)
3912 threshold: the threshold used by line finder. It is
3913 better to keep it large as only strong lines
3914 affect the baseline solution.
3915 chan_avg_limit: a maximum number of consequtive spectral
3916 channels to average during the search of
3917 weak and broad lines. The default is no
3918 averaging (and no search for weak lines).
3919 If such lines can affect the fitted baseline
3920 (e.g. a high order polynomial is fitted),
3921 increase this parameter (usually values up
3922 to 8 are reasonable). Most users of this
3923 method should find the default value sufficient.
3924 plot: plot the fit and the residual. In this each
3925 indivual fit has to be approved, by typing 'y'
3926 or 'n'
3927 getresidual: if False, returns best-fit values instead of
3928 residual. (default is True)
3929 showprogress: show progress status for large data.
3930 default is True.
3931 minnrow: minimum number of input spectra to show.
3932 default is 1000.
3933 outlog: Output the coefficients of the best-fit
3934 function to logger (default is False)
3935 blfile: Name of a text file in which the best-fit
3936 parameter values to be written
3937 (default is "": no file/logger output)
[2641]3938 csvformat: if True blfile is csv-formatted, default is False.
[2767]3939 bltable: name of a baseline table where fitting results
3940 (coefficients, rms, etc.) are to be written.
3941 if given, fitting results will NOT be output to
3942 scantable (insitu=True) or None will be
3943 returned (insitu=False).
3944 (default is "": no table output)
[1846]3945
[2012]3946 Example:
3947 bscan = scan.auto_poly_baseline(order=7, insitu=False)
3948 """
[880]3949
[2186]3950 try:
3951 varlist = vars()
[1846]3952
[2269]3953 if insitu is None:
3954 insitu = rcParams['insitu']
[2186]3955 if insitu:
3956 workscan = self
3957 else:
3958 workscan = self.copy()
[1846]3959
[2410]3960 if mask is None: mask = []
[2186]3961 if order is None: order = 0
[2767]3962 if clipthresh is None: clipthresh = 3.0
3963 if clipniter is None: clipniter = 0
[2186]3964 if edge is None: edge = (0, 0)
3965 if threshold is None: threshold = 3
3966 if chan_avg_limit is None: chan_avg_limit = 1
3967 if plot is None: plot = False
3968 if getresidual is None: getresidual = True
[2189]3969 if showprogress is None: showprogress = True
3970 if minnrow is None: minnrow = 1000
[2186]3971 if outlog is None: outlog = False
3972 if blfile is None: blfile = ''
[2641]3973 if csvformat is None: csvformat = False
[2767]3974 if bltable is None: bltable = ''
[1846]3975
[2767]3976 scsvformat = 'T' if csvformat else 'F'
[2641]3977
[2186]3978 edge = normalise_edge_param(edge)
[880]3979
[2012]3980 if plot:
[2269]3981 outblfile = (blfile != "") and \
3982 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]3983 if outblfile: blf = open(blfile, "a")
3984
[2186]3985 from asap.asaplinefind import linefinder
[2012]3986 fl = linefinder()
[2269]3987 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]3988 fl.set_scan(workscan)
[2186]3989
[2012]3990 f = fitter()
3991 f.set_function(lpoly=order)
[880]3992
[2186]3993 rows = xrange(workscan.nrow())
3994 #if len(rows) > 0: workscan._init_blinfo()
[2610]3995
3996 action = "H"
[2012]3997 for r in rows:
[2186]3998 idx = 2*workscan.getif(r)
[2541]3999 if mask:
4000 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4001 else: # mask=None
4002 msk = workscan._getmask(r)
4003 fl.find_lines(r, msk, edge[idx:idx+2])
[907]4004
[2012]4005 f.x = workscan._getabcissa(r)
4006 f.y = workscan._getspectrum(r)
4007 f.mask = fl.get_mask()
4008 f.data = None
4009 f.fit()
4010
[2610]4011 if action != "Y": # skip plotting when accepting all
4012 f.plot(residual=True)
4013 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4014 accept_fit = self._get_verify_action("Accept fit?",action)
4015 if r == 0: action = None
[2012]4016 if accept_fit.upper() == "N":
4017 #workscan._append_blinfo(None, None, None)
4018 continue
[2610]4019 elif accept_fit.upper() == "R":
4020 break
4021 elif accept_fit.upper() == "A":
4022 action = "Y"
[2012]4023
4024 blpars = f.get_parameters()
4025 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4026 #workscan._append_blinfo(blpars, masklist, f.mask)
[2349]4027 workscan._setspectrum(
4028 (f.fitter.getresidual() if getresidual
4029 else f.fitter.getfit()), r
4030 )
[2012]4031
4032 if outblfile:
4033 rms = workscan.get_rms(f.mask, r)
[2269]4034 dataout = \
4035 workscan.format_blparams_row(blpars["params"],
4036 blpars["fixed"],
4037 rms, str(masklist),
[2641]4038 r, True, csvformat)
[2012]4039 blf.write(dataout)
4040
4041 f._p.unmap()
4042 f._p = None
4043
4044 if outblfile: blf.close()
4045 else:
[2767]4046 workscan._auto_poly_baseline(mask, order,
4047 clipthresh, clipniter,
4048 edge, threshold,
[2269]4049 chan_avg_limit, getresidual,
4050 pack_progress_params(showprogress,
4051 minnrow),
[2767]4052 outlog, scsvformat+blfile,
4053 bltable)
4054 workscan._add_history("auto_poly_baseline", varlist)
[2012]4055
[2767]4056 if bltable == '':
4057 if insitu:
4058 self._assign(workscan)
4059 else:
4060 return workscan
[2012]4061 else:
[2767]4062 if not insitu:
4063 return None
[2012]4064
4065 except RuntimeError, e:
[2186]4066 raise_fitting_failure_exception(e)
[2012]4067
4068 def _init_blinfo(self):
4069 """\
4070 Initialise the following three auxiliary members:
4071 blpars : parameters of the best-fit baseline,
4072 masklists : mask data (edge positions of masked channels) and
4073 actualmask : mask data (in boolean list),
4074 to keep for use later (including output to logger/text files).
4075 Used by poly_baseline() and auto_poly_baseline() in case of
4076 'plot=True'.
4077 """
4078 self.blpars = []
4079 self.masklists = []
4080 self.actualmask = []
4081 return
[880]4082
[2012]4083 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4084 """\
4085 Append baseline-fitting related info to blpars, masklist and
4086 actualmask.
4087 """
4088 self.blpars.append(data_blpars)
4089 self.masklists.append(data_masklists)
4090 self.actualmask.append(data_actualmask)
4091 return
4092
[1862]4093 @asaplog_post_dec
[914]4094 def rotate_linpolphase(self, angle):
[1846]4095 """\
[914]4096 Rotate the phase of the complex polarization O=Q+iU correlation.
4097 This is always done in situ in the raw data. So if you call this
4098 function more than once then each call rotates the phase further.
[1846]4099
[914]4100 Parameters:
[1846]4101
[914]4102 angle: The angle (degrees) to rotate (add) by.
[1846]4103
4104 Example::
4105
[914]4106 scan.rotate_linpolphase(2.3)
[1846]4107
[914]4108 """
4109 varlist = vars()
[936]4110 self._math._rotate_linpolphase(self, angle)
[914]4111 self._add_history("rotate_linpolphase", varlist)
4112 return
[710]4113
[1862]4114 @asaplog_post_dec
[914]4115 def rotate_xyphase(self, angle):
[1846]4116 """\
[914]4117 Rotate the phase of the XY correlation. This is always done in situ
4118 in the data. So if you call this function more than once
4119 then each call rotates the phase further.
[1846]4120
[914]4121 Parameters:
[1846]4122
[914]4123 angle: The angle (degrees) to rotate (add) by.
[1846]4124
4125 Example::
4126
[914]4127 scan.rotate_xyphase(2.3)
[1846]4128
[914]4129 """
4130 varlist = vars()
[936]4131 self._math._rotate_xyphase(self, angle)
[914]4132 self._add_history("rotate_xyphase", varlist)
4133 return
4134
[1862]4135 @asaplog_post_dec
[914]4136 def swap_linears(self):
[1846]4137 """\
[1573]4138 Swap the linear polarisations XX and YY, or better the first two
[1348]4139 polarisations as this also works for ciculars.
[914]4140 """
4141 varlist = vars()
[936]4142 self._math._swap_linears(self)
[914]4143 self._add_history("swap_linears", varlist)
4144 return
4145
[1862]4146 @asaplog_post_dec
[914]4147 def invert_phase(self):
[1846]4148 """\
[914]4149 Invert the phase of the complex polarisation
4150 """
4151 varlist = vars()
[936]4152 self._math._invert_phase(self)
[914]4153 self._add_history("invert_phase", varlist)
4154 return
4155
[1862]4156 @asaplog_post_dec
[876]4157 def add(self, offset, insitu=None):
[1846]4158 """\
[513]4159 Return a scan where all spectra have the offset added
[1846]4160
[513]4161 Parameters:
[1846]4162
[513]4163 offset: the offset
[1855]4164
[513]4165 insitu: if False a new scantable is returned.
4166 Otherwise, the scaling is done in-situ
4167 The default is taken from .asaprc (False)
[1846]4168
[513]4169 """
4170 if insitu is None: insitu = rcParams['insitu']
[876]4171 self._math._setinsitu(insitu)
[513]4172 varlist = vars()
[876]4173 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]4174 s._add_history("add", varlist)
[876]4175 if insitu:
4176 self._assign(s)
4177 else:
[513]4178 return s
4179
[1862]4180 @asaplog_post_dec
[1308]4181 def scale(self, factor, tsys=True, insitu=None):
[1846]4182 """\
4183
[1938]4184 Return a scan where all spectra are scaled by the given 'factor'
[1846]4185
[513]4186 Parameters:
[1846]4187
[1819]4188 factor: the scaling factor (float or 1D float list)
[1855]4189
[513]4190 insitu: if False a new scantable is returned.
4191 Otherwise, the scaling is done in-situ
4192 The default is taken from .asaprc (False)
[1855]4193
[513]4194 tsys: if True (default) then apply the operation to Tsys
4195 as well as the data
[1846]4196
[513]4197 """
4198 if insitu is None: insitu = rcParams['insitu']
[876]4199 self._math._setinsitu(insitu)
[513]4200 varlist = vars()
[1819]4201 s = None
4202 import numpy
4203 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]4204 if isinstance(factor[0], list) or isinstance(factor[0],
4205 numpy.ndarray):
[1819]4206 from asapmath import _array2dOp
[2320]4207 s = _array2dOp( self, factor, "MUL", tsys, insitu )
[1819]4208 else:
[2320]4209 s = scantable( self._math._arrayop( self, factor,
4210 "MUL", tsys ) )
[1819]4211 else:
[2320]4212 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]4213 s._add_history("scale", varlist)
[876]4214 if insitu:
4215 self._assign(s)
4216 else:
[513]4217 return s
4218
[2349]4219 @preserve_selection
4220 def set_sourcetype(self, match, matchtype="pattern",
[1504]4221 sourcetype="reference"):
[1846]4222 """\
[1502]4223 Set the type of the source to be an source or reference scan
[1846]4224 using the provided pattern.
4225
[1502]4226 Parameters:
[1846]4227
[1504]4228 match: a Unix style pattern, regular expression or selector
[1855]4229
[1504]4230 matchtype: 'pattern' (default) UNIX style pattern or
4231 'regex' regular expression
[1855]4232
[1502]4233 sourcetype: the type of the source to use (source/reference)
[1846]4234
[1502]4235 """
4236 varlist = vars()
4237 stype = -1
[2480]4238 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
[1502]4239 stype = 1
[2480]4240 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
[1502]4241 stype = 0
[1504]4242 else:
[2480]4243 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
[1504]4244 if matchtype.lower().startswith("p"):
4245 matchtype = "pattern"
4246 elif matchtype.lower().startswith("r"):
4247 matchtype = "regex"
4248 else:
4249 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]4250 sel = selector()
4251 if isinstance(match, selector):
4252 sel = match
4253 else:
[2480]4254 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4255 self.set_selection(sel)
[1502]4256 self._setsourcetype(stype)
[1573]4257 self._add_history("set_sourcetype", varlist)
[1502]4258
[2818]4259
4260 def set_sourcename(self, name):
4261 varlist = vars()
4262 self._setsourcename(name)
4263 self._add_history("set_sourcename", varlist)
4264
[1862]4265 @asaplog_post_dec
[1857]4266 @preserve_selection
[1819]4267 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]4268 """\
[670]4269 This function allows to build quotients automatically.
[1819]4270 It assumes the observation to have the same number of
[670]4271 "ons" and "offs"
[1846]4272
[670]4273 Parameters:
[1846]4274
[710]4275 preserve: you can preserve (default) the continuum or
4276 remove it. The equations used are
[1857]4277
[670]4278 preserve: Output = Toff * (on/off) - Toff
[1857]4279
[1070]4280 remove: Output = Toff * (on/off) - Ton
[1855]4281
[1573]4282 mode: the on/off detection mode
[1348]4283 'paired' (default)
4284 identifies 'off' scans by the
4285 trailing '_R' (Mopra/Parkes) or
4286 '_e'/'_w' (Tid) and matches
4287 on/off pairs from the observing pattern
[1502]4288 'time'
4289 finds the closest off in time
[1348]4290
[1857]4291 .. todo:: verify argument is not implemented
4292
[670]4293 """
[1857]4294 varlist = vars()
[1348]4295 modes = ["time", "paired"]
[670]4296 if not mode in modes:
[876]4297 msg = "please provide valid mode. Valid modes are %s" % (modes)
4298 raise ValueError(msg)
[1348]4299 s = None
4300 if mode.lower() == "paired":
[2840]4301 from asap._asap import srctype
[1857]4302 sel = self.get_selection()
[2840]4303 #sel.set_query("SRCTYPE==psoff")
4304 sel.set_types(srctype.psoff)
[1356]4305 self.set_selection(sel)
[1348]4306 offs = self.copy()
[2840]4307 #sel.set_query("SRCTYPE==pson")
4308 sel.set_types(srctype.pson)
[1356]4309 self.set_selection(sel)
[1348]4310 ons = self.copy()
4311 s = scantable(self._math._quotient(ons, offs, preserve))
4312 elif mode.lower() == "time":
4313 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]4314 s._add_history("auto_quotient", varlist)
[876]4315 return s
[710]4316
[1862]4317 @asaplog_post_dec
[1145]4318 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]4319 """\
[1143]4320 Form a quotient using "off" beams when observing in "MX" mode.
[1846]4321
[1143]4322 Parameters:
[1846]4323
[1145]4324 mask: an optional mask to be used when weight == 'stddev'
[1855]4325
[1143]4326 weight: How to average the off beams. Default is 'median'.
[1855]4327
[1145]4328 preserve: you can preserve (default) the continuum or
[1855]4329 remove it. The equations used are:
[1846]4330
[1855]4331 preserve: Output = Toff * (on/off) - Toff
4332
4333 remove: Output = Toff * (on/off) - Ton
4334
[1217]4335 """
[1593]4336 mask = mask or ()
[1141]4337 varlist = vars()
4338 on = scantable(self._math._mx_extract(self, 'on'))
[1143]4339 preoff = scantable(self._math._mx_extract(self, 'off'))
4340 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]4341 from asapmath import quotient
[1145]4342 q = quotient(on, off, preserve)
[1143]4343 q._add_history("mx_quotient", varlist)
[1217]4344 return q
[513]4345
[1862]4346 @asaplog_post_dec
[718]4347 def freq_switch(self, insitu=None):
[1846]4348 """\
[718]4349 Apply frequency switching to the data.
[1846]4350
[718]4351 Parameters:
[1846]4352
[718]4353 insitu: if False a new scantable is returned.
4354 Otherwise, the swictching is done in-situ
4355 The default is taken from .asaprc (False)
[1846]4356
[718]4357 """
4358 if insitu is None: insitu = rcParams['insitu']
[876]4359 self._math._setinsitu(insitu)
[718]4360 varlist = vars()
[876]4361 s = scantable(self._math._freqswitch(self))
[1118]4362 s._add_history("freq_switch", varlist)
[1856]4363 if insitu:
4364 self._assign(s)
4365 else:
4366 return s
[718]4367
[1862]4368 @asaplog_post_dec
[780]4369 def recalc_azel(self):
[1846]4370 """Recalculate the azimuth and elevation for each position."""
[780]4371 varlist = vars()
[876]4372 self._recalcazel()
[780]4373 self._add_history("recalc_azel", varlist)
4374 return
4375
[1862]4376 @asaplog_post_dec
[513]4377 def __add__(self, other):
[2574]4378 """
4379 implicit on all axes and on Tsys
4380 """
[513]4381 varlist = vars()
[2574]4382 s = self.__op( other, "ADD" )
[513]4383 s._add_history("operator +", varlist)
4384 return s
4385
[1862]4386 @asaplog_post_dec
[513]4387 def __sub__(self, other):
4388 """
4389 implicit on all axes and on Tsys
4390 """
4391 varlist = vars()
[2574]4392 s = self.__op( other, "SUB" )
[513]4393 s._add_history("operator -", varlist)
4394 return s
[710]4395
[1862]4396 @asaplog_post_dec
[513]4397 def __mul__(self, other):
4398 """
4399 implicit on all axes and on Tsys
4400 """
4401 varlist = vars()
[2574]4402 s = self.__op( other, "MUL" ) ;
[513]4403 s._add_history("operator *", varlist)
4404 return s
4405
[710]4406
[1862]4407 @asaplog_post_dec
[513]4408 def __div__(self, other):
4409 """
4410 implicit on all axes and on Tsys
4411 """
4412 varlist = vars()
[2574]4413 s = self.__op( other, "DIV" )
4414 s._add_history("operator /", varlist)
4415 return s
4416
4417 @asaplog_post_dec
4418 def __op( self, other, mode ):
[513]4419 s = None
4420 if isinstance(other, scantable):
[2574]4421 s = scantable(self._math._binaryop(self, other, mode))
[513]4422 elif isinstance(other, float):
4423 if other == 0.0:
[718]4424 raise ZeroDivisionError("Dividing by zero is not recommended")
[2574]4425 s = scantable(self._math._unaryop(self, other, mode, False))
[2144]4426 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]4427 if isinstance(other[0], list) \
4428 or isinstance(other[0], numpy.ndarray):
[2144]4429 from asapmath import _array2dOp
[2574]4430 s = _array2dOp( self, other, mode, False )
[2144]4431 else:
[2574]4432 s = scantable( self._math._arrayop( self, other,
4433 mode, False ) )
[513]4434 else:
[718]4435 raise TypeError("Other input is not a scantable or float value")
[513]4436 return s
4437
[1862]4438 @asaplog_post_dec
[530]4439 def get_fit(self, row=0):
[1846]4440 """\
[530]4441 Print or return the stored fits for a row in the scantable
[1846]4442
[530]4443 Parameters:
[1846]4444
[530]4445 row: the row which the fit has been applied to.
[1846]4446
[530]4447 """
4448 if row > self.nrow():
4449 return
[976]4450 from asap.asapfit import asapfit
[530]4451 fit = asapfit(self._getfit(row))
[1859]4452 asaplog.push( '%s' %(fit) )
4453 return fit.as_dict()
[530]4454
[2349]4455 @preserve_selection
[1483]4456 def flag_nans(self):
[1846]4457 """\
[1483]4458 Utility function to flag NaN values in the scantable.
4459 """
4460 import numpy
4461 basesel = self.get_selection()
4462 for i in range(self.nrow()):
[1589]4463 sel = self.get_row_selector(i)
4464 self.set_selection(basesel+sel)
[1483]4465 nans = numpy.isnan(self._getspectrum(0))
4466 if numpy.any(nans):
4467 bnans = [ bool(v) for v in nans]
4468 self.flag(bnans)
4469
[1588]4470 def get_row_selector(self, rowno):
[1992]4471 return selector(rows=[rowno])
[1573]4472
[484]4473 def _add_history(self, funcname, parameters):
[1435]4474 if not rcParams['scantable.history']:
4475 return
[484]4476 # create date
4477 sep = "##"
4478 from datetime import datetime
4479 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4480 hist = dstr+sep
4481 hist += funcname+sep#cdate+sep
[2349]4482 if parameters.has_key('self'):
4483 del parameters['self']
[1118]4484 for k, v in parameters.iteritems():
[484]4485 if type(v) is dict:
[1118]4486 for k2, v2 in v.iteritems():
[484]4487 hist += k2
4488 hist += "="
[1118]4489 if isinstance(v2, scantable):
[484]4490 hist += 'scantable'
4491 elif k2 == 'mask':
[1118]4492 if isinstance(v2, list) or isinstance(v2, tuple):
[513]4493 hist += str(self._zip_mask(v2))
4494 else:
4495 hist += str(v2)
[484]4496 else:
[513]4497 hist += str(v2)
[484]4498 else:
4499 hist += k
4500 hist += "="
[1118]4501 if isinstance(v, scantable):
[484]4502 hist += 'scantable'
4503 elif k == 'mask':
[1118]4504 if isinstance(v, list) or isinstance(v, tuple):
[513]4505 hist += str(self._zip_mask(v))
4506 else:
4507 hist += str(v)
[484]4508 else:
4509 hist += str(v)
4510 hist += sep
4511 hist = hist[:-2] # remove trailing '##'
4512 self._addhistory(hist)
4513
[710]4514
[484]4515 def _zip_mask(self, mask):
4516 mask = list(mask)
4517 i = 0
4518 segments = []
4519 while mask[i:].count(1):
4520 i += mask[i:].index(1)
4521 if mask[i:].count(0):
4522 j = i + mask[i:].index(0)
4523 else:
[710]4524 j = len(mask)
[1118]4525 segments.append([i, j])
[710]4526 i = j
[484]4527 return segments
[714]4528
[626]4529 def _get_ordinate_label(self):
4530 fu = "("+self.get_fluxunit()+")"
4531 import re
4532 lbl = "Intensity"
[1118]4533 if re.match(".K.", fu):
[626]4534 lbl = "Brightness Temperature "+ fu
[1118]4535 elif re.match(".Jy.", fu):
[626]4536 lbl = "Flux density "+ fu
4537 return lbl
[710]4538
[876]4539 def _check_ifs(self):
[2349]4540# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
[1986]4541 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]4542 nchans = filter(lambda t: t > 0, nchans)
[876]4543 return (sum(nchans)/len(nchans) == nchans[0])
[976]4544
[1862]4545 @asaplog_post_dec
[1916]4546 def _fill(self, names, unit, average, opts={}):
[976]4547 first = True
4548 fullnames = []
4549 for name in names:
4550 name = os.path.expandvars(name)
4551 name = os.path.expanduser(name)
4552 if not os.path.exists(name):
4553 msg = "File '%s' does not exists" % (name)
4554 raise IOError(msg)
4555 fullnames.append(name)
4556 if average:
4557 asaplog.push('Auto averaging integrations')
[1079]4558 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]4559 for name in fullnames:
[1073]4560 tbl = Scantable(stype)
[2004]4561 if is_ms( name ):
4562 r = msfiller( tbl )
4563 else:
4564 r = filler( tbl )
[976]4565 msg = "Importing %s..." % (name)
[1118]4566 asaplog.push(msg, False)
[2349]4567 r.open(name, opts)
[2480]4568 rx = rcParams['scantable.reference']
4569 r.setreferenceexpr(rx)
[1843]4570 r.fill()
[976]4571 if average:
[1118]4572 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]4573 if not first:
4574 tbl = self._math._merge([self, tbl])
4575 Scantable.__init__(self, tbl)
[1843]4576 r.close()
[1118]4577 del r, tbl
[976]4578 first = False
[1861]4579 #flush log
4580 asaplog.post()
[976]4581 if unit is not None:
4582 self.set_fluxunit(unit)
[1824]4583 if not is_casapy():
4584 self.set_freqframe(rcParams['scantable.freqframe'])
[976]4585
[2610]4586 def _get_verify_action( self, msg, action=None ):
4587 valid_act = ['Y', 'N', 'A', 'R']
4588 if not action or not isinstance(action, str):
4589 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
4590 if action == '':
4591 return "Y"
4592 elif (action.upper()[0] in valid_act):
4593 return action.upper()[0]
4594 elif (action.upper()[0] in ['H','?']):
4595 print "Available actions of verification [Y|n|a|r]"
4596 print " Y : Yes for current data (default)"
4597 print " N : No for current data"
4598 print " A : Accept all in the following and exit from verification"
4599 print " R : Reject all in the following and exit from verification"
4600 print " H or ?: help (show this message)"
4601 return self._get_verify_action(msg)
4602 else:
4603 return 'Y'
[2012]4604
[1402]4605 def __getitem__(self, key):
4606 if key < 0:
4607 key += self.nrow()
4608 if key >= self.nrow():
4609 raise IndexError("Row index out of range.")
4610 return self._getspectrum(key)
4611
4612 def __setitem__(self, key, value):
4613 if key < 0:
4614 key += self.nrow()
4615 if key >= self.nrow():
4616 raise IndexError("Row index out of range.")
4617 if not hasattr(value, "__len__") or \
4618 len(value) > self.nchan(self.getif(key)):
4619 raise ValueError("Spectrum length doesn't match.")
4620 return self._setspectrum(value, key)
4621
4622 def __len__(self):
4623 return self.nrow()
4624
4625 def __iter__(self):
4626 for i in range(len(self)):
4627 yield self[i]
Note: See TracBrowser for help on using the repository browser.