source: trunk/python/scantable.py@ 2925

Last change on this file since 2925 was 2910, checked in by WataruKawasaki, 11 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: bug fix in sd.scantable.parse_spw_selection() to not include spws with empty channel range.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 205.7 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2751]4import re
[2315]5import tempfile
[1948]6import numpy
[1691]7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
[1824]12from asap.env import is_casapy
[876]13from asap._asap import Scantable
[2004]14from asap._asap import filler, msfiller
[1824]15from asap.parameters import rcParams
[1862]16from asap.logging import asaplog, asaplog_post_dec
[1824]17from asap.selector import selector
18from asap.linecatalog import linecatalog
[1600]19from asap.coordinate import coordinate
[1859]20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]21from asap.asapfitter import fitter
[102]22
[1689]23def preserve_selection(func):
[1691]24 @wraps_dec(func)
[1689]25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
[1857]27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
[1689]31 return val
32 return wrap
33
[1846]34def is_scantable(filename):
35 """Is the given file a scantable?
[1689]36
[1846]37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
[1883]42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
[2753]48 match_pattern = '^Type = (Scantable)? *$'
[2751]49 if re.match(match_pattern,l):
[1883]50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
[1697]58
[1883]59def is_ms(filename):
60 """Is the given file a MeasurementSet?
[1697]61
[1883]62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
[2186]79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
[2277]91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
[2277]98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
[2186]102 ** when len(edge)!=2
[2277]103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]111 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]112 when [5,10] given, it is interpreted as
[2277]113 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
[2189]173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
[2767]176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
[2810]268 elif lf_params_n == 0:
[2767]269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
[2882]303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
[2884]318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
[2882]320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
[2884]325 s1 = s1.strip()
326 s2 = s2.strip()
[2882]327
[2884]328 prefix = s2[-3:-2]
[2882]329 if is_number(prefix):
[2884]330 res1 = float(s1)
331 res2 = float(s2[:-2])
[2882]332 else:
[2884]333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
[2882]336
[2884]337 return (res1, res2)
[2882]338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
[2884]343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
[2882]345 raise RuntimeError("Invalid input string.")
346
[2884]347 # note that the default velocity unit is km/s
[2882]348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
[2884]351 s1 = s1.strip()
352 s2 = s2.strip()
[2882]353
[2884]354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
[2882]358 else:
[2884]359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
[2882]362
[2884]363 return (res1, res2)
[2882]364
[2889]365def get_frequency_by_velocity(restfreq, vel, doppler):
[2882]366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
[2889]374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
[2882]380
[2891]381def get_restfreq_in_Hz(s_restfreq):
382 value = 0.0
383 unit = ""
384 s = s_restfreq.replace(" ","")
[2889]385
[2891]386 for i in range(len(s))[::-1]:
387 if s[i].isalpha():
388 unit = s[i] + unit
389 else:
390 value = float(s[0:i+1])
391 break
392
393 if (unit == "") or (unit.lower() == "hz"):
394 return value
395 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
396 unitprefix = unit[0]
397 factor = 1.0
398
399 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
400 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
401 factor = factor_list[prefix_list.index(unitprefix)]
402 """
403 if (unitprefix == 'a'):
404 factor = 1.0e-18
405 elif (unitprefix == 'f'):
406 factor = 1.0e-15
407 elif (unitprefix == 'p'):
408 factor = 1.0e-12
409 elif (unitprefix == 'n'):
410 factor = 1.0e-9
411 elif (unitprefix == 'u'):
412 factor = 1.0e-6
413 elif (unitprefix == 'm'):
414 factor = 1.0e-3
415 elif (unitprefix == 'k'):
416 factor = 1.0e+3
417 elif (unitprefix == 'M'):
418 factor = 1.0e+6
419 elif (unitprefix == 'G'):
420 factor = 1.0e+9
421 elif (unitprefix == 'T'):
422 factor = 1.0e+12
423 elif (unitprefix == 'P'):
424 factor = 1.0e+15
425 elif (unitprefix == 'E'):
426 factor = 1.0e+18
427 """
428 return value*factor
429 else:
430 mesg = "wrong unit of restfreq."
431 raise Exception, mesg
432
433def normalise_restfreq(in_restfreq):
434 if isinstance(in_restfreq, float):
435 return in_restfreq
436 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
437 return float(in_restfreq)
438 elif isinstance(in_restfreq, str):
439 return get_restfreq_in_Hz(in_restfreq)
440 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
441 if isinstance(in_restfreq, numpy.ndarray):
442 if len(in_restfreq.shape) > 1:
443 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
444 raise Exception, mesg
445
446 res = []
447 for i in xrange(len(in_restfreq)):
448 elem = in_restfreq[i]
449 if isinstance(elem, float):
450 res.append(elem)
451 elif isinstance(elem, int) or isinstance(elem, long):
452 res.append(float(elem))
453 elif isinstance(elem, str):
454 res.append(get_restfreq_in_Hz(elem))
455 elif isinstance(elem, dict):
456 if isinstance(elem["value"], float):
457 res.append(elem)
458 elif isinstance(elem["value"], int):
459 dictelem = {}
460 dictelem["name"] = elem["name"]
461 dictelem["value"] = float(elem["value"])
462 res.append(dictelem)
463 elif isinstance(elem["value"], str):
464 dictelem = {}
465 dictelem["name"] = elem["name"]
466 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
467 res.append(dictelem)
468 else:
469 mesg = "restfreq elements must be float, int, or string."
470 raise Exception, mesg
471 return res
472 else:
473 mesg = "wrong type of restfreq given."
474 raise Exception, mesg
475
476def set_restfreq(s, restfreq):
477 rfset = (restfreq != '') and (restfreq != [])
478 if rfset:
479 s.set_restfreqs(normalise_restfreq(restfreq))
480
[876]481class scantable(Scantable):
[1846]482 """\
483 The ASAP container for scans (single-dish data).
[102]484 """
[1819]485
[1862]486 @asaplog_post_dec
[2315]487 def __init__(self, filename, average=None, unit=None, parallactify=None,
488 **args):
[1846]489 """\
[102]490 Create a scantable from a saved one or make a reference
[1846]491
[102]492 Parameters:
[1846]493
494 filename: the name of an asap table on disk
495 or
496 the name of a rpfits/sdfits/ms file
497 (integrations within scans are auto averaged
498 and the whole file is read) or
499 [advanced] a reference to an existing scantable
500
501 average: average all integrations withinb a scan on read.
502 The default (True) is taken from .asaprc.
503
[484]504 unit: brightness unit; must be consistent with K or Jy.
[1846]505 Over-rides the default selected by the filler
506 (input rpfits/sdfits/ms) or replaces the value
507 in existing scantables
508
[1920]509 antenna: for MeasurementSet input data only:
[2349]510 Antenna selection. integer (id) or string
511 (name or id).
[1846]512
[2349]513 parallactify: Indicate that the data had been parallactified.
514 Default (false) is taken from rc file.
[1846]515
[2754]516 getpt: Whether to import direction from MS/POINTING
517 table properly or not.
518 This is effective only when filename is MS.
519 The default (True) is to import direction
520 from MS/POINTING.
[710]521 """
[976]522 if average is None:
[710]523 average = rcParams['scantable.autoaverage']
[1593]524 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]525 varlist = vars()
[876]526 from asap._asap import stmath
[1819]527 self._math = stmath( rcParams['insitu'] )
[876]528 if isinstance(filename, Scantable):
529 Scantable.__init__(self, filename)
[181]530 else:
[1697]531 if isinstance(filename, str):
[976]532 filename = os.path.expandvars(filename)
533 filename = os.path.expanduser(filename)
534 if not os.path.exists(filename):
535 s = "File '%s' not found." % (filename)
536 raise IOError(s)
[1697]537 if is_scantable(filename):
538 ondisk = rcParams['scantable.storage'] == 'disk'
539 Scantable.__init__(self, filename, ondisk)
540 if unit is not None:
541 self.set_fluxunit(unit)
[2008]542 if average:
543 self._assign( self.average_time( scanav=True ) )
[1819]544 # do not reset to the default freqframe
545 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]546 elif is_ms(filename):
[1916]547 # Measurement Set
548 opts={'ms': {}}
[2844]549 mskeys=['getpt','antenna']
[1916]550 for key in mskeys:
551 if key in args.keys():
552 opts['ms'][key] = args[key]
553 self._fill([filename], unit, average, opts)
[1893]554 elif os.path.isfile(filename):
[2761]555 opts={'nro': {}}
556 nrokeys=['freqref']
557 for key in nrokeys:
558 if key in args.keys():
559 opts['nro'][key] = args[key]
560 self._fill([filename], unit, average, opts)
[2350]561 # only apply to new data not "copy constructor"
562 self.parallactify(parallactify)
[1883]563 else:
[1819]564 msg = "The given file '%s'is not a valid " \
565 "asap table." % (filename)
[1859]566 raise IOError(msg)
[1118]567 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]568 and isinstance(filename[-1], str):
[1916]569 self._fill(filename, unit, average)
[1586]570 self.parallactify(parallactify)
[1259]571 self._add_history("scantable", varlist)
[102]572
[1862]573 @asaplog_post_dec
[876]574 def save(self, name=None, format=None, overwrite=False):
[1846]575 """\
[1280]576 Store the scantable on disk. This can be an asap (aips++) Table,
577 SDFITS or MS2 format.
[1846]578
[116]579 Parameters:
[1846]580
[2431]581 name: the name of the outputfile. For format 'ASCII'
[1093]582 this is the root file name (data in 'name'.txt
[497]583 and header in 'name'_header.txt)
[1855]584
[116]585 format: an optional file format. Default is ASAP.
[1855]586 Allowed are:
587
588 * 'ASAP' (save as ASAP [aips++] Table),
589 * 'SDFITS' (save as SDFITS file)
590 * 'ASCII' (saves as ascii text file)
591 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]592 * 'FITS' (save as image FITS - not readable by
593 class)
[1855]594 * 'CLASS' (save as FITS readable by CLASS)
595
[411]596 overwrite: If the file should be overwritten if it exists.
[256]597 The default False is to return with warning
[411]598 without writing the output. USE WITH CARE.
[1855]599
[1846]600 Example::
601
[116]602 scan.save('myscan.asap')
[1118]603 scan.save('myscan.sdfits', 'SDFITS')
[1846]604
[116]605 """
[411]606 from os import path
[1593]607 format = format or rcParams['scantable.save']
[256]608 suffix = '.'+format.lower()
[1118]609 if name is None or name == "":
[256]610 name = 'scantable'+suffix
[718]611 msg = "No filename given. Using default name %s..." % name
612 asaplog.push(msg)
[411]613 name = path.expandvars(name)
[256]614 if path.isfile(name) or path.isdir(name):
615 if not overwrite:
[718]616 msg = "File %s exists." % name
[1859]617 raise IOError(msg)
[451]618 format2 = format.upper()
619 if format2 == 'ASAP':
[116]620 self._save(name)
[2029]621 elif format2 == 'MS2':
622 msopt = {'ms': {'overwrite': overwrite } }
623 from asap._asap import mswriter
624 writer = mswriter( self )
625 writer.write( name, msopt )
[116]626 else:
[989]627 from asap._asap import stwriter as stw
[1118]628 writer = stw(format2)
629 writer.write(self, name)
[116]630 return
631
[102]632 def copy(self):
[1846]633 """Return a copy of this scantable.
634
635 *Note*:
636
[1348]637 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]638
639 Example::
640
[102]641 copiedscan = scan.copy()
[1846]642
[102]643 """
[876]644 sd = scantable(Scantable._copy(self))
[113]645 return sd
646
[1093]647 def drop_scan(self, scanid=None):
[1846]648 """\
[1093]649 Return a new scantable where the specified scan number(s) has(have)
650 been dropped.
[1846]651
[1093]652 Parameters:
[1846]653
[1093]654 scanid: a (list of) scan number(s)
[1846]655
[1093]656 """
657 from asap import _is_sequence_or_number as _is_valid
658 from asap import _to_list
659 from asap import unique
660 if not _is_valid(scanid):
[2315]661 raise RuntimeError( 'Please specify a scanno to drop from the'
662 ' scantable' )
[1859]663 scanid = _to_list(scanid)
664 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
665 for sid in scanid: allscans.remove(sid)
666 if len(allscans) == 0:
667 raise ValueError("Can't remove all scans")
668 sel = selector(scans=allscans)
669 return self._select_copy(sel)
[1093]670
[1594]671 def _select_copy(self, selection):
672 orig = self.get_selection()
673 self.set_selection(orig+selection)
674 cp = self.copy()
675 self.set_selection(orig)
676 return cp
677
[102]678 def get_scan(self, scanid=None):
[1855]679 """\
[102]680 Return a specific scan (by scanno) or collection of scans (by
681 source name) in a new scantable.
[1846]682
683 *Note*:
684
[1348]685 See scantable.drop_scan() for the inverse operation.
[1846]686
[102]687 Parameters:
[1846]688
[513]689 scanid: a (list of) scanno or a source name, unix-style
690 patterns are accepted for source name matching, e.g.
691 '*_R' gets all 'ref scans
[1846]692
693 Example::
694
[513]695 # get all scans containing the source '323p459'
696 newscan = scan.get_scan('323p459')
697 # get all 'off' scans
698 refscans = scan.get_scan('*_R')
699 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]700 newscan = scan.get_scan([0, 2, 7, 10])
[1846]701
[102]702 """
703 if scanid is None:
[1859]704 raise RuntimeError( 'Please specify a scan no or name to '
705 'retrieve from the scantable' )
[102]706 try:
[946]707 bsel = self.get_selection()
708 sel = selector()
[102]709 if type(scanid) is str:
[946]710 sel.set_name(scanid)
[1594]711 return self._select_copy(sel)
[102]712 elif type(scanid) is int:
[946]713 sel.set_scans([scanid])
[1594]714 return self._select_copy(sel)
[381]715 elif type(scanid) is list:
[946]716 sel.set_scans(scanid)
[1594]717 return self._select_copy(sel)
[381]718 else:
[718]719 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]720 raise TypeError(msg)
[102]721 except RuntimeError:
[1859]722 raise
[102]723
724 def __str__(self):
[2315]725 tempFile = tempfile.NamedTemporaryFile()
726 Scantable._summary(self, tempFile.name)
727 tempFile.seek(0)
728 asaplog.clear()
729 return tempFile.file.read()
[102]730
[2315]731 @asaplog_post_dec
[976]732 def summary(self, filename=None):
[1846]733 """\
[102]734 Print a summary of the contents of this scantable.
[1846]735
[102]736 Parameters:
[1846]737
[1931]738 filename: the name of a file to write the putput to
[102]739 Default - no file output
[1846]740
[102]741 """
742 if filename is not None:
[256]743 if filename is "":
744 filename = 'scantable_summary.txt'
[415]745 from os.path import expandvars, isdir
[411]746 filename = expandvars(filename)
[2286]747 if isdir(filename):
[718]748 msg = "Illegal file name '%s'." % (filename)
[1859]749 raise IOError(msg)
[2286]750 else:
751 filename = ""
752 Scantable._summary(self, filename)
[710]753
[1512]754 def get_spectrum(self, rowno):
[1471]755 """Return the spectrum for the current row in the scantable as a list.
[1846]756
[1471]757 Parameters:
[1846]758
[1573]759 rowno: the row number to retrieve the spectrum from
[1846]760
[1471]761 """
762 return self._getspectrum(rowno)
[946]763
[1471]764 def get_mask(self, rowno):
765 """Return the mask for the current row in the scantable as a list.
[1846]766
[1471]767 Parameters:
[1846]768
[1573]769 rowno: the row number to retrieve the mask from
[1846]770
[1471]771 """
772 return self._getmask(rowno)
773
774 def set_spectrum(self, spec, rowno):
[1938]775 """Set the spectrum for the current row in the scantable.
[1846]776
[1471]777 Parameters:
[1846]778
[1855]779 spec: the new spectrum
[1846]780
[1855]781 rowno: the row number to set the spectrum for
782
[1471]783 """
[2348]784 assert(len(spec) == self.nchan(self.getif(rowno)))
[1471]785 return self._setspectrum(spec, rowno)
786
[1600]787 def get_coordinate(self, rowno):
788 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]789
790 *Note*:
791
[1600]792 * This coordinate is only valid until a scantable method modifies
793 the frequency axis.
794 * This coordinate does contain the original frequency set-up
795 NOT the new frame. The conversions however are done using the user
796 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
797 use scantable.freq_align first. Without it there is no closure,
[1846]798 i.e.::
[1600]799
[1846]800 c = myscan.get_coordinate(0)
801 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
802
[1600]803 Parameters:
[1846]804
[1600]805 rowno: the row number for the spectral coordinate
806
807 """
808 return coordinate(Scantable.get_coordinate(self, rowno))
809
[946]810 def get_selection(self):
[1846]811 """\
[1005]812 Get the selection object currently set on this scantable.
[1846]813
814 Example::
815
[1005]816 sel = scan.get_selection()
817 sel.set_ifs(0) # select IF 0
818 scan.set_selection(sel) # apply modified selection
[1846]819
[946]820 """
821 return selector(self._getselection())
822
[1576]823 def set_selection(self, selection=None, **kw):
[1846]824 """\
[1005]825 Select a subset of the data. All following operations on this scantable
826 are only applied to thi selection.
[1846]827
[1005]828 Parameters:
[1697]829
[1846]830 selection: a selector object (default unset the selection), or
[2431]831 any combination of 'pols', 'ifs', 'beams', 'scans',
832 'cycles', 'name', 'query'
[1697]833
[1846]834 Examples::
[1697]835
[1005]836 sel = selector() # create a selection object
[1118]837 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]838 scan.set_selection(sel) # set the selection
839 scan.summary() # will only print summary of scanno 0 an 3
840 scan.set_selection() # unset the selection
[1697]841 # or the equivalent
842 scan.set_selection(scans=[0,3])
843 scan.summary() # will only print summary of scanno 0 an 3
844 scan.set_selection() # unset the selection
[1846]845
[946]846 """
[1576]847 if selection is None:
848 # reset
849 if len(kw) == 0:
850 selection = selector()
851 else:
852 # try keywords
853 for k in kw:
854 if k not in selector.fields:
[2320]855 raise KeyError("Invalid selection key '%s', "
856 "valid keys are %s" % (k,
857 selector.fields))
[1576]858 selection = selector(**kw)
[946]859 self._setselection(selection)
860
[1819]861 def get_row(self, row=0, insitu=None):
[1846]862 """\
[1819]863 Select a row in the scantable.
864 Return a scantable with single row.
[1846]865
[1819]866 Parameters:
[1846]867
868 row: row no of integration, default is 0.
869 insitu: if False a new scantable is returned. Otherwise, the
870 scaling is done in-situ. The default is taken from .asaprc
871 (False)
872
[1819]873 """
[2349]874 if insitu is None:
875 insitu = rcParams['insitu']
[1819]876 if not insitu:
877 workscan = self.copy()
878 else:
879 workscan = self
880 # Select a row
[2349]881 sel = selector()
[1992]882 sel.set_rows([row])
[1819]883 workscan.set_selection(sel)
884 if not workscan.nrow() == 1:
[2349]885 msg = "Could not identify single row. %d rows selected." \
886 % (workscan.nrow())
[1819]887 raise RuntimeError(msg)
888 if insitu:
889 self._assign(workscan)
890 else:
891 return workscan
892
[1862]893 @asaplog_post_dec
[1907]894 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]895 """\
[135]896 Determine the specified statistic of the current beam/if/pol
[102]897 Takes a 'mask' as an optional parameter to specify which
898 channels should be excluded.
[1846]899
[102]900 Parameters:
[1846]901
[1819]902 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
903 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]904
[135]905 mask: an optional mask specifying where the statistic
[102]906 should be determined.
[1855]907
[1819]908 form: format string to print statistic values
[1846]909
[1907]910 row: row number of spectrum to process.
911 (default is None: for all rows)
[1846]912
[1907]913 Example:
[113]914 scan.set_unit('channel')
[1118]915 msk = scan.create_mask([100, 200], [500, 600])
[135]916 scan.stats(stat='mean', mask=m)
[1846]917
[102]918 """
[1593]919 mask = mask or []
[876]920 if not self._check_ifs():
[1118]921 raise ValueError("Cannot apply mask as the IFs have different "
922 "number of channels. Please use setselection() "
923 "to select individual IFs")
[1819]924 rtnabc = False
925 if stat.lower().endswith('_abc'): rtnabc = True
926 getchan = False
927 if stat.lower().startswith('min') or stat.lower().startswith('max'):
928 chan = self._math._minmaxchan(self, mask, stat)
929 getchan = True
930 statvals = []
[1907]931 if not rtnabc:
932 if row == None:
933 statvals = self._math._stats(self, mask, stat)
934 else:
935 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]936
[1819]937 #def cb(i):
938 # return statvals[i]
[256]939
[1819]940 #return self._row_callback(cb, stat)
[102]941
[1819]942 label=stat
943 #callback=cb
944 out = ""
945 #outvec = []
946 sep = '-'*50
[1907]947
948 if row == None:
949 rows = xrange(self.nrow())
950 elif isinstance(row, int):
951 rows = [ row ]
952
953 for i in rows:
[1819]954 refstr = ''
955 statunit= ''
956 if getchan:
957 qx, qy = self.chan2data(rowno=i, chan=chan[i])
958 if rtnabc:
959 statvals.append(qx['value'])
960 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
961 statunit= '['+qx['unit']+']'
962 else:
963 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
964
965 tm = self._gettime(i)
966 src = self._getsourcename(i)
967 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
968 out += 'Time[%s]:\n' % (tm)
[1907]969 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
970 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
971 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]972 #outvec.append(callback(i))
[1907]973 if len(rows) > 1:
974 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
975 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
976 else:
977 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
978 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]979 out += sep+"\n"
980
[1859]981 import os
982 if os.environ.has_key( 'USER' ):
983 usr = os.environ['USER']
984 else:
985 import commands
986 usr = commands.getoutput( 'whoami' )
987 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
988 f = open(tmpfile,'w')
989 print >> f, sep
990 print >> f, ' %s %s' % (label, statunit)
991 print >> f, sep
992 print >> f, out
993 f.close()
994 f = open(tmpfile,'r')
995 x = f.readlines()
996 f.close()
997 asaplog.push(''.join(x), False)
998
[1819]999 return statvals
1000
1001 def chan2data(self, rowno=0, chan=0):
[1846]1002 """\
[1819]1003 Returns channel/frequency/velocity and spectral value
1004 at an arbitrary row and channel in the scantable.
[1846]1005
[1819]1006 Parameters:
[1846]1007
[1819]1008 rowno: a row number in the scantable. Default is the
1009 first row, i.e. rowno=0
[1855]1010
[1819]1011 chan: a channel in the scantable. Default is the first
1012 channel, i.e. pos=0
[1846]1013
[1819]1014 """
1015 if isinstance(rowno, int) and isinstance(chan, int):
1016 qx = {'unit': self.get_unit(),
1017 'value': self._getabcissa(rowno)[chan]}
1018 qy = {'unit': self.get_fluxunit(),
1019 'value': self._getspectrum(rowno)[chan]}
1020 return qx, qy
1021
[1118]1022 def stddev(self, mask=None):
[1846]1023 """\
[135]1024 Determine the standard deviation of the current beam/if/pol
1025 Takes a 'mask' as an optional parameter to specify which
1026 channels should be excluded.
[1846]1027
[135]1028 Parameters:
[1846]1029
[135]1030 mask: an optional mask specifying where the standard
1031 deviation should be determined.
1032
[1846]1033 Example::
1034
[135]1035 scan.set_unit('channel')
[1118]1036 msk = scan.create_mask([100, 200], [500, 600])
[135]1037 scan.stddev(mask=m)
[1846]1038
[135]1039 """
[1118]1040 return self.stats(stat='stddev', mask=mask);
[135]1041
[1003]1042
[1259]1043 def get_column_names(self):
[1846]1044 """\
[1003]1045 Return a list of column names, which can be used for selection.
1046 """
[1259]1047 return list(Scantable.get_column_names(self))
[1003]1048
[1730]1049 def get_tsys(self, row=-1):
[1846]1050 """\
[113]1051 Return the System temperatures.
[1846]1052
1053 Parameters:
1054
1055 row: the rowno to get the information for. (default all rows)
1056
[113]1057 Returns:
[1846]1058
[876]1059 a list of Tsys values for the current selection
[1846]1060
[113]1061 """
[1730]1062 if row > -1:
1063 return self._get_column(self._gettsys, row)
[876]1064 return self._row_callback(self._gettsys, "Tsys")
[256]1065
[2406]1066 def get_tsysspectrum(self, row=-1):
1067 """\
1068 Return the channel dependent system temperatures.
[1730]1069
[2406]1070 Parameters:
1071
1072 row: the rowno to get the information for. (default all rows)
1073
1074 Returns:
1075
1076 a list of Tsys values for the current selection
1077
1078 """
1079 return self._get_column( self._gettsysspectrum, row )
1080
[2791]1081 def set_tsys(self, values, row=-1):
1082 """\
1083 Set the Tsys value(s) of the given 'row' or the whole scantable
1084 (selection).
1085
1086 Parameters:
1087
1088 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1089 row: the row number to apply Tsys values to.
1090 (default all rows)
1091
1092 """
1093
1094 if not hasattr(values, "__len__"):
1095 values = [values]
1096 self._settsys(values, row)
1097
[1730]1098 def get_weather(self, row=-1):
[1846]1099 """\
1100 Return the weather informations.
1101
1102 Parameters:
1103
1104 row: the rowno to get the information for. (default all rows)
1105
1106 Returns:
1107
1108 a dict or list of of dicts of values for the current selection
1109
1110 """
1111
[1730]1112 values = self._get_column(self._get_weather, row)
1113 if row > -1:
1114 return {'temperature': values[0],
1115 'pressure': values[1], 'humidity' : values[2],
1116 'windspeed' : values[3], 'windaz' : values[4]
1117 }
1118 else:
1119 out = []
1120 for r in values:
1121
1122 out.append({'temperature': r[0],
1123 'pressure': r[1], 'humidity' : r[2],
1124 'windspeed' : r[3], 'windaz' : r[4]
1125 })
1126 return out
1127
[876]1128 def _row_callback(self, callback, label):
1129 out = ""
[1118]1130 outvec = []
[1590]1131 sep = '-'*50
[876]1132 for i in range(self.nrow()):
1133 tm = self._gettime(i)
1134 src = self._getsourcename(i)
[1590]1135 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]1136 out += 'Time[%s]:\n' % (tm)
[1590]1137 if self.nbeam(-1) > 1:
1138 out += ' Beam[%d] ' % (self.getbeam(i))
1139 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1140 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]1141 outvec.append(callback(i))
1142 out += '= %3.3f\n' % (outvec[i])
[1590]1143 out += sep+'\n'
[1859]1144
1145 asaplog.push(sep)
1146 asaplog.push(" %s" % (label))
1147 asaplog.push(sep)
1148 asaplog.push(out)
[1861]1149 asaplog.post()
[1175]1150 return outvec
[256]1151
[1947]1152 def _get_column(self, callback, row=-1, *args):
[1070]1153 """
1154 """
1155 if row == -1:
[1947]1156 return [callback(i, *args) for i in range(self.nrow())]
[1070]1157 else:
[1819]1158 if 0 <= row < self.nrow():
[1947]1159 return callback(row, *args)
[256]1160
[1070]1161
[1948]1162 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]1163 """\
[113]1164 Get a list of time stamps for the observations.
[1938]1165 Return a datetime object or a string (default) for each
1166 integration time stamp in the scantable.
[1846]1167
[113]1168 Parameters:
[1846]1169
[1348]1170 row: row no of integration. Default -1 return all rows
[1855]1171
[1348]1172 asdatetime: return values as datetime objects rather than strings
[1846]1173
[2349]1174 prec: number of digits shown. Default -1 to automatic
1175 calculation.
[1948]1176 Note this number is equals to the digits of MVTime,
1177 i.e., 0<prec<3: dates with hh:: only,
1178 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]1179 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1180
[113]1181 """
[1175]1182 from datetime import datetime
[1948]1183 if prec < 0:
1184 # automagically set necessary precision +1
[2349]1185 prec = 7 - \
1186 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]1187 prec = max(6, int(prec))
1188 else:
1189 prec = max(0, prec)
1190 if asdatetime:
1191 #precision can be 1 millisecond at max
1192 prec = min(12, prec)
1193
[1947]1194 times = self._get_column(self._gettime, row, prec)
[1348]1195 if not asdatetime:
[1392]1196 return times
[1947]1197 format = "%Y/%m/%d/%H:%M:%S.%f"
1198 if prec < 7:
1199 nsub = 1 + (((6-prec)/2) % 3)
1200 substr = [".%f","%S","%M"]
1201 for i in range(nsub):
1202 format = format.replace(substr[i],"")
[1175]1203 if isinstance(times, list):
[1947]1204 return [datetime.strptime(i, format) for i in times]
[1175]1205 else:
[1947]1206 return datetime.strptime(times, format)
[102]1207
[1348]1208
1209 def get_inttime(self, row=-1):
[1846]1210 """\
[1348]1211 Get a list of integration times for the observations.
1212 Return a time in seconds for each integration in the scantable.
[1846]1213
[1348]1214 Parameters:
[1846]1215
[1348]1216 row: row no of integration. Default -1 return all rows.
[1846]1217
[1348]1218 """
[1573]1219 return self._get_column(self._getinttime, row)
[1348]1220
[1573]1221
[714]1222 def get_sourcename(self, row=-1):
[1846]1223 """\
[794]1224 Get a list source names for the observations.
[714]1225 Return a string for each integration in the scantable.
1226 Parameters:
[1846]1227
[1348]1228 row: row no of integration. Default -1 return all rows.
[1846]1229
[714]1230 """
[1070]1231 return self._get_column(self._getsourcename, row)
[714]1232
[794]1233 def get_elevation(self, row=-1):
[1846]1234 """\
[794]1235 Get a list of elevations for the observations.
1236 Return a float for each integration in the scantable.
[1846]1237
[794]1238 Parameters:
[1846]1239
[1348]1240 row: row no of integration. Default -1 return all rows.
[1846]1241
[794]1242 """
[1070]1243 return self._get_column(self._getelevation, row)
[794]1244
1245 def get_azimuth(self, row=-1):
[1846]1246 """\
[794]1247 Get a list of azimuths for the observations.
1248 Return a float for each integration in the scantable.
[1846]1249
[794]1250 Parameters:
[1348]1251 row: row no of integration. Default -1 return all rows.
[1846]1252
[794]1253 """
[1070]1254 return self._get_column(self._getazimuth, row)
[794]1255
1256 def get_parangle(self, row=-1):
[1846]1257 """\
[794]1258 Get a list of parallactic angles for the observations.
1259 Return a float for each integration in the scantable.
[1846]1260
[794]1261 Parameters:
[1846]1262
[1348]1263 row: row no of integration. Default -1 return all rows.
[1846]1264
[794]1265 """
[1070]1266 return self._get_column(self._getparangle, row)
[794]1267
[1070]1268 def get_direction(self, row=-1):
1269 """
1270 Get a list of Positions on the sky (direction) for the observations.
[1594]1271 Return a string for each integration in the scantable.
[1855]1272
[1070]1273 Parameters:
[1855]1274
[1070]1275 row: row no of integration. Default -1 return all rows
[1855]1276
[1070]1277 """
1278 return self._get_column(self._getdirection, row)
1279
[1391]1280 def get_directionval(self, row=-1):
[1846]1281 """\
[1391]1282 Get a list of Positions on the sky (direction) for the observations.
1283 Return a float for each integration in the scantable.
[1846]1284
[1391]1285 Parameters:
[1846]1286
[1391]1287 row: row no of integration. Default -1 return all rows
[1846]1288
[1391]1289 """
1290 return self._get_column(self._getdirectionvec, row)
1291
[1862]1292 @asaplog_post_dec
[102]1293 def set_unit(self, unit='channel'):
[1846]1294 """\
[102]1295 Set the unit for all following operations on this scantable
[1846]1296
[102]1297 Parameters:
[1846]1298
1299 unit: optional unit, default is 'channel'. Use one of '*Hz',
1300 'km/s', 'channel' or equivalent ''
1301
[102]1302 """
[484]1303 varlist = vars()
[1118]1304 if unit in ['', 'pixel', 'channel']:
[113]1305 unit = ''
1306 inf = list(self._getcoordinfo())
1307 inf[0] = unit
1308 self._setcoordinfo(inf)
[1118]1309 self._add_history("set_unit", varlist)
[113]1310
[1862]1311 @asaplog_post_dec
[484]1312 def set_instrument(self, instr):
[1846]1313 """\
[1348]1314 Set the instrument for subsequent processing.
[1846]1315
[358]1316 Parameters:
[1846]1317
[710]1318 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]1319 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1320
[358]1321 """
1322 self._setInstrument(instr)
[1118]1323 self._add_history("set_instument", vars())
[358]1324
[1862]1325 @asaplog_post_dec
[1190]1326 def set_feedtype(self, feedtype):
[1846]1327 """\
[1190]1328 Overwrite the feed type, which might not be set correctly.
[1846]1329
[1190]1330 Parameters:
[1846]1331
[1190]1332 feedtype: 'linear' or 'circular'
[1846]1333
[1190]1334 """
1335 self._setfeedtype(feedtype)
1336 self._add_history("set_feedtype", vars())
1337
[1862]1338 @asaplog_post_dec
[2897]1339 def get_doppler(self):
1340 """\
1341 Get the doppler.
1342 """
1343 return self._getcoordinfo()[2]
1344
1345 @asaplog_post_dec
[276]1346 def set_doppler(self, doppler='RADIO'):
[1846]1347 """\
[276]1348 Set the doppler for all following operations on this scantable.
[1846]1349
[276]1350 Parameters:
[1846]1351
[276]1352 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1353
[276]1354 """
[484]1355 varlist = vars()
[276]1356 inf = list(self._getcoordinfo())
1357 inf[2] = doppler
1358 self._setcoordinfo(inf)
[1118]1359 self._add_history("set_doppler", vars())
[710]1360
[1862]1361 @asaplog_post_dec
[226]1362 def set_freqframe(self, frame=None):
[1846]1363 """\
[113]1364 Set the frame type of the Spectral Axis.
[1846]1365
[113]1366 Parameters:
[1846]1367
[591]1368 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1369 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1370 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1371
1372 Example::
1373
[113]1374 scan.set_freqframe('BARY')
[1846]1375
[113]1376 """
[1593]1377 frame = frame or rcParams['scantable.freqframe']
[484]1378 varlist = vars()
[1819]1379 # "REST" is not implemented in casacore
1380 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1381 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1382 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1383 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1384
[989]1385 if frame in valid:
[113]1386 inf = list(self._getcoordinfo())
1387 inf[1] = frame
1388 self._setcoordinfo(inf)
[1118]1389 self._add_history("set_freqframe", varlist)
[102]1390 else:
[1118]1391 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1392 raise TypeError(msg)
[710]1393
[1862]1394 @asaplog_post_dec
[989]1395 def set_dirframe(self, frame=""):
[1846]1396 """\
[989]1397 Set the frame type of the Direction on the sky.
[1846]1398
[989]1399 Parameters:
[1846]1400
[989]1401 frame: an optional frame type, default ''. Valid frames are:
1402 'J2000', 'B1950', 'GALACTIC'
[1846]1403
1404 Example:
1405
[989]1406 scan.set_dirframe('GALACTIC')
[1846]1407
[989]1408 """
1409 varlist = vars()
[1859]1410 Scantable.set_dirframe(self, frame)
[1118]1411 self._add_history("set_dirframe", varlist)
[989]1412
[113]1413 def get_unit(self):
[1846]1414 """\
[113]1415 Get the default unit set in this scantable
[1846]1416
[113]1417 Returns:
[1846]1418
[113]1419 A unit string
[1846]1420
[113]1421 """
1422 inf = self._getcoordinfo()
1423 unit = inf[0]
1424 if unit == '': unit = 'channel'
1425 return unit
[102]1426
[1862]1427 @asaplog_post_dec
[158]1428 def get_abcissa(self, rowno=0):
[1846]1429 """\
[158]1430 Get the abcissa in the current coordinate setup for the currently
[113]1431 selected Beam/IF/Pol
[1846]1432
[113]1433 Parameters:
[1846]1434
[226]1435 rowno: an optional row number in the scantable. Default is the
1436 first row, i.e. rowno=0
[1846]1437
[113]1438 Returns:
[1846]1439
[1348]1440 The abcissa values and the format string (as a dictionary)
[1846]1441
[113]1442 """
[256]1443 abc = self._getabcissa(rowno)
[710]1444 lbl = self._getabcissalabel(rowno)
[158]1445 return abc, lbl
[113]1446
[1862]1447 @asaplog_post_dec
[2322]1448 def flag(self, mask=None, unflag=False, row=-1):
[1846]1449 """\
[1001]1450 Flag the selected data using an optional channel mask.
[1846]1451
[1001]1452 Parameters:
[1846]1453
[1001]1454 mask: an optional channel mask, created with create_mask. Default
1455 (no mask) is all channels.
[1855]1456
[1819]1457 unflag: if True, unflag the data
[1846]1458
[2322]1459 row: an optional row number in the scantable.
1460 Default -1 flags all rows
1461
[1001]1462 """
1463 varlist = vars()
[1593]1464 mask = mask or []
[1994]1465 self._flag(row, mask, unflag)
[1001]1466 self._add_history("flag", varlist)
1467
[1862]1468 @asaplog_post_dec
[2322]1469 def flag_row(self, rows=None, unflag=False):
[1846]1470 """\
[1819]1471 Flag the selected data in row-based manner.
[1846]1472
[1819]1473 Parameters:
[1846]1474
[1843]1475 rows: list of row numbers to be flagged. Default is no row
[2322]1476 (must be explicitly specified to execute row-based
1477 flagging).
[1855]1478
[1819]1479 unflag: if True, unflag the data.
[1846]1480
[1819]1481 """
1482 varlist = vars()
[2322]1483 if rows is None:
1484 rows = []
[1859]1485 self._flag_row(rows, unflag)
[1819]1486 self._add_history("flag_row", varlist)
1487
[1862]1488 @asaplog_post_dec
[1819]1489 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1490 """\
[1819]1491 Flag the selected data outside a specified range (in channel-base)
[1846]1492
[1819]1493 Parameters:
[1846]1494
[1819]1495 uthres: upper threshold.
[1855]1496
[1819]1497 dthres: lower threshold
[1846]1498
[2322]1499 clipoutside: True for flagging data outside the range
1500 [dthres:uthres].
[1928]1501 False for flagging data inside the range.
[1855]1502
[1846]1503 unflag: if True, unflag the data.
1504
[1819]1505 """
1506 varlist = vars()
[1859]1507 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1508 self._add_history("clip", varlist)
1509
[1862]1510 @asaplog_post_dec
[1584]1511 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1512 """\
[1192]1513 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1514 Flagged data in the scantable get interpolated over the region.
[1192]1515 No taper is applied.
[1846]1516
[1192]1517 Parameters:
[1846]1518
[1579]1519 start: the start frequency (really a period within the
1520 bandwidth) or period to remove
[1855]1521
[1579]1522 end: the end frequency or period to remove
[1855]1523
[2431]1524 unit: the frequency unit (default 'MHz') or '' for
[1579]1525 explicit lag channels
[1846]1526
1527 *Notes*:
1528
[1579]1529 It is recommended to flag edges of the band or strong
[1348]1530 signals beforehand.
[1846]1531
[1192]1532 """
1533 if insitu is None: insitu = rcParams['insitu']
1534 self._math._setinsitu(insitu)
1535 varlist = vars()
[1579]1536 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1537 if not (unit == "" or base.has_key(unit)):
[1192]1538 raise ValueError("%s is not a valid unit." % unit)
[1859]1539 if unit == "":
1540 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1541 else:
1542 s = scantable(self._math._lag_flag(self, start*base[unit],
1543 end*base[unit], "frequency"))
[1192]1544 s._add_history("lag_flag", varlist)
1545 if insitu:
1546 self._assign(s)
1547 else:
1548 return s
[1001]1549
[1862]1550 @asaplog_post_dec
[2349]1551 def fft(self, rowno=None, mask=None, getrealimag=False):
[2177]1552 """\
1553 Apply FFT to the spectra.
1554 Flagged data in the scantable get interpolated over the region.
1555
1556 Parameters:
[2186]1557
1558 rowno: The row number(s) to be processed. int, list
[2349]1559 and tuple are accepted. By default (None), FFT
[2186]1560 is applied to the whole data.
1561
1562 mask: Auxiliary channel mask(s). Given as a boolean
1563 list, it is applied to all specified rows.
1564 A list of boolean lists can also be used to
1565 apply different masks. In the latter case, the
1566 length of 'mask' must be the same as that of
[2349]1567 'rowno'. The default is None.
[2177]1568
1569 getrealimag: If True, returns the real and imaginary part
1570 values of the complex results.
1571 If False (the default), returns the amplitude
1572 (absolute value) normalised with Ndata/2 and
1573 phase (argument, in unit of radian).
1574
1575 Returns:
1576
[2186]1577 A list of dictionaries containing the results for each spectrum.
1578 Each dictionary contains two values, the real and the imaginary
1579 parts when getrealimag = True, or the amplitude(absolute value)
1580 and the phase(argument) when getrealimag = False. The key for
1581 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1582 respectively.
1583 """
[2349]1584 if rowno is None:
1585 rowno = []
[2177]1586 if isinstance(rowno, int):
1587 rowno = [rowno]
1588 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1589 raise TypeError("The row number(s) must be int, list or tuple.")
1590 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1591
[2411]1592 usecommonmask = True
1593
1594 if mask is None:
1595 mask = []
1596 if isinstance(mask, list) or isinstance(mask, tuple):
1597 if len(mask) == 0:
1598 mask = [[]]
1599 else:
1600 if isinstance(mask[0], bool):
1601 if len(mask) != self.nchan(self.getif(rowno[0])):
1602 raise ValueError("The spectra and the mask have "
1603 "different length.")
1604 mask = [mask]
1605 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1606 usecommonmask = False
1607 if len(mask) != len(rowno):
1608 raise ValueError("When specifying masks for each "
1609 "spectrum, the numbers of them "
1610 "must be identical.")
1611 for i in xrange(mask):
1612 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1613 raise ValueError("The spectra and the mask have "
1614 "different length.")
1615 else:
1616 raise TypeError("The mask must be a boolean list or "
1617 "a list of boolean list.")
1618 else:
[2349]1619 raise TypeError("The mask must be a boolean list or a list of "
1620 "boolean list.")
[2186]1621
1622 res = []
1623
1624 imask = 0
1625 for whichrow in rowno:
1626 fspec = self._fft(whichrow, mask[imask], getrealimag)
1627 nspec = len(fspec)
[2177]1628
[2349]1629 i = 0
1630 v1 = []
1631 v2 = []
1632 reselem = {"real":[],"imag":[]} if getrealimag \
1633 else {"ampl":[],"phase":[]}
[2177]1634
[2186]1635 while (i < nspec):
1636 v1.append(fspec[i])
1637 v2.append(fspec[i+1])
[2349]1638 i += 2
[2186]1639
[2177]1640 if getrealimag:
[2186]1641 reselem["real"] += v1
1642 reselem["imag"] += v2
[2177]1643 else:
[2186]1644 reselem["ampl"] += v1
1645 reselem["phase"] += v2
[2177]1646
[2186]1647 res.append(reselem)
1648
[2349]1649 if not usecommonmask:
1650 imask += 1
[2186]1651
[2177]1652 return res
1653
1654 @asaplog_post_dec
[113]1655 def create_mask(self, *args, **kwargs):
[1846]1656 """\
[1118]1657 Compute and return a mask based on [min, max] windows.
[189]1658 The specified windows are to be INCLUDED, when the mask is
[113]1659 applied.
[1846]1660
[102]1661 Parameters:
[1846]1662
[1118]1663 [min, max], [min2, max2], ...
[1024]1664 Pairs of start/end points (inclusive)specifying the regions
[102]1665 to be masked
[1855]1666
[189]1667 invert: optional argument. If specified as True,
1668 return an inverted mask, i.e. the regions
1669 specified are EXCLUDED
[1855]1670
[513]1671 row: create the mask using the specified row for
1672 unit conversions, default is row=0
1673 only necessary if frequency varies over rows.
[1846]1674
1675 Examples::
1676
[113]1677 scan.set_unit('channel')
[1846]1678 # a)
[1118]1679 msk = scan.create_mask([400, 500], [800, 900])
[189]1680 # masks everything outside 400 and 500
[113]1681 # and 800 and 900 in the unit 'channel'
1682
[1846]1683 # b)
[1118]1684 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1685 # masks the regions between 400 and 500
[113]1686 # and 800 and 900 in the unit 'channel'
[1846]1687
1688 # c)
1689 #mask only channel 400
[1554]1690 msk = scan.create_mask([400])
[1846]1691
[102]1692 """
[1554]1693 row = kwargs.get("row", 0)
[513]1694 data = self._getabcissa(row)
[113]1695 u = self._getcoordinfo()[0]
[1859]1696 if u == "":
1697 u = "channel"
1698 msg = "The current mask window unit is %s" % u
1699 i = self._check_ifs()
1700 if not i:
1701 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1702 asaplog.push(msg)
[2348]1703 n = len(data)
[1295]1704 msk = _n_bools(n, False)
[710]1705 # test if args is a 'list' or a 'normal *args - UGLY!!!
1706
[2349]1707 ws = (isinstance(args[-1][-1], int)
1708 or isinstance(args[-1][-1], float)) and args or args[0]
[710]1709 for window in ws:
[1554]1710 if len(window) == 1:
1711 window = [window[0], window[0]]
1712 if len(window) == 0 or len(window) > 2:
[2349]1713 raise ValueError("A window needs to be defined as "
1714 "[start(, end)]")
[1545]1715 if window[0] > window[1]:
1716 tmp = window[0]
1717 window[0] = window[1]
1718 window[1] = tmp
[102]1719 for i in range(n):
[1024]1720 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1721 msk[i] = True
[113]1722 if kwargs.has_key('invert'):
1723 if kwargs.get('invert'):
[1295]1724 msk = mask_not(msk)
[102]1725 return msk
[710]1726
[1931]1727 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1728 """\
[1819]1729 Compute and return a list of mask windows, [min, max].
[1846]1730
[1819]1731 Parameters:
[1846]1732
[1819]1733 mask: channel mask, created with create_mask.
[1855]1734
[1819]1735 row: calcutate the masklist using the specified row
1736 for unit conversions, default is row=0
1737 only necessary if frequency varies over rows.
[1846]1738
[1819]1739 Returns:
[1846]1740
[1819]1741 [min, max], [min2, max2], ...
1742 Pairs of start/end points (inclusive)specifying
1743 the masked regions
[1846]1744
[1819]1745 """
1746 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1747 raise TypeError("The mask should be list or tuple.")
[2427]1748 if len(mask) <= 0:
1749 raise TypeError("The mask elements should be > 0")
[2348]1750 data = self._getabcissa(row)
1751 if len(data) != len(mask):
[1819]1752 msg = "Number of channels in scantable != number of mask elements"
1753 raise TypeError(msg)
1754 u = self._getcoordinfo()[0]
[1859]1755 if u == "":
1756 u = "channel"
1757 msg = "The current mask window unit is %s" % u
1758 i = self._check_ifs()
1759 if not i:
1760 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1761 if not silent:
1762 asaplog.push(msg)
[2349]1763 masklist = []
[1819]1764 ist, ien = None, None
1765 ist, ien=self.get_mask_indices(mask)
1766 if ist is not None and ien is not None:
1767 for i in xrange(len(ist)):
1768 range=[data[ist[i]],data[ien[i]]]
1769 range.sort()
1770 masklist.append([range[0],range[1]])
1771 return masklist
1772
1773 def get_mask_indices(self, mask=None):
[1846]1774 """\
[1819]1775 Compute and Return lists of mask start indices and mask end indices.
[1855]1776
1777 Parameters:
1778
[1819]1779 mask: channel mask, created with create_mask.
[1846]1780
[1819]1781 Returns:
[1846]1782
[1819]1783 List of mask start indices and that of mask end indices,
1784 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1785
[1819]1786 """
1787 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1788 raise TypeError("The mask should be list or tuple.")
[2427]1789 if len(mask) <= 0:
1790 raise TypeError("The mask elements should be > 0")
[2349]1791 istart = []
1792 iend = []
1793 if mask[0]:
1794 istart.append(0)
[1819]1795 for i in range(len(mask)-1):
1796 if not mask[i] and mask[i+1]:
1797 istart.append(i+1)
1798 elif mask[i] and not mask[i+1]:
1799 iend.append(i)
[2349]1800 if mask[len(mask)-1]:
1801 iend.append(len(mask)-1)
[1819]1802 if len(istart) != len(iend):
1803 raise RuntimeError("Numbers of mask start != mask end.")
1804 for i in range(len(istart)):
1805 if istart[i] > iend[i]:
1806 raise RuntimeError("Mask start index > mask end index")
1807 break
1808 return istart,iend
1809
[2013]1810 @asaplog_post_dec
[2882]1811 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1812 """
1813 Parse MS type spw/channel selection syntax.
1814
1815 Parameters:
1816 selectstring : A string expression of spw and channel selection.
1817 Comma-separated expressions mean different spw -
1818 channel combinations. Spws and channel selections
1819 are partitioned by a colon ':'. In a single
1820 selection expression, you can put multiple values
1821 separated by semicolons ';'. Both for spw and
1822 channel selection, allowed cases include single
1823 value, blank('') or asterisk('*') to specify all
1824 available values, two values connected with a
1825 tilde ('~') to specify an inclusive range. Unit
1826 strings for frequency or velocity can be added to
1827 the tilde-connected values. For channel selection
1828 expression, placing a '<' or a '>' is possible to
1829 specify a semi-infinite interval as well.
1830
1831 examples:
1832 '' or '*' = all spws (all channels)
1833 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1834 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1835 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1836 all channels in spw8
[2884]1837 '1.3~1.5GHz' = all spws that fall in or have at
1838 least some overwrap with frequency
1839 range between 1.3GHz and 1.5GHz.
1840 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1841 between the specified
1842 frequency range in spws
1843 that fall in or have
1844 overwrap with the
1845 specified frequency
1846 range.
1847 '1:-200~250km/s' = channels that fall between the
1848 specified velocity range in
1849 spw 1.
[2897]1850 restfreq: the rest frequency.
1851 examples: '115.2712GHz', 115271201800.0
1852 frame: an optional frame type, default 'LSRK'. Valid frames are:
1853 'TOPO', 'LSRD', 'LSRK', 'BARY',
1854 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1855 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[2882]1856 Returns:
1857 A dictionary of selected (valid) spw and masklist pairs,
1858 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1859 """
1860 if not isinstance(selectstring, str):
1861 asaplog.post()
1862 asaplog.push("Expression of spw/channel selection must be a string.")
1863 asaplog.post("ERROR")
1864
1865 orig_unit = self.get_unit()
1866 self.set_unit('channel')
1867
[2891]1868 if restfreq is not None:
[2892]1869 orig_molids = self._getmolidcol_list()
1870 set_restfreq(self, restfreq)
[2882]1871
[2897]1872 orig_coord = self._getcoordinfo()
[2892]1873
1874 if frame is not None:
1875 orig_frame = orig_coord[1]
1876 self.set_freqframe(frame)
1877
1878 if doppler is not None:
1879 orig_doppler = orig_coord[2]
1880 self.set_doppler(doppler)
[2882]1881
1882 valid_ifs = self.getifnos()
1883
1884 comma_sep = selectstring.split(",")
1885 res = {}
1886
1887 for cms_elem in comma_sep:
1888 colon_sep = cms_elem.split(":")
1889
1890 if (len(colon_sep) > 2):
1891 raise RuntimeError("Invalid selection expression: more than two colons!")
1892
1893 # parse spw expression and store result in spw_list.
1894 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
[2884]1895 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1896 # 'a~b*m/s' (where * can be '' or 'k') and also
[2882]1897 # several of the above expressions connected with ';'.
1898
1899 spw_list = []
1900
1901 semicolon_sep = colon_sep[0].split(";")
1902
1903 for scs_elem in semicolon_sep:
1904 scs_elem = scs_elem.strip()
1905
1906 lt_sep = scs_elem.split("<")
1907 gt_sep = scs_elem.split(">")
1908 ti_sep = scs_elem.split("~")
1909
1910 lt_sep_length = len(lt_sep)
1911 gt_sep_length = len(gt_sep)
1912 ti_sep_length = len(ti_sep)
1913
1914 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1915
1916 if (len_product > 2):
1917 # '<', '>' and '~' must not coexist in a single spw expression
1918
1919 raise RuntimeError("Invalid spw selection.")
1920
1921 elif (len_product == 1):
1922 # '', '*', or single spw number.
1923
1924 if (scs_elem == "") or (scs_elem == "*"):
1925 spw_list = valid_ifs[:] # deep copy
1926
1927 else: # single number
[2887]1928 expr = int(scs_elem)
1929 spw_list.append(expr)
1930 if expr not in valid_ifs:
1931 asaplog.push("Invalid spw given. Ignored.")
1932
[2882]1933 else: # (len_product == 2)
[2887]1934 # namely, one of '<', '>' or '~' appears just once.
[2882]1935
1936 if (lt_sep_length == 2): # '<a'
1937 if is_number(lt_sep[1]):
[2886]1938 no_valid_spw = True
[2882]1939 for i in valid_ifs:
1940 if (i < float(lt_sep[1])):
1941 spw_list.append(i)
[2886]1942 no_valid_spw = False
1943
1944 if no_valid_spw:
1945 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
[2882]1946
1947 else:
[2886]1948 raise RuntimeError("Invalid spw selection.")
[2882]1949
1950 elif (gt_sep_length == 2): # '>a'
1951 if is_number(gt_sep[1]):
[2886]1952 no_valid_spw = True
[2882]1953 for i in valid_ifs:
1954 if (i > float(gt_sep[1])):
1955 spw_list.append(i)
[2886]1956 no_valid_spw = False
1957
1958 if no_valid_spw:
1959 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
[2882]1960
1961 else:
[2886]1962 raise RuntimeError("Invalid spw selection.")
[2882]1963
1964 else: # (ti_sep_length == 2) where both boundaries inclusive
1965 expr0 = ti_sep[0].strip()
1966 expr1 = ti_sep[1].strip()
1967
1968 if is_number(expr0) and is_number(expr1):
1969 # 'a~b'
1970 expr_pmin = min(float(expr0), float(expr1))
1971 expr_pmax = max(float(expr0), float(expr1))
[2887]1972 has_invalid_spw = False
[2886]1973 no_valid_spw = True
1974
[2882]1975 for i in valid_ifs:
1976 if (expr_pmin <= i) and (i <= expr_pmax):
1977 spw_list.append(i)
[2886]1978 no_valid_spw = False
[2887]1979 else:
1980 has_invalid_spw = True
[2886]1981
[2887]1982 if has_invalid_spw:
1983 msg = "Invalid spw is given. Ignored."
1984 asaplog.push(msg)
1985 asaplog.post()
1986
[2886]1987 if no_valid_spw:
1988 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
[2887]1989
[2884]1990 elif is_number(expr0) and is_frequency(expr1):
1991 # 'a~b*Hz'
1992 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
[2886]1993 no_valid_spw = True
1994
[2882]1995 for coord in self._get_coordinate_list():
1996 expr_p0 = coord['coord'].to_pixel(expr_f0)
1997 expr_p1 = coord['coord'].to_pixel(expr_f1)
1998 expr_pmin = min(expr_p0, expr_p1)
1999 expr_pmax = max(expr_p0, expr_p1)
2000
2001 spw = coord['if']
2002 pmin = 0.0
2003 pmax = float(self.nchan(spw) - 1)
2004
2005 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2006 spw_list.append(spw)
[2886]2007 no_valid_spw = False
2008
2009 if no_valid_spw:
2010 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
[2882]2011
[2884]2012 elif is_number(expr0) and is_velocity(expr1):
2013 # 'a~b*m/s'
2014 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
[2882]2015 expr_vmin = min(expr_v0, expr_v1)
2016 expr_vmax = max(expr_v0, expr_v1)
[2886]2017 no_valid_spw = True
2018
[2882]2019 for coord in self._get_coordinate_list():
2020 spw = coord['if']
2021
2022 pmin = 0.0
2023 pmax = float(self.nchan(spw) - 1)
2024
2025 vel0 = coord['coord'].to_velocity(pmin)
2026 vel1 = coord['coord'].to_velocity(pmax)
2027
2028 vmin = min(vel0, vel1)
2029 vmax = max(vel0, vel1)
2030
2031 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2032 spw_list.append(spw)
[2886]2033 no_valid_spw = False
2034
2035 if no_valid_spw:
2036 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
[2882]2037
2038 else:
2039 # cases such as 'aGHz~bkm/s' are not allowed now
2040 raise RuntimeError("Invalid spw selection.")
2041
[2887]2042 # check spw list and remove invalid ones.
2043 # if no valid spw left, emit ValueError.
2044 if len(spw_list) == 0:
2045 raise ValueError("No valid spw in given range.")
2046
[2882]2047 # parse channel expression and store the result in crange_list.
2048 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2049 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2050 # and also several of the above expressions connected with ';'.
2051
2052 for spw in spw_list:
2053 pmin = 0.0
2054 pmax = float(self.nchan(spw) - 1)
[2909]2055
2056 molid = self._getmolidcol_list()[self.get_first_rowno_by_if(spw)]
[2882]2057
2058 if (len(colon_sep) == 1):
2059 # no expression for channel selection,
2060 # which means all channels are to be selected.
2061 crange_list = [[pmin, pmax]]
2062
2063 else: # (len(colon_sep) == 2)
2064 crange_list = []
2065
2066 found = False
2067 for i in self._get_coordinate_list():
2068 if (i['if'] == spw):
2069 coord = i['coord']
2070 found = True
2071 break
2072
[2887]2073 if found:
2074 semicolon_sep = colon_sep[1].split(";")
2075 for scs_elem in semicolon_sep:
2076 scs_elem = scs_elem.strip()
[2882]2077
[2887]2078 ti_sep = scs_elem.split("~")
2079 ti_sep_length = len(ti_sep)
[2882]2080
[2887]2081 if (ti_sep_length > 2):
2082 raise RuntimeError("Invalid channel selection.")
[2882]2083
[2887]2084 elif (ti_sep_length == 1):
2085 if (scs_elem == "") or (scs_elem == "*"):
2086 # '' and '*' for all channels
2087 crange_list = [[pmin, pmax]]
2088 break
2089 elif (is_number(scs_elem)):
2090 # single channel given
2091 crange_list.append([float(scs_elem), float(scs_elem)])
2092 else:
2093 raise RuntimeError("Invalid channel selection.")
[2882]2094
[2887]2095 else: #(ti_sep_length == 2)
2096 expr0 = ti_sep[0].strip()
2097 expr1 = ti_sep[1].strip()
[2882]2098
[2887]2099 if is_number(expr0) and is_number(expr1):
2100 # 'a~b'
2101 expr_pmin = min(float(expr0), float(expr1))
2102 expr_pmax = max(float(expr0), float(expr1))
[2882]2103
[2887]2104 elif is_number(expr0) and is_frequency(expr1):
2105 # 'a~b*Hz'
2106 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2107 expr_p0 = coord.to_pixel(expr_f0)
2108 expr_p1 = coord.to_pixel(expr_f1)
2109 expr_pmin = min(expr_p0, expr_p1)
2110 expr_pmax = max(expr_p0, expr_p1)
[2882]2111
[2887]2112 elif is_number(expr0) and is_velocity(expr1):
2113 # 'a~b*m/s'
[2909]2114 restf = self.get_restfreqs()[molid][0]
[2887]2115 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
[2897]2116 dppl = self.get_doppler()
2117 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)
2118 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)
[2887]2119 expr_p0 = coord.to_pixel(expr_f0)
2120 expr_p1 = coord.to_pixel(expr_f1)
2121 expr_pmin = min(expr_p0, expr_p1)
2122 expr_pmax = max(expr_p0, expr_p1)
[2882]2123
[2887]2124 else:
2125 # cases such as 'aGHz~bkm/s' are not allowed now
2126 raise RuntimeError("Invalid channel selection.")
[2882]2127
[2887]2128 cmin = max(pmin, expr_pmin)
2129 cmax = min(pmax, expr_pmax)
2130 # if the given range of channel selection has overwrap with
2131 # that of current spw, output the overwrap area.
2132 if (cmin <= cmax):
2133 cmin = float(int(cmin + 0.5))
2134 cmax = float(int(cmax + 0.5))
2135 crange_list.append([cmin, cmax])
[2882]2136
2137 if (len(crange_list) == 0):
2138 crange_list.append([])
2139
[2910]2140 if (len(crange_list[0]) > 0):
2141 if res.has_key(spw):
2142 res[spw].extend(crange_list)
2143 else:
2144 res[spw] = crange_list
[2882]2145
[2887]2146 for spw in res.keys():
2147 if spw not in valid_ifs:
2148 del res[spw]
2149
2150 if len(res) == 0:
2151 raise RuntimeError("No valid spw.")
2152
[2882]2153 # restore original values
[2892]2154 self.set_unit(orig_unit)
[2891]2155 if restfreq is not None:
[2892]2156 self._setmolidcol_list(orig_molids)
2157 if frame is not None:
2158 self.set_freqframe(orig_frame)
2159 if doppler is not None:
2160 self.set_doppler(orig_doppler)
[2882]2161
2162 return res
[2890]2163
[2882]2164 @asaplog_post_dec
2165 def get_first_rowno_by_if(self, ifno):
2166 found = False
2167 for irow in xrange(self.nrow()):
2168 if (self.getif(irow) == ifno):
2169 res = irow
2170 found = True
2171 break
2172
2173 if not found: raise RuntimeError("Invalid IF value.")
2174
2175 return res
2176
2177 @asaplog_post_dec
2178 def _get_coordinate_list(self):
2179 res = []
2180 spws = self.getifnos()
2181 for spw in spws:
2182 elem = {}
2183 elem['if'] = spw
2184 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2185 res.append(elem)
2186
2187 return res
2188
2189 @asaplog_post_dec
[2349]2190 def parse_maskexpr(self, maskstring):
[2013]2191 """
2192 Parse CASA type mask selection syntax (IF dependent).
2193
2194 Parameters:
2195 maskstring : A string mask selection expression.
2196 A comma separated selections mean different IF -
2197 channel combinations. IFs and channel selections
2198 are partitioned by a colon, ':'.
2199 examples:
[2015]2200 '' = all IFs (all channels)
[2013]2201 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2202 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2203 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2204 all channels in IF8
2205 Returns:
2206 A dictionary of selected (valid) IF and masklist pairs,
2207 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2208 """
2209 if not isinstance(maskstring,str):
2210 asaplog.post()
[2611]2211 asaplog.push("Mask expression should be a string.")
[2013]2212 asaplog.post("ERROR")
2213
2214 valid_ifs = self.getifnos()
2215 frequnit = self.get_unit()
2216 seldict = {}
[2015]2217 if maskstring == "":
2218 maskstring = str(valid_ifs)[1:-1]
[2611]2219 ## split each selection "IF range[:CHAN range]"
[2867]2220 # split maskstring by "<spaces>,<spaces>"
2221 comma_sep = re.compile('\s*,\s*')
2222 sellist = comma_sep.split(maskstring)
2223 # separator by "<spaces>:<spaces>"
2224 collon_sep = re.compile('\s*:\s*')
[2013]2225 for currselstr in sellist:
[2867]2226 selset = collon_sep.split(currselstr)
[2013]2227 # spw and mask string (may include ~, < or >)
[2349]2228 spwmasklist = self._parse_selection(selset[0], typestr='integer',
[2611]2229 minval=min(valid_ifs),
[2349]2230 maxval=max(valid_ifs))
[2013]2231 for spwlist in spwmasklist:
2232 selspws = []
2233 for ispw in range(spwlist[0],spwlist[1]+1):
2234 # Put into the list only if ispw exists
2235 if valid_ifs.count(ispw):
2236 selspws.append(ispw)
2237 del spwmasklist, spwlist
2238
2239 # parse frequency mask list
2240 if len(selset) > 1:
[2349]2241 freqmasklist = self._parse_selection(selset[1], typestr='float',
2242 offset=0.)
[2013]2243 else:
2244 # want to select the whole spectrum
2245 freqmasklist = [None]
2246
2247 ## define a dictionary of spw - masklist combination
2248 for ispw in selspws:
2249 #print "working on", ispw
2250 spwstr = str(ispw)
2251 if len(selspws) == 0:
2252 # empty spw
2253 continue
2254 else:
2255 ## want to get min and max of the spw and
2256 ## offset to set for '<' and '>'
2257 if frequnit == 'channel':
2258 minfreq = 0
2259 maxfreq = self.nchan(ifno=ispw)
2260 offset = 0.5
2261 else:
2262 ## This is ugly part. need improvement
2263 for ifrow in xrange(self.nrow()):
2264 if self.getif(ifrow) == ispw:
2265 #print "IF",ispw,"found in row =",ifrow
2266 break
2267 freqcoord = self.get_coordinate(ifrow)
2268 freqs = self._getabcissa(ifrow)
2269 minfreq = min(freqs)
2270 maxfreq = max(freqs)
2271 if len(freqs) == 1:
2272 offset = 0.5
2273 elif frequnit.find('Hz') > 0:
[2349]2274 offset = abs(freqcoord.to_frequency(1,
2275 unit=frequnit)
2276 -freqcoord.to_frequency(0,
2277 unit=frequnit)
2278 )*0.5
[2013]2279 elif frequnit.find('m/s') > 0:
[2349]2280 offset = abs(freqcoord.to_velocity(1,
2281 unit=frequnit)
2282 -freqcoord.to_velocity(0,
2283 unit=frequnit)
2284 )*0.5
[2013]2285 else:
2286 asaplog.post()
2287 asaplog.push("Invalid frequency unit")
2288 asaplog.post("ERROR")
2289 del freqs, freqcoord, ifrow
2290 for freq in freqmasklist:
2291 selmask = freq or [minfreq, maxfreq]
2292 if selmask[0] == None:
2293 ## selection was "<freq[1]".
2294 if selmask[1] < minfreq:
2295 ## avoid adding region selection
2296 selmask = None
2297 else:
2298 selmask = [minfreq,selmask[1]-offset]
2299 elif selmask[1] == None:
2300 ## selection was ">freq[0]"
2301 if selmask[0] > maxfreq:
2302 ## avoid adding region selection
2303 selmask = None
2304 else:
2305 selmask = [selmask[0]+offset,maxfreq]
2306 if selmask:
2307 if not seldict.has_key(spwstr):
2308 # new spw selection
2309 seldict[spwstr] = []
2310 seldict[spwstr] += [selmask]
2311 del minfreq,maxfreq,offset,freq,selmask
2312 del spwstr
2313 del freqmasklist
2314 del valid_ifs
2315 if len(seldict) == 0:
2316 asaplog.post()
[2349]2317 asaplog.push("No valid selection in the mask expression: "
2318 +maskstring)
[2013]2319 asaplog.post("WARN")
2320 return None
2321 msg = "Selected masklist:\n"
2322 for sif, lmask in seldict.iteritems():
2323 msg += " IF"+sif+" - "+str(lmask)+"\n"
2324 asaplog.push(msg)
2325 return seldict
2326
[2611]2327 @asaplog_post_dec
2328 def parse_idx_selection(self, mode, selexpr):
2329 """
2330 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2331 BEAMNO, and row number
2332
2333 Parameters:
2334 mode : which column to select.
2335 ['scan',|'if'|'pol'|'beam'|'row']
2336 selexpr : A comma separated selection expression.
2337 examples:
2338 '' = all (returns [])
2339 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2340 (returns [0,1,4,5,6,9])
2341 Returns:
2342 A List of selected indices
2343 """
2344 if selexpr == "":
2345 return []
2346 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2347 'b': 'beam', 'r': 'row'}
2348 smode = mode.lower()[0]
2349 if not (smode in valid_modes.keys()):
2350 msg = "Invalid mode '%s'. Valid modes are %s" %\
2351 (mode, str(valid_modes.values()))
2352 asaplog.post()
2353 asaplog.push(msg)
2354 asaplog.post("ERROR")
2355 mode = valid_modes[smode]
2356 minidx = None
2357 maxidx = None
2358 if smode == 'r':
2359 minidx = 0
2360 maxidx = self.nrow()
2361 else:
2362 idx = getattr(self,"get"+mode+"nos")()
2363 minidx = min(idx)
2364 maxidx = max(idx)
2365 del idx
[2867]2366 # split selexpr by "<spaces>,<spaces>"
2367 comma_sep = re.compile('\s*,\s*')
2368 sellist = comma_sep.split(selexpr)
[2611]2369 idxlist = []
2370 for currselstr in sellist:
2371 # single range (may include ~, < or >)
2372 currlist = self._parse_selection(currselstr, typestr='integer',
2373 minval=minidx,maxval=maxidx)
2374 for thelist in currlist:
2375 idxlist += range(thelist[0],thelist[1]+1)
2376 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2377 asaplog.push(msg)
2378 return idxlist
2379
[2349]2380 def _parse_selection(self, selstr, typestr='float', offset=0.,
[2351]2381 minval=None, maxval=None):
[2013]2382 """
2383 Parameters:
2384 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2385 typestr : The type of the values in returned list
2386 ('integer' or 'float')
2387 offset : The offset value to subtract from or add to
2388 the boundary value if the selection string
[2611]2389 includes '<' or '>' [Valid only for typestr='float']
[2013]2390 minval, maxval : The minimum/maximum values to set if the
2391 selection string includes '<' or '>'.
2392 The list element is filled with None by default.
2393 Returns:
2394 A list of min/max pair of selections.
2395 Example:
[2611]2396 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2397 --> returns [[0,2],[5,7],[9,9]]
2398 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2399 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
[2013]2400 """
[2867]2401 # split selstr by '<spaces>;<spaces>'
2402 semi_sep = re.compile('\s*;\s*')
2403 selgroups = semi_sep.split(selstr)
[2013]2404 sellists = []
2405 if typestr.lower().startswith('int'):
2406 formatfunc = int
[2611]2407 offset = 1
[2013]2408 else:
2409 formatfunc = float
2410
2411 for currsel in selgroups:
[2867]2412 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2413 minsel = minval
2414 maxsel = maxval
[2013]2415 if currsel.find('~') > 0:
[2611]2416 # val0 <= x <= val1
[2013]2417 minsel = formatfunc(currsel.split('~')[0].strip())
[2867]2418 maxsel = formatfunc(currsel.split('~')[1].strip())
[2611]2419 elif currsel.strip().find('<=') > -1:
2420 bound = currsel.split('<=')
2421 try: # try "x <= val"
2422 minsel = minval
2423 maxsel = formatfunc(bound[1].strip())
2424 except ValueError: # now "val <= x"
2425 minsel = formatfunc(bound[0].strip())
2426 maxsel = maxval
2427 elif currsel.strip().find('>=') > -1:
2428 bound = currsel.split('>=')
2429 try: # try "x >= val"
2430 minsel = formatfunc(bound[1].strip())
2431 maxsel = maxval
2432 except ValueError: # now "val >= x"
2433 minsel = minval
2434 maxsel = formatfunc(bound[0].strip())
2435 elif currsel.strip().find('<') > -1:
2436 bound = currsel.split('<')
2437 try: # try "x < val"
2438 minsel = minval
2439 maxsel = formatfunc(bound[1].strip()) \
2440 - formatfunc(offset)
2441 except ValueError: # now "val < x"
2442 minsel = formatfunc(bound[0].strip()) \
[2013]2443 + formatfunc(offset)
[2611]2444 maxsel = maxval
2445 elif currsel.strip().find('>') > -1:
2446 bound = currsel.split('>')
2447 try: # try "x > val"
2448 minsel = formatfunc(bound[1].strip()) \
2449 + formatfunc(offset)
2450 maxsel = maxval
2451 except ValueError: # now "val > x"
2452 minsel = minval
2453 maxsel = formatfunc(bound[0].strip()) \
2454 - formatfunc(offset)
[2013]2455 else:
2456 minsel = formatfunc(currsel)
2457 maxsel = formatfunc(currsel)
2458 sellists.append([minsel,maxsel])
2459 return sellists
2460
[1819]2461# def get_restfreqs(self):
2462# """
2463# Get the restfrequency(s) stored in this scantable.
2464# The return value(s) are always of unit 'Hz'
2465# Parameters:
2466# none
2467# Returns:
2468# a list of doubles
2469# """
2470# return list(self._getrestfreqs())
2471
2472 def get_restfreqs(self, ids=None):
[1846]2473 """\
[256]2474 Get the restfrequency(s) stored in this scantable.
2475 The return value(s) are always of unit 'Hz'
[1846]2476
[256]2477 Parameters:
[1846]2478
[1819]2479 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2480 be retrieved
[1846]2481
[256]2482 Returns:
[1846]2483
[1819]2484 dictionary containing ids and a list of doubles for each id
[1846]2485
[256]2486 """
[1819]2487 if ids is None:
[2349]2488 rfreqs = {}
[1819]2489 idlist = self.getmolnos()
2490 for i in idlist:
[2349]2491 rfreqs[i] = list(self._getrestfreqs(i))
[1819]2492 return rfreqs
2493 else:
[2349]2494 if type(ids) == list or type(ids) == tuple:
2495 rfreqs = {}
[1819]2496 for i in ids:
[2349]2497 rfreqs[i] = list(self._getrestfreqs(i))
[1819]2498 return rfreqs
2499 else:
2500 return list(self._getrestfreqs(ids))
[102]2501
[2349]2502 @asaplog_post_dec
[931]2503 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]2504 """\
[931]2505 Set or replace the restfrequency specified and
[1938]2506 if the 'freqs' argument holds a scalar,
[931]2507 then that rest frequency will be applied to all the selected
2508 data. If the 'freqs' argument holds
2509 a vector, then it MUST be of equal or smaller length than
2510 the number of IFs (and the available restfrequencies will be
2511 replaced by this vector). In this case, *all* data have
2512 the restfrequency set per IF according
2513 to the corresponding value you give in the 'freqs' vector.
[1118]2514 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]2515 IF 1 gets restfreq 2e9.
[1846]2516
[1395]2517 You can also specify the frequencies via a linecatalog.
[1153]2518
[931]2519 Parameters:
[1846]2520
[931]2521 freqs: list of rest frequency values or string idenitfiers
[1855]2522
[931]2523 unit: unit for rest frequency (default 'Hz')
[402]2524
[1846]2525
2526 Example::
2527
[1819]2528 # set the given restfrequency for the all currently selected IFs
[931]2529 scan.set_restfreqs(freqs=1.4e9)
[1845]2530 # set restfrequencies for the n IFs (n > 1) in the order of the
2531 # list, i.e
2532 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2533 # len(list_of_restfreqs) == nIF
2534 # for nIF == 1 the following will set multiple restfrequency for
2535 # that IF
[1819]2536 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]2537 # set multiple restfrequencies per IF. as a list of lists where
2538 # the outer list has nIF elements, the inner s arbitrary
2539 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]2540
[1846]2541 *Note*:
[1845]2542
[931]2543 To do more sophisticate Restfrequency setting, e.g. on a
2544 source and IF basis, use scantable.set_selection() before using
[1846]2545 this function::
[931]2546
[1846]2547 # provided your scantable is called scan
2548 selection = selector()
[2431]2549 selection.set_name('ORION*')
[1846]2550 selection.set_ifs([1])
2551 scan.set_selection(selection)
2552 scan.set_restfreqs(freqs=86.6e9)
2553
[931]2554 """
2555 varlist = vars()
[1157]2556 from asap import linecatalog
2557 # simple value
[1118]2558 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]2559 self._setrestfreqs([freqs], [""], unit)
[1157]2560 # list of values
[1118]2561 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]2562 # list values are scalars
[1118]2563 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]2564 if len(freqs) == 1:
2565 self._setrestfreqs(freqs, [""], unit)
2566 else:
2567 # allow the 'old' mode of setting mulitple IFs
2568 savesel = self._getselection()
[2599]2569 sel = self.get_selection()
[1845]2570 iflist = self.getifnos()
2571 if len(freqs)>len(iflist):
2572 raise ValueError("number of elements in list of list "
2573 "exeeds the current IF selections")
2574 iflist = self.getifnos()
2575 for i, fval in enumerate(freqs):
2576 sel.set_ifs(iflist[i])
2577 self._setselection(sel)
2578 self._setrestfreqs([fval], [""], unit)
2579 self._setselection(savesel)
2580
2581 # list values are dict, {'value'=, 'name'=)
[1157]2582 elif isinstance(freqs[-1], dict):
[1845]2583 values = []
2584 names = []
2585 for d in freqs:
2586 values.append(d["value"])
2587 names.append(d["name"])
2588 self._setrestfreqs(values, names, unit)
[1819]2589 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]2590 savesel = self._getselection()
[2599]2591 sel = self.get_selection()
[1322]2592 iflist = self.getifnos()
[1819]2593 if len(freqs)>len(iflist):
[1845]2594 raise ValueError("number of elements in list of list exeeds"
2595 " the current IF selections")
2596 for i, fval in enumerate(freqs):
[1322]2597 sel.set_ifs(iflist[i])
[1259]2598 self._setselection(sel)
[1845]2599 self._setrestfreqs(fval, [""], unit)
[1157]2600 self._setselection(savesel)
2601 # freqs are to be taken from a linecatalog
[1153]2602 elif isinstance(freqs, linecatalog):
2603 savesel = self._getselection()
[2599]2604 sel = self.get_selection()
[1153]2605 for i in xrange(freqs.nrow()):
[1322]2606 sel.set_ifs(iflist[i])
[1153]2607 self._setselection(sel)
[1845]2608 self._setrestfreqs([freqs.get_frequency(i)],
2609 [freqs.get_name(i)], "MHz")
[1153]2610 # ensure that we are not iterating past nIF
2611 if i == self.nif()-1: break
2612 self._setselection(savesel)
[931]2613 else:
2614 return
2615 self._add_history("set_restfreqs", varlist)
2616
[2349]2617 @asaplog_post_dec
[1360]2618 def shift_refpix(self, delta):
[1846]2619 """\
[1589]2620 Shift the reference pixel of the Spectra Coordinate by an
2621 integer amount.
[1846]2622
[1589]2623 Parameters:
[1846]2624
[1589]2625 delta: the amount to shift by
[1846]2626
2627 *Note*:
2628
[1589]2629 Be careful using this with broadband data.
[1846]2630
[1360]2631 """
[2349]2632 varlist = vars()
[1731]2633 Scantable.shift_refpix(self, delta)
[2349]2634 s._add_history("shift_refpix", varlist)
[931]2635
[1862]2636 @asaplog_post_dec
[2820]2637 def history(self, filename=None, nrows=-1, start=0):
[1846]2638 """\
[1259]2639 Print the history. Optionally to a file.
[1846]2640
[1348]2641 Parameters:
[1846]2642
[1928]2643 filename: The name of the file to save the history to.
[1846]2644
[1259]2645 """
[2820]2646 n = self._historylength()
2647 if nrows == -1:
2648 nrows = n
2649 if start+nrows > n:
2650 nrows = nrows-start
2651 if n > 1000 and nrows == n:
2652 nrows = 1000
2653 start = n-1000
2654 asaplog.push("Warning: History has {0} entries. Displaying last "
2655 "1000".format(n))
2656 hist = list(self._gethistory(nrows, start))
[794]2657 out = "-"*80
[484]2658 for h in hist:
[2820]2659 if not h.strip():
2660 continue
2661 if h.find("---") >-1:
2662 continue
[489]2663 else:
2664 items = h.split("##")
2665 date = items[0]
2666 func = items[1]
2667 items = items[2:]
[794]2668 out += "\n"+date+"\n"
2669 out += "Function: %s\n Parameters:" % (func)
[489]2670 for i in items:
[1938]2671 if i == '':
2672 continue
[489]2673 s = i.split("=")
[1118]2674 out += "\n %s = %s" % (s[0], s[1])
[2820]2675 out = "\n".join([out, "*"*80])
[1259]2676 if filename is not None:
2677 if filename is "":
2678 filename = 'scantable_history.txt'
2679 filename = os.path.expandvars(os.path.expanduser(filename))
2680 if not os.path.isdir(filename):
2681 data = open(filename, 'w')
2682 data.write(out)
2683 data.close()
2684 else:
2685 msg = "Illegal file name '%s'." % (filename)
[1859]2686 raise IOError(msg)
2687 return page(out)
[2349]2688
[513]2689 #
2690 # Maths business
2691 #
[1862]2692 @asaplog_post_dec
[2818]2693 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2694 avmode="NONE"):
[1846]2695 """\
[2349]2696 Return the (time) weighted average of a scan. Scans will be averaged
2697 only if the source direction (RA/DEC) is within 1' otherwise
[1846]2698
2699 *Note*:
2700
[1070]2701 in channels only - align if necessary
[1846]2702
[513]2703 Parameters:
[1846]2704
[513]2705 mask: an optional mask (only used for 'var' and 'tsys'
2706 weighting)
[1855]2707
[558]2708 scanav: True averages each scan separately
2709 False (default) averages all scans together,
[1855]2710
[1099]2711 weight: Weighting scheme.
2712 'none' (mean no weight)
2713 'var' (1/var(spec) weighted)
2714 'tsys' (1/Tsys**2 weighted)
2715 'tint' (integration time weighted)
2716 'tintsys' (Tint/Tsys**2)
2717 'median' ( median averaging)
[535]2718 The default is 'tint'
[1855]2719
[931]2720 align: align the spectra in velocity before averaging. It takes
2721 the time of the first spectrum as reference time.
[2818]2722 avmode: 'SOURCE' - also select by source name - or
2723 'NONE' (default). Not applicable for scanav=True or
2724 weight=median
[1846]2725
2726 Example::
2727
[513]2728 # time average the scantable without using a mask
[710]2729 newscan = scan.average_time()
[1846]2730
[513]2731 """
2732 varlist = vars()
[1593]2733 weight = weight or 'TINT'
2734 mask = mask or ()
[2818]2735 scanav = (scanav and 'SCAN') or avmode.upper()
[1118]2736 scan = (self, )
[1859]2737
2738 if align:
2739 scan = (self.freq_align(insitu=False), )
[2818]2740 asaplog.push("Note: Alignment is don on a source-by-source basis")
2741 asaplog.push("Note: Averaging (by default) is not")
2742 # we need to set it to SOURCE averaging here
[1859]2743 s = None
2744 if weight.upper() == 'MEDIAN':
2745 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2746 scanav))
2747 else:
2748 s = scantable(self._math._average(scan, mask, weight.upper(),
2749 scanav))
[1099]2750 s._add_history("average_time", varlist)
[513]2751 return s
[710]2752
[1862]2753 @asaplog_post_dec
[876]2754 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]2755 """\
[513]2756 Return a scan where all spectra are converted to either
2757 Jansky or Kelvin depending upon the flux units of the scan table.
2758 By default the function tries to look the values up internally.
2759 If it can't find them (or if you want to over-ride), you must
2760 specify EITHER jyperk OR eta (and D which it will try to look up
2761 also if you don't set it). jyperk takes precedence if you set both.
[1846]2762
[513]2763 Parameters:
[1846]2764
[513]2765 jyperk: the Jy / K conversion factor
[1855]2766
[513]2767 eta: the aperture efficiency
[1855]2768
[1928]2769 d: the geometric diameter (metres)
[1855]2770
[513]2771 insitu: if False a new scantable is returned.
2772 Otherwise, the scaling is done in-situ
2773 The default is taken from .asaprc (False)
[1846]2774
[513]2775 """
2776 if insitu is None: insitu = rcParams['insitu']
[876]2777 self._math._setinsitu(insitu)
[513]2778 varlist = vars()
[1593]2779 jyperk = jyperk or -1.0
2780 d = d or -1.0
2781 eta = eta or -1.0
[876]2782 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2783 s._add_history("convert_flux", varlist)
2784 if insitu: self._assign(s)
2785 else: return s
[513]2786
[1862]2787 @asaplog_post_dec
[876]2788 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]2789 """\
[513]2790 Return a scan after applying a gain-elevation correction.
2791 The correction can be made via either a polynomial or a
2792 table-based interpolation (and extrapolation if necessary).
2793 You specify polynomial coefficients, an ascii table or neither.
2794 If you specify neither, then a polynomial correction will be made
2795 with built in coefficients known for certain telescopes (an error
2796 will occur if the instrument is not known).
2797 The data and Tsys are *divided* by the scaling factors.
[1846]2798
[513]2799 Parameters:
[1846]2800
[513]2801 poly: Polynomial coefficients (default None) to compute a
2802 gain-elevation correction as a function of
2803 elevation (in degrees).
[1855]2804
[513]2805 filename: The name of an ascii file holding correction factors.
2806 The first row of the ascii file must give the column
2807 names and these MUST include columns
[2431]2808 'ELEVATION' (degrees) and 'FACTOR' (multiply data
[513]2809 by this) somewhere.
2810 The second row must give the data type of the
2811 column. Use 'R' for Real and 'I' for Integer.
2812 An example file would be
2813 (actual factors are arbitrary) :
2814
2815 TIME ELEVATION FACTOR
2816 R R R
2817 0.1 0 0.8
2818 0.2 20 0.85
2819 0.3 40 0.9
2820 0.4 60 0.85
2821 0.5 80 0.8
2822 0.6 90 0.75
[1855]2823
[513]2824 method: Interpolation method when correcting from a table.
[2431]2825 Values are 'nearest', 'linear' (default), 'cubic'
2826 and 'spline'
[1855]2827
[513]2828 insitu: if False a new scantable is returned.
2829 Otherwise, the scaling is done in-situ
2830 The default is taken from .asaprc (False)
[1846]2831
[513]2832 """
2833
2834 if insitu is None: insitu = rcParams['insitu']
[876]2835 self._math._setinsitu(insitu)
[513]2836 varlist = vars()
[1593]2837 poly = poly or ()
[513]2838 from os.path import expandvars
2839 filename = expandvars(filename)
[876]2840 s = scantable(self._math._gainel(self, poly, filename, method))
2841 s._add_history("gain_el", varlist)
[1593]2842 if insitu:
2843 self._assign(s)
2844 else:
2845 return s
[710]2846
[1862]2847 @asaplog_post_dec
[931]2848 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2849 """\
[513]2850 Return a scan where all rows have been aligned in frequency/velocity.
2851 The alignment frequency frame (e.g. LSRK) is that set by function
2852 set_freqframe.
[1846]2853
[513]2854 Parameters:
[1855]2855
[513]2856 reftime: reference time to align at. By default, the time of
2857 the first row of data is used.
[1855]2858
[513]2859 method: Interpolation method for regridding the spectra.
[2431]2860 Choose from 'nearest', 'linear', 'cubic' (default)
2861 and 'spline'
[1855]2862
[513]2863 insitu: if False a new scantable is returned.
2864 Otherwise, the scaling is done in-situ
2865 The default is taken from .asaprc (False)
[1846]2866
[513]2867 """
[931]2868 if insitu is None: insitu = rcParams["insitu"]
[2429]2869 oldInsitu = self._math._insitu()
[876]2870 self._math._setinsitu(insitu)
[513]2871 varlist = vars()
[1593]2872 reftime = reftime or ""
[931]2873 s = scantable(self._math._freq_align(self, reftime, method))
[876]2874 s._add_history("freq_align", varlist)
[2429]2875 self._math._setinsitu(oldInsitu)
[2349]2876 if insitu:
2877 self._assign(s)
2878 else:
2879 return s
[513]2880
[1862]2881 @asaplog_post_dec
[1725]2882 def opacity(self, tau=None, insitu=None):
[1846]2883 """\
[513]2884 Apply an opacity correction. The data
2885 and Tsys are multiplied by the correction factor.
[1846]2886
[513]2887 Parameters:
[1855]2888
[1689]2889 tau: (list of) opacity from which the correction factor is
[513]2890 exp(tau*ZD)
[1689]2891 where ZD is the zenith-distance.
2892 If a list is provided, it has to be of length nIF,
2893 nIF*nPol or 1 and in order of IF/POL, e.g.
2894 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2895 if tau is `None` the opacities are determined from a
2896 model.
[1855]2897
[513]2898 insitu: if False a new scantable is returned.
2899 Otherwise, the scaling is done in-situ
2900 The default is taken from .asaprc (False)
[1846]2901
[513]2902 """
[2349]2903 if insitu is None:
2904 insitu = rcParams['insitu']
[876]2905 self._math._setinsitu(insitu)
[513]2906 varlist = vars()
[1689]2907 if not hasattr(tau, "__len__"):
2908 tau = [tau]
[876]2909 s = scantable(self._math._opacity(self, tau))
2910 s._add_history("opacity", varlist)
[2349]2911 if insitu:
2912 self._assign(s)
2913 else:
2914 return s
[513]2915
[1862]2916 @asaplog_post_dec
[513]2917 def bin(self, width=5, insitu=None):
[1846]2918 """\
[513]2919 Return a scan where all spectra have been binned up.
[1846]2920
[1348]2921 Parameters:
[1846]2922
[513]2923 width: The bin width (default=5) in pixels
[1855]2924
[513]2925 insitu: if False a new scantable is returned.
2926 Otherwise, the scaling is done in-situ
2927 The default is taken from .asaprc (False)
[1846]2928
[513]2929 """
[2349]2930 if insitu is None:
2931 insitu = rcParams['insitu']
[876]2932 self._math._setinsitu(insitu)
[513]2933 varlist = vars()
[876]2934 s = scantable(self._math._bin(self, width))
[1118]2935 s._add_history("bin", varlist)
[1589]2936 if insitu:
2937 self._assign(s)
2938 else:
2939 return s
[513]2940
[1862]2941 @asaplog_post_dec
[2672]2942 def reshape(self, first, last, insitu=None):
2943 """Resize the band by providing first and last channel.
2944 This will cut off all channels outside [first, last].
2945 """
2946 if insitu is None:
2947 insitu = rcParams['insitu']
2948 varlist = vars()
2949 if last < 0:
2950 last = self.nchan()-1 + last
2951 s = None
2952 if insitu:
2953 s = self
2954 else:
2955 s = self.copy()
2956 s._reshape(first,last)
2957 s._add_history("reshape", varlist)
2958 if not insitu:
2959 return s
2960
2961 @asaplog_post_dec
[513]2962 def resample(self, width=5, method='cubic', insitu=None):
[1846]2963 """\
[1348]2964 Return a scan where all spectra have been binned up.
[1573]2965
[1348]2966 Parameters:
[1846]2967
[513]2968 width: The bin width (default=5) in pixels
[1855]2969
[513]2970 method: Interpolation method when correcting from a table.
[2431]2971 Values are 'nearest', 'linear', 'cubic' (default)
2972 and 'spline'
[1855]2973
[513]2974 insitu: if False a new scantable is returned.
2975 Otherwise, the scaling is done in-situ
2976 The default is taken from .asaprc (False)
[1846]2977
[513]2978 """
[2349]2979 if insitu is None:
2980 insitu = rcParams['insitu']
[876]2981 self._math._setinsitu(insitu)
[513]2982 varlist = vars()
[876]2983 s = scantable(self._math._resample(self, method, width))
[1118]2984 s._add_history("resample", varlist)
[2349]2985 if insitu:
2986 self._assign(s)
2987 else:
2988 return s
[513]2989
[1862]2990 @asaplog_post_dec
[946]2991 def average_pol(self, mask=None, weight='none'):
[1846]2992 """\
[946]2993 Average the Polarisations together.
[1846]2994
[946]2995 Parameters:
[1846]2996
[946]2997 mask: An optional mask defining the region, where the
2998 averaging will be applied. The output will have all
2999 specified points masked.
[1855]3000
[946]3001 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3002 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]3003
[946]3004 """
3005 varlist = vars()
[1593]3006 mask = mask or ()
[1010]3007 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]3008 s._add_history("average_pol", varlist)
[992]3009 return s
[513]3010
[1862]3011 @asaplog_post_dec
[1145]3012 def average_beam(self, mask=None, weight='none'):
[1846]3013 """\
[1145]3014 Average the Beams together.
[1846]3015
[1145]3016 Parameters:
3017 mask: An optional mask defining the region, where the
3018 averaging will be applied. The output will have all
3019 specified points masked.
[1855]3020
[1145]3021 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3022 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]3023
[1145]3024 """
3025 varlist = vars()
[1593]3026 mask = mask or ()
[1145]3027 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3028 s._add_history("average_beam", varlist)
3029 return s
3030
[1586]3031 def parallactify(self, pflag):
[1846]3032 """\
[1843]3033 Set a flag to indicate whether this data should be treated as having
[1617]3034 been 'parallactified' (total phase == 0.0)
[1846]3035
[1617]3036 Parameters:
[1855]3037
[1843]3038 pflag: Bool indicating whether to turn this on (True) or
[1617]3039 off (False)
[1846]3040
[1617]3041 """
[1586]3042 varlist = vars()
3043 self._parallactify(pflag)
3044 self._add_history("parallactify", varlist)
3045
[1862]3046 @asaplog_post_dec
[992]3047 def convert_pol(self, poltype=None):
[1846]3048 """\
[992]3049 Convert the data to a different polarisation type.
[1565]3050 Note that you will need cross-polarisation terms for most conversions.
[1846]3051
[992]3052 Parameters:
[1855]3053
[992]3054 poltype: The new polarisation type. Valid types are:
[2431]3055 'linear', 'circular', 'stokes' and 'linpol'
[1846]3056
[992]3057 """
3058 varlist = vars()
[1859]3059 s = scantable(self._math._convertpol(self, poltype))
[1118]3060 s._add_history("convert_pol", varlist)
[992]3061 return s
3062
[1862]3063 @asaplog_post_dec
[2269]3064 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3065 insitu=None):
[1846]3066 """\
[513]3067 Smooth the spectrum by the specified kernel (conserving flux).
[1846]3068
[513]3069 Parameters:
[1846]3070
[513]3071 kernel: The type of smoothing kernel. Select from
[1574]3072 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3073 or 'poly'
[1855]3074
[513]3075 width: The width of the kernel in pixels. For hanning this is
3076 ignored otherwise it defauls to 5 pixels.
3077 For 'gaussian' it is the Full Width Half
3078 Maximum. For 'boxcar' it is the full width.
[1574]3079 For 'rmedian' and 'poly' it is the half width.
[1855]3080
[1574]3081 order: Optional parameter for 'poly' kernel (default is 2), to
3082 specify the order of the polnomial. Ignored by all other
3083 kernels.
[1855]3084
[1819]3085 plot: plot the original and the smoothed spectra.
3086 In this each indivual fit has to be approved, by
3087 typing 'y' or 'n'
[1855]3088
[513]3089 insitu: if False a new scantable is returned.
3090 Otherwise, the scaling is done in-situ
3091 The default is taken from .asaprc (False)
[1846]3092
[513]3093 """
3094 if insitu is None: insitu = rcParams['insitu']
[876]3095 self._math._setinsitu(insitu)
[513]3096 varlist = vars()
[1819]3097
3098 if plot: orgscan = self.copy()
3099
[1574]3100 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]3101 s._add_history("smooth", varlist)
[1819]3102
[2610]3103 action = 'H'
[1819]3104 if plot:
[2150]3105 from asap.asapplotter import new_asaplot
3106 theplot = new_asaplot(rcParams['plotter.gui'])
[2535]3107 from matplotlib import rc as rcp
3108 rcp('lines', linewidth=1)
[2150]3109 theplot.set_panels()
[1819]3110 ylab=s._get_ordinate_label()
[2150]3111 #theplot.palette(0,["#777777","red"])
[1819]3112 for r in xrange(s.nrow()):
3113 xsm=s._getabcissa(r)
3114 ysm=s._getspectrum(r)
3115 xorg=orgscan._getabcissa(r)
3116 yorg=orgscan._getspectrum(r)
[2610]3117 if action != "N": #skip plotting if rejecting all
3118 theplot.clear()
3119 theplot.hold()
3120 theplot.set_axes('ylabel',ylab)
3121 theplot.set_axes('xlabel',s._getabcissalabel(r))
3122 theplot.set_axes('title',s._getsourcename(r))
3123 theplot.set_line(label='Original',color="#777777")
3124 theplot.plot(xorg,yorg)
3125 theplot.set_line(label='Smoothed',color="red")
3126 theplot.plot(xsm,ysm)
3127 ### Ugly part for legend
3128 for i in [0,1]:
3129 theplot.subplots[0]['lines'].append(
3130 [theplot.subplots[0]['axes'].lines[i]]
3131 )
3132 theplot.release()
3133 ### Ugly part for legend
3134 theplot.subplots[0]['lines']=[]
3135 res = self._get_verify_action("Accept smoothing?",action)
3136 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3137 if r == 0: action = None
3138 #res = raw_input("Accept smoothing ([y]/n): ")
[1819]3139 if res.upper() == 'N':
[2610]3140 # reject for the current rows
[1819]3141 s._setspectrum(yorg, r)
[2610]3142 elif res.upper() == 'R':
3143 # reject all the following rows
3144 action = "N"
3145 s._setspectrum(yorg, r)
3146 elif res.upper() == 'A':
3147 # accept all the following rows
3148 break
[2150]3149 theplot.quit()
3150 del theplot
[1819]3151 del orgscan
3152
[876]3153 if insitu: self._assign(s)
3154 else: return s
[513]3155
[2186]3156 @asaplog_post_dec
[2435]3157 def regrid_channel(self, width=5, plot=False, insitu=None):
3158 """\
3159 Regrid the spectra by the specified channel width
3160
3161 Parameters:
3162
3163 width: The channel width (float) of regridded spectra
3164 in the current spectral unit.
3165
3166 plot: [NOT IMPLEMENTED YET]
3167 plot the original and the regridded spectra.
3168 In this each indivual fit has to be approved, by
3169 typing 'y' or 'n'
3170
3171 insitu: if False a new scantable is returned.
3172 Otherwise, the scaling is done in-situ
3173 The default is taken from .asaprc (False)
3174
3175 """
3176 if insitu is None: insitu = rcParams['insitu']
3177 varlist = vars()
3178
3179 if plot:
3180 asaplog.post()
3181 asaplog.push("Verification plot is not implemtnetd yet.")
3182 asaplog.post("WARN")
3183
3184 s = self.copy()
3185 s._regrid_specchan(width)
3186
3187 s._add_history("regrid_channel", varlist)
3188
3189# if plot:
3190# from asap.asapplotter import new_asaplot
3191# theplot = new_asaplot(rcParams['plotter.gui'])
[2535]3192# from matplotlib import rc as rcp
3193# rcp('lines', linewidth=1)
[2435]3194# theplot.set_panels()
3195# ylab=s._get_ordinate_label()
3196# #theplot.palette(0,["#777777","red"])
3197# for r in xrange(s.nrow()):
3198# xsm=s._getabcissa(r)
3199# ysm=s._getspectrum(r)
3200# xorg=orgscan._getabcissa(r)
3201# yorg=orgscan._getspectrum(r)
3202# theplot.clear()
3203# theplot.hold()
3204# theplot.set_axes('ylabel',ylab)
3205# theplot.set_axes('xlabel',s._getabcissalabel(r))
3206# theplot.set_axes('title',s._getsourcename(r))
3207# theplot.set_line(label='Original',color="#777777")
3208# theplot.plot(xorg,yorg)
3209# theplot.set_line(label='Smoothed',color="red")
3210# theplot.plot(xsm,ysm)
3211# ### Ugly part for legend
3212# for i in [0,1]:
3213# theplot.subplots[0]['lines'].append(
3214# [theplot.subplots[0]['axes'].lines[i]]
3215# )
3216# theplot.release()
3217# ### Ugly part for legend
3218# theplot.subplots[0]['lines']=[]
3219# res = raw_input("Accept smoothing ([y]/n): ")
3220# if res.upper() == 'N':
3221# s._setspectrum(yorg, r)
3222# theplot.quit()
3223# del theplot
3224# del orgscan
3225
3226 if insitu: self._assign(s)
3227 else: return s
3228
3229 @asaplog_post_dec
[2186]3230 def _parse_wn(self, wn):
3231 if isinstance(wn, list) or isinstance(wn, tuple):
3232 return wn
3233 elif isinstance(wn, int):
3234 return [ wn ]
3235 elif isinstance(wn, str):
[2277]3236 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]3237 val = wn.split('-')
3238 val = [int(val[0]), int(val[1])]
3239 val.sort()
3240 res = [i for i in xrange(val[0], val[1]+1)]
[2277]3241 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]3242 val = int(wn[2:])+1
3243 res = [i for i in xrange(val)]
[2277]3244 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]3245 val = int(wn[:-2])+1
3246 res = [i for i in xrange(val)]
[2277]3247 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]3248 val = int(wn[1:])
3249 res = [i for i in xrange(val)]
[2277]3250 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]3251 val = int(wn[:-1])
3252 res = [i for i in xrange(val)]
[2411]3253 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3254 # then interpreted in C++
3255 # side as [a,a+1,...,a_nyq]
3256 # (CAS-3759)
[2186]3257 val = int(wn[2:])
[2411]3258 res = [val, -999]
3259 #res = [i for i in xrange(val, self.nchan()/2+1)]
3260 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3261 # then interpreted in C++
3262 # side as [a,a+1,...,a_nyq]
3263 # (CAS-3759)
[2186]3264 val = int(wn[:-2])
[2411]3265 res = [val, -999]
3266 #res = [i for i in xrange(val, self.nchan()/2+1)]
3267 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3268 # then interpreted in C++
3269 # side as [a+1,a+2,...,a_nyq]
3270 # (CAS-3759)
[2186]3271 val = int(wn[1:])+1
[2411]3272 res = [val, -999]
3273 #res = [i for i in xrange(val, self.nchan()/2+1)]
3274 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3275 # then interpreted in C++
3276 # side as [a+1,a+2,...,a_nyq]
3277 # (CAS-3759)
[2186]3278 val = int(wn[:-1])+1
[2411]3279 res = [val, -999]
3280 #res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]3281
[2186]3282 return res
3283 else:
3284 msg = 'wrong value given for addwn/rejwn'
3285 raise RuntimeError(msg)
3286
[2713]3287 @asaplog_post_dec
[2810]3288 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
[2767]3289 """\
3290 Subtract baseline based on parameters written in Baseline Table.
3291
3292 Parameters:
[2809]3293 insitu: if True, baseline fitting/subtraction is done
[2810]3294 in-situ. If False, a new scantable with
3295 baseline subtracted is returned. Actually,
3296 format of the returned value depends on both
3297 insitu and retfitres (see below).
[2767]3298 The default is taken from .asaprc (False)
[2810]3299 retfitres: if True, the results of baseline fitting (i.e.,
3300 coefficients and rms) are returned.
3301 default is False.
3302 The format of the returned value of this
3303 function varies as follows:
3304 (1) in case insitu=True and retfitres=True:
3305 fitting result.
3306 (2) in case insitu=True and retfitres=False:
3307 None.
3308 (3) in case insitu=False and retfitres=True:
3309 a dictionary containing a new scantable
3310 (with baseline subtracted) and the fitting
3311 results.
3312 (4) in case insitu=False and retfitres=False:
3313 a new scantable (with baseline subtracted).
[2767]3314 inbltable: name of input baseline table. The row number of
3315 scantable and that of inbltable must be
3316 identical.
3317 outbltable: name of output baseline table where baseline
3318 parameters and fitting results recorded.
3319 default is ''(no output).
[2809]3320 overwrite: if True when an existing baseline table is
3321 specified for outbltable, overwrites it.
3322 Otherwise there is no harm.
[2767]3323 default is False.
3324 """
3325
3326 try:
3327 varlist = vars()
[2810]3328 if retfitres is None: retfitres = False
[2767]3329 if inbltable is None: raise ValueError("bltable missing.")
3330 if outbltable is None: outbltable = ''
3331 if overwrite is None: overwrite = False
3332
3333 if insitu is None: insitu = rcParams['insitu']
3334 if insitu:
3335 workscan = self
3336 else:
3337 workscan = self.copy()
3338
3339 sres = workscan._apply_bltable(inbltable,
[2810]3340 retfitres,
[2767]3341 outbltable,
3342 os.path.exists(outbltable),
3343 overwrite)
[2810]3344 if retfitres: res = parse_fitresult(sres)
[2767]3345
3346 workscan._add_history('apply_bltable', varlist)
3347
3348 if insitu:
3349 self._assign(workscan)
[2810]3350 if retfitres:
3351 return res
3352 else:
3353 return None
[2767]3354 else:
[2810]3355 if retfitres:
3356 return {'scantable': workscan, 'fitresults': res}
3357 else:
3358 return workscan
[2767]3359
3360 except RuntimeError, e:
3361 raise_fitting_failure_exception(e)
3362
3363 @asaplog_post_dec
[2810]3364 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
[2767]3365 """\
3366 Subtract baseline based on parameters written in the input list.
3367
3368 Parameters:
[2809]3369 insitu: if True, baseline fitting/subtraction is done
[2810]3370 in-situ. If False, a new scantable with
3371 baseline subtracted is returned. Actually,
3372 format of the returned value depends on both
3373 insitu and retfitres (see below).
[2767]3374 The default is taken from .asaprc (False)
[2810]3375 retfitres: if True, the results of baseline fitting (i.e.,
3376 coefficients and rms) are returned.
3377 default is False.
3378 The format of the returned value of this
3379 function varies as follows:
3380 (1) in case insitu=True and retfitres=True:
3381 fitting result.
3382 (2) in case insitu=True and retfitres=False:
3383 None.
3384 (3) in case insitu=False and retfitres=True:
3385 a dictionary containing a new scantable
3386 (with baseline subtracted) and the fitting
3387 results.
3388 (4) in case insitu=False and retfitres=False:
3389 a new scantable (with baseline subtracted).
[2767]3390 blinfo: baseline parameter set stored in a dictionary
3391 or a list of dictionary. Each dictionary
3392 corresponds to each spectrum and must contain
3393 the following keys and values:
3394 'row': row number,
3395 'blfunc': function name. available ones include
3396 'poly', 'chebyshev', 'cspline' and
3397 'sinusoid',
3398 'order': maximum order of polynomial. needed
3399 if blfunc='poly' or 'chebyshev',
3400 'npiece': number or piecewise polynomial.
3401 needed if blfunc='cspline',
3402 'nwave': a list of sinusoidal wave numbers.
3403 needed if blfunc='sinusoid', and
3404 'masklist': min-max windows for channel mask.
3405 the specified ranges will be used
3406 for fitting.
3407 bltable: name of output baseline table where baseline
3408 parameters and fitting results recorded.
3409 default is ''(no output).
[2809]3410 overwrite: if True when an existing baseline table is
3411 specified for bltable, overwrites it.
3412 Otherwise there is no harm.
[2767]3413 default is False.
3414
3415 Example:
3416 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3417 'masklist':[[10,350],[352,510]]},
3418 {'row':1, 'blfunc':'cspline', 'npiece':3,
3419 'masklist':[[3,16],[19,404],[407,511]]}
3420 ])
3421
3422 the first spectrum (row=0) will be fitted with polynomial
3423 of order=5 and the next one (row=1) will be fitted with cubic
3424 spline consisting of 3 pieces.
3425 """
3426
3427 try:
3428 varlist = vars()
[2810]3429 if retfitres is None: retfitres = False
[2767]3430 if blinfo is None: blinfo = []
3431 if bltable is None: bltable = ''
3432 if overwrite is None: overwrite = False
3433
3434 if insitu is None: insitu = rcParams['insitu']
3435 if insitu:
3436 workscan = self
3437 else:
3438 workscan = self.copy()
3439
3440 nrow = workscan.nrow()
3441
3442 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3443
3444 print "in_blinfo=< "+ str(in_blinfo)+" >"
3445
3446 sres = workscan._sub_baseline(in_blinfo,
[2810]3447 retfitres,
[2767]3448 bltable,
3449 os.path.exists(bltable),
3450 overwrite)
[2810]3451 if retfitres: res = parse_fitresult(sres)
[2767]3452
3453 workscan._add_history('sub_baseline', varlist)
3454
3455 if insitu:
3456 self._assign(workscan)
[2810]3457 if retfitres:
3458 return res
3459 else:
3460 return None
[2767]3461 else:
[2810]3462 if retfitres:
3463 return {'scantable': workscan, 'fitresults': res}
3464 else:
3465 return workscan
[2767]3466
3467 except RuntimeError, e:
3468 raise_fitting_failure_exception(e)
3469
3470 @asaplog_post_dec
[2713]3471 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3472 whichrow=None, uselinefinder=None, edge=None,
3473 threshold=None, chan_avg_limit=None):
3474 """\
3475 Calculates and returns model selection criteria for a specified
3476 baseline model and a given spectrum data.
3477 Available values include Akaike Information Criterion (AIC), the
3478 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3479 Bayesian Information Criterion (BIC) and the Generalised Cross
3480 Validation (GCV).
[2186]3481
[2713]3482 Parameters:
3483 value: name of model selection criteria to calculate.
3484 available ones include 'aic', 'aicc', 'bic' and
3485 'gcv'. default is 'aicc'.
3486 blfunc: baseline function name. available ones include
3487 'chebyshev', 'cspline' and 'sinusoid'.
3488 default is 'chebyshev'.
3489 order: parameter for basline function. actually stands for
3490 order of polynomial (order) for 'chebyshev',
3491 number of spline pieces (npiece) for 'cspline' and
3492 maximum wave number for 'sinusoid', respectively.
3493 default is 5 (which is also the default order value
3494 for [auto_]chebyshev_baseline()).
3495 mask: an optional mask. default is [].
3496 whichrow: row number. default is 0 (the first row)
3497 uselinefinder: use sd.linefinder() to flag out line regions
3498 default is True.
3499 edge: an optional number of channel to drop at
3500 the edge of spectrum. If only one value is
3501 specified, the same number will be dropped
3502 from both sides of the spectrum. Default
3503 is to keep all channels. Nested tuples
3504 represent individual edge selection for
3505 different IFs (a number of spectral channels
3506 can be different)
3507 default is (0, 0).
3508 threshold: the threshold used by line finder. It is
3509 better to keep it large as only strong lines
3510 affect the baseline solution.
3511 default is 3.
3512 chan_avg_limit: a maximum number of consequtive spectral
3513 channels to average during the search of
3514 weak and broad lines. The default is no
3515 averaging (and no search for weak lines).
3516 If such lines can affect the fitted baseline
3517 (e.g. a high order polynomial is fitted),
3518 increase this parameter (usually values up
3519 to 8 are reasonable). Most users of this
3520 method should find the default value sufficient.
3521 default is 1.
3522
3523 Example:
3524 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3525 """
3526
3527 try:
3528 varlist = vars()
3529
3530 if value is None: value = 'aicc'
3531 if blfunc is None: blfunc = 'chebyshev'
3532 if order is None: order = 5
3533 if mask is None: mask = []
3534 if whichrow is None: whichrow = 0
3535 if uselinefinder is None: uselinefinder = True
3536 if edge is None: edge = (0, 0)
3537 if threshold is None: threshold = 3
3538 if chan_avg_limit is None: chan_avg_limit = 1
3539
3540 return self._calc_aic(value, blfunc, order, mask,
3541 whichrow, uselinefinder, edge,
3542 threshold, chan_avg_limit)
3543
3544 except RuntimeError, e:
3545 raise_fitting_failure_exception(e)
3546
[1862]3547 @asaplog_post_dec
[2771]3548 def sinusoid_baseline(self, mask=None, applyfft=None,
[2269]3549 fftmethod=None, fftthresh=None,
[2771]3550 addwn=None, rejwn=None,
3551 insitu=None,
3552 clipthresh=None, clipniter=None,
3553 plot=None,
3554 getresidual=None,
3555 showprogress=None, minnrow=None,
3556 outlog=None,
[2767]3557 blfile=None, csvformat=None,
3558 bltable=None):
[2047]3559 """\
[2349]3560 Return a scan which has been baselined (all rows) with sinusoidal
3561 functions.
3562
[2047]3563 Parameters:
[2186]3564 mask: an optional mask
3565 applyfft: if True use some method, such as FFT, to find
3566 strongest sinusoidal components in the wavenumber
3567 domain to be used for baseline fitting.
3568 default is True.
3569 fftmethod: method to find the strong sinusoidal components.
3570 now only 'fft' is available and it is the default.
3571 fftthresh: the threshold to select wave numbers to be used for
3572 fitting from the distribution of amplitudes in the
3573 wavenumber domain.
3574 both float and string values accepted.
3575 given a float value, the unit is set to sigma.
3576 for string values, allowed formats include:
[2349]3577 'xsigma' or 'x' (= x-sigma level. e.g.,
3578 '3sigma'), or
[2186]3579 'topx' (= the x strongest ones, e.g. 'top5').
3580 default is 3.0 (unit: sigma).
3581 addwn: the additional wave numbers to be used for fitting.
3582 list or integer value is accepted to specify every
3583 wave numbers. also string value can be used in case
3584 you need to specify wave numbers in a certain range,
3585 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3586 '<a' (= 0,1,...,a-2,a-1),
3587 '>=a' (= a, a+1, ... up to the maximum wave
3588 number corresponding to the Nyquist
3589 frequency for the case of FFT).
[2411]3590 default is [0].
[2186]3591 rejwn: the wave numbers NOT to be used for fitting.
3592 can be set just as addwn but has higher priority:
3593 wave numbers which are specified both in addwn
3594 and rejwn will NOT be used. default is [].
[2771]3595 insitu: if False a new scantable is returned.
3596 Otherwise, the scaling is done in-situ
3597 The default is taken from .asaprc (False)
[2081]3598 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3599 clipniter: maximum number of iteration of 'clipthresh'-sigma
3600 clipping (default is 0)
[2081]3601 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3602 plot the fit and the residual. In this each
3603 indivual fit has to be approved, by typing 'y'
3604 or 'n'
3605 getresidual: if False, returns best-fit values instead of
3606 residual. (default is True)
[2189]3607 showprogress: show progress status for large data.
3608 default is True.
3609 minnrow: minimum number of input spectra to show.
3610 default is 1000.
[2081]3611 outlog: Output the coefficients of the best-fit
3612 function to logger (default is False)
3613 blfile: Name of a text file in which the best-fit
3614 parameter values to be written
[2186]3615 (default is '': no file/logger output)
[2641]3616 csvformat: if True blfile is csv-formatted, default is False.
[2767]3617 bltable: name of a baseline table where fitting results
3618 (coefficients, rms, etc.) are to be written.
3619 if given, fitting results will NOT be output to
3620 scantable (insitu=True) or None will be
3621 returned (insitu=False).
3622 (default is "": no table output)
[2047]3623
3624 Example:
[2349]3625 # return a scan baselined by a combination of sinusoidal curves
3626 # having wave numbers in spectral window up to 10,
[2047]3627 # also with 3-sigma clipping, iteration up to 4 times
[2186]3628 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]3629
3630 Note:
3631 The best-fit parameter values output in logger and/or blfile are now
3632 based on specunit of 'channel'.
[2047]3633 """
3634
[2186]3635 try:
3636 varlist = vars()
[2047]3637
[2186]3638 if insitu is None: insitu = rcParams['insitu']
3639 if insitu:
3640 workscan = self
3641 else:
3642 workscan = self.copy()
3643
[2410]3644 if mask is None: mask = []
[2186]3645 if applyfft is None: applyfft = True
3646 if fftmethod is None: fftmethod = 'fft'
3647 if fftthresh is None: fftthresh = 3.0
[2411]3648 if addwn is None: addwn = [0]
[2186]3649 if rejwn is None: rejwn = []
3650 if clipthresh is None: clipthresh = 3.0
3651 if clipniter is None: clipniter = 0
3652 if plot is None: plot = False
3653 if getresidual is None: getresidual = True
[2189]3654 if showprogress is None: showprogress = True
3655 if minnrow is None: minnrow = 1000
[2186]3656 if outlog is None: outlog = False
3657 if blfile is None: blfile = ''
[2641]3658 if csvformat is None: csvformat = False
[2767]3659 if bltable is None: bltable = ''
[2047]3660
[2767]3661 sapplyfft = 'true' if applyfft else 'false'
3662 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3663
[2767]3664 scsvformat = 'T' if csvformat else 'F'
3665
[2081]3666 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3667 workscan._sinusoid_baseline(mask,
3668 fftinfo,
3669 #applyfft, fftmethod.lower(),
3670 #str(fftthresh).lower(),
[2349]3671 workscan._parse_wn(addwn),
[2643]3672 workscan._parse_wn(rejwn),
3673 clipthresh, clipniter,
3674 getresidual,
[2349]3675 pack_progress_params(showprogress,
[2641]3676 minnrow),
[2767]3677 outlog, scsvformat+blfile,
3678 bltable)
[2186]3679 workscan._add_history('sinusoid_baseline', varlist)
[2767]3680
3681 if bltable == '':
3682 if insitu:
3683 self._assign(workscan)
3684 else:
3685 return workscan
[2047]3686 else:
[2767]3687 if not insitu:
3688 return None
[2047]3689
3690 except RuntimeError, e:
[2186]3691 raise_fitting_failure_exception(e)
[2047]3692
3693
[2186]3694 @asaplog_post_dec
[2771]3695 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
[2349]3696 fftmethod=None, fftthresh=None,
[2771]3697 addwn=None, rejwn=None,
3698 insitu=None,
3699 clipthresh=None, clipniter=None,
3700 edge=None, threshold=None, chan_avg_limit=None,
3701 plot=None,
3702 getresidual=None,
3703 showprogress=None, minnrow=None,
3704 outlog=None,
[2767]3705 blfile=None, csvformat=None,
3706 bltable=None):
[2047]3707 """\
[2349]3708 Return a scan which has been baselined (all rows) with sinusoidal
3709 functions.
[2047]3710 Spectral lines are detected first using linefinder and masked out
3711 to avoid them affecting the baseline solution.
3712
3713 Parameters:
[2189]3714 mask: an optional mask retreived from scantable
3715 applyfft: if True use some method, such as FFT, to find
3716 strongest sinusoidal components in the wavenumber
3717 domain to be used for baseline fitting.
3718 default is True.
3719 fftmethod: method to find the strong sinusoidal components.
3720 now only 'fft' is available and it is the default.
3721 fftthresh: the threshold to select wave numbers to be used for
3722 fitting from the distribution of amplitudes in the
3723 wavenumber domain.
3724 both float and string values accepted.
3725 given a float value, the unit is set to sigma.
3726 for string values, allowed formats include:
[2349]3727 'xsigma' or 'x' (= x-sigma level. e.g.,
3728 '3sigma'), or
[2189]3729 'topx' (= the x strongest ones, e.g. 'top5').
3730 default is 3.0 (unit: sigma).
3731 addwn: the additional wave numbers to be used for fitting.
3732 list or integer value is accepted to specify every
3733 wave numbers. also string value can be used in case
3734 you need to specify wave numbers in a certain range,
3735 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3736 '<a' (= 0,1,...,a-2,a-1),
3737 '>=a' (= a, a+1, ... up to the maximum wave
3738 number corresponding to the Nyquist
3739 frequency for the case of FFT).
[2411]3740 default is [0].
[2189]3741 rejwn: the wave numbers NOT to be used for fitting.
3742 can be set just as addwn but has higher priority:
3743 wave numbers which are specified both in addwn
3744 and rejwn will NOT be used. default is [].
[2771]3745 insitu: if False a new scantable is returned.
3746 Otherwise, the scaling is done in-situ
3747 The default is taken from .asaprc (False)
[2189]3748 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3749 clipniter: maximum number of iteration of 'clipthresh'-sigma
3750 clipping (default is 0)
[2189]3751 edge: an optional number of channel to drop at
3752 the edge of spectrum. If only one value is
3753 specified, the same number will be dropped
3754 from both sides of the spectrum. Default
3755 is to keep all channels. Nested tuples
3756 represent individual edge selection for
3757 different IFs (a number of spectral channels
3758 can be different)
3759 threshold: the threshold used by line finder. It is
3760 better to keep it large as only strong lines
3761 affect the baseline solution.
3762 chan_avg_limit: a maximum number of consequtive spectral
3763 channels to average during the search of
3764 weak and broad lines. The default is no
3765 averaging (and no search for weak lines).
3766 If such lines can affect the fitted baseline
3767 (e.g. a high order polynomial is fitted),
3768 increase this parameter (usually values up
3769 to 8 are reasonable). Most users of this
3770 method should find the default value sufficient.
3771 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3772 plot the fit and the residual. In this each
3773 indivual fit has to be approved, by typing 'y'
3774 or 'n'
3775 getresidual: if False, returns best-fit values instead of
3776 residual. (default is True)
3777 showprogress: show progress status for large data.
3778 default is True.
3779 minnrow: minimum number of input spectra to show.
3780 default is 1000.
3781 outlog: Output the coefficients of the best-fit
3782 function to logger (default is False)
3783 blfile: Name of a text file in which the best-fit
3784 parameter values to be written
3785 (default is "": no file/logger output)
[2641]3786 csvformat: if True blfile is csv-formatted, default is False.
[2767]3787 bltable: name of a baseline table where fitting results
3788 (coefficients, rms, etc.) are to be written.
3789 if given, fitting results will NOT be output to
3790 scantable (insitu=True) or None will be
3791 returned (insitu=False).
3792 (default is "": no table output)
[2047]3793
3794 Example:
[2186]3795 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]3796
3797 Note:
3798 The best-fit parameter values output in logger and/or blfile are now
3799 based on specunit of 'channel'.
[2047]3800 """
3801
[2186]3802 try:
3803 varlist = vars()
[2047]3804
[2186]3805 if insitu is None: insitu = rcParams['insitu']
3806 if insitu:
3807 workscan = self
[2047]3808 else:
[2186]3809 workscan = self.copy()
3810
[2410]3811 if mask is None: mask = []
[2186]3812 if applyfft is None: applyfft = True
3813 if fftmethod is None: fftmethod = 'fft'
3814 if fftthresh is None: fftthresh = 3.0
[2411]3815 if addwn is None: addwn = [0]
[2186]3816 if rejwn is None: rejwn = []
3817 if clipthresh is None: clipthresh = 3.0
3818 if clipniter is None: clipniter = 0
3819 if edge is None: edge = (0,0)
3820 if threshold is None: threshold = 3
3821 if chan_avg_limit is None: chan_avg_limit = 1
3822 if plot is None: plot = False
3823 if getresidual is None: getresidual = True
[2189]3824 if showprogress is None: showprogress = True
3825 if minnrow is None: minnrow = 1000
[2186]3826 if outlog is None: outlog = False
3827 if blfile is None: blfile = ''
[2641]3828 if csvformat is None: csvformat = False
[2767]3829 if bltable is None: bltable = ''
[2047]3830
[2767]3831 sapplyfft = 'true' if applyfft else 'false'
3832 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3833
[2767]3834 scsvformat = 'T' if csvformat else 'F'
3835
[2277]3836 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3837 workscan._auto_sinusoid_baseline(mask,
3838 fftinfo,
[2349]3839 workscan._parse_wn(addwn),
3840 workscan._parse_wn(rejwn),
3841 clipthresh, clipniter,
3842 normalise_edge_param(edge),
3843 threshold, chan_avg_limit,
3844 getresidual,
3845 pack_progress_params(showprogress,
3846 minnrow),
[2767]3847 outlog, scsvformat+blfile, bltable)
[2047]3848 workscan._add_history("auto_sinusoid_baseline", varlist)
[2767]3849
3850 if bltable == '':
3851 if insitu:
3852 self._assign(workscan)
3853 else:
3854 return workscan
[2047]3855 else:
[2767]3856 if not insitu:
3857 return None
[2047]3858
3859 except RuntimeError, e:
[2186]3860 raise_fitting_failure_exception(e)
[2047]3861
3862 @asaplog_post_dec
[2771]3863 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]3864 clipthresh=None, clipniter=None, plot=None,
3865 getresidual=None, showprogress=None, minnrow=None,
[2767]3866 outlog=None, blfile=None, csvformat=None,
3867 bltable=None):
[1846]3868 """\
[2349]3869 Return a scan which has been baselined (all rows) by cubic spline
3870 function (piecewise cubic polynomial).
3871
[513]3872 Parameters:
[2771]3873 mask: An optional mask
3874 npiece: Number of pieces. (default is 2)
[2189]3875 insitu: If False a new scantable is returned.
3876 Otherwise, the scaling is done in-situ
3877 The default is taken from .asaprc (False)
3878 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3879 clipniter: maximum number of iteration of 'clipthresh'-sigma
3880 clipping (default is 0)
[2189]3881 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3882 plot the fit and the residual. In this each
3883 indivual fit has to be approved, by typing 'y'
3884 or 'n'
3885 getresidual: if False, returns best-fit values instead of
3886 residual. (default is True)
3887 showprogress: show progress status for large data.
3888 default is True.
3889 minnrow: minimum number of input spectra to show.
3890 default is 1000.
3891 outlog: Output the coefficients of the best-fit
3892 function to logger (default is False)
3893 blfile: Name of a text file in which the best-fit
3894 parameter values to be written
3895 (default is "": no file/logger output)
[2641]3896 csvformat: if True blfile is csv-formatted, default is False.
[2767]3897 bltable: name of a baseline table where fitting results
3898 (coefficients, rms, etc.) are to be written.
3899 if given, fitting results will NOT be output to
3900 scantable (insitu=True) or None will be
3901 returned (insitu=False).
3902 (default is "": no table output)
[1846]3903
[2012]3904 Example:
[2349]3905 # return a scan baselined by a cubic spline consisting of 2 pieces
3906 # (i.e., 1 internal knot),
[2012]3907 # also with 3-sigma clipping, iteration up to 4 times
3908 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]3909
3910 Note:
3911 The best-fit parameter values output in logger and/or blfile are now
3912 based on specunit of 'channel'.
[2012]3913 """
3914
[2186]3915 try:
3916 varlist = vars()
3917
3918 if insitu is None: insitu = rcParams['insitu']
3919 if insitu:
3920 workscan = self
3921 else:
3922 workscan = self.copy()
[1855]3923
[2410]3924 if mask is None: mask = []
[2189]3925 if npiece is None: npiece = 2
3926 if clipthresh is None: clipthresh = 3.0
3927 if clipniter is None: clipniter = 0
3928 if plot is None: plot = False
3929 if getresidual is None: getresidual = True
3930 if showprogress is None: showprogress = True
3931 if minnrow is None: minnrow = 1000
3932 if outlog is None: outlog = False
3933 if blfile is None: blfile = ''
[2767]3934 if csvformat is None: csvformat = False
3935 if bltable is None: bltable = ''
[1855]3936
[2767]3937 scsvformat = 'T' if csvformat else 'F'
[2641]3938
[2012]3939 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3940 workscan._cspline_baseline(mask, npiece,
3941 clipthresh, clipniter,
[2349]3942 getresidual,
3943 pack_progress_params(showprogress,
[2641]3944 minnrow),
[2767]3945 outlog, scsvformat+blfile,
3946 bltable)
[2012]3947 workscan._add_history("cspline_baseline", varlist)
[2767]3948
3949 if bltable == '':
3950 if insitu:
3951 self._assign(workscan)
3952 else:
3953 return workscan
[2012]3954 else:
[2767]3955 if not insitu:
3956 return None
[2012]3957
3958 except RuntimeError, e:
[2186]3959 raise_fitting_failure_exception(e)
[1855]3960
[2186]3961 @asaplog_post_dec
[2771]3962 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]3963 clipthresh=None, clipniter=None,
3964 edge=None, threshold=None, chan_avg_limit=None,
3965 getresidual=None, plot=None,
3966 showprogress=None, minnrow=None, outlog=None,
[2767]3967 blfile=None, csvformat=None, bltable=None):
[2012]3968 """\
3969 Return a scan which has been baselined (all rows) by cubic spline
3970 function (piecewise cubic polynomial).
3971 Spectral lines are detected first using linefinder and masked out
3972 to avoid them affecting the baseline solution.
3973
3974 Parameters:
[2771]3975 mask: an optional mask retreived from scantable
3976 npiece: Number of pieces. (default is 2)
[2189]3977 insitu: if False a new scantable is returned.
3978 Otherwise, the scaling is done in-situ
3979 The default is taken from .asaprc (False)
3980 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3981 clipniter: maximum number of iteration of 'clipthresh'-sigma
3982 clipping (default is 0)
[2189]3983 edge: an optional number of channel to drop at
3984 the edge of spectrum. If only one value is
3985 specified, the same number will be dropped
3986 from both sides of the spectrum. Default
3987 is to keep all channels. Nested tuples
3988 represent individual edge selection for
3989 different IFs (a number of spectral channels
3990 can be different)
3991 threshold: the threshold used by line finder. It is
3992 better to keep it large as only strong lines
3993 affect the baseline solution.
3994 chan_avg_limit: a maximum number of consequtive spectral
3995 channels to average during the search of
3996 weak and broad lines. The default is no
3997 averaging (and no search for weak lines).
3998 If such lines can affect the fitted baseline
3999 (e.g. a high order polynomial is fitted),
4000 increase this parameter (usually values up
4001 to 8 are reasonable). Most users of this
4002 method should find the default value sufficient.
4003 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4004 plot the fit and the residual. In this each
4005 indivual fit has to be approved, by typing 'y'
4006 or 'n'
4007 getresidual: if False, returns best-fit values instead of
4008 residual. (default is True)
4009 showprogress: show progress status for large data.
4010 default is True.
4011 minnrow: minimum number of input spectra to show.
4012 default is 1000.
4013 outlog: Output the coefficients of the best-fit
4014 function to logger (default is False)
4015 blfile: Name of a text file in which the best-fit
4016 parameter values to be written
4017 (default is "": no file/logger output)
[2641]4018 csvformat: if True blfile is csv-formatted, default is False.
[2767]4019 bltable: name of a baseline table where fitting results
4020 (coefficients, rms, etc.) are to be written.
4021 if given, fitting results will NOT be output to
4022 scantable (insitu=True) or None will be
4023 returned (insitu=False).
4024 (default is "": no table output)
[1846]4025
[1907]4026 Example:
[2012]4027 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]4028
4029 Note:
4030 The best-fit parameter values output in logger and/or blfile are now
4031 based on specunit of 'channel'.
[2012]4032 """
[1846]4033
[2186]4034 try:
4035 varlist = vars()
[2012]4036
[2186]4037 if insitu is None: insitu = rcParams['insitu']
4038 if insitu:
4039 workscan = self
[1391]4040 else:
[2186]4041 workscan = self.copy()
4042
[2410]4043 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4044 if mask is None: mask = []
[2186]4045 if npiece is None: npiece = 2
4046 if clipthresh is None: clipthresh = 3.0
4047 if clipniter is None: clipniter = 0
4048 if edge is None: edge = (0, 0)
4049 if threshold is None: threshold = 3
4050 if chan_avg_limit is None: chan_avg_limit = 1
4051 if plot is None: plot = False
4052 if getresidual is None: getresidual = True
[2189]4053 if showprogress is None: showprogress = True
4054 if minnrow is None: minnrow = 1000
[2186]4055 if outlog is None: outlog = False
4056 if blfile is None: blfile = ''
[2641]4057 if csvformat is None: csvformat = False
[2767]4058 if bltable is None: bltable = ''
[1819]4059
[2767]4060 scsvformat = 'T' if csvformat else 'F'
[2641]4061
[2277]4062 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]4063 workscan._auto_cspline_baseline(mask, npiece,
4064 clipthresh, clipniter,
[2269]4065 normalise_edge_param(edge),
4066 threshold,
4067 chan_avg_limit, getresidual,
4068 pack_progress_params(showprogress,
4069 minnrow),
[2767]4070 outlog,
4071 scsvformat+blfile,
4072 bltable)
[2012]4073 workscan._add_history("auto_cspline_baseline", varlist)
[2767]4074
4075 if bltable == '':
4076 if insitu:
4077 self._assign(workscan)
4078 else:
4079 return workscan
[1856]4080 else:
[2767]4081 if not insitu:
4082 return None
[2012]4083
4084 except RuntimeError, e:
[2186]4085 raise_fitting_failure_exception(e)
[513]4086
[1931]4087 @asaplog_post_dec
[2771]4088 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]4089 clipthresh=None, clipniter=None, plot=None,
4090 getresidual=None, showprogress=None, minnrow=None,
[2767]4091 outlog=None, blfile=None, csvformat=None,
4092 bltable=None):
[2645]4093 """\
4094 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4095
4096 Parameters:
[2771]4097 mask: An optional mask
4098 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]4099 insitu: If False a new scantable is returned.
4100 Otherwise, the scaling is done in-situ
4101 The default is taken from .asaprc (False)
4102 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4103 clipniter: maximum number of iteration of 'clipthresh'-sigma
4104 clipping (default is 0)
4105 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4106 plot the fit and the residual. In this each
4107 indivual fit has to be approved, by typing 'y'
4108 or 'n'
4109 getresidual: if False, returns best-fit values instead of
4110 residual. (default is True)
4111 showprogress: show progress status for large data.
4112 default is True.
4113 minnrow: minimum number of input spectra to show.
4114 default is 1000.
4115 outlog: Output the coefficients of the best-fit
4116 function to logger (default is False)
4117 blfile: Name of a text file in which the best-fit
4118 parameter values to be written
4119 (default is "": no file/logger output)
4120 csvformat: if True blfile is csv-formatted, default is False.
[2767]4121 bltable: name of a baseline table where fitting results
4122 (coefficients, rms, etc.) are to be written.
4123 if given, fitting results will NOT be output to
4124 scantable (insitu=True) or None will be
4125 returned (insitu=False).
4126 (default is "": no table output)
[2645]4127
4128 Example:
4129 # return a scan baselined by a cubic spline consisting of 2 pieces
4130 # (i.e., 1 internal knot),
4131 # also with 3-sigma clipping, iteration up to 4 times
4132 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4133
4134 Note:
4135 The best-fit parameter values output in logger and/or blfile are now
4136 based on specunit of 'channel'.
4137 """
4138
4139 try:
4140 varlist = vars()
4141
4142 if insitu is None: insitu = rcParams['insitu']
4143 if insitu:
4144 workscan = self
4145 else:
4146 workscan = self.copy()
4147
4148 if mask is None: mask = []
4149 if order is None: order = 5
4150 if clipthresh is None: clipthresh = 3.0
4151 if clipniter is None: clipniter = 0
4152 if plot is None: plot = False
4153 if getresidual is None: getresidual = True
4154 if showprogress is None: showprogress = True
4155 if minnrow is None: minnrow = 1000
4156 if outlog is None: outlog = False
4157 if blfile is None: blfile = ''
[2767]4158 if csvformat is None: csvformat = False
4159 if bltable is None: bltable = ''
[2645]4160
[2767]4161 scsvformat = 'T' if csvformat else 'F'
[2645]4162
4163 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]4164 workscan._chebyshev_baseline(mask, order,
4165 clipthresh, clipniter,
[2645]4166 getresidual,
4167 pack_progress_params(showprogress,
4168 minnrow),
[2767]4169 outlog, scsvformat+blfile,
4170 bltable)
[2645]4171 workscan._add_history("chebyshev_baseline", varlist)
[2767]4172
4173 if bltable == '':
4174 if insitu:
4175 self._assign(workscan)
4176 else:
4177 return workscan
[2645]4178 else:
[2767]4179 if not insitu:
4180 return None
[2645]4181
4182 except RuntimeError, e:
4183 raise_fitting_failure_exception(e)
4184
4185 @asaplog_post_dec
[2771]4186 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]4187 clipthresh=None, clipniter=None,
4188 edge=None, threshold=None, chan_avg_limit=None,
4189 getresidual=None, plot=None,
4190 showprogress=None, minnrow=None, outlog=None,
[2767]4191 blfile=None, csvformat=None, bltable=None):
[2645]4192 """\
4193 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4194 Spectral lines are detected first using linefinder and masked out
4195 to avoid them affecting the baseline solution.
4196
4197 Parameters:
[2771]4198 mask: an optional mask retreived from scantable
4199 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]4200 insitu: if False a new scantable is returned.
4201 Otherwise, the scaling is done in-situ
4202 The default is taken from .asaprc (False)
4203 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4204 clipniter: maximum number of iteration of 'clipthresh'-sigma
4205 clipping (default is 0)
4206 edge: an optional number of channel to drop at
4207 the edge of spectrum. If only one value is
4208 specified, the same number will be dropped
4209 from both sides of the spectrum. Default
4210 is to keep all channels. Nested tuples
4211 represent individual edge selection for
4212 different IFs (a number of spectral channels
4213 can be different)
4214 threshold: the threshold used by line finder. It is
4215 better to keep it large as only strong lines
4216 affect the baseline solution.
4217 chan_avg_limit: a maximum number of consequtive spectral
4218 channels to average during the search of
4219 weak and broad lines. The default is no
4220 averaging (and no search for weak lines).
4221 If such lines can affect the fitted baseline
4222 (e.g. a high order polynomial is fitted),
4223 increase this parameter (usually values up
4224 to 8 are reasonable). Most users of this
4225 method should find the default value sufficient.
4226 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4227 plot the fit and the residual. In this each
4228 indivual fit has to be approved, by typing 'y'
4229 or 'n'
4230 getresidual: if False, returns best-fit values instead of
4231 residual. (default is True)
4232 showprogress: show progress status for large data.
4233 default is True.
4234 minnrow: minimum number of input spectra to show.
4235 default is 1000.
4236 outlog: Output the coefficients of the best-fit
4237 function to logger (default is False)
4238 blfile: Name of a text file in which the best-fit
4239 parameter values to be written
4240 (default is "": no file/logger output)
4241 csvformat: if True blfile is csv-formatted, default is False.
[2767]4242 bltable: name of a baseline table where fitting results
4243 (coefficients, rms, etc.) are to be written.
4244 if given, fitting results will NOT be output to
4245 scantable (insitu=True) or None will be
4246 returned (insitu=False).
4247 (default is "": no table output)
[2645]4248
4249 Example:
4250 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4251
4252 Note:
4253 The best-fit parameter values output in logger and/or blfile are now
4254 based on specunit of 'channel'.
4255 """
4256
4257 try:
4258 varlist = vars()
4259
4260 if insitu is None: insitu = rcParams['insitu']
4261 if insitu:
4262 workscan = self
4263 else:
4264 workscan = self.copy()
4265
4266 if mask is None: mask = []
4267 if order is None: order = 5
4268 if clipthresh is None: clipthresh = 3.0
4269 if clipniter is None: clipniter = 0
4270 if edge is None: edge = (0, 0)
4271 if threshold is None: threshold = 3
4272 if chan_avg_limit is None: chan_avg_limit = 1
4273 if plot is None: plot = False
4274 if getresidual is None: getresidual = True
4275 if showprogress is None: showprogress = True
4276 if minnrow is None: minnrow = 1000
4277 if outlog is None: outlog = False
4278 if blfile is None: blfile = ''
4279 if csvformat is None: csvformat = False
[2767]4280 if bltable is None: bltable = ''
[2645]4281
[2767]4282 scsvformat = 'T' if csvformat else 'F'
[2645]4283
4284 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]4285 workscan._auto_chebyshev_baseline(mask, order,
4286 clipthresh, clipniter,
[2645]4287 normalise_edge_param(edge),
4288 threshold,
4289 chan_avg_limit, getresidual,
4290 pack_progress_params(showprogress,
4291 minnrow),
[2767]4292 outlog, scsvformat+blfile,
4293 bltable)
[2645]4294 workscan._add_history("auto_chebyshev_baseline", varlist)
[2767]4295
4296 if bltable == '':
4297 if insitu:
4298 self._assign(workscan)
4299 else:
4300 return workscan
[2645]4301 else:
[2767]4302 if not insitu:
4303 return None
[2645]4304
4305 except RuntimeError, e:
4306 raise_fitting_failure_exception(e)
4307
4308 @asaplog_post_dec
[2771]4309 def poly_baseline(self, mask=None, order=None, insitu=None,
[2767]4310 clipthresh=None, clipniter=None, plot=None,
[2269]4311 getresidual=None, showprogress=None, minnrow=None,
[2767]4312 outlog=None, blfile=None, csvformat=None,
4313 bltable=None):
[1907]4314 """\
4315 Return a scan which has been baselined (all rows) by a polynomial.
4316 Parameters:
[2771]4317 mask: an optional mask
4318 order: the order of the polynomial (default is 0)
[2189]4319 insitu: if False a new scantable is returned.
4320 Otherwise, the scaling is done in-situ
4321 The default is taken from .asaprc (False)
[2767]4322 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4323 clipniter: maximum number of iteration of 'clipthresh'-sigma
4324 clipping (default is 0)
[2189]4325 plot: plot the fit and the residual. In this each
4326 indivual fit has to be approved, by typing 'y'
4327 or 'n'
4328 getresidual: if False, returns best-fit values instead of
4329 residual. (default is True)
4330 showprogress: show progress status for large data.
4331 default is True.
4332 minnrow: minimum number of input spectra to show.
4333 default is 1000.
4334 outlog: Output the coefficients of the best-fit
4335 function to logger (default is False)
4336 blfile: Name of a text file in which the best-fit
4337 parameter values to be written
4338 (default is "": no file/logger output)
[2641]4339 csvformat: if True blfile is csv-formatted, default is False.
[2767]4340 bltable: name of a baseline table where fitting results
4341 (coefficients, rms, etc.) are to be written.
4342 if given, fitting results will NOT be output to
4343 scantable (insitu=True) or None will be
4344 returned (insitu=False).
4345 (default is "": no table output)
[2012]4346
[1907]4347 Example:
4348 # return a scan baselined by a third order polynomial,
4349 # not using a mask
4350 bscan = scan.poly_baseline(order=3)
4351 """
[1931]4352
[2186]4353 try:
4354 varlist = vars()
[1931]4355
[2269]4356 if insitu is None:
4357 insitu = rcParams["insitu"]
[2186]4358 if insitu:
4359 workscan = self
4360 else:
4361 workscan = self.copy()
[1907]4362
[2410]4363 if mask is None: mask = []
[2189]4364 if order is None: order = 0
[2767]4365 if clipthresh is None: clipthresh = 3.0
4366 if clipniter is None: clipniter = 0
[2189]4367 if plot is None: plot = False
4368 if getresidual is None: getresidual = True
4369 if showprogress is None: showprogress = True
4370 if minnrow is None: minnrow = 1000
4371 if outlog is None: outlog = False
[2767]4372 if blfile is None: blfile = ''
[2641]4373 if csvformat is None: csvformat = False
[2767]4374 if bltable is None: bltable = ''
[1907]4375
[2767]4376 scsvformat = 'T' if csvformat else 'F'
[2641]4377
[2012]4378 if plot:
[2269]4379 outblfile = (blfile != "") and \
[2349]4380 os.path.exists(os.path.expanduser(
4381 os.path.expandvars(blfile))
4382 )
[2269]4383 if outblfile:
4384 blf = open(blfile, "a")
[2012]4385
[1907]4386 f = fitter()
4387 f.set_function(lpoly=order)
[2186]4388
4389 rows = xrange(workscan.nrow())
4390 #if len(rows) > 0: workscan._init_blinfo()
[2610]4391
4392 action = "H"
[1907]4393 for r in rows:
4394 f.x = workscan._getabcissa(r)
4395 f.y = workscan._getspectrum(r)
[2541]4396 if mask:
4397 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4398 else: # mask=None
4399 f.mask = workscan._getmask(r)
4400
[1907]4401 f.data = None
4402 f.fit()
[2541]4403
[2610]4404 if action != "Y": # skip plotting when accepting all
4405 f.plot(residual=True)
4406 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4407 #if accept_fit.upper() == "N":
4408 # #workscan._append_blinfo(None, None, None)
4409 # continue
4410 accept_fit = self._get_verify_action("Accept fit?",action)
4411 if r == 0: action = None
[1907]4412 if accept_fit.upper() == "N":
4413 continue
[2610]4414 elif accept_fit.upper() == "R":
4415 break
4416 elif accept_fit.upper() == "A":
4417 action = "Y"
[2012]4418
4419 blpars = f.get_parameters()
4420 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4421 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]4422 workscan._setspectrum((f.fitter.getresidual()
4423 if getresidual else f.fitter.getfit()), r)
[1907]4424
[2012]4425 if outblfile:
4426 rms = workscan.get_rms(f.mask, r)
[2269]4427 dataout = \
4428 workscan.format_blparams_row(blpars["params"],
4429 blpars["fixed"],
4430 rms, str(masklist),
[2641]4431 r, True, csvformat)
[2012]4432 blf.write(dataout)
4433
[1907]4434 f._p.unmap()
4435 f._p = None
[2012]4436
[2349]4437 if outblfile:
4438 blf.close()
[1907]4439 else:
[2767]4440 workscan._poly_baseline(mask, order,
4441 clipthresh, clipniter, #
4442 getresidual,
[2269]4443 pack_progress_params(showprogress,
4444 minnrow),
[2767]4445 outlog, scsvformat+blfile,
4446 bltable) #
[1907]4447
4448 workscan._add_history("poly_baseline", varlist)
4449
4450 if insitu:
4451 self._assign(workscan)
4452 else:
4453 return workscan
4454
[1919]4455 except RuntimeError, e:
[2186]4456 raise_fitting_failure_exception(e)
[1907]4457
[2186]4458 @asaplog_post_dec
[2771]4459 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
[2767]4460 clipthresh=None, clipniter=None,
4461 edge=None, threshold=None, chan_avg_limit=None,
4462 getresidual=None, plot=None,
4463 showprogress=None, minnrow=None, outlog=None,
4464 blfile=None, csvformat=None, bltable=None):
[1846]4465 """\
[1931]4466 Return a scan which has been baselined (all rows) by a polynomial.
[880]4467 Spectral lines are detected first using linefinder and masked out
4468 to avoid them affecting the baseline solution.
4469
4470 Parameters:
[2771]4471 mask: an optional mask retreived from scantable
4472 order: the order of the polynomial (default is 0)
[2189]4473 insitu: if False a new scantable is returned.
4474 Otherwise, the scaling is done in-situ
4475 The default is taken from .asaprc (False)
[2767]4476 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4477 clipniter: maximum number of iteration of 'clipthresh'-sigma
4478 clipping (default is 0)
[2189]4479 edge: an optional number of channel to drop at
4480 the edge of spectrum. If only one value is
4481 specified, the same number will be dropped
4482 from both sides of the spectrum. Default
4483 is to keep all channels. Nested tuples
4484 represent individual edge selection for
4485 different IFs (a number of spectral channels
4486 can be different)
4487 threshold: the threshold used by line finder. It is
4488 better to keep it large as only strong lines
4489 affect the baseline solution.
4490 chan_avg_limit: a maximum number of consequtive spectral
4491 channels to average during the search of
4492 weak and broad lines. The default is no
4493 averaging (and no search for weak lines).
4494 If such lines can affect the fitted baseline
4495 (e.g. a high order polynomial is fitted),
4496 increase this parameter (usually values up
4497 to 8 are reasonable). Most users of this
4498 method should find the default value sufficient.
4499 plot: plot the fit and the residual. In this each
4500 indivual fit has to be approved, by typing 'y'
4501 or 'n'
4502 getresidual: if False, returns best-fit values instead of
4503 residual. (default is True)
4504 showprogress: show progress status for large data.
4505 default is True.
4506 minnrow: minimum number of input spectra to show.
4507 default is 1000.
4508 outlog: Output the coefficients of the best-fit
4509 function to logger (default is False)
4510 blfile: Name of a text file in which the best-fit
4511 parameter values to be written
4512 (default is "": no file/logger output)
[2641]4513 csvformat: if True blfile is csv-formatted, default is False.
[2767]4514 bltable: name of a baseline table where fitting results
4515 (coefficients, rms, etc.) are to be written.
4516 if given, fitting results will NOT be output to
4517 scantable (insitu=True) or None will be
4518 returned (insitu=False).
4519 (default is "": no table output)
[1846]4520
[2012]4521 Example:
4522 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4523 """
[880]4524
[2186]4525 try:
4526 varlist = vars()
[1846]4527
[2269]4528 if insitu is None:
4529 insitu = rcParams['insitu']
[2186]4530 if insitu:
4531 workscan = self
4532 else:
4533 workscan = self.copy()
[1846]4534
[2410]4535 if mask is None: mask = []
[2186]4536 if order is None: order = 0
[2767]4537 if clipthresh is None: clipthresh = 3.0
4538 if clipniter is None: clipniter = 0
[2186]4539 if edge is None: edge = (0, 0)
4540 if threshold is None: threshold = 3
4541 if chan_avg_limit is None: chan_avg_limit = 1
4542 if plot is None: plot = False
4543 if getresidual is None: getresidual = True
[2189]4544 if showprogress is None: showprogress = True
4545 if minnrow is None: minnrow = 1000
[2186]4546 if outlog is None: outlog = False
4547 if blfile is None: blfile = ''
[2641]4548 if csvformat is None: csvformat = False
[2767]4549 if bltable is None: bltable = ''
[1846]4550
[2767]4551 scsvformat = 'T' if csvformat else 'F'
[2641]4552
[2186]4553 edge = normalise_edge_param(edge)
[880]4554
[2012]4555 if plot:
[2269]4556 outblfile = (blfile != "") and \
4557 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]4558 if outblfile: blf = open(blfile, "a")
4559
[2186]4560 from asap.asaplinefind import linefinder
[2012]4561 fl = linefinder()
[2269]4562 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]4563 fl.set_scan(workscan)
[2186]4564
[2012]4565 f = fitter()
4566 f.set_function(lpoly=order)
[880]4567
[2186]4568 rows = xrange(workscan.nrow())
4569 #if len(rows) > 0: workscan._init_blinfo()
[2610]4570
4571 action = "H"
[2012]4572 for r in rows:
[2186]4573 idx = 2*workscan.getif(r)
[2541]4574 if mask:
4575 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4576 else: # mask=None
4577 msk = workscan._getmask(r)
4578 fl.find_lines(r, msk, edge[idx:idx+2])
[907]4579
[2012]4580 f.x = workscan._getabcissa(r)
4581 f.y = workscan._getspectrum(r)
4582 f.mask = fl.get_mask()
4583 f.data = None
4584 f.fit()
4585
[2610]4586 if action != "Y": # skip plotting when accepting all
4587 f.plot(residual=True)
4588 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4589 accept_fit = self._get_verify_action("Accept fit?",action)
4590 if r == 0: action = None
[2012]4591 if accept_fit.upper() == "N":
4592 #workscan._append_blinfo(None, None, None)
4593 continue
[2610]4594 elif accept_fit.upper() == "R":
4595 break
4596 elif accept_fit.upper() == "A":
4597 action = "Y"
[2012]4598
4599 blpars = f.get_parameters()
4600 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4601 #workscan._append_blinfo(blpars, masklist, f.mask)
[2349]4602 workscan._setspectrum(
4603 (f.fitter.getresidual() if getresidual
4604 else f.fitter.getfit()), r
4605 )
[2012]4606
4607 if outblfile:
4608 rms = workscan.get_rms(f.mask, r)
[2269]4609 dataout = \
4610 workscan.format_blparams_row(blpars["params"],
4611 blpars["fixed"],
4612 rms, str(masklist),
[2641]4613 r, True, csvformat)
[2012]4614 blf.write(dataout)
4615
4616 f._p.unmap()
4617 f._p = None
4618
4619 if outblfile: blf.close()
4620 else:
[2767]4621 workscan._auto_poly_baseline(mask, order,
4622 clipthresh, clipniter,
4623 edge, threshold,
[2269]4624 chan_avg_limit, getresidual,
4625 pack_progress_params(showprogress,
4626 minnrow),
[2767]4627 outlog, scsvformat+blfile,
4628 bltable)
4629 workscan._add_history("auto_poly_baseline", varlist)
[2012]4630
[2767]4631 if bltable == '':
4632 if insitu:
4633 self._assign(workscan)
4634 else:
4635 return workscan
[2012]4636 else:
[2767]4637 if not insitu:
4638 return None
[2012]4639
4640 except RuntimeError, e:
[2186]4641 raise_fitting_failure_exception(e)
[2012]4642
4643 def _init_blinfo(self):
4644 """\
4645 Initialise the following three auxiliary members:
4646 blpars : parameters of the best-fit baseline,
4647 masklists : mask data (edge positions of masked channels) and
4648 actualmask : mask data (in boolean list),
4649 to keep for use later (including output to logger/text files).
4650 Used by poly_baseline() and auto_poly_baseline() in case of
4651 'plot=True'.
4652 """
4653 self.blpars = []
4654 self.masklists = []
4655 self.actualmask = []
4656 return
[880]4657
[2012]4658 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4659 """\
4660 Append baseline-fitting related info to blpars, masklist and
4661 actualmask.
4662 """
4663 self.blpars.append(data_blpars)
4664 self.masklists.append(data_masklists)
4665 self.actualmask.append(data_actualmask)
4666 return
4667
[1862]4668 @asaplog_post_dec
[914]4669 def rotate_linpolphase(self, angle):
[1846]4670 """\
[914]4671 Rotate the phase of the complex polarization O=Q+iU correlation.
4672 This is always done in situ in the raw data. So if you call this
4673 function more than once then each call rotates the phase further.
[1846]4674
[914]4675 Parameters:
[1846]4676
[914]4677 angle: The angle (degrees) to rotate (add) by.
[1846]4678
4679 Example::
4680
[914]4681 scan.rotate_linpolphase(2.3)
[1846]4682
[914]4683 """
4684 varlist = vars()
[936]4685 self._math._rotate_linpolphase(self, angle)
[914]4686 self._add_history("rotate_linpolphase", varlist)
4687 return
[710]4688
[1862]4689 @asaplog_post_dec
[914]4690 def rotate_xyphase(self, angle):
[1846]4691 """\
[914]4692 Rotate the phase of the XY correlation. This is always done in situ
4693 in the data. So if you call this function more than once
4694 then each call rotates the phase further.
[1846]4695
[914]4696 Parameters:
[1846]4697
[914]4698 angle: The angle (degrees) to rotate (add) by.
[1846]4699
4700 Example::
4701
[914]4702 scan.rotate_xyphase(2.3)
[1846]4703
[914]4704 """
4705 varlist = vars()
[936]4706 self._math._rotate_xyphase(self, angle)
[914]4707 self._add_history("rotate_xyphase", varlist)
4708 return
4709
[1862]4710 @asaplog_post_dec
[914]4711 def swap_linears(self):
[1846]4712 """\
[1573]4713 Swap the linear polarisations XX and YY, or better the first two
[1348]4714 polarisations as this also works for ciculars.
[914]4715 """
4716 varlist = vars()
[936]4717 self._math._swap_linears(self)
[914]4718 self._add_history("swap_linears", varlist)
4719 return
4720
[1862]4721 @asaplog_post_dec
[914]4722 def invert_phase(self):
[1846]4723 """\
[914]4724 Invert the phase of the complex polarisation
4725 """
4726 varlist = vars()
[936]4727 self._math._invert_phase(self)
[914]4728 self._add_history("invert_phase", varlist)
4729 return
4730
[1862]4731 @asaplog_post_dec
[876]4732 def add(self, offset, insitu=None):
[1846]4733 """\
[513]4734 Return a scan where all spectra have the offset added
[1846]4735
[513]4736 Parameters:
[1846]4737
[513]4738 offset: the offset
[1855]4739
[513]4740 insitu: if False a new scantable is returned.
4741 Otherwise, the scaling is done in-situ
4742 The default is taken from .asaprc (False)
[1846]4743
[513]4744 """
4745 if insitu is None: insitu = rcParams['insitu']
[876]4746 self._math._setinsitu(insitu)
[513]4747 varlist = vars()
[876]4748 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]4749 s._add_history("add", varlist)
[876]4750 if insitu:
4751 self._assign(s)
4752 else:
[513]4753 return s
4754
[1862]4755 @asaplog_post_dec
[1308]4756 def scale(self, factor, tsys=True, insitu=None):
[1846]4757 """\
4758
[1938]4759 Return a scan where all spectra are scaled by the given 'factor'
[1846]4760
[513]4761 Parameters:
[1846]4762
[1819]4763 factor: the scaling factor (float or 1D float list)
[1855]4764
[513]4765 insitu: if False a new scantable is returned.
4766 Otherwise, the scaling is done in-situ
4767 The default is taken from .asaprc (False)
[1855]4768
[513]4769 tsys: if True (default) then apply the operation to Tsys
4770 as well as the data
[1846]4771
[513]4772 """
4773 if insitu is None: insitu = rcParams['insitu']
[876]4774 self._math._setinsitu(insitu)
[513]4775 varlist = vars()
[1819]4776 s = None
4777 import numpy
4778 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]4779 if isinstance(factor[0], list) or isinstance(factor[0],
4780 numpy.ndarray):
[1819]4781 from asapmath import _array2dOp
[2320]4782 s = _array2dOp( self, factor, "MUL", tsys, insitu )
[1819]4783 else:
[2320]4784 s = scantable( self._math._arrayop( self, factor,
4785 "MUL", tsys ) )
[1819]4786 else:
[2320]4787 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]4788 s._add_history("scale", varlist)
[876]4789 if insitu:
4790 self._assign(s)
4791 else:
[513]4792 return s
4793
[2349]4794 @preserve_selection
4795 def set_sourcetype(self, match, matchtype="pattern",
[1504]4796 sourcetype="reference"):
[1846]4797 """\
[1502]4798 Set the type of the source to be an source or reference scan
[1846]4799 using the provided pattern.
4800
[1502]4801 Parameters:
[1846]4802
[1504]4803 match: a Unix style pattern, regular expression or selector
[1855]4804
[1504]4805 matchtype: 'pattern' (default) UNIX style pattern or
4806 'regex' regular expression
[1855]4807
[1502]4808 sourcetype: the type of the source to use (source/reference)
[1846]4809
[1502]4810 """
4811 varlist = vars()
4812 stype = -1
[2480]4813 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
[1502]4814 stype = 1
[2480]4815 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
[1502]4816 stype = 0
[1504]4817 else:
[2480]4818 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
[1504]4819 if matchtype.lower().startswith("p"):
4820 matchtype = "pattern"
4821 elif matchtype.lower().startswith("r"):
4822 matchtype = "regex"
4823 else:
4824 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]4825 sel = selector()
4826 if isinstance(match, selector):
4827 sel = match
4828 else:
[2480]4829 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4830 self.set_selection(sel)
[1502]4831 self._setsourcetype(stype)
[1573]4832 self._add_history("set_sourcetype", varlist)
[1502]4833
[2818]4834
4835 def set_sourcename(self, name):
4836 varlist = vars()
4837 self._setsourcename(name)
4838 self._add_history("set_sourcename", varlist)
4839
[1862]4840 @asaplog_post_dec
[1857]4841 @preserve_selection
[1819]4842 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]4843 """\
[670]4844 This function allows to build quotients automatically.
[1819]4845 It assumes the observation to have the same number of
[670]4846 "ons" and "offs"
[1846]4847
[670]4848 Parameters:
[1846]4849
[710]4850 preserve: you can preserve (default) the continuum or
4851 remove it. The equations used are
[1857]4852
[670]4853 preserve: Output = Toff * (on/off) - Toff
[1857]4854
[1070]4855 remove: Output = Toff * (on/off) - Ton
[1855]4856
[1573]4857 mode: the on/off detection mode
[1348]4858 'paired' (default)
4859 identifies 'off' scans by the
4860 trailing '_R' (Mopra/Parkes) or
4861 '_e'/'_w' (Tid) and matches
4862 on/off pairs from the observing pattern
[1502]4863 'time'
4864 finds the closest off in time
[1348]4865
[1857]4866 .. todo:: verify argument is not implemented
4867
[670]4868 """
[1857]4869 varlist = vars()
[1348]4870 modes = ["time", "paired"]
[670]4871 if not mode in modes:
[876]4872 msg = "please provide valid mode. Valid modes are %s" % (modes)
4873 raise ValueError(msg)
[1348]4874 s = None
4875 if mode.lower() == "paired":
[2840]4876 from asap._asap import srctype
[1857]4877 sel = self.get_selection()
[2840]4878 #sel.set_query("SRCTYPE==psoff")
4879 sel.set_types(srctype.psoff)
[1356]4880 self.set_selection(sel)
[1348]4881 offs = self.copy()
[2840]4882 #sel.set_query("SRCTYPE==pson")
4883 sel.set_types(srctype.pson)
[1356]4884 self.set_selection(sel)
[1348]4885 ons = self.copy()
4886 s = scantable(self._math._quotient(ons, offs, preserve))
4887 elif mode.lower() == "time":
4888 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]4889 s._add_history("auto_quotient", varlist)
[876]4890 return s
[710]4891
[1862]4892 @asaplog_post_dec
[1145]4893 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]4894 """\
[1143]4895 Form a quotient using "off" beams when observing in "MX" mode.
[1846]4896
[1143]4897 Parameters:
[1846]4898
[1145]4899 mask: an optional mask to be used when weight == 'stddev'
[1855]4900
[1143]4901 weight: How to average the off beams. Default is 'median'.
[1855]4902
[1145]4903 preserve: you can preserve (default) the continuum or
[1855]4904 remove it. The equations used are:
[1846]4905
[1855]4906 preserve: Output = Toff * (on/off) - Toff
4907
4908 remove: Output = Toff * (on/off) - Ton
4909
[1217]4910 """
[1593]4911 mask = mask or ()
[1141]4912 varlist = vars()
4913 on = scantable(self._math._mx_extract(self, 'on'))
[1143]4914 preoff = scantable(self._math._mx_extract(self, 'off'))
4915 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]4916 from asapmath import quotient
[1145]4917 q = quotient(on, off, preserve)
[1143]4918 q._add_history("mx_quotient", varlist)
[1217]4919 return q
[513]4920
[1862]4921 @asaplog_post_dec
[718]4922 def freq_switch(self, insitu=None):
[1846]4923 """\
[718]4924 Apply frequency switching to the data.
[1846]4925
[718]4926 Parameters:
[1846]4927
[718]4928 insitu: if False a new scantable is returned.
4929 Otherwise, the swictching is done in-situ
4930 The default is taken from .asaprc (False)
[1846]4931
[718]4932 """
4933 if insitu is None: insitu = rcParams['insitu']
[876]4934 self._math._setinsitu(insitu)
[718]4935 varlist = vars()
[876]4936 s = scantable(self._math._freqswitch(self))
[1118]4937 s._add_history("freq_switch", varlist)
[1856]4938 if insitu:
4939 self._assign(s)
4940 else:
4941 return s
[718]4942
[1862]4943 @asaplog_post_dec
[780]4944 def recalc_azel(self):
[1846]4945 """Recalculate the azimuth and elevation for each position."""
[780]4946 varlist = vars()
[876]4947 self._recalcazel()
[780]4948 self._add_history("recalc_azel", varlist)
4949 return
4950
[1862]4951 @asaplog_post_dec
[513]4952 def __add__(self, other):
[2574]4953 """
4954 implicit on all axes and on Tsys
4955 """
[513]4956 varlist = vars()
[2574]4957 s = self.__op( other, "ADD" )
[513]4958 s._add_history("operator +", varlist)
4959 return s
4960
[1862]4961 @asaplog_post_dec
[513]4962 def __sub__(self, other):
4963 """
4964 implicit on all axes and on Tsys
4965 """
4966 varlist = vars()
[2574]4967 s = self.__op( other, "SUB" )
[513]4968 s._add_history("operator -", varlist)
4969 return s
[710]4970
[1862]4971 @asaplog_post_dec
[513]4972 def __mul__(self, other):
4973 """
4974 implicit on all axes and on Tsys
4975 """
4976 varlist = vars()
[2574]4977 s = self.__op( other, "MUL" ) ;
[513]4978 s._add_history("operator *", varlist)
4979 return s
4980
[710]4981
[1862]4982 @asaplog_post_dec
[513]4983 def __div__(self, other):
4984 """
4985 implicit on all axes and on Tsys
4986 """
4987 varlist = vars()
[2574]4988 s = self.__op( other, "DIV" )
4989 s._add_history("operator /", varlist)
4990 return s
4991
4992 @asaplog_post_dec
4993 def __op( self, other, mode ):
[513]4994 s = None
4995 if isinstance(other, scantable):
[2574]4996 s = scantable(self._math._binaryop(self, other, mode))
[513]4997 elif isinstance(other, float):
4998 if other == 0.0:
[718]4999 raise ZeroDivisionError("Dividing by zero is not recommended")
[2574]5000 s = scantable(self._math._unaryop(self, other, mode, False))
[2144]5001 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]5002 if isinstance(other[0], list) \
5003 or isinstance(other[0], numpy.ndarray):
[2144]5004 from asapmath import _array2dOp
[2574]5005 s = _array2dOp( self, other, mode, False )
[2144]5006 else:
[2574]5007 s = scantable( self._math._arrayop( self, other,
5008 mode, False ) )
[513]5009 else:
[718]5010 raise TypeError("Other input is not a scantable or float value")
[513]5011 return s
5012
[1862]5013 @asaplog_post_dec
[530]5014 def get_fit(self, row=0):
[1846]5015 """\
[530]5016 Print or return the stored fits for a row in the scantable
[1846]5017
[530]5018 Parameters:
[1846]5019
[530]5020 row: the row which the fit has been applied to.
[1846]5021
[530]5022 """
5023 if row > self.nrow():
5024 return
[976]5025 from asap.asapfit import asapfit
[530]5026 fit = asapfit(self._getfit(row))
[1859]5027 asaplog.push( '%s' %(fit) )
5028 return fit.as_dict()
[530]5029
[2349]5030 @preserve_selection
[1483]5031 def flag_nans(self):
[1846]5032 """\
[1483]5033 Utility function to flag NaN values in the scantable.
5034 """
5035 import numpy
5036 basesel = self.get_selection()
5037 for i in range(self.nrow()):
[1589]5038 sel = self.get_row_selector(i)
5039 self.set_selection(basesel+sel)
[1483]5040 nans = numpy.isnan(self._getspectrum(0))
[2877]5041 if numpy.any(nans):
5042 bnans = [ bool(v) for v in nans]
5043 self.flag(bnans)
5044
5045 self.set_selection(basesel)
[1483]5046
[1588]5047 def get_row_selector(self, rowno):
[1992]5048 return selector(rows=[rowno])
[1573]5049
[484]5050 def _add_history(self, funcname, parameters):
[1435]5051 if not rcParams['scantable.history']:
5052 return
[484]5053 # create date
5054 sep = "##"
5055 from datetime import datetime
5056 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5057 hist = dstr+sep
5058 hist += funcname+sep#cdate+sep
[2349]5059 if parameters.has_key('self'):
5060 del parameters['self']
[1118]5061 for k, v in parameters.iteritems():
[484]5062 if type(v) is dict:
[1118]5063 for k2, v2 in v.iteritems():
[484]5064 hist += k2
5065 hist += "="
[1118]5066 if isinstance(v2, scantable):
[484]5067 hist += 'scantable'
5068 elif k2 == 'mask':
[1118]5069 if isinstance(v2, list) or isinstance(v2, tuple):
[513]5070 hist += str(self._zip_mask(v2))
5071 else:
5072 hist += str(v2)
[484]5073 else:
[513]5074 hist += str(v2)
[484]5075 else:
5076 hist += k
5077 hist += "="
[1118]5078 if isinstance(v, scantable):
[484]5079 hist += 'scantable'
5080 elif k == 'mask':
[1118]5081 if isinstance(v, list) or isinstance(v, tuple):
[513]5082 hist += str(self._zip_mask(v))
5083 else:
5084 hist += str(v)
[484]5085 else:
5086 hist += str(v)
5087 hist += sep
5088 hist = hist[:-2] # remove trailing '##'
5089 self._addhistory(hist)
5090
[710]5091
[484]5092 def _zip_mask(self, mask):
5093 mask = list(mask)
5094 i = 0
5095 segments = []
5096 while mask[i:].count(1):
5097 i += mask[i:].index(1)
5098 if mask[i:].count(0):
5099 j = i + mask[i:].index(0)
5100 else:
[710]5101 j = len(mask)
[1118]5102 segments.append([i, j])
[710]5103 i = j
[484]5104 return segments
[714]5105
[626]5106 def _get_ordinate_label(self):
5107 fu = "("+self.get_fluxunit()+")"
5108 import re
5109 lbl = "Intensity"
[1118]5110 if re.match(".K.", fu):
[626]5111 lbl = "Brightness Temperature "+ fu
[1118]5112 elif re.match(".Jy.", fu):
[626]5113 lbl = "Flux density "+ fu
5114 return lbl
[710]5115
[876]5116 def _check_ifs(self):
[2349]5117# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
[1986]5118 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]5119 nchans = filter(lambda t: t > 0, nchans)
[876]5120 return (sum(nchans)/len(nchans) == nchans[0])
[976]5121
[1862]5122 @asaplog_post_dec
[1916]5123 def _fill(self, names, unit, average, opts={}):
[976]5124 first = True
5125 fullnames = []
5126 for name in names:
5127 name = os.path.expandvars(name)
5128 name = os.path.expanduser(name)
5129 if not os.path.exists(name):
5130 msg = "File '%s' does not exists" % (name)
5131 raise IOError(msg)
5132 fullnames.append(name)
5133 if average:
5134 asaplog.push('Auto averaging integrations')
[1079]5135 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]5136 for name in fullnames:
[1073]5137 tbl = Scantable(stype)
[2004]5138 if is_ms( name ):
5139 r = msfiller( tbl )
5140 else:
5141 r = filler( tbl )
[976]5142 msg = "Importing %s..." % (name)
[1118]5143 asaplog.push(msg, False)
[2349]5144 r.open(name, opts)
[2480]5145 rx = rcParams['scantable.reference']
5146 r.setreferenceexpr(rx)
[1843]5147 r.fill()
[976]5148 if average:
[1118]5149 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]5150 if not first:
[2902]5151 tbl = self._math._merge([self, tbl])
[976]5152 Scantable.__init__(self, tbl)
[1843]5153 r.close()
[1118]5154 del r, tbl
[976]5155 first = False
[1861]5156 #flush log
5157 asaplog.post()
[976]5158 if unit is not None:
5159 self.set_fluxunit(unit)
[1824]5160 if not is_casapy():
5161 self.set_freqframe(rcParams['scantable.freqframe'])
[976]5162
[2610]5163 def _get_verify_action( self, msg, action=None ):
5164 valid_act = ['Y', 'N', 'A', 'R']
5165 if not action or not isinstance(action, str):
5166 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5167 if action == '':
5168 return "Y"
5169 elif (action.upper()[0] in valid_act):
5170 return action.upper()[0]
5171 elif (action.upper()[0] in ['H','?']):
5172 print "Available actions of verification [Y|n|a|r]"
5173 print " Y : Yes for current data (default)"
5174 print " N : No for current data"
5175 print " A : Accept all in the following and exit from verification"
5176 print " R : Reject all in the following and exit from verification"
5177 print " H or ?: help (show this message)"
5178 return self._get_verify_action(msg)
5179 else:
5180 return 'Y'
[2012]5181
[1402]5182 def __getitem__(self, key):
5183 if key < 0:
5184 key += self.nrow()
5185 if key >= self.nrow():
5186 raise IndexError("Row index out of range.")
5187 return self._getspectrum(key)
5188
5189 def __setitem__(self, key, value):
5190 if key < 0:
5191 key += self.nrow()
5192 if key >= self.nrow():
5193 raise IndexError("Row index out of range.")
5194 if not hasattr(value, "__len__") or \
5195 len(value) > self.nchan(self.getif(key)):
5196 raise ValueError("Spectrum length doesn't match.")
5197 return self._setspectrum(value, key)
5198
5199 def __len__(self):
5200 return self.nrow()
5201
5202 def __iter__(self):
5203 for i in range(len(self)):
5204 yield self[i]
Note: See TracBrowser for help on using the repository browser.