source: trunk/python/scantable.py@ 2984

Last change on this file since 2984 was 2984, checked in by WataruKawasaki, 10 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd.scantable

Description: a minor clean-up in do_pack_blinfo() and sub_baseline().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 208.2 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2751]4import re
[2315]5import tempfile
[1948]6import numpy
[1691]7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
[1824]12from asap.env import is_casapy
[876]13from asap._asap import Scantable
[2004]14from asap._asap import filler, msfiller
[1824]15from asap.parameters import rcParams
[1862]16from asap.logging import asaplog, asaplog_post_dec
[1824]17from asap.selector import selector
18from asap.linecatalog import linecatalog
[1600]19from asap.coordinate import coordinate
[1859]20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]21from asap.asapfitter import fitter
[102]22
[1689]23def preserve_selection(func):
[1691]24 @wraps_dec(func)
[1689]25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
[1857]27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
[1689]31 return val
32 return wrap
33
[1846]34def is_scantable(filename):
35 """Is the given file a scantable?
[1689]36
[1846]37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
[1883]42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
[2753]48 match_pattern = '^Type = (Scantable)? *$'
[2751]49 if re.match(match_pattern,l):
[1883]50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
[1697]58
[1883]59def is_ms(filename):
60 """Is the given file a MeasurementSet?
[1697]61
[1883]62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
[2186]79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
[2277]91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
[2277]98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
[2186]102 ** when len(edge)!=2
[2277]103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]111 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]112 when [5,10] given, it is interpreted as
[2277]113 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
[2189]173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
[2767]176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
234
[2984]235 fspec_key = fspec_keys[dinfo['blfunc']]
[2767]236 if not blinfo.has_key(fspec_key):
237 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
238
239 clip_params_n = 0
240 for key in ['clipthresh', 'clipniter']:
241 if blinfo.has_key(key):
242 clip_params_n += 1
243 dinfo[key] = str(blinfo[key])
244
245 if clip_params_n == 0:
246 dinfo['clipthresh'] = '0.0'
247 dinfo['clipniter'] = '0'
248 elif clip_params_n != 2:
249 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
250
251 lf_params_n = 0
252 for key in ['thresh', 'edge', 'chan_avg_limit']:
253 if blinfo.has_key(key):
254 lf_params_n += 1
255 val = blinfo[key]
256 if isinstance(val, list) or isinstance(val, tuple):
257 slval = []
258 for i in xrange(len(val)):
259 slval.append(str(val[i]))
260 sval = ",".join(slval)
261 else:
262 sval = str(val)
263 dinfo[key] = sval
264
265 if lf_params_n == 3:
266 dinfo['use_linefinder'] = 'true'
[2810]267 elif lf_params_n == 0:
[2767]268 dinfo['use_linefinder'] = 'false'
269 dinfo['thresh'] = ''
270 dinfo['edge'] = ''
271 dinfo['chan_avg_limit'] = ''
272 else:
273 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
274
[2984]275 slblinfo = [dinfo['row'], dinfo['blfunc'], dinfo[fspec_key], dinfo['masklist'], \
[2767]276 dinfo['clipthresh'], dinfo['clipniter'], \
277 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
278
279 return ":".join(slblinfo)
280
281def parse_fitresult(sres):
282 """\
283 Parse the returned value of apply_bltable() or sub_baseline() and
284 extract row number, the best-fit coefficients and rms, then pack
285 them into a dictionary.
286 The input value is generated by Scantable::packFittingResults() and
287 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
288 """
289 res = []
290 for i in xrange(len(sres)):
291 (srow, scoeff, srms) = sres[i].split(":")
292 row = int(srow)
293 rms = float(srms)
294 lscoeff = scoeff.split(",")
295 coeff = []
296 for j in xrange(len(lscoeff)):
297 coeff.append(float(lscoeff[j]))
298 res.append({'row': row, 'coeff': coeff, 'rms': rms})
299
300 return res
301
[2882]302def is_number(s):
303 s = s.strip()
304 res = True
305 try:
306 a = float(s)
307 res = True
308 except:
309 res = False
310 finally:
311 return res
312
313def is_frequency(s):
314 s = s.strip()
315 return (s[-2:].lower() == "hz")
316
[2884]317def get_freq_by_string(s1, s2):
318 if not (is_number(s1) and is_frequency(s2)):
[2882]319 raise RuntimeError("Invalid input string.")
320
321 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
322 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
323
[2884]324 s1 = s1.strip()
325 s2 = s2.strip()
[2882]326
[2884]327 prefix = s2[-3:-2]
[2882]328 if is_number(prefix):
[2884]329 res1 = float(s1)
330 res2 = float(s2[:-2])
[2882]331 else:
[2884]332 factor = factor_list[prefix_list.index(prefix)]
333 res1 = float(s1) * factor
334 res2 = float(s2[:-3]) * factor
[2882]335
[2884]336 return (res1, res2)
[2882]337
338def is_velocity(s):
339 s = s.strip()
340 return (s[-3:].lower() == "m/s")
341
[2884]342def get_velocity_by_string(s1, s2):
343 if not (is_number(s1) and is_velocity(s2)):
[2882]344 raise RuntimeError("Invalid input string.")
345
[2884]346 # note that the default velocity unit is km/s
[2882]347 prefix_list = [".", "k"]
348 factor_list = [1e-3, 1.0]
349
[2884]350 s1 = s1.strip()
351 s2 = s2.strip()
[2882]352
[2884]353 prefix = s2[-4:-3]
354 if is_number(prefix): # in case velocity unit m/s
355 res1 = float(s1) * 1e-3
356 res2 = float(s2[:-3]) * 1e-3
[2882]357 else:
[2884]358 factor = factor_list[prefix_list.index(prefix)]
359 res1 = float(s1) * factor
360 res2 = float(s2[:-4]) * factor
[2882]361
[2884]362 return (res1, res2)
[2882]363
[2889]364def get_frequency_by_velocity(restfreq, vel, doppler):
[2882]365 # vel is in unit of km/s
366
367 # speed of light
368 vel_c = 299792.458
369
370 import math
371 r = vel / vel_c
372
[2889]373 if doppler.lower() == 'radio':
374 return restfreq * (1.0 - r)
375 if doppler.lower() == 'optical':
376 return restfreq / (1.0 + r)
377 else:
378 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
[2882]379
[2891]380def get_restfreq_in_Hz(s_restfreq):
381 value = 0.0
382 unit = ""
383 s = s_restfreq.replace(" ","")
[2889]384
[2891]385 for i in range(len(s))[::-1]:
386 if s[i].isalpha():
387 unit = s[i] + unit
388 else:
389 value = float(s[0:i+1])
390 break
391
392 if (unit == "") or (unit.lower() == "hz"):
393 return value
394 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
395 unitprefix = unit[0]
396 factor = 1.0
397
398 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
399 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
400 factor = factor_list[prefix_list.index(unitprefix)]
401 """
402 if (unitprefix == 'a'):
403 factor = 1.0e-18
404 elif (unitprefix == 'f'):
405 factor = 1.0e-15
406 elif (unitprefix == 'p'):
407 factor = 1.0e-12
408 elif (unitprefix == 'n'):
409 factor = 1.0e-9
410 elif (unitprefix == 'u'):
411 factor = 1.0e-6
412 elif (unitprefix == 'm'):
413 factor = 1.0e-3
414 elif (unitprefix == 'k'):
415 factor = 1.0e+3
416 elif (unitprefix == 'M'):
417 factor = 1.0e+6
418 elif (unitprefix == 'G'):
419 factor = 1.0e+9
420 elif (unitprefix == 'T'):
421 factor = 1.0e+12
422 elif (unitprefix == 'P'):
423 factor = 1.0e+15
424 elif (unitprefix == 'E'):
425 factor = 1.0e+18
426 """
427 return value*factor
428 else:
429 mesg = "wrong unit of restfreq."
430 raise Exception, mesg
431
432def normalise_restfreq(in_restfreq):
433 if isinstance(in_restfreq, float):
434 return in_restfreq
435 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
436 return float(in_restfreq)
437 elif isinstance(in_restfreq, str):
438 return get_restfreq_in_Hz(in_restfreq)
439 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
440 if isinstance(in_restfreq, numpy.ndarray):
441 if len(in_restfreq.shape) > 1:
442 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
443 raise Exception, mesg
444
445 res = []
446 for i in xrange(len(in_restfreq)):
447 elem = in_restfreq[i]
448 if isinstance(elem, float):
449 res.append(elem)
450 elif isinstance(elem, int) or isinstance(elem, long):
451 res.append(float(elem))
452 elif isinstance(elem, str):
453 res.append(get_restfreq_in_Hz(elem))
454 elif isinstance(elem, dict):
455 if isinstance(elem["value"], float):
456 res.append(elem)
457 elif isinstance(elem["value"], int):
458 dictelem = {}
459 dictelem["name"] = elem["name"]
460 dictelem["value"] = float(elem["value"])
461 res.append(dictelem)
462 elif isinstance(elem["value"], str):
463 dictelem = {}
464 dictelem["name"] = elem["name"]
465 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
466 res.append(dictelem)
467 else:
468 mesg = "restfreq elements must be float, int, or string."
469 raise Exception, mesg
470 return res
471 else:
472 mesg = "wrong type of restfreq given."
473 raise Exception, mesg
474
475def set_restfreq(s, restfreq):
476 rfset = (restfreq != '') and (restfreq != [])
477 if rfset:
478 s.set_restfreqs(normalise_restfreq(restfreq))
479
[876]480class scantable(Scantable):
[1846]481 """\
482 The ASAP container for scans (single-dish data).
[102]483 """
[1819]484
[1862]485 @asaplog_post_dec
[2315]486 def __init__(self, filename, average=None, unit=None, parallactify=None,
487 **args):
[1846]488 """\
[102]489 Create a scantable from a saved one or make a reference
[1846]490
[102]491 Parameters:
[1846]492
493 filename: the name of an asap table on disk
494 or
495 the name of a rpfits/sdfits/ms file
496 (integrations within scans are auto averaged
497 and the whole file is read) or
498 [advanced] a reference to an existing scantable
499
500 average: average all integrations withinb a scan on read.
501 The default (True) is taken from .asaprc.
502
[484]503 unit: brightness unit; must be consistent with K or Jy.
[1846]504 Over-rides the default selected by the filler
505 (input rpfits/sdfits/ms) or replaces the value
506 in existing scantables
507
[1920]508 antenna: for MeasurementSet input data only:
[2349]509 Antenna selection. integer (id) or string
510 (name or id).
[1846]511
[2349]512 parallactify: Indicate that the data had been parallactified.
513 Default (false) is taken from rc file.
[1846]514
[2754]515 getpt: Whether to import direction from MS/POINTING
516 table properly or not.
517 This is effective only when filename is MS.
518 The default (True) is to import direction
519 from MS/POINTING.
[710]520 """
[976]521 if average is None:
[710]522 average = rcParams['scantable.autoaverage']
[1593]523 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]524 varlist = vars()
[876]525 from asap._asap import stmath
[1819]526 self._math = stmath( rcParams['insitu'] )
[876]527 if isinstance(filename, Scantable):
528 Scantable.__init__(self, filename)
[181]529 else:
[1697]530 if isinstance(filename, str):
[976]531 filename = os.path.expandvars(filename)
532 filename = os.path.expanduser(filename)
533 if not os.path.exists(filename):
534 s = "File '%s' not found." % (filename)
535 raise IOError(s)
[1697]536 if is_scantable(filename):
537 ondisk = rcParams['scantable.storage'] == 'disk'
538 Scantable.__init__(self, filename, ondisk)
539 if unit is not None:
540 self.set_fluxunit(unit)
[2008]541 if average:
542 self._assign( self.average_time( scanav=True ) )
[1819]543 # do not reset to the default freqframe
544 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]545 elif is_ms(filename):
[1916]546 # Measurement Set
547 opts={'ms': {}}
[2844]548 mskeys=['getpt','antenna']
[1916]549 for key in mskeys:
550 if key in args.keys():
551 opts['ms'][key] = args[key]
552 self._fill([filename], unit, average, opts)
[1893]553 elif os.path.isfile(filename):
[2761]554 opts={'nro': {}}
555 nrokeys=['freqref']
556 for key in nrokeys:
557 if key in args.keys():
558 opts['nro'][key] = args[key]
559 self._fill([filename], unit, average, opts)
[2350]560 # only apply to new data not "copy constructor"
561 self.parallactify(parallactify)
[1883]562 else:
[1819]563 msg = "The given file '%s'is not a valid " \
564 "asap table." % (filename)
[1859]565 raise IOError(msg)
[1118]566 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]567 and isinstance(filename[-1], str):
[1916]568 self._fill(filename, unit, average)
[1586]569 self.parallactify(parallactify)
[1259]570 self._add_history("scantable", varlist)
[102]571
[1862]572 @asaplog_post_dec
[876]573 def save(self, name=None, format=None, overwrite=False):
[1846]574 """\
[1280]575 Store the scantable on disk. This can be an asap (aips++) Table,
576 SDFITS or MS2 format.
[1846]577
[116]578 Parameters:
[1846]579
[2431]580 name: the name of the outputfile. For format 'ASCII'
[1093]581 this is the root file name (data in 'name'.txt
[497]582 and header in 'name'_header.txt)
[1855]583
[116]584 format: an optional file format. Default is ASAP.
[1855]585 Allowed are:
586
587 * 'ASAP' (save as ASAP [aips++] Table),
588 * 'SDFITS' (save as SDFITS file)
589 * 'ASCII' (saves as ascii text file)
590 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]591 * 'FITS' (save as image FITS - not readable by
592 class)
[1855]593 * 'CLASS' (save as FITS readable by CLASS)
594
[411]595 overwrite: If the file should be overwritten if it exists.
[256]596 The default False is to return with warning
[411]597 without writing the output. USE WITH CARE.
[1855]598
[1846]599 Example::
600
[116]601 scan.save('myscan.asap')
[1118]602 scan.save('myscan.sdfits', 'SDFITS')
[1846]603
[116]604 """
[411]605 from os import path
[1593]606 format = format or rcParams['scantable.save']
[256]607 suffix = '.'+format.lower()
[1118]608 if name is None or name == "":
[256]609 name = 'scantable'+suffix
[718]610 msg = "No filename given. Using default name %s..." % name
611 asaplog.push(msg)
[411]612 name = path.expandvars(name)
[256]613 if path.isfile(name) or path.isdir(name):
614 if not overwrite:
[718]615 msg = "File %s exists." % name
[1859]616 raise IOError(msg)
[451]617 format2 = format.upper()
618 if format2 == 'ASAP':
[116]619 self._save(name)
[2029]620 elif format2 == 'MS2':
621 msopt = {'ms': {'overwrite': overwrite } }
622 from asap._asap import mswriter
623 writer = mswriter( self )
624 writer.write( name, msopt )
[116]625 else:
[989]626 from asap._asap import stwriter as stw
[1118]627 writer = stw(format2)
628 writer.write(self, name)
[116]629 return
630
[102]631 def copy(self):
[1846]632 """Return a copy of this scantable.
633
634 *Note*:
635
[1348]636 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]637
638 Example::
639
[102]640 copiedscan = scan.copy()
[1846]641
[102]642 """
[876]643 sd = scantable(Scantable._copy(self))
[113]644 return sd
645
[1093]646 def drop_scan(self, scanid=None):
[1846]647 """\
[1093]648 Return a new scantable where the specified scan number(s) has(have)
649 been dropped.
[1846]650
[1093]651 Parameters:
[1846]652
[1093]653 scanid: a (list of) scan number(s)
[1846]654
[1093]655 """
656 from asap import _is_sequence_or_number as _is_valid
657 from asap import _to_list
658 from asap import unique
659 if not _is_valid(scanid):
[2315]660 raise RuntimeError( 'Please specify a scanno to drop from the'
661 ' scantable' )
[1859]662 scanid = _to_list(scanid)
663 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
664 for sid in scanid: allscans.remove(sid)
665 if len(allscans) == 0:
666 raise ValueError("Can't remove all scans")
667 sel = selector(scans=allscans)
668 return self._select_copy(sel)
[1093]669
[1594]670 def _select_copy(self, selection):
671 orig = self.get_selection()
672 self.set_selection(orig+selection)
673 cp = self.copy()
674 self.set_selection(orig)
675 return cp
676
[102]677 def get_scan(self, scanid=None):
[1855]678 """\
[102]679 Return a specific scan (by scanno) or collection of scans (by
680 source name) in a new scantable.
[1846]681
682 *Note*:
683
[1348]684 See scantable.drop_scan() for the inverse operation.
[1846]685
[102]686 Parameters:
[1846]687
[513]688 scanid: a (list of) scanno or a source name, unix-style
689 patterns are accepted for source name matching, e.g.
690 '*_R' gets all 'ref scans
[1846]691
692 Example::
693
[513]694 # get all scans containing the source '323p459'
695 newscan = scan.get_scan('323p459')
696 # get all 'off' scans
697 refscans = scan.get_scan('*_R')
698 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]699 newscan = scan.get_scan([0, 2, 7, 10])
[1846]700
[102]701 """
702 if scanid is None:
[1859]703 raise RuntimeError( 'Please specify a scan no or name to '
704 'retrieve from the scantable' )
[102]705 try:
[946]706 bsel = self.get_selection()
707 sel = selector()
[102]708 if type(scanid) is str:
[946]709 sel.set_name(scanid)
[1594]710 return self._select_copy(sel)
[102]711 elif type(scanid) is int:
[946]712 sel.set_scans([scanid])
[1594]713 return self._select_copy(sel)
[381]714 elif type(scanid) is list:
[946]715 sel.set_scans(scanid)
[1594]716 return self._select_copy(sel)
[381]717 else:
[718]718 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]719 raise TypeError(msg)
[102]720 except RuntimeError:
[1859]721 raise
[102]722
723 def __str__(self):
[2315]724 tempFile = tempfile.NamedTemporaryFile()
725 Scantable._summary(self, tempFile.name)
726 tempFile.seek(0)
727 asaplog.clear()
728 return tempFile.file.read()
[102]729
[2315]730 @asaplog_post_dec
[976]731 def summary(self, filename=None):
[1846]732 """\
[102]733 Print a summary of the contents of this scantable.
[1846]734
[102]735 Parameters:
[1846]736
[1931]737 filename: the name of a file to write the putput to
[102]738 Default - no file output
[1846]739
[102]740 """
741 if filename is not None:
[256]742 if filename is "":
743 filename = 'scantable_summary.txt'
[415]744 from os.path import expandvars, isdir
[411]745 filename = expandvars(filename)
[2286]746 if isdir(filename):
[718]747 msg = "Illegal file name '%s'." % (filename)
[1859]748 raise IOError(msg)
[2286]749 else:
750 filename = ""
751 Scantable._summary(self, filename)
[710]752
[1512]753 def get_spectrum(self, rowno):
[1471]754 """Return the spectrum for the current row in the scantable as a list.
[1846]755
[1471]756 Parameters:
[1846]757
[1573]758 rowno: the row number to retrieve the spectrum from
[1846]759
[1471]760 """
761 return self._getspectrum(rowno)
[946]762
[1471]763 def get_mask(self, rowno):
764 """Return the mask for the current row in the scantable as a list.
[1846]765
[1471]766 Parameters:
[1846]767
[1573]768 rowno: the row number to retrieve the mask from
[1846]769
[1471]770 """
771 return self._getmask(rowno)
772
773 def set_spectrum(self, spec, rowno):
[1938]774 """Set the spectrum for the current row in the scantable.
[1846]775
[1471]776 Parameters:
[1846]777
[1855]778 spec: the new spectrum
[1846]779
[1855]780 rowno: the row number to set the spectrum for
781
[1471]782 """
[2348]783 assert(len(spec) == self.nchan(self.getif(rowno)))
[1471]784 return self._setspectrum(spec, rowno)
785
[1600]786 def get_coordinate(self, rowno):
787 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]788
789 *Note*:
790
[1600]791 * This coordinate is only valid until a scantable method modifies
792 the frequency axis.
793 * This coordinate does contain the original frequency set-up
794 NOT the new frame. The conversions however are done using the user
795 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
796 use scantable.freq_align first. Without it there is no closure,
[1846]797 i.e.::
[1600]798
[1846]799 c = myscan.get_coordinate(0)
800 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
801
[1600]802 Parameters:
[1846]803
[1600]804 rowno: the row number for the spectral coordinate
805
806 """
807 return coordinate(Scantable.get_coordinate(self, rowno))
808
[946]809 def get_selection(self):
[1846]810 """\
[1005]811 Get the selection object currently set on this scantable.
[1846]812
813 Example::
814
[1005]815 sel = scan.get_selection()
816 sel.set_ifs(0) # select IF 0
817 scan.set_selection(sel) # apply modified selection
[1846]818
[946]819 """
820 return selector(self._getselection())
821
[1576]822 def set_selection(self, selection=None, **kw):
[1846]823 """\
[1005]824 Select a subset of the data. All following operations on this scantable
825 are only applied to thi selection.
[1846]826
[1005]827 Parameters:
[1697]828
[1846]829 selection: a selector object (default unset the selection), or
[2431]830 any combination of 'pols', 'ifs', 'beams', 'scans',
831 'cycles', 'name', 'query'
[1697]832
[1846]833 Examples::
[1697]834
[1005]835 sel = selector() # create a selection object
[1118]836 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]837 scan.set_selection(sel) # set the selection
838 scan.summary() # will only print summary of scanno 0 an 3
839 scan.set_selection() # unset the selection
[1697]840 # or the equivalent
841 scan.set_selection(scans=[0,3])
842 scan.summary() # will only print summary of scanno 0 an 3
843 scan.set_selection() # unset the selection
[1846]844
[946]845 """
[1576]846 if selection is None:
847 # reset
848 if len(kw) == 0:
849 selection = selector()
850 else:
851 # try keywords
852 for k in kw:
853 if k not in selector.fields:
[2320]854 raise KeyError("Invalid selection key '%s', "
855 "valid keys are %s" % (k,
856 selector.fields))
[1576]857 selection = selector(**kw)
[946]858 self._setselection(selection)
859
[1819]860 def get_row(self, row=0, insitu=None):
[1846]861 """\
[1819]862 Select a row in the scantable.
863 Return a scantable with single row.
[1846]864
[1819]865 Parameters:
[1846]866
867 row: row no of integration, default is 0.
868 insitu: if False a new scantable is returned. Otherwise, the
869 scaling is done in-situ. The default is taken from .asaprc
870 (False)
871
[1819]872 """
[2349]873 if insitu is None:
874 insitu = rcParams['insitu']
[1819]875 if not insitu:
876 workscan = self.copy()
877 else:
878 workscan = self
879 # Select a row
[2349]880 sel = selector()
[1992]881 sel.set_rows([row])
[1819]882 workscan.set_selection(sel)
883 if not workscan.nrow() == 1:
[2349]884 msg = "Could not identify single row. %d rows selected." \
885 % (workscan.nrow())
[1819]886 raise RuntimeError(msg)
887 if insitu:
888 self._assign(workscan)
889 else:
890 return workscan
891
[1862]892 @asaplog_post_dec
[2957]893 def stats(self, stat='stddev', mask=None, form='3.3f', row=None, skip_flaggedrow=False):
[1846]894 """\
[135]895 Determine the specified statistic of the current beam/if/pol
[102]896 Takes a 'mask' as an optional parameter to specify which
897 channels should be excluded.
[1846]898
[102]899 Parameters:
[1846]900
[1819]901 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
902 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]903
[135]904 mask: an optional mask specifying where the statistic
[102]905 should be determined.
[1855]906
[1819]907 form: format string to print statistic values
[1846]908
[1907]909 row: row number of spectrum to process.
910 (default is None: for all rows)
[1846]911
[2957]912 skip_flaggedrow: if True, skip outputting text for flagged
913 spectra. default is False.
914
[1907]915 Example:
[113]916 scan.set_unit('channel')
[1118]917 msk = scan.create_mask([100, 200], [500, 600])
[135]918 scan.stats(stat='mean', mask=m)
[1846]919
[102]920 """
[1593]921 mask = mask or []
[876]922 if not self._check_ifs():
[1118]923 raise ValueError("Cannot apply mask as the IFs have different "
924 "number of channels. Please use setselection() "
925 "to select individual IFs")
[1819]926 getchan = False
927 if stat.lower().startswith('min') or stat.lower().startswith('max'):
928 chan = self._math._minmaxchan(self, mask, stat)
929 getchan = True
930 statvals = []
[2957]931
932 rtnabc = False
933 if stat.lower().endswith('_abc'):
934 rtnabc = True
935 else:
[1907]936 if row == None:
937 statvals = self._math._stats(self, mask, stat)
938 else:
939 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]940
[1819]941 #def cb(i):
942 # return statvals[i]
[256]943
[1819]944 #return self._row_callback(cb, stat)
[102]945
[1819]946 label=stat
947 #callback=cb
948 out = ""
949 #outvec = []
950 sep = '-'*50
[1907]951
952 if row == None:
953 rows = xrange(self.nrow())
954 elif isinstance(row, int):
955 rows = [ row ]
956
957 for i in rows:
[1819]958 refstr = ''
959 statunit= ''
960 if getchan:
961 qx, qy = self.chan2data(rowno=i, chan=chan[i])
962 if rtnabc:
963 statvals.append(qx['value'])
964 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
965 statunit= '['+qx['unit']+']'
966 else:
967 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
968
[2973]969 if skip_flaggedrow and self._getflagrow(i):
970 statvals[i] = None
971 continue
[2957]972
[1819]973 tm = self._gettime(i)
974 src = self._getsourcename(i)
975 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
976 out += 'Time[%s]:\n' % (tm)
[1907]977 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
978 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
979 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]980 #outvec.append(callback(i))
[1907]981 if len(rows) > 1:
982 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
983 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
984 else:
985 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
986 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]987 out += sep+"\n"
988
[1859]989 import os
990 if os.environ.has_key( 'USER' ):
991 usr = os.environ['USER']
992 else:
993 import commands
994 usr = commands.getoutput( 'whoami' )
995 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
996 f = open(tmpfile,'w')
997 print >> f, sep
998 print >> f, ' %s %s' % (label, statunit)
999 print >> f, sep
1000 print >> f, out
1001 f.close()
1002 f = open(tmpfile,'r')
1003 x = f.readlines()
1004 f.close()
1005 asaplog.push(''.join(x), False)
1006
[1819]1007 return statvals
1008
1009 def chan2data(self, rowno=0, chan=0):
[1846]1010 """\
[1819]1011 Returns channel/frequency/velocity and spectral value
1012 at an arbitrary row and channel in the scantable.
[1846]1013
[1819]1014 Parameters:
[1846]1015
[1819]1016 rowno: a row number in the scantable. Default is the
1017 first row, i.e. rowno=0
[1855]1018
[1819]1019 chan: a channel in the scantable. Default is the first
1020 channel, i.e. pos=0
[1846]1021
[1819]1022 """
1023 if isinstance(rowno, int) and isinstance(chan, int):
1024 qx = {'unit': self.get_unit(),
1025 'value': self._getabcissa(rowno)[chan]}
1026 qy = {'unit': self.get_fluxunit(),
1027 'value': self._getspectrum(rowno)[chan]}
1028 return qx, qy
1029
[1118]1030 def stddev(self, mask=None):
[1846]1031 """\
[135]1032 Determine the standard deviation of the current beam/if/pol
1033 Takes a 'mask' as an optional parameter to specify which
1034 channels should be excluded.
[1846]1035
[135]1036 Parameters:
[1846]1037
[135]1038 mask: an optional mask specifying where the standard
1039 deviation should be determined.
1040
[1846]1041 Example::
1042
[135]1043 scan.set_unit('channel')
[1118]1044 msk = scan.create_mask([100, 200], [500, 600])
[135]1045 scan.stddev(mask=m)
[1846]1046
[135]1047 """
[1118]1048 return self.stats(stat='stddev', mask=mask);
[135]1049
[1003]1050
[1259]1051 def get_column_names(self):
[1846]1052 """\
[1003]1053 Return a list of column names, which can be used for selection.
1054 """
[1259]1055 return list(Scantable.get_column_names(self))
[1003]1056
[1730]1057 def get_tsys(self, row=-1):
[1846]1058 """\
[113]1059 Return the System temperatures.
[1846]1060
1061 Parameters:
1062
1063 row: the rowno to get the information for. (default all rows)
1064
[113]1065 Returns:
[1846]1066
[876]1067 a list of Tsys values for the current selection
[1846]1068
[113]1069 """
[1730]1070 if row > -1:
1071 return self._get_column(self._gettsys, row)
[876]1072 return self._row_callback(self._gettsys, "Tsys")
[256]1073
[2406]1074 def get_tsysspectrum(self, row=-1):
1075 """\
1076 Return the channel dependent system temperatures.
[1730]1077
[2406]1078 Parameters:
1079
1080 row: the rowno to get the information for. (default all rows)
1081
1082 Returns:
1083
1084 a list of Tsys values for the current selection
1085
1086 """
1087 return self._get_column( self._gettsysspectrum, row )
1088
[2791]1089 def set_tsys(self, values, row=-1):
1090 """\
1091 Set the Tsys value(s) of the given 'row' or the whole scantable
1092 (selection).
1093
1094 Parameters:
1095
1096 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1097 row: the row number to apply Tsys values to.
1098 (default all rows)
1099
1100 """
1101
1102 if not hasattr(values, "__len__"):
1103 values = [values]
1104 self._settsys(values, row)
1105
[1730]1106 def get_weather(self, row=-1):
[1846]1107 """\
[2930]1108 Return the weather information.
[1846]1109
1110 Parameters:
1111
1112 row: the rowno to get the information for. (default all rows)
1113
1114 Returns:
1115
1116 a dict or list of of dicts of values for the current selection
1117
1118 """
[2930]1119 if row >= len(self):
1120 raise IndexError("row out of range")
[1730]1121 values = self._get_column(self._get_weather, row)
1122 if row > -1:
1123 return {'temperature': values[0],
1124 'pressure': values[1], 'humidity' : values[2],
1125 'windspeed' : values[3], 'windaz' : values[4]
1126 }
1127 else:
1128 out = []
1129 for r in values:
1130 out.append({'temperature': r[0],
1131 'pressure': r[1], 'humidity' : r[2],
1132 'windspeed' : r[3], 'windaz' : r[4]
1133 })
1134 return out
1135
[876]1136 def _row_callback(self, callback, label):
1137 out = ""
[1118]1138 outvec = []
[1590]1139 sep = '-'*50
[876]1140 for i in range(self.nrow()):
1141 tm = self._gettime(i)
1142 src = self._getsourcename(i)
[1590]1143 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]1144 out += 'Time[%s]:\n' % (tm)
[1590]1145 if self.nbeam(-1) > 1:
1146 out += ' Beam[%d] ' % (self.getbeam(i))
1147 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1148 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]1149 outvec.append(callback(i))
1150 out += '= %3.3f\n' % (outvec[i])
[1590]1151 out += sep+'\n'
[1859]1152
1153 asaplog.push(sep)
1154 asaplog.push(" %s" % (label))
1155 asaplog.push(sep)
1156 asaplog.push(out)
[1861]1157 asaplog.post()
[1175]1158 return outvec
[256]1159
[1947]1160 def _get_column(self, callback, row=-1, *args):
[1070]1161 """
1162 """
1163 if row == -1:
[1947]1164 return [callback(i, *args) for i in range(self.nrow())]
[1070]1165 else:
[1819]1166 if 0 <= row < self.nrow():
[1947]1167 return callback(row, *args)
[256]1168
[1070]1169
[1948]1170 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]1171 """\
[113]1172 Get a list of time stamps for the observations.
[1938]1173 Return a datetime object or a string (default) for each
1174 integration time stamp in the scantable.
[1846]1175
[113]1176 Parameters:
[1846]1177
[1348]1178 row: row no of integration. Default -1 return all rows
[1855]1179
[1348]1180 asdatetime: return values as datetime objects rather than strings
[1846]1181
[2349]1182 prec: number of digits shown. Default -1 to automatic
1183 calculation.
[1948]1184 Note this number is equals to the digits of MVTime,
1185 i.e., 0<prec<3: dates with hh:: only,
1186 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]1187 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1188
[113]1189 """
[1175]1190 from datetime import datetime
[1948]1191 if prec < 0:
1192 # automagically set necessary precision +1
[2349]1193 prec = 7 - \
1194 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]1195 prec = max(6, int(prec))
1196 else:
1197 prec = max(0, prec)
1198 if asdatetime:
1199 #precision can be 1 millisecond at max
1200 prec = min(12, prec)
1201
[1947]1202 times = self._get_column(self._gettime, row, prec)
[1348]1203 if not asdatetime:
[1392]1204 return times
[1947]1205 format = "%Y/%m/%d/%H:%M:%S.%f"
1206 if prec < 7:
1207 nsub = 1 + (((6-prec)/2) % 3)
1208 substr = [".%f","%S","%M"]
1209 for i in range(nsub):
1210 format = format.replace(substr[i],"")
[1175]1211 if isinstance(times, list):
[1947]1212 return [datetime.strptime(i, format) for i in times]
[1175]1213 else:
[1947]1214 return datetime.strptime(times, format)
[102]1215
[1348]1216
1217 def get_inttime(self, row=-1):
[1846]1218 """\
[1348]1219 Get a list of integration times for the observations.
1220 Return a time in seconds for each integration in the scantable.
[1846]1221
[1348]1222 Parameters:
[1846]1223
[1348]1224 row: row no of integration. Default -1 return all rows.
[1846]1225
[1348]1226 """
[1573]1227 return self._get_column(self._getinttime, row)
[1348]1228
[1573]1229
[714]1230 def get_sourcename(self, row=-1):
[1846]1231 """\
[794]1232 Get a list source names for the observations.
[714]1233 Return a string for each integration in the scantable.
1234 Parameters:
[1846]1235
[1348]1236 row: row no of integration. Default -1 return all rows.
[1846]1237
[714]1238 """
[1070]1239 return self._get_column(self._getsourcename, row)
[714]1240
[794]1241 def get_elevation(self, row=-1):
[1846]1242 """\
[794]1243 Get a list of elevations for the observations.
1244 Return a float for each integration in the scantable.
[1846]1245
[794]1246 Parameters:
[1846]1247
[1348]1248 row: row no of integration. Default -1 return all rows.
[1846]1249
[794]1250 """
[1070]1251 return self._get_column(self._getelevation, row)
[794]1252
1253 def get_azimuth(self, row=-1):
[1846]1254 """\
[794]1255 Get a list of azimuths for the observations.
1256 Return a float for each integration in the scantable.
[1846]1257
[794]1258 Parameters:
[1348]1259 row: row no of integration. Default -1 return all rows.
[1846]1260
[794]1261 """
[1070]1262 return self._get_column(self._getazimuth, row)
[794]1263
1264 def get_parangle(self, row=-1):
[1846]1265 """\
[794]1266 Get a list of parallactic angles for the observations.
1267 Return a float for each integration in the scantable.
[1846]1268
[794]1269 Parameters:
[1846]1270
[1348]1271 row: row no of integration. Default -1 return all rows.
[1846]1272
[794]1273 """
[1070]1274 return self._get_column(self._getparangle, row)
[794]1275
[1070]1276 def get_direction(self, row=-1):
1277 """
1278 Get a list of Positions on the sky (direction) for the observations.
[1594]1279 Return a string for each integration in the scantable.
[1855]1280
[1070]1281 Parameters:
[1855]1282
[1070]1283 row: row no of integration. Default -1 return all rows
[1855]1284
[1070]1285 """
1286 return self._get_column(self._getdirection, row)
1287
[1391]1288 def get_directionval(self, row=-1):
[1846]1289 """\
[1391]1290 Get a list of Positions on the sky (direction) for the observations.
1291 Return a float for each integration in the scantable.
[1846]1292
[1391]1293 Parameters:
[1846]1294
[1391]1295 row: row no of integration. Default -1 return all rows
[1846]1296
[1391]1297 """
1298 return self._get_column(self._getdirectionvec, row)
1299
[1862]1300 @asaplog_post_dec
[102]1301 def set_unit(self, unit='channel'):
[1846]1302 """\
[102]1303 Set the unit for all following operations on this scantable
[1846]1304
[102]1305 Parameters:
[1846]1306
1307 unit: optional unit, default is 'channel'. Use one of '*Hz',
1308 'km/s', 'channel' or equivalent ''
1309
[102]1310 """
[484]1311 varlist = vars()
[1118]1312 if unit in ['', 'pixel', 'channel']:
[113]1313 unit = ''
1314 inf = list(self._getcoordinfo())
1315 inf[0] = unit
1316 self._setcoordinfo(inf)
[1118]1317 self._add_history("set_unit", varlist)
[113]1318
[1862]1319 @asaplog_post_dec
[484]1320 def set_instrument(self, instr):
[1846]1321 """\
[1348]1322 Set the instrument for subsequent processing.
[1846]1323
[358]1324 Parameters:
[1846]1325
[710]1326 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]1327 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1328
[358]1329 """
1330 self._setInstrument(instr)
[1118]1331 self._add_history("set_instument", vars())
[358]1332
[1862]1333 @asaplog_post_dec
[1190]1334 def set_feedtype(self, feedtype):
[1846]1335 """\
[1190]1336 Overwrite the feed type, which might not be set correctly.
[1846]1337
[1190]1338 Parameters:
[1846]1339
[1190]1340 feedtype: 'linear' or 'circular'
[1846]1341
[1190]1342 """
1343 self._setfeedtype(feedtype)
1344 self._add_history("set_feedtype", vars())
1345
[1862]1346 @asaplog_post_dec
[2897]1347 def get_doppler(self):
1348 """\
1349 Get the doppler.
1350 """
1351 return self._getcoordinfo()[2]
1352
1353 @asaplog_post_dec
[276]1354 def set_doppler(self, doppler='RADIO'):
[1846]1355 """\
[276]1356 Set the doppler for all following operations on this scantable.
[1846]1357
[276]1358 Parameters:
[1846]1359
[276]1360 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1361
[276]1362 """
[484]1363 varlist = vars()
[276]1364 inf = list(self._getcoordinfo())
1365 inf[2] = doppler
1366 self._setcoordinfo(inf)
[1118]1367 self._add_history("set_doppler", vars())
[710]1368
[1862]1369 @asaplog_post_dec
[226]1370 def set_freqframe(self, frame=None):
[1846]1371 """\
[113]1372 Set the frame type of the Spectral Axis.
[1846]1373
[113]1374 Parameters:
[1846]1375
[591]1376 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1377 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1378 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1379
1380 Example::
1381
[113]1382 scan.set_freqframe('BARY')
[1846]1383
[113]1384 """
[1593]1385 frame = frame or rcParams['scantable.freqframe']
[484]1386 varlist = vars()
[1819]1387 # "REST" is not implemented in casacore
1388 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1389 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1390 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1391 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1392
[989]1393 if frame in valid:
[113]1394 inf = list(self._getcoordinfo())
1395 inf[1] = frame
1396 self._setcoordinfo(inf)
[1118]1397 self._add_history("set_freqframe", varlist)
[102]1398 else:
[1118]1399 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1400 raise TypeError(msg)
[710]1401
[1862]1402 @asaplog_post_dec
[989]1403 def set_dirframe(self, frame=""):
[1846]1404 """\
[989]1405 Set the frame type of the Direction on the sky.
[1846]1406
[989]1407 Parameters:
[1846]1408
[989]1409 frame: an optional frame type, default ''. Valid frames are:
1410 'J2000', 'B1950', 'GALACTIC'
[1846]1411
1412 Example:
1413
[989]1414 scan.set_dirframe('GALACTIC')
[1846]1415
[989]1416 """
1417 varlist = vars()
[1859]1418 Scantable.set_dirframe(self, frame)
[1118]1419 self._add_history("set_dirframe", varlist)
[989]1420
[113]1421 def get_unit(self):
[1846]1422 """\
[113]1423 Get the default unit set in this scantable
[1846]1424
[113]1425 Returns:
[1846]1426
[113]1427 A unit string
[1846]1428
[113]1429 """
1430 inf = self._getcoordinfo()
1431 unit = inf[0]
1432 if unit == '': unit = 'channel'
1433 return unit
[102]1434
[1862]1435 @asaplog_post_dec
[158]1436 def get_abcissa(self, rowno=0):
[1846]1437 """\
[158]1438 Get the abcissa in the current coordinate setup for the currently
[113]1439 selected Beam/IF/Pol
[1846]1440
[113]1441 Parameters:
[1846]1442
[226]1443 rowno: an optional row number in the scantable. Default is the
1444 first row, i.e. rowno=0
[1846]1445
[113]1446 Returns:
[1846]1447
[1348]1448 The abcissa values and the format string (as a dictionary)
[1846]1449
[113]1450 """
[256]1451 abc = self._getabcissa(rowno)
[710]1452 lbl = self._getabcissalabel(rowno)
[158]1453 return abc, lbl
[113]1454
[1862]1455 @asaplog_post_dec
[2322]1456 def flag(self, mask=None, unflag=False, row=-1):
[1846]1457 """\
[1001]1458 Flag the selected data using an optional channel mask.
[1846]1459
[1001]1460 Parameters:
[1846]1461
[1001]1462 mask: an optional channel mask, created with create_mask. Default
1463 (no mask) is all channels.
[1855]1464
[1819]1465 unflag: if True, unflag the data
[1846]1466
[2322]1467 row: an optional row number in the scantable.
1468 Default -1 flags all rows
1469
[1001]1470 """
1471 varlist = vars()
[1593]1472 mask = mask or []
[1994]1473 self._flag(row, mask, unflag)
[1001]1474 self._add_history("flag", varlist)
1475
[1862]1476 @asaplog_post_dec
[2322]1477 def flag_row(self, rows=None, unflag=False):
[1846]1478 """\
[1819]1479 Flag the selected data in row-based manner.
[1846]1480
[1819]1481 Parameters:
[1846]1482
[1843]1483 rows: list of row numbers to be flagged. Default is no row
[2322]1484 (must be explicitly specified to execute row-based
1485 flagging).
[1855]1486
[1819]1487 unflag: if True, unflag the data.
[1846]1488
[1819]1489 """
1490 varlist = vars()
[2322]1491 if rows is None:
1492 rows = []
[1859]1493 self._flag_row(rows, unflag)
[1819]1494 self._add_history("flag_row", varlist)
1495
[1862]1496 @asaplog_post_dec
[1819]1497 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1498 """\
[1819]1499 Flag the selected data outside a specified range (in channel-base)
[1846]1500
[1819]1501 Parameters:
[1846]1502
[1819]1503 uthres: upper threshold.
[1855]1504
[1819]1505 dthres: lower threshold
[1846]1506
[2322]1507 clipoutside: True for flagging data outside the range
1508 [dthres:uthres].
[1928]1509 False for flagging data inside the range.
[1855]1510
[1846]1511 unflag: if True, unflag the data.
1512
[1819]1513 """
1514 varlist = vars()
[1859]1515 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1516 self._add_history("clip", varlist)
1517
[1862]1518 @asaplog_post_dec
[1584]1519 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1520 """\
[1192]1521 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1522 Flagged data in the scantable get interpolated over the region.
[1192]1523 No taper is applied.
[1846]1524
[1192]1525 Parameters:
[1846]1526
[1579]1527 start: the start frequency (really a period within the
1528 bandwidth) or period to remove
[1855]1529
[1579]1530 end: the end frequency or period to remove
[1855]1531
[2431]1532 unit: the frequency unit (default 'MHz') or '' for
[1579]1533 explicit lag channels
[1846]1534
1535 *Notes*:
1536
[1579]1537 It is recommended to flag edges of the band or strong
[1348]1538 signals beforehand.
[1846]1539
[1192]1540 """
1541 if insitu is None: insitu = rcParams['insitu']
1542 self._math._setinsitu(insitu)
1543 varlist = vars()
[1579]1544 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1545 if not (unit == "" or base.has_key(unit)):
[1192]1546 raise ValueError("%s is not a valid unit." % unit)
[1859]1547 if unit == "":
1548 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1549 else:
1550 s = scantable(self._math._lag_flag(self, start*base[unit],
1551 end*base[unit], "frequency"))
[1192]1552 s._add_history("lag_flag", varlist)
1553 if insitu:
1554 self._assign(s)
1555 else:
1556 return s
[1001]1557
[1862]1558 @asaplog_post_dec
[2349]1559 def fft(self, rowno=None, mask=None, getrealimag=False):
[2177]1560 """\
1561 Apply FFT to the spectra.
1562 Flagged data in the scantable get interpolated over the region.
1563
1564 Parameters:
[2186]1565
1566 rowno: The row number(s) to be processed. int, list
[2349]1567 and tuple are accepted. By default (None), FFT
[2186]1568 is applied to the whole data.
1569
1570 mask: Auxiliary channel mask(s). Given as a boolean
1571 list, it is applied to all specified rows.
1572 A list of boolean lists can also be used to
1573 apply different masks. In the latter case, the
1574 length of 'mask' must be the same as that of
[2349]1575 'rowno'. The default is None.
[2177]1576
1577 getrealimag: If True, returns the real and imaginary part
1578 values of the complex results.
1579 If False (the default), returns the amplitude
1580 (absolute value) normalised with Ndata/2 and
1581 phase (argument, in unit of radian).
1582
1583 Returns:
1584
[2186]1585 A list of dictionaries containing the results for each spectrum.
1586 Each dictionary contains two values, the real and the imaginary
1587 parts when getrealimag = True, or the amplitude(absolute value)
1588 and the phase(argument) when getrealimag = False. The key for
1589 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1590 respectively.
1591 """
[2349]1592 if rowno is None:
1593 rowno = []
[2177]1594 if isinstance(rowno, int):
1595 rowno = [rowno]
1596 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1597 raise TypeError("The row number(s) must be int, list or tuple.")
1598 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1599
[2411]1600 usecommonmask = True
1601
1602 if mask is None:
1603 mask = []
1604 if isinstance(mask, list) or isinstance(mask, tuple):
1605 if len(mask) == 0:
1606 mask = [[]]
1607 else:
1608 if isinstance(mask[0], bool):
1609 if len(mask) != self.nchan(self.getif(rowno[0])):
1610 raise ValueError("The spectra and the mask have "
1611 "different length.")
1612 mask = [mask]
1613 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1614 usecommonmask = False
1615 if len(mask) != len(rowno):
1616 raise ValueError("When specifying masks for each "
1617 "spectrum, the numbers of them "
1618 "must be identical.")
1619 for i in xrange(mask):
1620 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1621 raise ValueError("The spectra and the mask have "
1622 "different length.")
1623 else:
1624 raise TypeError("The mask must be a boolean list or "
1625 "a list of boolean list.")
1626 else:
[2349]1627 raise TypeError("The mask must be a boolean list or a list of "
1628 "boolean list.")
[2186]1629
1630 res = []
1631
1632 imask = 0
1633 for whichrow in rowno:
1634 fspec = self._fft(whichrow, mask[imask], getrealimag)
1635 nspec = len(fspec)
[2177]1636
[2349]1637 i = 0
1638 v1 = []
1639 v2 = []
1640 reselem = {"real":[],"imag":[]} if getrealimag \
1641 else {"ampl":[],"phase":[]}
[2177]1642
[2186]1643 while (i < nspec):
1644 v1.append(fspec[i])
1645 v2.append(fspec[i+1])
[2349]1646 i += 2
[2186]1647
[2177]1648 if getrealimag:
[2186]1649 reselem["real"] += v1
1650 reselem["imag"] += v2
[2177]1651 else:
[2186]1652 reselem["ampl"] += v1
1653 reselem["phase"] += v2
[2177]1654
[2186]1655 res.append(reselem)
1656
[2349]1657 if not usecommonmask:
1658 imask += 1
[2186]1659
[2177]1660 return res
1661
1662 @asaplog_post_dec
[113]1663 def create_mask(self, *args, **kwargs):
[1846]1664 """\
[1118]1665 Compute and return a mask based on [min, max] windows.
[189]1666 The specified windows are to be INCLUDED, when the mask is
[113]1667 applied.
[1846]1668
[102]1669 Parameters:
[1846]1670
[1118]1671 [min, max], [min2, max2], ...
[1024]1672 Pairs of start/end points (inclusive)specifying the regions
[102]1673 to be masked
[1855]1674
[189]1675 invert: optional argument. If specified as True,
1676 return an inverted mask, i.e. the regions
1677 specified are EXCLUDED
[1855]1678
[513]1679 row: create the mask using the specified row for
1680 unit conversions, default is row=0
1681 only necessary if frequency varies over rows.
[1846]1682
1683 Examples::
1684
[113]1685 scan.set_unit('channel')
[1846]1686 # a)
[1118]1687 msk = scan.create_mask([400, 500], [800, 900])
[189]1688 # masks everything outside 400 and 500
[113]1689 # and 800 and 900 in the unit 'channel'
1690
[1846]1691 # b)
[1118]1692 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1693 # masks the regions between 400 and 500
[113]1694 # and 800 and 900 in the unit 'channel'
[1846]1695
1696 # c)
1697 #mask only channel 400
[1554]1698 msk = scan.create_mask([400])
[1846]1699
[102]1700 """
[1554]1701 row = kwargs.get("row", 0)
[513]1702 data = self._getabcissa(row)
[113]1703 u = self._getcoordinfo()[0]
[1859]1704 if u == "":
1705 u = "channel"
1706 msg = "The current mask window unit is %s" % u
1707 i = self._check_ifs()
1708 if not i:
1709 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1710 asaplog.push(msg)
[2348]1711 n = len(data)
[1295]1712 msk = _n_bools(n, False)
[710]1713 # test if args is a 'list' or a 'normal *args - UGLY!!!
1714
[2349]1715 ws = (isinstance(args[-1][-1], int)
1716 or isinstance(args[-1][-1], float)) and args or args[0]
[710]1717 for window in ws:
[1554]1718 if len(window) == 1:
1719 window = [window[0], window[0]]
1720 if len(window) == 0 or len(window) > 2:
[2349]1721 raise ValueError("A window needs to be defined as "
1722 "[start(, end)]")
[1545]1723 if window[0] > window[1]:
1724 tmp = window[0]
1725 window[0] = window[1]
1726 window[1] = tmp
[102]1727 for i in range(n):
[1024]1728 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1729 msk[i] = True
[113]1730 if kwargs.has_key('invert'):
1731 if kwargs.get('invert'):
[1295]1732 msk = mask_not(msk)
[102]1733 return msk
[710]1734
[1931]1735 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1736 """\
[1819]1737 Compute and return a list of mask windows, [min, max].
[1846]1738
[1819]1739 Parameters:
[1846]1740
[1819]1741 mask: channel mask, created with create_mask.
[1855]1742
[1819]1743 row: calcutate the masklist using the specified row
1744 for unit conversions, default is row=0
1745 only necessary if frequency varies over rows.
[1846]1746
[1819]1747 Returns:
[1846]1748
[1819]1749 [min, max], [min2, max2], ...
1750 Pairs of start/end points (inclusive)specifying
1751 the masked regions
[1846]1752
[1819]1753 """
1754 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1755 raise TypeError("The mask should be list or tuple.")
[2427]1756 if len(mask) <= 0:
1757 raise TypeError("The mask elements should be > 0")
[2348]1758 data = self._getabcissa(row)
1759 if len(data) != len(mask):
[1819]1760 msg = "Number of channels in scantable != number of mask elements"
1761 raise TypeError(msg)
1762 u = self._getcoordinfo()[0]
[1859]1763 if u == "":
1764 u = "channel"
1765 msg = "The current mask window unit is %s" % u
1766 i = self._check_ifs()
1767 if not i:
1768 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1769 if not silent:
1770 asaplog.push(msg)
[2349]1771 masklist = []
[1819]1772 ist, ien = None, None
1773 ist, ien=self.get_mask_indices(mask)
1774 if ist is not None and ien is not None:
1775 for i in xrange(len(ist)):
1776 range=[data[ist[i]],data[ien[i]]]
1777 range.sort()
1778 masklist.append([range[0],range[1]])
1779 return masklist
1780
1781 def get_mask_indices(self, mask=None):
[1846]1782 """\
[1819]1783 Compute and Return lists of mask start indices and mask end indices.
[1855]1784
1785 Parameters:
1786
[1819]1787 mask: channel mask, created with create_mask.
[1846]1788
[1819]1789 Returns:
[1846]1790
[1819]1791 List of mask start indices and that of mask end indices,
1792 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1793
[1819]1794 """
1795 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1796 raise TypeError("The mask should be list or tuple.")
[2427]1797 if len(mask) <= 0:
1798 raise TypeError("The mask elements should be > 0")
[2349]1799 istart = []
1800 iend = []
1801 if mask[0]:
1802 istart.append(0)
[1819]1803 for i in range(len(mask)-1):
1804 if not mask[i] and mask[i+1]:
1805 istart.append(i+1)
1806 elif mask[i] and not mask[i+1]:
1807 iend.append(i)
[2349]1808 if mask[len(mask)-1]:
1809 iend.append(len(mask)-1)
[1819]1810 if len(istart) != len(iend):
1811 raise RuntimeError("Numbers of mask start != mask end.")
1812 for i in range(len(istart)):
1813 if istart[i] > iend[i]:
1814 raise RuntimeError("Mask start index > mask end index")
1815 break
1816 return istart,iend
1817
[2013]1818 @asaplog_post_dec
[2882]1819 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1820 """
1821 Parse MS type spw/channel selection syntax.
1822
1823 Parameters:
1824 selectstring : A string expression of spw and channel selection.
1825 Comma-separated expressions mean different spw -
1826 channel combinations. Spws and channel selections
1827 are partitioned by a colon ':'. In a single
1828 selection expression, you can put multiple values
1829 separated by semicolons ';'. Both for spw and
1830 channel selection, allowed cases include single
1831 value, blank('') or asterisk('*') to specify all
1832 available values, two values connected with a
1833 tilde ('~') to specify an inclusive range. Unit
1834 strings for frequency or velocity can be added to
1835 the tilde-connected values. For channel selection
1836 expression, placing a '<' or a '>' is possible to
1837 specify a semi-infinite interval as well.
1838
1839 examples:
1840 '' or '*' = all spws (all channels)
1841 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1842 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1843 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1844 all channels in spw8
[2936]1845 '1.3~1.5GHz' = all spws whose central frequency
1846 falls in frequency range between
1847 1.3GHz and 1.5GHz.
1848 '1.3~1.5GHz:1.3~1.5GHz' = channels which fall
[2884]1849 between the specified
1850 frequency range in spws
[2936]1851 whose central frequency
1852 falls in the specified
1853 frequency range.
[2884]1854 '1:-200~250km/s' = channels that fall between the
1855 specified velocity range in
1856 spw 1.
[2897]1857 restfreq: the rest frequency.
1858 examples: '115.2712GHz', 115271201800.0
1859 frame: an optional frame type, default 'LSRK'. Valid frames are:
1860 'TOPO', 'LSRD', 'LSRK', 'BARY',
1861 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1862 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[2882]1863 Returns:
1864 A dictionary of selected (valid) spw and masklist pairs,
1865 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1866 """
1867 if not isinstance(selectstring, str):
1868 asaplog.post()
1869 asaplog.push("Expression of spw/channel selection must be a string.")
1870 asaplog.post("ERROR")
1871
1872 orig_unit = self.get_unit()
1873 self.set_unit('channel')
1874
[2891]1875 if restfreq is not None:
[2892]1876 orig_molids = self._getmolidcol_list()
1877 set_restfreq(self, restfreq)
[2882]1878
[2897]1879 orig_coord = self._getcoordinfo()
[2892]1880
1881 if frame is not None:
1882 orig_frame = orig_coord[1]
1883 self.set_freqframe(frame)
1884
1885 if doppler is not None:
1886 orig_doppler = orig_coord[2]
1887 self.set_doppler(doppler)
[2882]1888
1889 valid_ifs = self.getifnos()
1890
1891 comma_sep = selectstring.split(",")
1892 res = {}
1893
1894 for cms_elem in comma_sep:
1895 colon_sep = cms_elem.split(":")
1896
1897 if (len(colon_sep) > 2):
1898 raise RuntimeError("Invalid selection expression: more than two colons!")
1899
1900 # parse spw expression and store result in spw_list.
1901 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
[2884]1902 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1903 # 'a~b*m/s' (where * can be '' or 'k') and also
[2882]1904 # several of the above expressions connected with ';'.
1905
1906 spw_list = []
1907
1908 semicolon_sep = colon_sep[0].split(";")
1909
1910 for scs_elem in semicolon_sep:
1911 scs_elem = scs_elem.strip()
1912
1913 lt_sep = scs_elem.split("<")
1914 gt_sep = scs_elem.split(">")
1915 ti_sep = scs_elem.split("~")
1916
1917 lt_sep_length = len(lt_sep)
1918 gt_sep_length = len(gt_sep)
1919 ti_sep_length = len(ti_sep)
1920
1921 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1922
1923 if (len_product > 2):
1924 # '<', '>' and '~' must not coexist in a single spw expression
1925
1926 raise RuntimeError("Invalid spw selection.")
1927
1928 elif (len_product == 1):
1929 # '', '*', or single spw number.
1930
1931 if (scs_elem == "") or (scs_elem == "*"):
1932 spw_list = valid_ifs[:] # deep copy
1933
1934 else: # single number
[2887]1935 expr = int(scs_elem)
1936 spw_list.append(expr)
1937 if expr not in valid_ifs:
1938 asaplog.push("Invalid spw given. Ignored.")
1939
[2882]1940 else: # (len_product == 2)
[2887]1941 # namely, one of '<', '>' or '~' appears just once.
[2882]1942
1943 if (lt_sep_length == 2): # '<a'
1944 if is_number(lt_sep[1]):
[2886]1945 no_valid_spw = True
[2882]1946 for i in valid_ifs:
1947 if (i < float(lt_sep[1])):
1948 spw_list.append(i)
[2886]1949 no_valid_spw = False
1950
1951 if no_valid_spw:
1952 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
[2882]1953
1954 else:
[2886]1955 raise RuntimeError("Invalid spw selection.")
[2882]1956
1957 elif (gt_sep_length == 2): # '>a'
1958 if is_number(gt_sep[1]):
[2886]1959 no_valid_spw = True
[2882]1960 for i in valid_ifs:
1961 if (i > float(gt_sep[1])):
1962 spw_list.append(i)
[2886]1963 no_valid_spw = False
1964
1965 if no_valid_spw:
1966 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
[2882]1967
1968 else:
[2886]1969 raise RuntimeError("Invalid spw selection.")
[2882]1970
1971 else: # (ti_sep_length == 2) where both boundaries inclusive
1972 expr0 = ti_sep[0].strip()
1973 expr1 = ti_sep[1].strip()
1974
1975 if is_number(expr0) and is_number(expr1):
1976 # 'a~b'
1977 expr_pmin = min(float(expr0), float(expr1))
1978 expr_pmax = max(float(expr0), float(expr1))
[2887]1979 has_invalid_spw = False
[2886]1980 no_valid_spw = True
1981
[2882]1982 for i in valid_ifs:
1983 if (expr_pmin <= i) and (i <= expr_pmax):
1984 spw_list.append(i)
[2886]1985 no_valid_spw = False
[2887]1986 else:
1987 has_invalid_spw = True
[2886]1988
[2887]1989 if has_invalid_spw:
1990 msg = "Invalid spw is given. Ignored."
1991 asaplog.push(msg)
1992 asaplog.post()
1993
[2886]1994 if no_valid_spw:
1995 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
[2887]1996
[2884]1997 elif is_number(expr0) and is_frequency(expr1):
1998 # 'a~b*Hz'
1999 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
[2936]2000 expr_fmin = min(expr_f0, expr_f1)
2001 expr_fmax = max(expr_f0, expr_f1)
[2886]2002 no_valid_spw = True
2003
[2882]2004 for coord in self._get_coordinate_list():
[2936]2005 spw = coord['if']
2006
2007 """
[2882]2008 expr_p0 = coord['coord'].to_pixel(expr_f0)
2009 expr_p1 = coord['coord'].to_pixel(expr_f1)
2010 expr_pmin = min(expr_p0, expr_p1)
2011 expr_pmax = max(expr_p0, expr_p1)
2012
2013 pmin = 0.0
2014 pmax = float(self.nchan(spw) - 1)
[2936]2015
[2882]2016 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2017 spw_list.append(spw)
[2886]2018 no_valid_spw = False
[2936]2019 """
[2886]2020
[2937]2021 crd = coord['coord']
2022 fhead = crd.to_frequency(0)
2023 ftail = crd.to_frequency(self.nchan(spw) - 1)
[2936]2024 fcen = (fhead + ftail) / 2.0
2025
2026 if ((expr_fmin <= fcen) and (fcen <= expr_fmax)):
2027 spw_list.append(spw)
2028 no_valid_spw = False
2029
[2886]2030 if no_valid_spw:
2031 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
[2882]2032
[2884]2033 elif is_number(expr0) and is_velocity(expr1):
2034 # 'a~b*m/s'
2035 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
[2882]2036 expr_vmin = min(expr_v0, expr_v1)
2037 expr_vmax = max(expr_v0, expr_v1)
[2886]2038 no_valid_spw = True
2039
[2882]2040 for coord in self._get_coordinate_list():
2041 spw = coord['if']
[2936]2042
2043 """
[2882]2044 pmin = 0.0
2045 pmax = float(self.nchan(spw) - 1)
2046
2047 vel0 = coord['coord'].to_velocity(pmin)
2048 vel1 = coord['coord'].to_velocity(pmax)
2049
2050 vmin = min(vel0, vel1)
2051 vmax = max(vel0, vel1)
2052
2053 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2054 spw_list.append(spw)
[2886]2055 no_valid_spw = False
[2936]2056 """
[2886]2057
[2937]2058 crd = coord['coord']
2059 fhead = crd.to_frequency(0)
2060 ftail = crd.to_frequency(self.nchan(spw) - 1)
2061 fcen = (fhead + ftail) / 2.0
2062 vcen = crd.to_velocity(crd.to_pixel(fcen))
[2936]2063
2064 if ((expr_vmin <= vcen) and (vcen <= expr_vmax)):
2065 spw_list.append(spw)
2066 no_valid_spw = False
2067
[2886]2068 if no_valid_spw:
2069 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
[2882]2070
2071 else:
2072 # cases such as 'aGHz~bkm/s' are not allowed now
2073 raise RuntimeError("Invalid spw selection.")
2074
[2887]2075 # check spw list and remove invalid ones.
2076 # if no valid spw left, emit ValueError.
2077 if len(spw_list) == 0:
2078 raise ValueError("No valid spw in given range.")
2079
[2882]2080 # parse channel expression and store the result in crange_list.
2081 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2082 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2083 # and also several of the above expressions connected with ';'.
2084
2085 for spw in spw_list:
2086 pmin = 0.0
2087 pmax = float(self.nchan(spw) - 1)
[2909]2088
2089 molid = self._getmolidcol_list()[self.get_first_rowno_by_if(spw)]
[2882]2090
2091 if (len(colon_sep) == 1):
2092 # no expression for channel selection,
2093 # which means all channels are to be selected.
2094 crange_list = [[pmin, pmax]]
2095
2096 else: # (len(colon_sep) == 2)
2097 crange_list = []
2098
2099 found = False
2100 for i in self._get_coordinate_list():
2101 if (i['if'] == spw):
2102 coord = i['coord']
2103 found = True
2104 break
2105
[2887]2106 if found:
2107 semicolon_sep = colon_sep[1].split(";")
2108 for scs_elem in semicolon_sep:
2109 scs_elem = scs_elem.strip()
[2882]2110
[2887]2111 ti_sep = scs_elem.split("~")
2112 ti_sep_length = len(ti_sep)
[2882]2113
[2887]2114 if (ti_sep_length > 2):
2115 raise RuntimeError("Invalid channel selection.")
[2882]2116
[2887]2117 elif (ti_sep_length == 1):
2118 if (scs_elem == "") or (scs_elem == "*"):
2119 # '' and '*' for all channels
2120 crange_list = [[pmin, pmax]]
2121 break
2122 elif (is_number(scs_elem)):
2123 # single channel given
2124 crange_list.append([float(scs_elem), float(scs_elem)])
2125 else:
2126 raise RuntimeError("Invalid channel selection.")
[2882]2127
[2887]2128 else: #(ti_sep_length == 2)
2129 expr0 = ti_sep[0].strip()
2130 expr1 = ti_sep[1].strip()
[2882]2131
[2887]2132 if is_number(expr0) and is_number(expr1):
2133 # 'a~b'
2134 expr_pmin = min(float(expr0), float(expr1))
2135 expr_pmax = max(float(expr0), float(expr1))
[2882]2136
[2887]2137 elif is_number(expr0) and is_frequency(expr1):
2138 # 'a~b*Hz'
2139 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2140 expr_p0 = coord.to_pixel(expr_f0)
2141 expr_p1 = coord.to_pixel(expr_f1)
2142 expr_pmin = min(expr_p0, expr_p1)
2143 expr_pmax = max(expr_p0, expr_p1)
[2882]2144
[2887]2145 elif is_number(expr0) and is_velocity(expr1):
2146 # 'a~b*m/s'
[2909]2147 restf = self.get_restfreqs()[molid][0]
[2887]2148 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
[2897]2149 dppl = self.get_doppler()
2150 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)
2151 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)
[2887]2152 expr_p0 = coord.to_pixel(expr_f0)
2153 expr_p1 = coord.to_pixel(expr_f1)
2154 expr_pmin = min(expr_p0, expr_p1)
2155 expr_pmax = max(expr_p0, expr_p1)
[2882]2156
[2887]2157 else:
2158 # cases such as 'aGHz~bkm/s' are not allowed now
2159 raise RuntimeError("Invalid channel selection.")
[2882]2160
[2887]2161 cmin = max(pmin, expr_pmin)
2162 cmax = min(pmax, expr_pmax)
2163 # if the given range of channel selection has overwrap with
2164 # that of current spw, output the overwrap area.
2165 if (cmin <= cmax):
2166 cmin = float(int(cmin + 0.5))
2167 cmax = float(int(cmax + 0.5))
2168 crange_list.append([cmin, cmax])
[2882]2169
2170 if (len(crange_list) == 0):
2171 crange_list.append([])
2172
[2910]2173 if (len(crange_list[0]) > 0):
2174 if res.has_key(spw):
2175 res[spw].extend(crange_list)
2176 else:
2177 res[spw] = crange_list
[2882]2178
[2887]2179 for spw in res.keys():
[2932]2180 if spw in valid_ifs:
[2933]2181 # remove duplicated channel ranges
[2932]2182 for i in reversed(xrange(len(res[spw]))):
2183 for j in xrange(i):
[2933]2184 if ((res[spw][i][0]-res[spw][j][1])*(res[spw][i][1]-res[spw][j][0]) <= 0) or \
2185 (min(abs(res[spw][i][0]-res[spw][j][1]),abs(res[spw][j][0]-res[spw][i][1])) == 1):
[2935]2186 asaplog.post()
2187 merge_warn_mesg = "Spw " + str(spw) + ": overwrapping channel ranges are merged."
2188 asaplog.push(merge_warn_mesg)
2189 asaplog.post('WARN')
[2932]2190 res[spw][j][0] = min(res[spw][i][0], res[spw][j][0])
2191 res[spw][j][1] = max(res[spw][i][1], res[spw][j][1])
2192 res[spw].pop(i)
2193 break
2194 else:
[2887]2195 del res[spw]
2196
2197 if len(res) == 0:
2198 raise RuntimeError("No valid spw.")
2199
[2882]2200 # restore original values
[2892]2201 self.set_unit(orig_unit)
[2891]2202 if restfreq is not None:
[2892]2203 self._setmolidcol_list(orig_molids)
2204 if frame is not None:
2205 self.set_freqframe(orig_frame)
2206 if doppler is not None:
2207 self.set_doppler(orig_doppler)
[2882]2208
2209 return res
[2890]2210
[2882]2211 @asaplog_post_dec
2212 def get_first_rowno_by_if(self, ifno):
2213 found = False
2214 for irow in xrange(self.nrow()):
2215 if (self.getif(irow) == ifno):
2216 res = irow
2217 found = True
2218 break
2219
[2926]2220 if not found: raise RuntimeError("No valid spw.")
[2882]2221
2222 return res
2223
2224 @asaplog_post_dec
2225 def _get_coordinate_list(self):
2226 res = []
2227 spws = self.getifnos()
2228 for spw in spws:
2229 elem = {}
2230 elem['if'] = spw
2231 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2232 res.append(elem)
2233
2234 return res
2235
2236 @asaplog_post_dec
[2349]2237 def parse_maskexpr(self, maskstring):
[2013]2238 """
2239 Parse CASA type mask selection syntax (IF dependent).
2240
2241 Parameters:
2242 maskstring : A string mask selection expression.
2243 A comma separated selections mean different IF -
2244 channel combinations. IFs and channel selections
2245 are partitioned by a colon, ':'.
2246 examples:
[2015]2247 '' = all IFs (all channels)
[2013]2248 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2249 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2250 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2251 all channels in IF8
2252 Returns:
2253 A dictionary of selected (valid) IF and masklist pairs,
2254 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2255 """
2256 if not isinstance(maskstring,str):
2257 asaplog.post()
[2611]2258 asaplog.push("Mask expression should be a string.")
[2013]2259 asaplog.post("ERROR")
2260
2261 valid_ifs = self.getifnos()
2262 frequnit = self.get_unit()
2263 seldict = {}
[2015]2264 if maskstring == "":
2265 maskstring = str(valid_ifs)[1:-1]
[2611]2266 ## split each selection "IF range[:CHAN range]"
[2867]2267 # split maskstring by "<spaces>,<spaces>"
2268 comma_sep = re.compile('\s*,\s*')
2269 sellist = comma_sep.split(maskstring)
2270 # separator by "<spaces>:<spaces>"
2271 collon_sep = re.compile('\s*:\s*')
[2013]2272 for currselstr in sellist:
[2867]2273 selset = collon_sep.split(currselstr)
[2013]2274 # spw and mask string (may include ~, < or >)
[2349]2275 spwmasklist = self._parse_selection(selset[0], typestr='integer',
[2611]2276 minval=min(valid_ifs),
[2349]2277 maxval=max(valid_ifs))
[2013]2278 for spwlist in spwmasklist:
2279 selspws = []
2280 for ispw in range(spwlist[0],spwlist[1]+1):
2281 # Put into the list only if ispw exists
2282 if valid_ifs.count(ispw):
2283 selspws.append(ispw)
2284 del spwmasklist, spwlist
2285
2286 # parse frequency mask list
2287 if len(selset) > 1:
[2349]2288 freqmasklist = self._parse_selection(selset[1], typestr='float',
2289 offset=0.)
[2013]2290 else:
2291 # want to select the whole spectrum
2292 freqmasklist = [None]
2293
2294 ## define a dictionary of spw - masklist combination
2295 for ispw in selspws:
2296 #print "working on", ispw
2297 spwstr = str(ispw)
2298 if len(selspws) == 0:
2299 # empty spw
2300 continue
2301 else:
2302 ## want to get min and max of the spw and
2303 ## offset to set for '<' and '>'
2304 if frequnit == 'channel':
2305 minfreq = 0
2306 maxfreq = self.nchan(ifno=ispw)
2307 offset = 0.5
2308 else:
2309 ## This is ugly part. need improvement
2310 for ifrow in xrange(self.nrow()):
2311 if self.getif(ifrow) == ispw:
2312 #print "IF",ispw,"found in row =",ifrow
2313 break
2314 freqcoord = self.get_coordinate(ifrow)
2315 freqs = self._getabcissa(ifrow)
2316 minfreq = min(freqs)
2317 maxfreq = max(freqs)
2318 if len(freqs) == 1:
2319 offset = 0.5
2320 elif frequnit.find('Hz') > 0:
[2349]2321 offset = abs(freqcoord.to_frequency(1,
2322 unit=frequnit)
2323 -freqcoord.to_frequency(0,
2324 unit=frequnit)
2325 )*0.5
[2013]2326 elif frequnit.find('m/s') > 0:
[2349]2327 offset = abs(freqcoord.to_velocity(1,
2328 unit=frequnit)
2329 -freqcoord.to_velocity(0,
2330 unit=frequnit)
2331 )*0.5
[2013]2332 else:
2333 asaplog.post()
2334 asaplog.push("Invalid frequency unit")
2335 asaplog.post("ERROR")
2336 del freqs, freqcoord, ifrow
2337 for freq in freqmasklist:
2338 selmask = freq or [minfreq, maxfreq]
2339 if selmask[0] == None:
2340 ## selection was "<freq[1]".
2341 if selmask[1] < minfreq:
2342 ## avoid adding region selection
2343 selmask = None
2344 else:
2345 selmask = [minfreq,selmask[1]-offset]
2346 elif selmask[1] == None:
2347 ## selection was ">freq[0]"
2348 if selmask[0] > maxfreq:
2349 ## avoid adding region selection
2350 selmask = None
2351 else:
2352 selmask = [selmask[0]+offset,maxfreq]
2353 if selmask:
2354 if not seldict.has_key(spwstr):
2355 # new spw selection
2356 seldict[spwstr] = []
2357 seldict[spwstr] += [selmask]
2358 del minfreq,maxfreq,offset,freq,selmask
2359 del spwstr
2360 del freqmasklist
2361 del valid_ifs
2362 if len(seldict) == 0:
2363 asaplog.post()
[2349]2364 asaplog.push("No valid selection in the mask expression: "
2365 +maskstring)
[2013]2366 asaplog.post("WARN")
2367 return None
2368 msg = "Selected masklist:\n"
2369 for sif, lmask in seldict.iteritems():
2370 msg += " IF"+sif+" - "+str(lmask)+"\n"
2371 asaplog.push(msg)
2372 return seldict
2373
[2611]2374 @asaplog_post_dec
2375 def parse_idx_selection(self, mode, selexpr):
2376 """
2377 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2378 BEAMNO, and row number
2379
2380 Parameters:
2381 mode : which column to select.
2382 ['scan',|'if'|'pol'|'beam'|'row']
2383 selexpr : A comma separated selection expression.
2384 examples:
2385 '' = all (returns [])
2386 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2387 (returns [0,1,4,5,6,9])
2388 Returns:
2389 A List of selected indices
2390 """
2391 if selexpr == "":
2392 return []
2393 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2394 'b': 'beam', 'r': 'row'}
2395 smode = mode.lower()[0]
2396 if not (smode in valid_modes.keys()):
2397 msg = "Invalid mode '%s'. Valid modes are %s" %\
2398 (mode, str(valid_modes.values()))
2399 asaplog.post()
2400 asaplog.push(msg)
2401 asaplog.post("ERROR")
2402 mode = valid_modes[smode]
2403 minidx = None
2404 maxidx = None
2405 if smode == 'r':
2406 minidx = 0
2407 maxidx = self.nrow()
2408 else:
2409 idx = getattr(self,"get"+mode+"nos")()
2410 minidx = min(idx)
2411 maxidx = max(idx)
2412 del idx
[2867]2413 # split selexpr by "<spaces>,<spaces>"
2414 comma_sep = re.compile('\s*,\s*')
2415 sellist = comma_sep.split(selexpr)
[2611]2416 idxlist = []
2417 for currselstr in sellist:
2418 # single range (may include ~, < or >)
2419 currlist = self._parse_selection(currselstr, typestr='integer',
2420 minval=minidx,maxval=maxidx)
2421 for thelist in currlist:
2422 idxlist += range(thelist[0],thelist[1]+1)
[2932]2423 # remove duplicated elements after first ones
2424 for i in reversed(xrange(len(idxlist))):
2425 if idxlist.index(idxlist[i]) < i:
2426 idxlist.pop(i)
[2611]2427 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2428 asaplog.push(msg)
2429 return idxlist
2430
[2349]2431 def _parse_selection(self, selstr, typestr='float', offset=0.,
[2351]2432 minval=None, maxval=None):
[2013]2433 """
2434 Parameters:
2435 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2436 typestr : The type of the values in returned list
2437 ('integer' or 'float')
2438 offset : The offset value to subtract from or add to
2439 the boundary value if the selection string
[2611]2440 includes '<' or '>' [Valid only for typestr='float']
[2013]2441 minval, maxval : The minimum/maximum values to set if the
2442 selection string includes '<' or '>'.
2443 The list element is filled with None by default.
2444 Returns:
2445 A list of min/max pair of selections.
2446 Example:
[2611]2447 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2448 --> returns [[0,2],[5,7],[9,9]]
2449 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2450 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
[2013]2451 """
[2867]2452 # split selstr by '<spaces>;<spaces>'
2453 semi_sep = re.compile('\s*;\s*')
2454 selgroups = semi_sep.split(selstr)
[2013]2455 sellists = []
2456 if typestr.lower().startswith('int'):
2457 formatfunc = int
[2611]2458 offset = 1
[2013]2459 else:
2460 formatfunc = float
2461
2462 for currsel in selgroups:
[2867]2463 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2464 minsel = minval
2465 maxsel = maxval
[2013]2466 if currsel.find('~') > 0:
[2611]2467 # val0 <= x <= val1
[2013]2468 minsel = formatfunc(currsel.split('~')[0].strip())
[2867]2469 maxsel = formatfunc(currsel.split('~')[1].strip())
[2611]2470 elif currsel.strip().find('<=') > -1:
2471 bound = currsel.split('<=')
2472 try: # try "x <= val"
2473 minsel = minval
2474 maxsel = formatfunc(bound[1].strip())
2475 except ValueError: # now "val <= x"
2476 minsel = formatfunc(bound[0].strip())
2477 maxsel = maxval
2478 elif currsel.strip().find('>=') > -1:
2479 bound = currsel.split('>=')
2480 try: # try "x >= val"
2481 minsel = formatfunc(bound[1].strip())
2482 maxsel = maxval
2483 except ValueError: # now "val >= x"
2484 minsel = minval
2485 maxsel = formatfunc(bound[0].strip())
2486 elif currsel.strip().find('<') > -1:
2487 bound = currsel.split('<')
2488 try: # try "x < val"
2489 minsel = minval
2490 maxsel = formatfunc(bound[1].strip()) \
2491 - formatfunc(offset)
2492 except ValueError: # now "val < x"
2493 minsel = formatfunc(bound[0].strip()) \
[2013]2494 + formatfunc(offset)
[2611]2495 maxsel = maxval
2496 elif currsel.strip().find('>') > -1:
2497 bound = currsel.split('>')
2498 try: # try "x > val"
2499 minsel = formatfunc(bound[1].strip()) \
2500 + formatfunc(offset)
2501 maxsel = maxval
2502 except ValueError: # now "val > x"
2503 minsel = minval
2504 maxsel = formatfunc(bound[0].strip()) \
2505 - formatfunc(offset)
[2013]2506 else:
2507 minsel = formatfunc(currsel)
2508 maxsel = formatfunc(currsel)
2509 sellists.append([minsel,maxsel])
2510 return sellists
2511
[1819]2512# def get_restfreqs(self):
2513# """
2514# Get the restfrequency(s) stored in this scantable.
2515# The return value(s) are always of unit 'Hz'
2516# Parameters:
2517# none
2518# Returns:
2519# a list of doubles
2520# """
2521# return list(self._getrestfreqs())
2522
2523 def get_restfreqs(self, ids=None):
[1846]2524 """\
[256]2525 Get the restfrequency(s) stored in this scantable.
2526 The return value(s) are always of unit 'Hz'
[1846]2527
[256]2528 Parameters:
[1846]2529
[1819]2530 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2531 be retrieved
[1846]2532
[256]2533 Returns:
[1846]2534
[1819]2535 dictionary containing ids and a list of doubles for each id
[1846]2536
[256]2537 """
[1819]2538 if ids is None:
[2349]2539 rfreqs = {}
[1819]2540 idlist = self.getmolnos()
2541 for i in idlist:
[2349]2542 rfreqs[i] = list(self._getrestfreqs(i))
[1819]2543 return rfreqs
2544 else:
[2349]2545 if type(ids) == list or type(ids) == tuple:
2546 rfreqs = {}
[1819]2547 for i in ids:
[2349]2548 rfreqs[i] = list(self._getrestfreqs(i))
[1819]2549 return rfreqs
2550 else:
2551 return list(self._getrestfreqs(ids))
[102]2552
[2349]2553 @asaplog_post_dec
[931]2554 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]2555 """\
[931]2556 Set or replace the restfrequency specified and
[1938]2557 if the 'freqs' argument holds a scalar,
[931]2558 then that rest frequency will be applied to all the selected
2559 data. If the 'freqs' argument holds
2560 a vector, then it MUST be of equal or smaller length than
2561 the number of IFs (and the available restfrequencies will be
2562 replaced by this vector). In this case, *all* data have
2563 the restfrequency set per IF according
2564 to the corresponding value you give in the 'freqs' vector.
[1118]2565 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]2566 IF 1 gets restfreq 2e9.
[1846]2567
[1395]2568 You can also specify the frequencies via a linecatalog.
[1153]2569
[931]2570 Parameters:
[1846]2571
[931]2572 freqs: list of rest frequency values or string idenitfiers
[1855]2573
[931]2574 unit: unit for rest frequency (default 'Hz')
[402]2575
[1846]2576
2577 Example::
2578
[1819]2579 # set the given restfrequency for the all currently selected IFs
[931]2580 scan.set_restfreqs(freqs=1.4e9)
[1845]2581 # set restfrequencies for the n IFs (n > 1) in the order of the
2582 # list, i.e
2583 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2584 # len(list_of_restfreqs) == nIF
2585 # for nIF == 1 the following will set multiple restfrequency for
2586 # that IF
[1819]2587 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]2588 # set multiple restfrequencies per IF. as a list of lists where
2589 # the outer list has nIF elements, the inner s arbitrary
2590 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]2591
[1846]2592 *Note*:
[1845]2593
[931]2594 To do more sophisticate Restfrequency setting, e.g. on a
2595 source and IF basis, use scantable.set_selection() before using
[1846]2596 this function::
[931]2597
[1846]2598 # provided your scantable is called scan
2599 selection = selector()
[2431]2600 selection.set_name('ORION*')
[1846]2601 selection.set_ifs([1])
2602 scan.set_selection(selection)
2603 scan.set_restfreqs(freqs=86.6e9)
2604
[931]2605 """
2606 varlist = vars()
[1157]2607 from asap import linecatalog
2608 # simple value
[1118]2609 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]2610 self._setrestfreqs([freqs], [""], unit)
[1157]2611 # list of values
[1118]2612 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]2613 # list values are scalars
[1118]2614 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]2615 if len(freqs) == 1:
2616 self._setrestfreqs(freqs, [""], unit)
2617 else:
2618 # allow the 'old' mode of setting mulitple IFs
2619 savesel = self._getselection()
[2599]2620 sel = self.get_selection()
[1845]2621 iflist = self.getifnos()
2622 if len(freqs)>len(iflist):
2623 raise ValueError("number of elements in list of list "
2624 "exeeds the current IF selections")
2625 iflist = self.getifnos()
2626 for i, fval in enumerate(freqs):
2627 sel.set_ifs(iflist[i])
2628 self._setselection(sel)
2629 self._setrestfreqs([fval], [""], unit)
2630 self._setselection(savesel)
2631
2632 # list values are dict, {'value'=, 'name'=)
[1157]2633 elif isinstance(freqs[-1], dict):
[1845]2634 values = []
2635 names = []
2636 for d in freqs:
2637 values.append(d["value"])
2638 names.append(d["name"])
2639 self._setrestfreqs(values, names, unit)
[1819]2640 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]2641 savesel = self._getselection()
[2599]2642 sel = self.get_selection()
[1322]2643 iflist = self.getifnos()
[1819]2644 if len(freqs)>len(iflist):
[1845]2645 raise ValueError("number of elements in list of list exeeds"
2646 " the current IF selections")
2647 for i, fval in enumerate(freqs):
[1322]2648 sel.set_ifs(iflist[i])
[1259]2649 self._setselection(sel)
[1845]2650 self._setrestfreqs(fval, [""], unit)
[1157]2651 self._setselection(savesel)
2652 # freqs are to be taken from a linecatalog
[1153]2653 elif isinstance(freqs, linecatalog):
2654 savesel = self._getselection()
[2599]2655 sel = self.get_selection()
[1153]2656 for i in xrange(freqs.nrow()):
[1322]2657 sel.set_ifs(iflist[i])
[1153]2658 self._setselection(sel)
[1845]2659 self._setrestfreqs([freqs.get_frequency(i)],
2660 [freqs.get_name(i)], "MHz")
[1153]2661 # ensure that we are not iterating past nIF
2662 if i == self.nif()-1: break
2663 self._setselection(savesel)
[931]2664 else:
2665 return
2666 self._add_history("set_restfreqs", varlist)
2667
[2349]2668 @asaplog_post_dec
[1360]2669 def shift_refpix(self, delta):
[1846]2670 """\
[1589]2671 Shift the reference pixel of the Spectra Coordinate by an
2672 integer amount.
[1846]2673
[1589]2674 Parameters:
[1846]2675
[1589]2676 delta: the amount to shift by
[1846]2677
2678 *Note*:
2679
[1589]2680 Be careful using this with broadband data.
[1846]2681
[1360]2682 """
[2349]2683 varlist = vars()
[1731]2684 Scantable.shift_refpix(self, delta)
[2349]2685 s._add_history("shift_refpix", varlist)
[931]2686
[1862]2687 @asaplog_post_dec
[2820]2688 def history(self, filename=None, nrows=-1, start=0):
[1846]2689 """\
[1259]2690 Print the history. Optionally to a file.
[1846]2691
[1348]2692 Parameters:
[1846]2693
[1928]2694 filename: The name of the file to save the history to.
[1846]2695
[1259]2696 """
[2820]2697 n = self._historylength()
2698 if nrows == -1:
2699 nrows = n
2700 if start+nrows > n:
2701 nrows = nrows-start
2702 if n > 1000 and nrows == n:
2703 nrows = 1000
2704 start = n-1000
2705 asaplog.push("Warning: History has {0} entries. Displaying last "
2706 "1000".format(n))
2707 hist = list(self._gethistory(nrows, start))
[794]2708 out = "-"*80
[484]2709 for h in hist:
[2820]2710 if not h.strip():
2711 continue
2712 if h.find("---") >-1:
2713 continue
[489]2714 else:
2715 items = h.split("##")
2716 date = items[0]
2717 func = items[1]
2718 items = items[2:]
[794]2719 out += "\n"+date+"\n"
2720 out += "Function: %s\n Parameters:" % (func)
[489]2721 for i in items:
[1938]2722 if i == '':
2723 continue
[489]2724 s = i.split("=")
[1118]2725 out += "\n %s = %s" % (s[0], s[1])
[2820]2726 out = "\n".join([out, "*"*80])
[1259]2727 if filename is not None:
2728 if filename is "":
2729 filename = 'scantable_history.txt'
2730 filename = os.path.expandvars(os.path.expanduser(filename))
2731 if not os.path.isdir(filename):
2732 data = open(filename, 'w')
2733 data.write(out)
2734 data.close()
2735 else:
2736 msg = "Illegal file name '%s'." % (filename)
[1859]2737 raise IOError(msg)
2738 return page(out)
[2349]2739
[513]2740 #
2741 # Maths business
2742 #
[1862]2743 @asaplog_post_dec
[2818]2744 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2745 avmode="NONE"):
[1846]2746 """\
[2349]2747 Return the (time) weighted average of a scan. Scans will be averaged
2748 only if the source direction (RA/DEC) is within 1' otherwise
[1846]2749
2750 *Note*:
2751
[1070]2752 in channels only - align if necessary
[1846]2753
[513]2754 Parameters:
[1846]2755
[513]2756 mask: an optional mask (only used for 'var' and 'tsys'
2757 weighting)
[1855]2758
[558]2759 scanav: True averages each scan separately
2760 False (default) averages all scans together,
[1855]2761
[1099]2762 weight: Weighting scheme.
2763 'none' (mean no weight)
2764 'var' (1/var(spec) weighted)
2765 'tsys' (1/Tsys**2 weighted)
2766 'tint' (integration time weighted)
2767 'tintsys' (Tint/Tsys**2)
2768 'median' ( median averaging)
[535]2769 The default is 'tint'
[1855]2770
[931]2771 align: align the spectra in velocity before averaging. It takes
2772 the time of the first spectrum as reference time.
[2818]2773 avmode: 'SOURCE' - also select by source name - or
2774 'NONE' (default). Not applicable for scanav=True or
2775 weight=median
[1846]2776
2777 Example::
2778
[513]2779 # time average the scantable without using a mask
[710]2780 newscan = scan.average_time()
[1846]2781
[513]2782 """
2783 varlist = vars()
[1593]2784 weight = weight or 'TINT'
2785 mask = mask or ()
[2818]2786 scanav = (scanav and 'SCAN') or avmode.upper()
[1118]2787 scan = (self, )
[1859]2788
2789 if align:
2790 scan = (self.freq_align(insitu=False), )
[2818]2791 asaplog.push("Note: Alignment is don on a source-by-source basis")
2792 asaplog.push("Note: Averaging (by default) is not")
2793 # we need to set it to SOURCE averaging here
[1859]2794 s = None
2795 if weight.upper() == 'MEDIAN':
2796 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2797 scanav))
2798 else:
2799 s = scantable(self._math._average(scan, mask, weight.upper(),
2800 scanav))
[1099]2801 s._add_history("average_time", varlist)
[513]2802 return s
[710]2803
[1862]2804 @asaplog_post_dec
[876]2805 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]2806 """\
[513]2807 Return a scan where all spectra are converted to either
2808 Jansky or Kelvin depending upon the flux units of the scan table.
2809 By default the function tries to look the values up internally.
2810 If it can't find them (or if you want to over-ride), you must
2811 specify EITHER jyperk OR eta (and D which it will try to look up
2812 also if you don't set it). jyperk takes precedence if you set both.
[1846]2813
[513]2814 Parameters:
[1846]2815
[513]2816 jyperk: the Jy / K conversion factor
[1855]2817
[513]2818 eta: the aperture efficiency
[1855]2819
[1928]2820 d: the geometric diameter (metres)
[1855]2821
[513]2822 insitu: if False a new scantable is returned.
2823 Otherwise, the scaling is done in-situ
2824 The default is taken from .asaprc (False)
[1846]2825
[513]2826 """
2827 if insitu is None: insitu = rcParams['insitu']
[876]2828 self._math._setinsitu(insitu)
[513]2829 varlist = vars()
[1593]2830 jyperk = jyperk or -1.0
2831 d = d or -1.0
2832 eta = eta or -1.0
[876]2833 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2834 s._add_history("convert_flux", varlist)
2835 if insitu: self._assign(s)
2836 else: return s
[513]2837
[1862]2838 @asaplog_post_dec
[876]2839 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]2840 """\
[513]2841 Return a scan after applying a gain-elevation correction.
2842 The correction can be made via either a polynomial or a
2843 table-based interpolation (and extrapolation if necessary).
2844 You specify polynomial coefficients, an ascii table or neither.
2845 If you specify neither, then a polynomial correction will be made
2846 with built in coefficients known for certain telescopes (an error
2847 will occur if the instrument is not known).
2848 The data and Tsys are *divided* by the scaling factors.
[1846]2849
[513]2850 Parameters:
[1846]2851
[513]2852 poly: Polynomial coefficients (default None) to compute a
2853 gain-elevation correction as a function of
2854 elevation (in degrees).
[1855]2855
[513]2856 filename: The name of an ascii file holding correction factors.
2857 The first row of the ascii file must give the column
2858 names and these MUST include columns
[2431]2859 'ELEVATION' (degrees) and 'FACTOR' (multiply data
[513]2860 by this) somewhere.
2861 The second row must give the data type of the
2862 column. Use 'R' for Real and 'I' for Integer.
2863 An example file would be
2864 (actual factors are arbitrary) :
2865
2866 TIME ELEVATION FACTOR
2867 R R R
2868 0.1 0 0.8
2869 0.2 20 0.85
2870 0.3 40 0.9
2871 0.4 60 0.85
2872 0.5 80 0.8
2873 0.6 90 0.75
[1855]2874
[513]2875 method: Interpolation method when correcting from a table.
[2431]2876 Values are 'nearest', 'linear' (default), 'cubic'
2877 and 'spline'
[1855]2878
[513]2879 insitu: if False a new scantable is returned.
2880 Otherwise, the scaling is done in-situ
2881 The default is taken from .asaprc (False)
[1846]2882
[513]2883 """
2884
2885 if insitu is None: insitu = rcParams['insitu']
[876]2886 self._math._setinsitu(insitu)
[513]2887 varlist = vars()
[1593]2888 poly = poly or ()
[513]2889 from os.path import expandvars
2890 filename = expandvars(filename)
[876]2891 s = scantable(self._math._gainel(self, poly, filename, method))
2892 s._add_history("gain_el", varlist)
[1593]2893 if insitu:
2894 self._assign(s)
2895 else:
2896 return s
[710]2897
[1862]2898 @asaplog_post_dec
[931]2899 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2900 """\
[513]2901 Return a scan where all rows have been aligned in frequency/velocity.
2902 The alignment frequency frame (e.g. LSRK) is that set by function
2903 set_freqframe.
[1846]2904
[513]2905 Parameters:
[1855]2906
[513]2907 reftime: reference time to align at. By default, the time of
2908 the first row of data is used.
[1855]2909
[513]2910 method: Interpolation method for regridding the spectra.
[2431]2911 Choose from 'nearest', 'linear', 'cubic' (default)
2912 and 'spline'
[1855]2913
[513]2914 insitu: if False a new scantable is returned.
2915 Otherwise, the scaling is done in-situ
2916 The default is taken from .asaprc (False)
[1846]2917
[513]2918 """
[931]2919 if insitu is None: insitu = rcParams["insitu"]
[2429]2920 oldInsitu = self._math._insitu()
[876]2921 self._math._setinsitu(insitu)
[513]2922 varlist = vars()
[1593]2923 reftime = reftime or ""
[931]2924 s = scantable(self._math._freq_align(self, reftime, method))
[876]2925 s._add_history("freq_align", varlist)
[2429]2926 self._math._setinsitu(oldInsitu)
[2349]2927 if insitu:
2928 self._assign(s)
2929 else:
2930 return s
[513]2931
[1862]2932 @asaplog_post_dec
[1725]2933 def opacity(self, tau=None, insitu=None):
[1846]2934 """\
[513]2935 Apply an opacity correction. The data
2936 and Tsys are multiplied by the correction factor.
[1846]2937
[513]2938 Parameters:
[1855]2939
[1689]2940 tau: (list of) opacity from which the correction factor is
[513]2941 exp(tau*ZD)
[1689]2942 where ZD is the zenith-distance.
2943 If a list is provided, it has to be of length nIF,
2944 nIF*nPol or 1 and in order of IF/POL, e.g.
2945 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2946 if tau is `None` the opacities are determined from a
2947 model.
[1855]2948
[513]2949 insitu: if False a new scantable is returned.
2950 Otherwise, the scaling is done in-situ
2951 The default is taken from .asaprc (False)
[1846]2952
[513]2953 """
[2349]2954 if insitu is None:
2955 insitu = rcParams['insitu']
[876]2956 self._math._setinsitu(insitu)
[513]2957 varlist = vars()
[1689]2958 if not hasattr(tau, "__len__"):
2959 tau = [tau]
[876]2960 s = scantable(self._math._opacity(self, tau))
2961 s._add_history("opacity", varlist)
[2349]2962 if insitu:
2963 self._assign(s)
2964 else:
2965 return s
[513]2966
[1862]2967 @asaplog_post_dec
[513]2968 def bin(self, width=5, insitu=None):
[1846]2969 """\
[513]2970 Return a scan where all spectra have been binned up.
[1846]2971
[1348]2972 Parameters:
[1846]2973
[513]2974 width: The bin width (default=5) in pixels
[1855]2975
[513]2976 insitu: if False a new scantable is returned.
2977 Otherwise, the scaling is done in-situ
2978 The default is taken from .asaprc (False)
[1846]2979
[513]2980 """
[2349]2981 if insitu is None:
2982 insitu = rcParams['insitu']
[876]2983 self._math._setinsitu(insitu)
[513]2984 varlist = vars()
[876]2985 s = scantable(self._math._bin(self, width))
[1118]2986 s._add_history("bin", varlist)
[1589]2987 if insitu:
2988 self._assign(s)
2989 else:
2990 return s
[513]2991
[1862]2992 @asaplog_post_dec
[2672]2993 def reshape(self, first, last, insitu=None):
2994 """Resize the band by providing first and last channel.
2995 This will cut off all channels outside [first, last].
2996 """
2997 if insitu is None:
2998 insitu = rcParams['insitu']
2999 varlist = vars()
3000 if last < 0:
3001 last = self.nchan()-1 + last
3002 s = None
3003 if insitu:
3004 s = self
3005 else:
3006 s = self.copy()
3007 s._reshape(first,last)
3008 s._add_history("reshape", varlist)
3009 if not insitu:
3010 return s
3011
3012 @asaplog_post_dec
[513]3013 def resample(self, width=5, method='cubic', insitu=None):
[1846]3014 """\
[1348]3015 Return a scan where all spectra have been binned up.
[1573]3016
[1348]3017 Parameters:
[1846]3018
[513]3019 width: The bin width (default=5) in pixels
[1855]3020
[513]3021 method: Interpolation method when correcting from a table.
[2431]3022 Values are 'nearest', 'linear', 'cubic' (default)
3023 and 'spline'
[1855]3024
[513]3025 insitu: if False a new scantable is returned.
3026 Otherwise, the scaling is done in-situ
3027 The default is taken from .asaprc (False)
[1846]3028
[513]3029 """
[2349]3030 if insitu is None:
3031 insitu = rcParams['insitu']
[876]3032 self._math._setinsitu(insitu)
[513]3033 varlist = vars()
[876]3034 s = scantable(self._math._resample(self, method, width))
[1118]3035 s._add_history("resample", varlist)
[2349]3036 if insitu:
3037 self._assign(s)
3038 else:
3039 return s
[513]3040
[1862]3041 @asaplog_post_dec
[946]3042 def average_pol(self, mask=None, weight='none'):
[1846]3043 """\
[946]3044 Average the Polarisations together.
[1846]3045
[946]3046 Parameters:
[1846]3047
[946]3048 mask: An optional mask defining the region, where the
3049 averaging will be applied. The output will have all
3050 specified points masked.
[1855]3051
[946]3052 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3053 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]3054
[946]3055 """
3056 varlist = vars()
[1593]3057 mask = mask or ()
[1010]3058 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]3059 s._add_history("average_pol", varlist)
[992]3060 return s
[513]3061
[1862]3062 @asaplog_post_dec
[1145]3063 def average_beam(self, mask=None, weight='none'):
[1846]3064 """\
[1145]3065 Average the Beams together.
[1846]3066
[1145]3067 Parameters:
3068 mask: An optional mask defining the region, where the
3069 averaging will be applied. The output will have all
3070 specified points masked.
[1855]3071
[1145]3072 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3073 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]3074
[1145]3075 """
3076 varlist = vars()
[1593]3077 mask = mask or ()
[1145]3078 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3079 s._add_history("average_beam", varlist)
3080 return s
3081
[1586]3082 def parallactify(self, pflag):
[1846]3083 """\
[1843]3084 Set a flag to indicate whether this data should be treated as having
[1617]3085 been 'parallactified' (total phase == 0.0)
[1846]3086
[1617]3087 Parameters:
[1855]3088
[1843]3089 pflag: Bool indicating whether to turn this on (True) or
[1617]3090 off (False)
[1846]3091
[1617]3092 """
[1586]3093 varlist = vars()
3094 self._parallactify(pflag)
3095 self._add_history("parallactify", varlist)
3096
[1862]3097 @asaplog_post_dec
[992]3098 def convert_pol(self, poltype=None):
[1846]3099 """\
[992]3100 Convert the data to a different polarisation type.
[1565]3101 Note that you will need cross-polarisation terms for most conversions.
[1846]3102
[992]3103 Parameters:
[1855]3104
[992]3105 poltype: The new polarisation type. Valid types are:
[2431]3106 'linear', 'circular', 'stokes' and 'linpol'
[1846]3107
[992]3108 """
3109 varlist = vars()
[1859]3110 s = scantable(self._math._convertpol(self, poltype))
[1118]3111 s._add_history("convert_pol", varlist)
[992]3112 return s
3113
[1862]3114 @asaplog_post_dec
[2269]3115 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3116 insitu=None):
[1846]3117 """\
[513]3118 Smooth the spectrum by the specified kernel (conserving flux).
[1846]3119
[513]3120 Parameters:
[1846]3121
[513]3122 kernel: The type of smoothing kernel. Select from
[1574]3123 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3124 or 'poly'
[1855]3125
[513]3126 width: The width of the kernel in pixels. For hanning this is
3127 ignored otherwise it defauls to 5 pixels.
3128 For 'gaussian' it is the Full Width Half
3129 Maximum. For 'boxcar' it is the full width.
[1574]3130 For 'rmedian' and 'poly' it is the half width.
[1855]3131
[1574]3132 order: Optional parameter for 'poly' kernel (default is 2), to
3133 specify the order of the polnomial. Ignored by all other
3134 kernels.
[1855]3135
[1819]3136 plot: plot the original and the smoothed spectra.
3137 In this each indivual fit has to be approved, by
3138 typing 'y' or 'n'
[1855]3139
[513]3140 insitu: if False a new scantable is returned.
3141 Otherwise, the scaling is done in-situ
3142 The default is taken from .asaprc (False)
[1846]3143
[513]3144 """
3145 if insitu is None: insitu = rcParams['insitu']
[876]3146 self._math._setinsitu(insitu)
[513]3147 varlist = vars()
[1819]3148
3149 if plot: orgscan = self.copy()
3150
[1574]3151 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]3152 s._add_history("smooth", varlist)
[1819]3153
[2610]3154 action = 'H'
[1819]3155 if plot:
[2150]3156 from asap.asapplotter import new_asaplot
3157 theplot = new_asaplot(rcParams['plotter.gui'])
[2535]3158 from matplotlib import rc as rcp
3159 rcp('lines', linewidth=1)
[2150]3160 theplot.set_panels()
[1819]3161 ylab=s._get_ordinate_label()
[2150]3162 #theplot.palette(0,["#777777","red"])
[1819]3163 for r in xrange(s.nrow()):
3164 xsm=s._getabcissa(r)
3165 ysm=s._getspectrum(r)
3166 xorg=orgscan._getabcissa(r)
3167 yorg=orgscan._getspectrum(r)
[2610]3168 if action != "N": #skip plotting if rejecting all
3169 theplot.clear()
3170 theplot.hold()
3171 theplot.set_axes('ylabel',ylab)
3172 theplot.set_axes('xlabel',s._getabcissalabel(r))
3173 theplot.set_axes('title',s._getsourcename(r))
3174 theplot.set_line(label='Original',color="#777777")
3175 theplot.plot(xorg,yorg)
3176 theplot.set_line(label='Smoothed',color="red")
3177 theplot.plot(xsm,ysm)
3178 ### Ugly part for legend
3179 for i in [0,1]:
3180 theplot.subplots[0]['lines'].append(
3181 [theplot.subplots[0]['axes'].lines[i]]
3182 )
3183 theplot.release()
3184 ### Ugly part for legend
3185 theplot.subplots[0]['lines']=[]
3186 res = self._get_verify_action("Accept smoothing?",action)
3187 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3188 if r == 0: action = None
3189 #res = raw_input("Accept smoothing ([y]/n): ")
[1819]3190 if res.upper() == 'N':
[2610]3191 # reject for the current rows
[1819]3192 s._setspectrum(yorg, r)
[2610]3193 elif res.upper() == 'R':
3194 # reject all the following rows
3195 action = "N"
3196 s._setspectrum(yorg, r)
3197 elif res.upper() == 'A':
3198 # accept all the following rows
3199 break
[2150]3200 theplot.quit()
3201 del theplot
[1819]3202 del orgscan
3203
[876]3204 if insitu: self._assign(s)
3205 else: return s
[513]3206
[2186]3207 @asaplog_post_dec
[2435]3208 def regrid_channel(self, width=5, plot=False, insitu=None):
3209 """\
3210 Regrid the spectra by the specified channel width
3211
3212 Parameters:
3213
3214 width: The channel width (float) of regridded spectra
3215 in the current spectral unit.
3216
3217 plot: [NOT IMPLEMENTED YET]
3218 plot the original and the regridded spectra.
3219 In this each indivual fit has to be approved, by
3220 typing 'y' or 'n'
3221
3222 insitu: if False a new scantable is returned.
3223 Otherwise, the scaling is done in-situ
3224 The default is taken from .asaprc (False)
3225
3226 """
3227 if insitu is None: insitu = rcParams['insitu']
3228 varlist = vars()
3229
3230 if plot:
3231 asaplog.post()
3232 asaplog.push("Verification plot is not implemtnetd yet.")
3233 asaplog.post("WARN")
3234
3235 s = self.copy()
3236 s._regrid_specchan(width)
3237
3238 s._add_history("regrid_channel", varlist)
3239
3240# if plot:
3241# from asap.asapplotter import new_asaplot
3242# theplot = new_asaplot(rcParams['plotter.gui'])
[2535]3243# from matplotlib import rc as rcp
3244# rcp('lines', linewidth=1)
[2435]3245# theplot.set_panels()
3246# ylab=s._get_ordinate_label()
3247# #theplot.palette(0,["#777777","red"])
3248# for r in xrange(s.nrow()):
3249# xsm=s._getabcissa(r)
3250# ysm=s._getspectrum(r)
3251# xorg=orgscan._getabcissa(r)
3252# yorg=orgscan._getspectrum(r)
3253# theplot.clear()
3254# theplot.hold()
3255# theplot.set_axes('ylabel',ylab)
3256# theplot.set_axes('xlabel',s._getabcissalabel(r))
3257# theplot.set_axes('title',s._getsourcename(r))
3258# theplot.set_line(label='Original',color="#777777")
3259# theplot.plot(xorg,yorg)
3260# theplot.set_line(label='Smoothed',color="red")
3261# theplot.plot(xsm,ysm)
3262# ### Ugly part for legend
3263# for i in [0,1]:
3264# theplot.subplots[0]['lines'].append(
3265# [theplot.subplots[0]['axes'].lines[i]]
3266# )
3267# theplot.release()
3268# ### Ugly part for legend
3269# theplot.subplots[0]['lines']=[]
3270# res = raw_input("Accept smoothing ([y]/n): ")
3271# if res.upper() == 'N':
3272# s._setspectrum(yorg, r)
3273# theplot.quit()
3274# del theplot
3275# del orgscan
3276
3277 if insitu: self._assign(s)
3278 else: return s
3279
3280 @asaplog_post_dec
[2186]3281 def _parse_wn(self, wn):
3282 if isinstance(wn, list) or isinstance(wn, tuple):
3283 return wn
3284 elif isinstance(wn, int):
3285 return [ wn ]
3286 elif isinstance(wn, str):
[2277]3287 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]3288 val = wn.split('-')
3289 val = [int(val[0]), int(val[1])]
3290 val.sort()
3291 res = [i for i in xrange(val[0], val[1]+1)]
[2277]3292 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]3293 val = int(wn[2:])+1
3294 res = [i for i in xrange(val)]
[2277]3295 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]3296 val = int(wn[:-2])+1
3297 res = [i for i in xrange(val)]
[2277]3298 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]3299 val = int(wn[1:])
3300 res = [i for i in xrange(val)]
[2277]3301 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]3302 val = int(wn[:-1])
3303 res = [i for i in xrange(val)]
[2411]3304 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3305 # then interpreted in C++
3306 # side as [a,a+1,...,a_nyq]
3307 # (CAS-3759)
[2186]3308 val = int(wn[2:])
[2411]3309 res = [val, -999]
3310 #res = [i for i in xrange(val, self.nchan()/2+1)]
3311 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3312 # then interpreted in C++
3313 # side as [a,a+1,...,a_nyq]
3314 # (CAS-3759)
[2186]3315 val = int(wn[:-2])
[2411]3316 res = [val, -999]
3317 #res = [i for i in xrange(val, self.nchan()/2+1)]
3318 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3319 # then interpreted in C++
3320 # side as [a+1,a+2,...,a_nyq]
3321 # (CAS-3759)
[2186]3322 val = int(wn[1:])+1
[2411]3323 res = [val, -999]
3324 #res = [i for i in xrange(val, self.nchan()/2+1)]
3325 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3326 # then interpreted in C++
3327 # side as [a+1,a+2,...,a_nyq]
3328 # (CAS-3759)
[2186]3329 val = int(wn[:-1])+1
[2411]3330 res = [val, -999]
3331 #res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]3332
[2186]3333 return res
3334 else:
3335 msg = 'wrong value given for addwn/rejwn'
3336 raise RuntimeError(msg)
3337
[2713]3338 @asaplog_post_dec
[2810]3339 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
[2767]3340 """\
3341 Subtract baseline based on parameters written in Baseline Table.
3342
3343 Parameters:
[2809]3344 insitu: if True, baseline fitting/subtraction is done
[2810]3345 in-situ. If False, a new scantable with
3346 baseline subtracted is returned. Actually,
3347 format of the returned value depends on both
3348 insitu and retfitres (see below).
[2767]3349 The default is taken from .asaprc (False)
[2810]3350 retfitres: if True, the results of baseline fitting (i.e.,
3351 coefficients and rms) are returned.
3352 default is False.
3353 The format of the returned value of this
3354 function varies as follows:
3355 (1) in case insitu=True and retfitres=True:
3356 fitting result.
3357 (2) in case insitu=True and retfitres=False:
3358 None.
3359 (3) in case insitu=False and retfitres=True:
3360 a dictionary containing a new scantable
3361 (with baseline subtracted) and the fitting
3362 results.
3363 (4) in case insitu=False and retfitres=False:
3364 a new scantable (with baseline subtracted).
[2767]3365 inbltable: name of input baseline table. The row number of
3366 scantable and that of inbltable must be
3367 identical.
3368 outbltable: name of output baseline table where baseline
3369 parameters and fitting results recorded.
3370 default is ''(no output).
[2809]3371 overwrite: if True when an existing baseline table is
3372 specified for outbltable, overwrites it.
3373 Otherwise there is no harm.
[2767]3374 default is False.
3375 """
3376
3377 try:
3378 varlist = vars()
[2810]3379 if retfitres is None: retfitres = False
[2767]3380 if inbltable is None: raise ValueError("bltable missing.")
3381 if outbltable is None: outbltable = ''
3382 if overwrite is None: overwrite = False
3383
3384 if insitu is None: insitu = rcParams['insitu']
3385 if insitu:
3386 workscan = self
3387 else:
3388 workscan = self.copy()
3389
3390 sres = workscan._apply_bltable(inbltable,
[2810]3391 retfitres,
[2767]3392 outbltable,
3393 os.path.exists(outbltable),
3394 overwrite)
[2810]3395 if retfitres: res = parse_fitresult(sres)
[2767]3396
3397 workscan._add_history('apply_bltable', varlist)
3398
3399 if insitu:
3400 self._assign(workscan)
[2810]3401 if retfitres:
3402 return res
3403 else:
3404 return None
[2767]3405 else:
[2810]3406 if retfitres:
3407 return {'scantable': workscan, 'fitresults': res}
3408 else:
3409 return workscan
[2767]3410
3411 except RuntimeError, e:
3412 raise_fitting_failure_exception(e)
3413
3414 @asaplog_post_dec
[2810]3415 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
[2767]3416 """\
3417 Subtract baseline based on parameters written in the input list.
3418
3419 Parameters:
[2809]3420 insitu: if True, baseline fitting/subtraction is done
[2810]3421 in-situ. If False, a new scantable with
3422 baseline subtracted is returned. Actually,
3423 format of the returned value depends on both
3424 insitu and retfitres (see below).
[2767]3425 The default is taken from .asaprc (False)
[2810]3426 retfitres: if True, the results of baseline fitting (i.e.,
3427 coefficients and rms) are returned.
3428 default is False.
3429 The format of the returned value of this
3430 function varies as follows:
3431 (1) in case insitu=True and retfitres=True:
3432 fitting result.
3433 (2) in case insitu=True and retfitres=False:
3434 None.
3435 (3) in case insitu=False and retfitres=True:
3436 a dictionary containing a new scantable
3437 (with baseline subtracted) and the fitting
3438 results.
3439 (4) in case insitu=False and retfitres=False:
3440 a new scantable (with baseline subtracted).
[2767]3441 blinfo: baseline parameter set stored in a dictionary
3442 or a list of dictionary. Each dictionary
3443 corresponds to each spectrum and must contain
3444 the following keys and values:
3445 'row': row number,
3446 'blfunc': function name. available ones include
3447 'poly', 'chebyshev', 'cspline' and
3448 'sinusoid',
3449 'order': maximum order of polynomial. needed
3450 if blfunc='poly' or 'chebyshev',
3451 'npiece': number or piecewise polynomial.
3452 needed if blfunc='cspline',
3453 'nwave': a list of sinusoidal wave numbers.
3454 needed if blfunc='sinusoid', and
3455 'masklist': min-max windows for channel mask.
3456 the specified ranges will be used
3457 for fitting.
3458 bltable: name of output baseline table where baseline
3459 parameters and fitting results recorded.
3460 default is ''(no output).
[2809]3461 overwrite: if True when an existing baseline table is
3462 specified for bltable, overwrites it.
3463 Otherwise there is no harm.
[2767]3464 default is False.
3465
3466 Example:
3467 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3468 'masklist':[[10,350],[352,510]]},
3469 {'row':1, 'blfunc':'cspline', 'npiece':3,
3470 'masklist':[[3,16],[19,404],[407,511]]}
3471 ])
3472
3473 the first spectrum (row=0) will be fitted with polynomial
3474 of order=5 and the next one (row=1) will be fitted with cubic
3475 spline consisting of 3 pieces.
3476 """
3477
3478 try:
3479 varlist = vars()
[2810]3480 if retfitres is None: retfitres = False
[2767]3481 if blinfo is None: blinfo = []
3482 if bltable is None: bltable = ''
3483 if overwrite is None: overwrite = False
3484
3485 if insitu is None: insitu = rcParams['insitu']
3486 if insitu:
3487 workscan = self
3488 else:
3489 workscan = self.copy()
3490
3491 nrow = workscan.nrow()
3492
3493 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3494
3495 sres = workscan._sub_baseline(in_blinfo,
[2810]3496 retfitres,
[2767]3497 bltable,
3498 os.path.exists(bltable),
3499 overwrite)
[2810]3500 if retfitres: res = parse_fitresult(sres)
[2767]3501
3502 workscan._add_history('sub_baseline', varlist)
3503
3504 if insitu:
3505 self._assign(workscan)
[2810]3506 if retfitres:
3507 return res
3508 else:
3509 return None
[2767]3510 else:
[2810]3511 if retfitres:
3512 return {'scantable': workscan, 'fitresults': res}
3513 else:
3514 return workscan
[2767]3515
3516 except RuntimeError, e:
3517 raise_fitting_failure_exception(e)
3518
3519 @asaplog_post_dec
[2713]3520 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3521 whichrow=None, uselinefinder=None, edge=None,
3522 threshold=None, chan_avg_limit=None):
3523 """\
3524 Calculates and returns model selection criteria for a specified
3525 baseline model and a given spectrum data.
3526 Available values include Akaike Information Criterion (AIC), the
3527 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3528 Bayesian Information Criterion (BIC) and the Generalised Cross
3529 Validation (GCV).
[2186]3530
[2713]3531 Parameters:
3532 value: name of model selection criteria to calculate.
3533 available ones include 'aic', 'aicc', 'bic' and
3534 'gcv'. default is 'aicc'.
3535 blfunc: baseline function name. available ones include
3536 'chebyshev', 'cspline' and 'sinusoid'.
3537 default is 'chebyshev'.
3538 order: parameter for basline function. actually stands for
3539 order of polynomial (order) for 'chebyshev',
3540 number of spline pieces (npiece) for 'cspline' and
3541 maximum wave number for 'sinusoid', respectively.
3542 default is 5 (which is also the default order value
3543 for [auto_]chebyshev_baseline()).
3544 mask: an optional mask. default is [].
3545 whichrow: row number. default is 0 (the first row)
3546 uselinefinder: use sd.linefinder() to flag out line regions
3547 default is True.
3548 edge: an optional number of channel to drop at
3549 the edge of spectrum. If only one value is
3550 specified, the same number will be dropped
3551 from both sides of the spectrum. Default
3552 is to keep all channels. Nested tuples
3553 represent individual edge selection for
3554 different IFs (a number of spectral channels
3555 can be different)
3556 default is (0, 0).
3557 threshold: the threshold used by line finder. It is
3558 better to keep it large as only strong lines
3559 affect the baseline solution.
3560 default is 3.
3561 chan_avg_limit: a maximum number of consequtive spectral
3562 channels to average during the search of
3563 weak and broad lines. The default is no
3564 averaging (and no search for weak lines).
3565 If such lines can affect the fitted baseline
3566 (e.g. a high order polynomial is fitted),
3567 increase this parameter (usually values up
3568 to 8 are reasonable). Most users of this
3569 method should find the default value sufficient.
3570 default is 1.
3571
3572 Example:
3573 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3574 """
3575
3576 try:
3577 varlist = vars()
3578
3579 if value is None: value = 'aicc'
3580 if blfunc is None: blfunc = 'chebyshev'
3581 if order is None: order = 5
3582 if mask is None: mask = []
3583 if whichrow is None: whichrow = 0
3584 if uselinefinder is None: uselinefinder = True
3585 if edge is None: edge = (0, 0)
3586 if threshold is None: threshold = 3
3587 if chan_avg_limit is None: chan_avg_limit = 1
3588
3589 return self._calc_aic(value, blfunc, order, mask,
3590 whichrow, uselinefinder, edge,
3591 threshold, chan_avg_limit)
3592
3593 except RuntimeError, e:
3594 raise_fitting_failure_exception(e)
3595
[1862]3596 @asaplog_post_dec
[2771]3597 def sinusoid_baseline(self, mask=None, applyfft=None,
[2269]3598 fftmethod=None, fftthresh=None,
[2771]3599 addwn=None, rejwn=None,
3600 insitu=None,
3601 clipthresh=None, clipniter=None,
3602 plot=None,
3603 getresidual=None,
3604 showprogress=None, minnrow=None,
3605 outlog=None,
[2767]3606 blfile=None, csvformat=None,
3607 bltable=None):
[2047]3608 """\
[2349]3609 Return a scan which has been baselined (all rows) with sinusoidal
3610 functions.
3611
[2047]3612 Parameters:
[2186]3613 mask: an optional mask
3614 applyfft: if True use some method, such as FFT, to find
3615 strongest sinusoidal components in the wavenumber
3616 domain to be used for baseline fitting.
3617 default is True.
3618 fftmethod: method to find the strong sinusoidal components.
3619 now only 'fft' is available and it is the default.
3620 fftthresh: the threshold to select wave numbers to be used for
3621 fitting from the distribution of amplitudes in the
3622 wavenumber domain.
3623 both float and string values accepted.
3624 given a float value, the unit is set to sigma.
3625 for string values, allowed formats include:
[2349]3626 'xsigma' or 'x' (= x-sigma level. e.g.,
3627 '3sigma'), or
[2186]3628 'topx' (= the x strongest ones, e.g. 'top5').
3629 default is 3.0 (unit: sigma).
3630 addwn: the additional wave numbers to be used for fitting.
3631 list or integer value is accepted to specify every
3632 wave numbers. also string value can be used in case
3633 you need to specify wave numbers in a certain range,
3634 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3635 '<a' (= 0,1,...,a-2,a-1),
3636 '>=a' (= a, a+1, ... up to the maximum wave
3637 number corresponding to the Nyquist
3638 frequency for the case of FFT).
[2411]3639 default is [0].
[2186]3640 rejwn: the wave numbers NOT to be used for fitting.
3641 can be set just as addwn but has higher priority:
3642 wave numbers which are specified both in addwn
3643 and rejwn will NOT be used. default is [].
[2771]3644 insitu: if False a new scantable is returned.
3645 Otherwise, the scaling is done in-situ
3646 The default is taken from .asaprc (False)
[2081]3647 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3648 clipniter: maximum number of iteration of 'clipthresh'-sigma
3649 clipping (default is 0)
[2081]3650 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3651 plot the fit and the residual. In this each
3652 indivual fit has to be approved, by typing 'y'
3653 or 'n'
3654 getresidual: if False, returns best-fit values instead of
3655 residual. (default is True)
[2189]3656 showprogress: show progress status for large data.
3657 default is True.
3658 minnrow: minimum number of input spectra to show.
3659 default is 1000.
[2081]3660 outlog: Output the coefficients of the best-fit
3661 function to logger (default is False)
3662 blfile: Name of a text file in which the best-fit
3663 parameter values to be written
[2186]3664 (default is '': no file/logger output)
[2641]3665 csvformat: if True blfile is csv-formatted, default is False.
[2767]3666 bltable: name of a baseline table where fitting results
3667 (coefficients, rms, etc.) are to be written.
3668 if given, fitting results will NOT be output to
3669 scantable (insitu=True) or None will be
3670 returned (insitu=False).
3671 (default is "": no table output)
[2047]3672
3673 Example:
[2349]3674 # return a scan baselined by a combination of sinusoidal curves
3675 # having wave numbers in spectral window up to 10,
[2047]3676 # also with 3-sigma clipping, iteration up to 4 times
[2186]3677 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]3678
3679 Note:
3680 The best-fit parameter values output in logger and/or blfile are now
3681 based on specunit of 'channel'.
[2047]3682 """
3683
[2186]3684 try:
3685 varlist = vars()
[2047]3686
[2186]3687 if insitu is None: insitu = rcParams['insitu']
3688 if insitu:
3689 workscan = self
3690 else:
3691 workscan = self.copy()
3692
[2410]3693 if mask is None: mask = []
[2186]3694 if applyfft is None: applyfft = True
3695 if fftmethod is None: fftmethod = 'fft'
3696 if fftthresh is None: fftthresh = 3.0
[2411]3697 if addwn is None: addwn = [0]
[2186]3698 if rejwn is None: rejwn = []
3699 if clipthresh is None: clipthresh = 3.0
3700 if clipniter is None: clipniter = 0
3701 if plot is None: plot = False
3702 if getresidual is None: getresidual = True
[2189]3703 if showprogress is None: showprogress = True
3704 if minnrow is None: minnrow = 1000
[2186]3705 if outlog is None: outlog = False
3706 if blfile is None: blfile = ''
[2641]3707 if csvformat is None: csvformat = False
[2767]3708 if bltable is None: bltable = ''
[2047]3709
[2767]3710 sapplyfft = 'true' if applyfft else 'false'
3711 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3712
[2767]3713 scsvformat = 'T' if csvformat else 'F'
3714
[2081]3715 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3716 workscan._sinusoid_baseline(mask,
3717 fftinfo,
3718 #applyfft, fftmethod.lower(),
3719 #str(fftthresh).lower(),
[2349]3720 workscan._parse_wn(addwn),
[2643]3721 workscan._parse_wn(rejwn),
3722 clipthresh, clipniter,
3723 getresidual,
[2349]3724 pack_progress_params(showprogress,
[2641]3725 minnrow),
[2767]3726 outlog, scsvformat+blfile,
3727 bltable)
[2186]3728 workscan._add_history('sinusoid_baseline', varlist)
[2767]3729
3730 if bltable == '':
3731 if insitu:
3732 self._assign(workscan)
3733 else:
3734 return workscan
[2047]3735 else:
[2767]3736 if not insitu:
3737 return None
[2047]3738
3739 except RuntimeError, e:
[2186]3740 raise_fitting_failure_exception(e)
[2047]3741
3742
[2186]3743 @asaplog_post_dec
[2771]3744 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
[2349]3745 fftmethod=None, fftthresh=None,
[2771]3746 addwn=None, rejwn=None,
3747 insitu=None,
3748 clipthresh=None, clipniter=None,
3749 edge=None, threshold=None, chan_avg_limit=None,
3750 plot=None,
3751 getresidual=None,
3752 showprogress=None, minnrow=None,
3753 outlog=None,
[2767]3754 blfile=None, csvformat=None,
3755 bltable=None):
[2047]3756 """\
[2349]3757 Return a scan which has been baselined (all rows) with sinusoidal
3758 functions.
[2047]3759 Spectral lines are detected first using linefinder and masked out
3760 to avoid them affecting the baseline solution.
3761
3762 Parameters:
[2189]3763 mask: an optional mask retreived from scantable
3764 applyfft: if True use some method, such as FFT, to find
3765 strongest sinusoidal components in the wavenumber
3766 domain to be used for baseline fitting.
3767 default is True.
3768 fftmethod: method to find the strong sinusoidal components.
3769 now only 'fft' is available and it is the default.
3770 fftthresh: the threshold to select wave numbers to be used for
3771 fitting from the distribution of amplitudes in the
3772 wavenumber domain.
3773 both float and string values accepted.
3774 given a float value, the unit is set to sigma.
3775 for string values, allowed formats include:
[2349]3776 'xsigma' or 'x' (= x-sigma level. e.g.,
3777 '3sigma'), or
[2189]3778 'topx' (= the x strongest ones, e.g. 'top5').
3779 default is 3.0 (unit: sigma).
3780 addwn: the additional wave numbers to be used for fitting.
3781 list or integer value is accepted to specify every
3782 wave numbers. also string value can be used in case
3783 you need to specify wave numbers in a certain range,
3784 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3785 '<a' (= 0,1,...,a-2,a-1),
3786 '>=a' (= a, a+1, ... up to the maximum wave
3787 number corresponding to the Nyquist
3788 frequency for the case of FFT).
[2411]3789 default is [0].
[2189]3790 rejwn: the wave numbers NOT to be used for fitting.
3791 can be set just as addwn but has higher priority:
3792 wave numbers which are specified both in addwn
3793 and rejwn will NOT be used. default is [].
[2771]3794 insitu: if False a new scantable is returned.
3795 Otherwise, the scaling is done in-situ
3796 The default is taken from .asaprc (False)
[2189]3797 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3798 clipniter: maximum number of iteration of 'clipthresh'-sigma
3799 clipping (default is 0)
[2189]3800 edge: an optional number of channel to drop at
3801 the edge of spectrum. If only one value is
3802 specified, the same number will be dropped
3803 from both sides of the spectrum. Default
3804 is to keep all channels. Nested tuples
3805 represent individual edge selection for
3806 different IFs (a number of spectral channels
3807 can be different)
3808 threshold: the threshold used by line finder. It is
3809 better to keep it large as only strong lines
3810 affect the baseline solution.
3811 chan_avg_limit: a maximum number of consequtive spectral
3812 channels to average during the search of
3813 weak and broad lines. The default is no
3814 averaging (and no search for weak lines).
3815 If such lines can affect the fitted baseline
3816 (e.g. a high order polynomial is fitted),
3817 increase this parameter (usually values up
3818 to 8 are reasonable). Most users of this
3819 method should find the default value sufficient.
3820 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3821 plot the fit and the residual. In this each
3822 indivual fit has to be approved, by typing 'y'
3823 or 'n'
3824 getresidual: if False, returns best-fit values instead of
3825 residual. (default is True)
3826 showprogress: show progress status for large data.
3827 default is True.
3828 minnrow: minimum number of input spectra to show.
3829 default is 1000.
3830 outlog: Output the coefficients of the best-fit
3831 function to logger (default is False)
3832 blfile: Name of a text file in which the best-fit
3833 parameter values to be written
3834 (default is "": no file/logger output)
[2641]3835 csvformat: if True blfile is csv-formatted, default is False.
[2767]3836 bltable: name of a baseline table where fitting results
3837 (coefficients, rms, etc.) are to be written.
3838 if given, fitting results will NOT be output to
3839 scantable (insitu=True) or None will be
3840 returned (insitu=False).
3841 (default is "": no table output)
[2047]3842
3843 Example:
[2186]3844 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]3845
3846 Note:
3847 The best-fit parameter values output in logger and/or blfile are now
3848 based on specunit of 'channel'.
[2047]3849 """
3850
[2186]3851 try:
3852 varlist = vars()
[2047]3853
[2186]3854 if insitu is None: insitu = rcParams['insitu']
3855 if insitu:
3856 workscan = self
[2047]3857 else:
[2186]3858 workscan = self.copy()
3859
[2410]3860 if mask is None: mask = []
[2186]3861 if applyfft is None: applyfft = True
3862 if fftmethod is None: fftmethod = 'fft'
3863 if fftthresh is None: fftthresh = 3.0
[2411]3864 if addwn is None: addwn = [0]
[2186]3865 if rejwn is None: rejwn = []
3866 if clipthresh is None: clipthresh = 3.0
3867 if clipniter is None: clipniter = 0
3868 if edge is None: edge = (0,0)
3869 if threshold is None: threshold = 3
3870 if chan_avg_limit is None: chan_avg_limit = 1
3871 if plot is None: plot = False
3872 if getresidual is None: getresidual = True
[2189]3873 if showprogress is None: showprogress = True
3874 if minnrow is None: minnrow = 1000
[2186]3875 if outlog is None: outlog = False
3876 if blfile is None: blfile = ''
[2641]3877 if csvformat is None: csvformat = False
[2767]3878 if bltable is None: bltable = ''
[2047]3879
[2767]3880 sapplyfft = 'true' if applyfft else 'false'
3881 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3882
[2767]3883 scsvformat = 'T' if csvformat else 'F'
3884
[2277]3885 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3886 workscan._auto_sinusoid_baseline(mask,
3887 fftinfo,
[2349]3888 workscan._parse_wn(addwn),
3889 workscan._parse_wn(rejwn),
3890 clipthresh, clipniter,
3891 normalise_edge_param(edge),
3892 threshold, chan_avg_limit,
3893 getresidual,
3894 pack_progress_params(showprogress,
3895 minnrow),
[2767]3896 outlog, scsvformat+blfile, bltable)
[2047]3897 workscan._add_history("auto_sinusoid_baseline", varlist)
[2767]3898
3899 if bltable == '':
3900 if insitu:
3901 self._assign(workscan)
3902 else:
3903 return workscan
[2047]3904 else:
[2767]3905 if not insitu:
3906 return None
[2047]3907
3908 except RuntimeError, e:
[2186]3909 raise_fitting_failure_exception(e)
[2047]3910
3911 @asaplog_post_dec
[2771]3912 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]3913 clipthresh=None, clipniter=None, plot=None,
3914 getresidual=None, showprogress=None, minnrow=None,
[2767]3915 outlog=None, blfile=None, csvformat=None,
3916 bltable=None):
[1846]3917 """\
[2349]3918 Return a scan which has been baselined (all rows) by cubic spline
3919 function (piecewise cubic polynomial).
3920
[513]3921 Parameters:
[2771]3922 mask: An optional mask
3923 npiece: Number of pieces. (default is 2)
[2189]3924 insitu: If False a new scantable is returned.
3925 Otherwise, the scaling is done in-situ
3926 The default is taken from .asaprc (False)
3927 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3928 clipniter: maximum number of iteration of 'clipthresh'-sigma
3929 clipping (default is 0)
[2189]3930 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3931 plot the fit and the residual. In this each
3932 indivual fit has to be approved, by typing 'y'
3933 or 'n'
3934 getresidual: if False, returns best-fit values instead of
3935 residual. (default is True)
3936 showprogress: show progress status for large data.
3937 default is True.
3938 minnrow: minimum number of input spectra to show.
3939 default is 1000.
3940 outlog: Output the coefficients of the best-fit
3941 function to logger (default is False)
3942 blfile: Name of a text file in which the best-fit
3943 parameter values to be written
3944 (default is "": no file/logger output)
[2641]3945 csvformat: if True blfile is csv-formatted, default is False.
[2767]3946 bltable: name of a baseline table where fitting results
3947 (coefficients, rms, etc.) are to be written.
3948 if given, fitting results will NOT be output to
3949 scantable (insitu=True) or None will be
3950 returned (insitu=False).
3951 (default is "": no table output)
[1846]3952
[2012]3953 Example:
[2349]3954 # return a scan baselined by a cubic spline consisting of 2 pieces
3955 # (i.e., 1 internal knot),
[2012]3956 # also with 3-sigma clipping, iteration up to 4 times
3957 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]3958
3959 Note:
3960 The best-fit parameter values output in logger and/or blfile are now
3961 based on specunit of 'channel'.
[2012]3962 """
3963
[2186]3964 try:
3965 varlist = vars()
3966
3967 if insitu is None: insitu = rcParams['insitu']
3968 if insitu:
3969 workscan = self
3970 else:
3971 workscan = self.copy()
[1855]3972
[2410]3973 if mask is None: mask = []
[2189]3974 if npiece is None: npiece = 2
3975 if clipthresh is None: clipthresh = 3.0
3976 if clipniter is None: clipniter = 0
3977 if plot is None: plot = False
3978 if getresidual is None: getresidual = True
3979 if showprogress is None: showprogress = True
3980 if minnrow is None: minnrow = 1000
3981 if outlog is None: outlog = False
3982 if blfile is None: blfile = ''
[2767]3983 if csvformat is None: csvformat = False
3984 if bltable is None: bltable = ''
[1855]3985
[2767]3986 scsvformat = 'T' if csvformat else 'F'
[2641]3987
[2012]3988 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3989 workscan._cspline_baseline(mask, npiece,
3990 clipthresh, clipniter,
[2349]3991 getresidual,
3992 pack_progress_params(showprogress,
[2641]3993 minnrow),
[2767]3994 outlog, scsvformat+blfile,
3995 bltable)
[2012]3996 workscan._add_history("cspline_baseline", varlist)
[2767]3997
3998 if bltable == '':
3999 if insitu:
4000 self._assign(workscan)
4001 else:
4002 return workscan
[2012]4003 else:
[2767]4004 if not insitu:
4005 return None
[2012]4006
4007 except RuntimeError, e:
[2186]4008 raise_fitting_failure_exception(e)
[1855]4009
[2186]4010 @asaplog_post_dec
[2771]4011 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]4012 clipthresh=None, clipniter=None,
4013 edge=None, threshold=None, chan_avg_limit=None,
4014 getresidual=None, plot=None,
4015 showprogress=None, minnrow=None, outlog=None,
[2767]4016 blfile=None, csvformat=None, bltable=None):
[2012]4017 """\
4018 Return a scan which has been baselined (all rows) by cubic spline
4019 function (piecewise cubic polynomial).
4020 Spectral lines are detected first using linefinder and masked out
4021 to avoid them affecting the baseline solution.
4022
4023 Parameters:
[2771]4024 mask: an optional mask retreived from scantable
4025 npiece: Number of pieces. (default is 2)
[2189]4026 insitu: if False a new scantable is returned.
4027 Otherwise, the scaling is done in-situ
4028 The default is taken from .asaprc (False)
4029 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]4030 clipniter: maximum number of iteration of 'clipthresh'-sigma
4031 clipping (default is 0)
[2189]4032 edge: an optional number of channel to drop at
4033 the edge of spectrum. If only one value is
4034 specified, the same number will be dropped
4035 from both sides of the spectrum. Default
4036 is to keep all channels. Nested tuples
4037 represent individual edge selection for
4038 different IFs (a number of spectral channels
4039 can be different)
4040 threshold: the threshold used by line finder. It is
4041 better to keep it large as only strong lines
4042 affect the baseline solution.
4043 chan_avg_limit: a maximum number of consequtive spectral
4044 channels to average during the search of
4045 weak and broad lines. The default is no
4046 averaging (and no search for weak lines).
4047 If such lines can affect the fitted baseline
4048 (e.g. a high order polynomial is fitted),
4049 increase this parameter (usually values up
4050 to 8 are reasonable). Most users of this
4051 method should find the default value sufficient.
4052 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4053 plot the fit and the residual. In this each
4054 indivual fit has to be approved, by typing 'y'
4055 or 'n'
4056 getresidual: if False, returns best-fit values instead of
4057 residual. (default is True)
4058 showprogress: show progress status for large data.
4059 default is True.
4060 minnrow: minimum number of input spectra to show.
4061 default is 1000.
4062 outlog: Output the coefficients of the best-fit
4063 function to logger (default is False)
4064 blfile: Name of a text file in which the best-fit
4065 parameter values to be written
4066 (default is "": no file/logger output)
[2641]4067 csvformat: if True blfile is csv-formatted, default is False.
[2767]4068 bltable: name of a baseline table where fitting results
4069 (coefficients, rms, etc.) are to be written.
4070 if given, fitting results will NOT be output to
4071 scantable (insitu=True) or None will be
4072 returned (insitu=False).
4073 (default is "": no table output)
[1846]4074
[1907]4075 Example:
[2012]4076 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]4077
4078 Note:
4079 The best-fit parameter values output in logger and/or blfile are now
4080 based on specunit of 'channel'.
[2012]4081 """
[1846]4082
[2186]4083 try:
4084 varlist = vars()
[2012]4085
[2186]4086 if insitu is None: insitu = rcParams['insitu']
4087 if insitu:
4088 workscan = self
[1391]4089 else:
[2186]4090 workscan = self.copy()
4091
[2410]4092 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4093 if mask is None: mask = []
[2186]4094 if npiece is None: npiece = 2
4095 if clipthresh is None: clipthresh = 3.0
4096 if clipniter is None: clipniter = 0
4097 if edge is None: edge = (0, 0)
4098 if threshold is None: threshold = 3
4099 if chan_avg_limit is None: chan_avg_limit = 1
4100 if plot is None: plot = False
4101 if getresidual is None: getresidual = True
[2189]4102 if showprogress is None: showprogress = True
4103 if minnrow is None: minnrow = 1000
[2186]4104 if outlog is None: outlog = False
4105 if blfile is None: blfile = ''
[2641]4106 if csvformat is None: csvformat = False
[2767]4107 if bltable is None: bltable = ''
[1819]4108
[2767]4109 scsvformat = 'T' if csvformat else 'F'
[2641]4110
[2277]4111 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]4112 workscan._auto_cspline_baseline(mask, npiece,
4113 clipthresh, clipniter,
[2269]4114 normalise_edge_param(edge),
4115 threshold,
4116 chan_avg_limit, getresidual,
4117 pack_progress_params(showprogress,
4118 minnrow),
[2767]4119 outlog,
4120 scsvformat+blfile,
4121 bltable)
[2012]4122 workscan._add_history("auto_cspline_baseline", varlist)
[2767]4123
4124 if bltable == '':
4125 if insitu:
4126 self._assign(workscan)
4127 else:
4128 return workscan
[1856]4129 else:
[2767]4130 if not insitu:
4131 return None
[2012]4132
4133 except RuntimeError, e:
[2186]4134 raise_fitting_failure_exception(e)
[513]4135
[1931]4136 @asaplog_post_dec
[2771]4137 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]4138 clipthresh=None, clipniter=None, plot=None,
4139 getresidual=None, showprogress=None, minnrow=None,
[2767]4140 outlog=None, blfile=None, csvformat=None,
4141 bltable=None):
[2645]4142 """\
4143 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4144
4145 Parameters:
[2771]4146 mask: An optional mask
4147 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]4148 insitu: If False a new scantable is returned.
4149 Otherwise, the scaling is done in-situ
4150 The default is taken from .asaprc (False)
4151 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4152 clipniter: maximum number of iteration of 'clipthresh'-sigma
4153 clipping (default is 0)
4154 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4155 plot the fit and the residual. In this each
4156 indivual fit has to be approved, by typing 'y'
4157 or 'n'
4158 getresidual: if False, returns best-fit values instead of
4159 residual. (default is True)
4160 showprogress: show progress status for large data.
4161 default is True.
4162 minnrow: minimum number of input spectra to show.
4163 default is 1000.
4164 outlog: Output the coefficients of the best-fit
4165 function to logger (default is False)
4166 blfile: Name of a text file in which the best-fit
4167 parameter values to be written
4168 (default is "": no file/logger output)
4169 csvformat: if True blfile is csv-formatted, default is False.
[2767]4170 bltable: name of a baseline table where fitting results
4171 (coefficients, rms, etc.) are to be written.
4172 if given, fitting results will NOT be output to
4173 scantable (insitu=True) or None will be
4174 returned (insitu=False).
4175 (default is "": no table output)
[2645]4176
4177 Example:
4178 # return a scan baselined by a cubic spline consisting of 2 pieces
4179 # (i.e., 1 internal knot),
4180 # also with 3-sigma clipping, iteration up to 4 times
4181 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4182
4183 Note:
4184 The best-fit parameter values output in logger and/or blfile are now
4185 based on specunit of 'channel'.
4186 """
4187
4188 try:
4189 varlist = vars()
4190
4191 if insitu is None: insitu = rcParams['insitu']
4192 if insitu:
4193 workscan = self
4194 else:
4195 workscan = self.copy()
4196
4197 if mask is None: mask = []
4198 if order is None: order = 5
4199 if clipthresh is None: clipthresh = 3.0
4200 if clipniter is None: clipniter = 0
4201 if plot is None: plot = False
4202 if getresidual is None: getresidual = True
4203 if showprogress is None: showprogress = True
4204 if minnrow is None: minnrow = 1000
4205 if outlog is None: outlog = False
4206 if blfile is None: blfile = ''
[2767]4207 if csvformat is None: csvformat = False
4208 if bltable is None: bltable = ''
[2645]4209
[2767]4210 scsvformat = 'T' if csvformat else 'F'
[2645]4211
4212 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]4213 workscan._chebyshev_baseline(mask, order,
4214 clipthresh, clipniter,
[2645]4215 getresidual,
4216 pack_progress_params(showprogress,
4217 minnrow),
[2767]4218 outlog, scsvformat+blfile,
4219 bltable)
[2645]4220 workscan._add_history("chebyshev_baseline", varlist)
[2767]4221
4222 if bltable == '':
4223 if insitu:
4224 self._assign(workscan)
4225 else:
4226 return workscan
[2645]4227 else:
[2767]4228 if not insitu:
4229 return None
[2645]4230
4231 except RuntimeError, e:
4232 raise_fitting_failure_exception(e)
4233
4234 @asaplog_post_dec
[2771]4235 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]4236 clipthresh=None, clipniter=None,
4237 edge=None, threshold=None, chan_avg_limit=None,
4238 getresidual=None, plot=None,
4239 showprogress=None, minnrow=None, outlog=None,
[2767]4240 blfile=None, csvformat=None, bltable=None):
[2645]4241 """\
4242 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4243 Spectral lines are detected first using linefinder and masked out
4244 to avoid them affecting the baseline solution.
4245
4246 Parameters:
[2771]4247 mask: an optional mask retreived from scantable
4248 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]4249 insitu: if False a new scantable is returned.
4250 Otherwise, the scaling is done in-situ
4251 The default is taken from .asaprc (False)
4252 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4253 clipniter: maximum number of iteration of 'clipthresh'-sigma
4254 clipping (default is 0)
4255 edge: an optional number of channel to drop at
4256 the edge of spectrum. If only one value is
4257 specified, the same number will be dropped
4258 from both sides of the spectrum. Default
4259 is to keep all channels. Nested tuples
4260 represent individual edge selection for
4261 different IFs (a number of spectral channels
4262 can be different)
4263 threshold: the threshold used by line finder. It is
4264 better to keep it large as only strong lines
4265 affect the baseline solution.
4266 chan_avg_limit: a maximum number of consequtive spectral
4267 channels to average during the search of
4268 weak and broad lines. The default is no
4269 averaging (and no search for weak lines).
4270 If such lines can affect the fitted baseline
4271 (e.g. a high order polynomial is fitted),
4272 increase this parameter (usually values up
4273 to 8 are reasonable). Most users of this
4274 method should find the default value sufficient.
4275 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4276 plot the fit and the residual. In this each
4277 indivual fit has to be approved, by typing 'y'
4278 or 'n'
4279 getresidual: if False, returns best-fit values instead of
4280 residual. (default is True)
4281 showprogress: show progress status for large data.
4282 default is True.
4283 minnrow: minimum number of input spectra to show.
4284 default is 1000.
4285 outlog: Output the coefficients of the best-fit
4286 function to logger (default is False)
4287 blfile: Name of a text file in which the best-fit
4288 parameter values to be written
4289 (default is "": no file/logger output)
4290 csvformat: if True blfile is csv-formatted, default is False.
[2767]4291 bltable: name of a baseline table where fitting results
4292 (coefficients, rms, etc.) are to be written.
4293 if given, fitting results will NOT be output to
4294 scantable (insitu=True) or None will be
4295 returned (insitu=False).
4296 (default is "": no table output)
[2645]4297
4298 Example:
4299 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4300
4301 Note:
4302 The best-fit parameter values output in logger and/or blfile are now
4303 based on specunit of 'channel'.
4304 """
4305
4306 try:
4307 varlist = vars()
4308
4309 if insitu is None: insitu = rcParams['insitu']
4310 if insitu:
4311 workscan = self
4312 else:
4313 workscan = self.copy()
4314
4315 if mask is None: mask = []
4316 if order is None: order = 5
4317 if clipthresh is None: clipthresh = 3.0
4318 if clipniter is None: clipniter = 0
4319 if edge is None: edge = (0, 0)
4320 if threshold is None: threshold = 3
4321 if chan_avg_limit is None: chan_avg_limit = 1
4322 if plot is None: plot = False
4323 if getresidual is None: getresidual = True
4324 if showprogress is None: showprogress = True
4325 if minnrow is None: minnrow = 1000
4326 if outlog is None: outlog = False
4327 if blfile is None: blfile = ''
4328 if csvformat is None: csvformat = False
[2767]4329 if bltable is None: bltable = ''
[2645]4330
[2767]4331 scsvformat = 'T' if csvformat else 'F'
[2645]4332
4333 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]4334 workscan._auto_chebyshev_baseline(mask, order,
4335 clipthresh, clipniter,
[2645]4336 normalise_edge_param(edge),
4337 threshold,
4338 chan_avg_limit, getresidual,
4339 pack_progress_params(showprogress,
4340 minnrow),
[2767]4341 outlog, scsvformat+blfile,
4342 bltable)
[2645]4343 workscan._add_history("auto_chebyshev_baseline", varlist)
[2767]4344
4345 if bltable == '':
4346 if insitu:
4347 self._assign(workscan)
4348 else:
4349 return workscan
[2645]4350 else:
[2767]4351 if not insitu:
4352 return None
[2645]4353
4354 except RuntimeError, e:
4355 raise_fitting_failure_exception(e)
4356
4357 @asaplog_post_dec
[2771]4358 def poly_baseline(self, mask=None, order=None, insitu=None,
[2767]4359 clipthresh=None, clipniter=None, plot=None,
[2269]4360 getresidual=None, showprogress=None, minnrow=None,
[2767]4361 outlog=None, blfile=None, csvformat=None,
4362 bltable=None):
[1907]4363 """\
4364 Return a scan which has been baselined (all rows) by a polynomial.
4365 Parameters:
[2771]4366 mask: an optional mask
4367 order: the order of the polynomial (default is 0)
[2189]4368 insitu: if False a new scantable is returned.
4369 Otherwise, the scaling is done in-situ
4370 The default is taken from .asaprc (False)
[2767]4371 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4372 clipniter: maximum number of iteration of 'clipthresh'-sigma
4373 clipping (default is 0)
[2189]4374 plot: plot the fit and the residual. In this each
4375 indivual fit has to be approved, by typing 'y'
4376 or 'n'
4377 getresidual: if False, returns best-fit values instead of
4378 residual. (default is True)
4379 showprogress: show progress status for large data.
4380 default is True.
4381 minnrow: minimum number of input spectra to show.
4382 default is 1000.
4383 outlog: Output the coefficients of the best-fit
4384 function to logger (default is False)
4385 blfile: Name of a text file in which the best-fit
4386 parameter values to be written
4387 (default is "": no file/logger output)
[2641]4388 csvformat: if True blfile is csv-formatted, default is False.
[2767]4389 bltable: name of a baseline table where fitting results
4390 (coefficients, rms, etc.) are to be written.
4391 if given, fitting results will NOT be output to
4392 scantable (insitu=True) or None will be
4393 returned (insitu=False).
4394 (default is "": no table output)
[2012]4395
[1907]4396 Example:
4397 # return a scan baselined by a third order polynomial,
4398 # not using a mask
4399 bscan = scan.poly_baseline(order=3)
4400 """
[1931]4401
[2186]4402 try:
4403 varlist = vars()
[1931]4404
[2269]4405 if insitu is None:
4406 insitu = rcParams["insitu"]
[2186]4407 if insitu:
4408 workscan = self
4409 else:
4410 workscan = self.copy()
[1907]4411
[2410]4412 if mask is None: mask = []
[2189]4413 if order is None: order = 0
[2767]4414 if clipthresh is None: clipthresh = 3.0
4415 if clipniter is None: clipniter = 0
[2189]4416 if plot is None: plot = False
4417 if getresidual is None: getresidual = True
4418 if showprogress is None: showprogress = True
4419 if minnrow is None: minnrow = 1000
4420 if outlog is None: outlog = False
[2767]4421 if blfile is None: blfile = ''
[2641]4422 if csvformat is None: csvformat = False
[2767]4423 if bltable is None: bltable = ''
[1907]4424
[2767]4425 scsvformat = 'T' if csvformat else 'F'
[2641]4426
[2012]4427 if plot:
[2269]4428 outblfile = (blfile != "") and \
[2349]4429 os.path.exists(os.path.expanduser(
4430 os.path.expandvars(blfile))
4431 )
[2269]4432 if outblfile:
4433 blf = open(blfile, "a")
[2012]4434
[1907]4435 f = fitter()
4436 f.set_function(lpoly=order)
[2186]4437
4438 rows = xrange(workscan.nrow())
4439 #if len(rows) > 0: workscan._init_blinfo()
[2610]4440
4441 action = "H"
[1907]4442 for r in rows:
4443 f.x = workscan._getabcissa(r)
4444 f.y = workscan._getspectrum(r)
[2541]4445 if mask:
4446 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4447 else: # mask=None
4448 f.mask = workscan._getmask(r)
4449
[1907]4450 f.data = None
4451 f.fit()
[2541]4452
[2610]4453 if action != "Y": # skip plotting when accepting all
4454 f.plot(residual=True)
4455 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4456 #if accept_fit.upper() == "N":
4457 # #workscan._append_blinfo(None, None, None)
4458 # continue
4459 accept_fit = self._get_verify_action("Accept fit?",action)
4460 if r == 0: action = None
[1907]4461 if accept_fit.upper() == "N":
4462 continue
[2610]4463 elif accept_fit.upper() == "R":
4464 break
4465 elif accept_fit.upper() == "A":
4466 action = "Y"
[2012]4467
4468 blpars = f.get_parameters()
4469 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4470 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]4471 workscan._setspectrum((f.fitter.getresidual()
4472 if getresidual else f.fitter.getfit()), r)
[1907]4473
[2012]4474 if outblfile:
4475 rms = workscan.get_rms(f.mask, r)
[2269]4476 dataout = \
4477 workscan.format_blparams_row(blpars["params"],
4478 blpars["fixed"],
4479 rms, str(masklist),
[2641]4480 r, True, csvformat)
[2012]4481 blf.write(dataout)
4482
[1907]4483 f._p.unmap()
4484 f._p = None
[2012]4485
[2349]4486 if outblfile:
4487 blf.close()
[1907]4488 else:
[2767]4489 workscan._poly_baseline(mask, order,
4490 clipthresh, clipniter, #
4491 getresidual,
[2269]4492 pack_progress_params(showprogress,
4493 minnrow),
[2767]4494 outlog, scsvformat+blfile,
4495 bltable) #
[1907]4496
4497 workscan._add_history("poly_baseline", varlist)
4498
4499 if insitu:
4500 self._assign(workscan)
4501 else:
4502 return workscan
4503
[1919]4504 except RuntimeError, e:
[2186]4505 raise_fitting_failure_exception(e)
[1907]4506
[2186]4507 @asaplog_post_dec
[2771]4508 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
[2767]4509 clipthresh=None, clipniter=None,
4510 edge=None, threshold=None, chan_avg_limit=None,
4511 getresidual=None, plot=None,
4512 showprogress=None, minnrow=None, outlog=None,
4513 blfile=None, csvformat=None, bltable=None):
[1846]4514 """\
[1931]4515 Return a scan which has been baselined (all rows) by a polynomial.
[880]4516 Spectral lines are detected first using linefinder and masked out
4517 to avoid them affecting the baseline solution.
4518
4519 Parameters:
[2771]4520 mask: an optional mask retreived from scantable
4521 order: the order of the polynomial (default is 0)
[2189]4522 insitu: if False a new scantable is returned.
4523 Otherwise, the scaling is done in-situ
4524 The default is taken from .asaprc (False)
[2767]4525 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4526 clipniter: maximum number of iteration of 'clipthresh'-sigma
4527 clipping (default is 0)
[2189]4528 edge: an optional number of channel to drop at
4529 the edge of spectrum. If only one value is
4530 specified, the same number will be dropped
4531 from both sides of the spectrum. Default
4532 is to keep all channels. Nested tuples
4533 represent individual edge selection for
4534 different IFs (a number of spectral channels
4535 can be different)
4536 threshold: the threshold used by line finder. It is
4537 better to keep it large as only strong lines
4538 affect the baseline solution.
4539 chan_avg_limit: a maximum number of consequtive spectral
4540 channels to average during the search of
4541 weak and broad lines. The default is no
4542 averaging (and no search for weak lines).
4543 If such lines can affect the fitted baseline
4544 (e.g. a high order polynomial is fitted),
4545 increase this parameter (usually values up
4546 to 8 are reasonable). Most users of this
4547 method should find the default value sufficient.
4548 plot: plot the fit and the residual. In this each
4549 indivual fit has to be approved, by typing 'y'
4550 or 'n'
4551 getresidual: if False, returns best-fit values instead of
4552 residual. (default is True)
4553 showprogress: show progress status for large data.
4554 default is True.
4555 minnrow: minimum number of input spectra to show.
4556 default is 1000.
4557 outlog: Output the coefficients of the best-fit
4558 function to logger (default is False)
4559 blfile: Name of a text file in which the best-fit
4560 parameter values to be written
4561 (default is "": no file/logger output)
[2641]4562 csvformat: if True blfile is csv-formatted, default is False.
[2767]4563 bltable: name of a baseline table where fitting results
4564 (coefficients, rms, etc.) are to be written.
4565 if given, fitting results will NOT be output to
4566 scantable (insitu=True) or None will be
4567 returned (insitu=False).
4568 (default is "": no table output)
[1846]4569
[2012]4570 Example:
4571 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4572 """
[880]4573
[2186]4574 try:
4575 varlist = vars()
[1846]4576
[2269]4577 if insitu is None:
4578 insitu = rcParams['insitu']
[2186]4579 if insitu:
4580 workscan = self
4581 else:
4582 workscan = self.copy()
[1846]4583
[2410]4584 if mask is None: mask = []
[2186]4585 if order is None: order = 0
[2767]4586 if clipthresh is None: clipthresh = 3.0
4587 if clipniter is None: clipniter = 0
[2186]4588 if edge is None: edge = (0, 0)
4589 if threshold is None: threshold = 3
4590 if chan_avg_limit is None: chan_avg_limit = 1
4591 if plot is None: plot = False
4592 if getresidual is None: getresidual = True
[2189]4593 if showprogress is None: showprogress = True
4594 if minnrow is None: minnrow = 1000
[2186]4595 if outlog is None: outlog = False
4596 if blfile is None: blfile = ''
[2641]4597 if csvformat is None: csvformat = False
[2767]4598 if bltable is None: bltable = ''
[1846]4599
[2767]4600 scsvformat = 'T' if csvformat else 'F'
[2641]4601
[2186]4602 edge = normalise_edge_param(edge)
[880]4603
[2012]4604 if plot:
[2269]4605 outblfile = (blfile != "") and \
4606 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]4607 if outblfile: blf = open(blfile, "a")
4608
[2186]4609 from asap.asaplinefind import linefinder
[2012]4610 fl = linefinder()
[2269]4611 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]4612 fl.set_scan(workscan)
[2186]4613
[2012]4614 f = fitter()
4615 f.set_function(lpoly=order)
[880]4616
[2186]4617 rows = xrange(workscan.nrow())
4618 #if len(rows) > 0: workscan._init_blinfo()
[2610]4619
4620 action = "H"
[2012]4621 for r in rows:
[2186]4622 idx = 2*workscan.getif(r)
[2541]4623 if mask:
4624 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4625 else: # mask=None
4626 msk = workscan._getmask(r)
4627 fl.find_lines(r, msk, edge[idx:idx+2])
[907]4628
[2012]4629 f.x = workscan._getabcissa(r)
4630 f.y = workscan._getspectrum(r)
4631 f.mask = fl.get_mask()
4632 f.data = None
4633 f.fit()
4634
[2610]4635 if action != "Y": # skip plotting when accepting all
4636 f.plot(residual=True)
4637 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4638 accept_fit = self._get_verify_action("Accept fit?",action)
4639 if r == 0: action = None
[2012]4640 if accept_fit.upper() == "N":
4641 #workscan._append_blinfo(None, None, None)
4642 continue
[2610]4643 elif accept_fit.upper() == "R":
4644 break
4645 elif accept_fit.upper() == "A":
4646 action = "Y"
[2012]4647
4648 blpars = f.get_parameters()
4649 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4650 #workscan._append_blinfo(blpars, masklist, f.mask)
[2349]4651 workscan._setspectrum(
4652 (f.fitter.getresidual() if getresidual
4653 else f.fitter.getfit()), r
4654 )
[2012]4655
4656 if outblfile:
4657 rms = workscan.get_rms(f.mask, r)
[2269]4658 dataout = \
4659 workscan.format_blparams_row(blpars["params"],
4660 blpars["fixed"],
4661 rms, str(masklist),
[2641]4662 r, True, csvformat)
[2012]4663 blf.write(dataout)
4664
4665 f._p.unmap()
4666 f._p = None
4667
4668 if outblfile: blf.close()
4669 else:
[2767]4670 workscan._auto_poly_baseline(mask, order,
4671 clipthresh, clipniter,
4672 edge, threshold,
[2269]4673 chan_avg_limit, getresidual,
4674 pack_progress_params(showprogress,
4675 minnrow),
[2767]4676 outlog, scsvformat+blfile,
4677 bltable)
4678 workscan._add_history("auto_poly_baseline", varlist)
[2012]4679
[2767]4680 if bltable == '':
4681 if insitu:
4682 self._assign(workscan)
4683 else:
4684 return workscan
[2012]4685 else:
[2767]4686 if not insitu:
4687 return None
[2012]4688
4689 except RuntimeError, e:
[2186]4690 raise_fitting_failure_exception(e)
[2012]4691
4692 def _init_blinfo(self):
4693 """\
4694 Initialise the following three auxiliary members:
4695 blpars : parameters of the best-fit baseline,
4696 masklists : mask data (edge positions of masked channels) and
4697 actualmask : mask data (in boolean list),
4698 to keep for use later (including output to logger/text files).
4699 Used by poly_baseline() and auto_poly_baseline() in case of
4700 'plot=True'.
4701 """
4702 self.blpars = []
4703 self.masklists = []
4704 self.actualmask = []
4705 return
[880]4706
[2012]4707 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4708 """\
4709 Append baseline-fitting related info to blpars, masklist and
4710 actualmask.
4711 """
4712 self.blpars.append(data_blpars)
4713 self.masklists.append(data_masklists)
4714 self.actualmask.append(data_actualmask)
4715 return
4716
[1862]4717 @asaplog_post_dec
[914]4718 def rotate_linpolphase(self, angle):
[1846]4719 """\
[914]4720 Rotate the phase of the complex polarization O=Q+iU correlation.
4721 This is always done in situ in the raw data. So if you call this
4722 function more than once then each call rotates the phase further.
[1846]4723
[914]4724 Parameters:
[1846]4725
[914]4726 angle: The angle (degrees) to rotate (add) by.
[1846]4727
4728 Example::
4729
[914]4730 scan.rotate_linpolphase(2.3)
[1846]4731
[914]4732 """
4733 varlist = vars()
[936]4734 self._math._rotate_linpolphase(self, angle)
[914]4735 self._add_history("rotate_linpolphase", varlist)
4736 return
[710]4737
[1862]4738 @asaplog_post_dec
[914]4739 def rotate_xyphase(self, angle):
[1846]4740 """\
[914]4741 Rotate the phase of the XY correlation. This is always done in situ
4742 in the data. So if you call this function more than once
4743 then each call rotates the phase further.
[1846]4744
[914]4745 Parameters:
[1846]4746
[914]4747 angle: The angle (degrees) to rotate (add) by.
[1846]4748
4749 Example::
4750
[914]4751 scan.rotate_xyphase(2.3)
[1846]4752
[914]4753 """
4754 varlist = vars()
[936]4755 self._math._rotate_xyphase(self, angle)
[914]4756 self._add_history("rotate_xyphase", varlist)
4757 return
4758
[1862]4759 @asaplog_post_dec
[914]4760 def swap_linears(self):
[1846]4761 """\
[1573]4762 Swap the linear polarisations XX and YY, or better the first two
[1348]4763 polarisations as this also works for ciculars.
[914]4764 """
4765 varlist = vars()
[936]4766 self._math._swap_linears(self)
[914]4767 self._add_history("swap_linears", varlist)
4768 return
4769
[1862]4770 @asaplog_post_dec
[914]4771 def invert_phase(self):
[1846]4772 """\
[914]4773 Invert the phase of the complex polarisation
4774 """
4775 varlist = vars()
[936]4776 self._math._invert_phase(self)
[914]4777 self._add_history("invert_phase", varlist)
4778 return
4779
[1862]4780 @asaplog_post_dec
[876]4781 def add(self, offset, insitu=None):
[1846]4782 """\
[513]4783 Return a scan where all spectra have the offset added
[1846]4784
[513]4785 Parameters:
[1846]4786
[513]4787 offset: the offset
[1855]4788
[513]4789 insitu: if False a new scantable is returned.
4790 Otherwise, the scaling is done in-situ
4791 The default is taken from .asaprc (False)
[1846]4792
[513]4793 """
4794 if insitu is None: insitu = rcParams['insitu']
[876]4795 self._math._setinsitu(insitu)
[513]4796 varlist = vars()
[2954]4797 s = scantable(self._math._unaryop(self, offset, "ADD", False, False))
[1118]4798 s._add_history("add", varlist)
[876]4799 if insitu:
4800 self._assign(s)
4801 else:
[513]4802 return s
4803
[1862]4804 @asaplog_post_dec
[2966]4805 def scale(self, factor, tsys=True, insitu=None):
[1846]4806 """\
4807
[1938]4808 Return a scan where all spectra are scaled by the given 'factor'
[1846]4809
[513]4810 Parameters:
[1846]4811
[1819]4812 factor: the scaling factor (float or 1D float list)
[1855]4813
[513]4814 insitu: if False a new scantable is returned.
4815 Otherwise, the scaling is done in-situ
4816 The default is taken from .asaprc (False)
[1855]4817
[513]4818 tsys: if True (default) then apply the operation to Tsys
4819 as well as the data
4820 """
4821 if insitu is None: insitu = rcParams['insitu']
[876]4822 self._math._setinsitu(insitu)
[513]4823 varlist = vars()
[1819]4824 s = None
4825 import numpy
4826 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]4827 if isinstance(factor[0], list) or isinstance(factor[0],
4828 numpy.ndarray):
[1819]4829 from asapmath import _array2dOp
[2966]4830 s = _array2dOp( self, factor, "MUL", tsys, insitu, True )
[1819]4831 else:
[2320]4832 s = scantable( self._math._arrayop( self, factor,
[2966]4833 "MUL", tsys, True ) )
[1819]4834 else:
[2966]4835 s = scantable(self._math._unaryop(self, factor, "MUL", tsys, True ))
[1118]4836 s._add_history("scale", varlist)
[876]4837 if insitu:
4838 self._assign(s)
4839 else:
[513]4840 return s
4841
[2349]4842 @preserve_selection
4843 def set_sourcetype(self, match, matchtype="pattern",
[1504]4844 sourcetype="reference"):
[1846]4845 """\
[1502]4846 Set the type of the source to be an source or reference scan
[1846]4847 using the provided pattern.
4848
[1502]4849 Parameters:
[1846]4850
[1504]4851 match: a Unix style pattern, regular expression or selector
[1855]4852
[1504]4853 matchtype: 'pattern' (default) UNIX style pattern or
4854 'regex' regular expression
[1855]4855
[1502]4856 sourcetype: the type of the source to use (source/reference)
[1846]4857
[1502]4858 """
4859 varlist = vars()
4860 stype = -1
[2480]4861 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
[1502]4862 stype = 1
[2480]4863 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
[1502]4864 stype = 0
[1504]4865 else:
[2480]4866 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
[1504]4867 if matchtype.lower().startswith("p"):
4868 matchtype = "pattern"
4869 elif matchtype.lower().startswith("r"):
4870 matchtype = "regex"
4871 else:
4872 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]4873 sel = selector()
4874 if isinstance(match, selector):
4875 sel = match
4876 else:
[2480]4877 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4878 self.set_selection(sel)
[1502]4879 self._setsourcetype(stype)
[1573]4880 self._add_history("set_sourcetype", varlist)
[1502]4881
[2818]4882
4883 def set_sourcename(self, name):
4884 varlist = vars()
4885 self._setsourcename(name)
4886 self._add_history("set_sourcename", varlist)
4887
[1862]4888 @asaplog_post_dec
[1857]4889 @preserve_selection
[1819]4890 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]4891 """\
[670]4892 This function allows to build quotients automatically.
[1819]4893 It assumes the observation to have the same number of
[670]4894 "ons" and "offs"
[1846]4895
[670]4896 Parameters:
[1846]4897
[710]4898 preserve: you can preserve (default) the continuum or
4899 remove it. The equations used are
[1857]4900
[670]4901 preserve: Output = Toff * (on/off) - Toff
[1857]4902
[1070]4903 remove: Output = Toff * (on/off) - Ton
[1855]4904
[1573]4905 mode: the on/off detection mode
[1348]4906 'paired' (default)
4907 identifies 'off' scans by the
4908 trailing '_R' (Mopra/Parkes) or
4909 '_e'/'_w' (Tid) and matches
4910 on/off pairs from the observing pattern
[1502]4911 'time'
4912 finds the closest off in time
[1348]4913
[1857]4914 .. todo:: verify argument is not implemented
4915
[670]4916 """
[1857]4917 varlist = vars()
[1348]4918 modes = ["time", "paired"]
[670]4919 if not mode in modes:
[876]4920 msg = "please provide valid mode. Valid modes are %s" % (modes)
4921 raise ValueError(msg)
[1348]4922 s = None
4923 if mode.lower() == "paired":
[2840]4924 from asap._asap import srctype
[1857]4925 sel = self.get_selection()
[2840]4926 #sel.set_query("SRCTYPE==psoff")
4927 sel.set_types(srctype.psoff)
[1356]4928 self.set_selection(sel)
[1348]4929 offs = self.copy()
[2840]4930 #sel.set_query("SRCTYPE==pson")
4931 sel.set_types(srctype.pson)
[1356]4932 self.set_selection(sel)
[1348]4933 ons = self.copy()
4934 s = scantable(self._math._quotient(ons, offs, preserve))
4935 elif mode.lower() == "time":
4936 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]4937 s._add_history("auto_quotient", varlist)
[876]4938 return s
[710]4939
[1862]4940 @asaplog_post_dec
[1145]4941 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]4942 """\
[1143]4943 Form a quotient using "off" beams when observing in "MX" mode.
[1846]4944
[1143]4945 Parameters:
[1846]4946
[1145]4947 mask: an optional mask to be used when weight == 'stddev'
[1855]4948
[1143]4949 weight: How to average the off beams. Default is 'median'.
[1855]4950
[1145]4951 preserve: you can preserve (default) the continuum or
[1855]4952 remove it. The equations used are:
[1846]4953
[1855]4954 preserve: Output = Toff * (on/off) - Toff
4955
4956 remove: Output = Toff * (on/off) - Ton
4957
[1217]4958 """
[1593]4959 mask = mask or ()
[1141]4960 varlist = vars()
4961 on = scantable(self._math._mx_extract(self, 'on'))
[1143]4962 preoff = scantable(self._math._mx_extract(self, 'off'))
4963 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]4964 from asapmath import quotient
[1145]4965 q = quotient(on, off, preserve)
[1143]4966 q._add_history("mx_quotient", varlist)
[1217]4967 return q
[513]4968
[1862]4969 @asaplog_post_dec
[718]4970 def freq_switch(self, insitu=None):
[1846]4971 """\
[718]4972 Apply frequency switching to the data.
[1846]4973
[718]4974 Parameters:
[1846]4975
[718]4976 insitu: if False a new scantable is returned.
4977 Otherwise, the swictching is done in-situ
4978 The default is taken from .asaprc (False)
[1846]4979
[718]4980 """
4981 if insitu is None: insitu = rcParams['insitu']
[876]4982 self._math._setinsitu(insitu)
[718]4983 varlist = vars()
[876]4984 s = scantable(self._math._freqswitch(self))
[1118]4985 s._add_history("freq_switch", varlist)
[1856]4986 if insitu:
4987 self._assign(s)
4988 else:
4989 return s
[718]4990
[1862]4991 @asaplog_post_dec
[780]4992 def recalc_azel(self):
[1846]4993 """Recalculate the azimuth and elevation for each position."""
[780]4994 varlist = vars()
[876]4995 self._recalcazel()
[780]4996 self._add_history("recalc_azel", varlist)
4997 return
4998
[1862]4999 @asaplog_post_dec
[513]5000 def __add__(self, other):
[2574]5001 """
5002 implicit on all axes and on Tsys
5003 """
[513]5004 varlist = vars()
[2574]5005 s = self.__op( other, "ADD" )
[513]5006 s._add_history("operator +", varlist)
5007 return s
5008
[1862]5009 @asaplog_post_dec
[513]5010 def __sub__(self, other):
5011 """
5012 implicit on all axes and on Tsys
5013 """
5014 varlist = vars()
[2574]5015 s = self.__op( other, "SUB" )
[513]5016 s._add_history("operator -", varlist)
5017 return s
[710]5018
[1862]5019 @asaplog_post_dec
[513]5020 def __mul__(self, other):
5021 """
5022 implicit on all axes and on Tsys
5023 """
5024 varlist = vars()
[2574]5025 s = self.__op( other, "MUL" ) ;
[513]5026 s._add_history("operator *", varlist)
5027 return s
5028
[710]5029
[1862]5030 @asaplog_post_dec
[513]5031 def __div__(self, other):
5032 """
5033 implicit on all axes and on Tsys
5034 """
5035 varlist = vars()
[2574]5036 s = self.__op( other, "DIV" )
5037 s._add_history("operator /", varlist)
5038 return s
5039
5040 @asaplog_post_dec
5041 def __op( self, other, mode ):
[513]5042 s = None
5043 if isinstance(other, scantable):
[2574]5044 s = scantable(self._math._binaryop(self, other, mode))
[513]5045 elif isinstance(other, float):
5046 if other == 0.0:
[718]5047 raise ZeroDivisionError("Dividing by zero is not recommended")
[2954]5048 s = scantable(self._math._unaryop(self, other, mode, False, False))
[2144]5049 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]5050 if isinstance(other[0], list) \
5051 or isinstance(other[0], numpy.ndarray):
[2144]5052 from asapmath import _array2dOp
[2574]5053 s = _array2dOp( self, other, mode, False )
[2144]5054 else:
[2574]5055 s = scantable( self._math._arrayop( self, other,
[2954]5056 mode, False, False ) )
[513]5057 else:
[718]5058 raise TypeError("Other input is not a scantable or float value")
[513]5059 return s
5060
[1862]5061 @asaplog_post_dec
[530]5062 def get_fit(self, row=0):
[1846]5063 """\
[530]5064 Print or return the stored fits for a row in the scantable
[1846]5065
[530]5066 Parameters:
[1846]5067
[530]5068 row: the row which the fit has been applied to.
[1846]5069
[530]5070 """
5071 if row > self.nrow():
5072 return
[976]5073 from asap.asapfit import asapfit
[530]5074 fit = asapfit(self._getfit(row))
[1859]5075 asaplog.push( '%s' %(fit) )
5076 return fit.as_dict()
[530]5077
[2349]5078 @preserve_selection
[1483]5079 def flag_nans(self):
[1846]5080 """\
[1483]5081 Utility function to flag NaN values in the scantable.
5082 """
5083 import numpy
5084 basesel = self.get_selection()
5085 for i in range(self.nrow()):
[1589]5086 sel = self.get_row_selector(i)
5087 self.set_selection(basesel+sel)
[1483]5088 nans = numpy.isnan(self._getspectrum(0))
[2877]5089 if numpy.any(nans):
5090 bnans = [ bool(v) for v in nans]
5091 self.flag(bnans)
5092
5093 self.set_selection(basesel)
[1483]5094
[1588]5095 def get_row_selector(self, rowno):
[1992]5096 return selector(rows=[rowno])
[1573]5097
[484]5098 def _add_history(self, funcname, parameters):
[1435]5099 if not rcParams['scantable.history']:
5100 return
[484]5101 # create date
5102 sep = "##"
5103 from datetime import datetime
5104 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5105 hist = dstr+sep
5106 hist += funcname+sep#cdate+sep
[2349]5107 if parameters.has_key('self'):
5108 del parameters['self']
[1118]5109 for k, v in parameters.iteritems():
[484]5110 if type(v) is dict:
[1118]5111 for k2, v2 in v.iteritems():
[484]5112 hist += k2
5113 hist += "="
[1118]5114 if isinstance(v2, scantable):
[484]5115 hist += 'scantable'
5116 elif k2 == 'mask':
[1118]5117 if isinstance(v2, list) or isinstance(v2, tuple):
[513]5118 hist += str(self._zip_mask(v2))
5119 else:
5120 hist += str(v2)
[484]5121 else:
[513]5122 hist += str(v2)
[484]5123 else:
5124 hist += k
5125 hist += "="
[1118]5126 if isinstance(v, scantable):
[484]5127 hist += 'scantable'
5128 elif k == 'mask':
[1118]5129 if isinstance(v, list) or isinstance(v, tuple):
[513]5130 hist += str(self._zip_mask(v))
5131 else:
5132 hist += str(v)
[484]5133 else:
5134 hist += str(v)
5135 hist += sep
5136 hist = hist[:-2] # remove trailing '##'
5137 self._addhistory(hist)
5138
[710]5139
[484]5140 def _zip_mask(self, mask):
5141 mask = list(mask)
5142 i = 0
5143 segments = []
5144 while mask[i:].count(1):
5145 i += mask[i:].index(1)
5146 if mask[i:].count(0):
5147 j = i + mask[i:].index(0)
5148 else:
[710]5149 j = len(mask)
[1118]5150 segments.append([i, j])
[710]5151 i = j
[484]5152 return segments
[714]5153
[626]5154 def _get_ordinate_label(self):
5155 fu = "("+self.get_fluxunit()+")"
5156 import re
5157 lbl = "Intensity"
[1118]5158 if re.match(".K.", fu):
[626]5159 lbl = "Brightness Temperature "+ fu
[1118]5160 elif re.match(".Jy.", fu):
[626]5161 lbl = "Flux density "+ fu
5162 return lbl
[710]5163
[876]5164 def _check_ifs(self):
[2349]5165# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
[1986]5166 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]5167 nchans = filter(lambda t: t > 0, nchans)
[876]5168 return (sum(nchans)/len(nchans) == nchans[0])
[976]5169
[1862]5170 @asaplog_post_dec
[1916]5171 def _fill(self, names, unit, average, opts={}):
[976]5172 first = True
5173 fullnames = []
5174 for name in names:
5175 name = os.path.expandvars(name)
5176 name = os.path.expanduser(name)
5177 if not os.path.exists(name):
5178 msg = "File '%s' does not exists" % (name)
5179 raise IOError(msg)
5180 fullnames.append(name)
5181 if average:
5182 asaplog.push('Auto averaging integrations')
[1079]5183 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]5184 for name in fullnames:
[1073]5185 tbl = Scantable(stype)
[2004]5186 if is_ms( name ):
5187 r = msfiller( tbl )
5188 else:
5189 r = filler( tbl )
[976]5190 msg = "Importing %s..." % (name)
[1118]5191 asaplog.push(msg, False)
[2349]5192 r.open(name, opts)
[2480]5193 rx = rcParams['scantable.reference']
5194 r.setreferenceexpr(rx)
[1843]5195 r.fill()
[976]5196 if average:
[1118]5197 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]5198 if not first:
[2902]5199 tbl = self._math._merge([self, tbl])
[976]5200 Scantable.__init__(self, tbl)
[1843]5201 r.close()
[1118]5202 del r, tbl
[976]5203 first = False
[1861]5204 #flush log
5205 asaplog.post()
[976]5206 if unit is not None:
5207 self.set_fluxunit(unit)
[1824]5208 if not is_casapy():
5209 self.set_freqframe(rcParams['scantable.freqframe'])
[976]5210
[2610]5211 def _get_verify_action( self, msg, action=None ):
5212 valid_act = ['Y', 'N', 'A', 'R']
5213 if not action or not isinstance(action, str):
5214 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5215 if action == '':
5216 return "Y"
5217 elif (action.upper()[0] in valid_act):
5218 return action.upper()[0]
5219 elif (action.upper()[0] in ['H','?']):
5220 print "Available actions of verification [Y|n|a|r]"
5221 print " Y : Yes for current data (default)"
5222 print " N : No for current data"
5223 print " A : Accept all in the following and exit from verification"
5224 print " R : Reject all in the following and exit from verification"
5225 print " H or ?: help (show this message)"
5226 return self._get_verify_action(msg)
5227 else:
5228 return 'Y'
[2012]5229
[1402]5230 def __getitem__(self, key):
5231 if key < 0:
5232 key += self.nrow()
5233 if key >= self.nrow():
5234 raise IndexError("Row index out of range.")
5235 return self._getspectrum(key)
5236
5237 def __setitem__(self, key, value):
5238 if key < 0:
5239 key += self.nrow()
5240 if key >= self.nrow():
5241 raise IndexError("Row index out of range.")
5242 if not hasattr(value, "__len__") or \
5243 len(value) > self.nchan(self.getif(key)):
5244 raise ValueError("Spectrum length doesn't match.")
5245 return self._setspectrum(value, key)
5246
5247 def __len__(self):
5248 return self.nrow()
5249
5250 def __iter__(self):
5251 for i in range(len(self)):
5252 yield self[i]
Note: See TracBrowser for help on using the repository browser.