source: trunk/python/scantable.py@ 2904

Last change on this file since 2904 was 2902, checked in by Takeshi Nakazato, 11 years ago

New Development: No

JIRA Issue: Yes CAS-5875

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: test_sdcoadd

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Another possible fix for broken build of standalone asap which may
be more reasonable and more robust than before.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 205.6 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2751]4import re
[2315]5import tempfile
[1948]6import numpy
[1691]7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
[1824]12from asap.env import is_casapy
[876]13from asap._asap import Scantable
[2004]14from asap._asap import filler, msfiller
[1824]15from asap.parameters import rcParams
[1862]16from asap.logging import asaplog, asaplog_post_dec
[1824]17from asap.selector import selector
18from asap.linecatalog import linecatalog
[1600]19from asap.coordinate import coordinate
[1859]20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]21from asap.asapfitter import fitter
[102]22
[1689]23def preserve_selection(func):
[1691]24 @wraps_dec(func)
[1689]25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
[1857]27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
[1689]31 return val
32 return wrap
33
[1846]34def is_scantable(filename):
35 """Is the given file a scantable?
[1689]36
[1846]37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
[1883]42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
[2753]48 match_pattern = '^Type = (Scantable)? *$'
[2751]49 if re.match(match_pattern,l):
[1883]50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
[1697]58
[1883]59def is_ms(filename):
60 """Is the given file a MeasurementSet?
[1697]61
[1883]62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
[2186]79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
[2277]91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
[2277]98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
[2186]102 ** when len(edge)!=2
[2277]103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]111 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]112 when [5,10] given, it is interpreted as
[2277]113 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
[2189]173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
[2767]176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
[2810]268 elif lf_params_n == 0:
[2767]269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
[2882]303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
[2884]318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
[2882]320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
[2884]325 s1 = s1.strip()
326 s2 = s2.strip()
[2882]327
[2884]328 prefix = s2[-3:-2]
[2882]329 if is_number(prefix):
[2884]330 res1 = float(s1)
331 res2 = float(s2[:-2])
[2882]332 else:
[2884]333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
[2882]336
[2884]337 return (res1, res2)
[2882]338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
[2884]343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
[2882]345 raise RuntimeError("Invalid input string.")
346
[2884]347 # note that the default velocity unit is km/s
[2882]348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
[2884]351 s1 = s1.strip()
352 s2 = s2.strip()
[2882]353
[2884]354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
[2882]358 else:
[2884]359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
[2882]362
[2884]363 return (res1, res2)
[2882]364
[2889]365def get_frequency_by_velocity(restfreq, vel, doppler):
[2882]366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
[2889]374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
[2882]380
[2891]381def get_restfreq_in_Hz(s_restfreq):
382 value = 0.0
383 unit = ""
384 s = s_restfreq.replace(" ","")
[2889]385
[2891]386 for i in range(len(s))[::-1]:
387 if s[i].isalpha():
388 unit = s[i] + unit
389 else:
390 value = float(s[0:i+1])
391 break
392
393 if (unit == "") or (unit.lower() == "hz"):
394 return value
395 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
396 unitprefix = unit[0]
397 factor = 1.0
398
399 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
400 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
401 factor = factor_list[prefix_list.index(unitprefix)]
402 """
403 if (unitprefix == 'a'):
404 factor = 1.0e-18
405 elif (unitprefix == 'f'):
406 factor = 1.0e-15
407 elif (unitprefix == 'p'):
408 factor = 1.0e-12
409 elif (unitprefix == 'n'):
410 factor = 1.0e-9
411 elif (unitprefix == 'u'):
412 factor = 1.0e-6
413 elif (unitprefix == 'm'):
414 factor = 1.0e-3
415 elif (unitprefix == 'k'):
416 factor = 1.0e+3
417 elif (unitprefix == 'M'):
418 factor = 1.0e+6
419 elif (unitprefix == 'G'):
420 factor = 1.0e+9
421 elif (unitprefix == 'T'):
422 factor = 1.0e+12
423 elif (unitprefix == 'P'):
424 factor = 1.0e+15
425 elif (unitprefix == 'E'):
426 factor = 1.0e+18
427 """
428 return value*factor
429 else:
430 mesg = "wrong unit of restfreq."
431 raise Exception, mesg
432
433def normalise_restfreq(in_restfreq):
434 if isinstance(in_restfreq, float):
435 return in_restfreq
436 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
437 return float(in_restfreq)
438 elif isinstance(in_restfreq, str):
439 return get_restfreq_in_Hz(in_restfreq)
440 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
441 if isinstance(in_restfreq, numpy.ndarray):
442 if len(in_restfreq.shape) > 1:
443 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
444 raise Exception, mesg
445
446 res = []
447 for i in xrange(len(in_restfreq)):
448 elem = in_restfreq[i]
449 if isinstance(elem, float):
450 res.append(elem)
451 elif isinstance(elem, int) or isinstance(elem, long):
452 res.append(float(elem))
453 elif isinstance(elem, str):
454 res.append(get_restfreq_in_Hz(elem))
455 elif isinstance(elem, dict):
456 if isinstance(elem["value"], float):
457 res.append(elem)
458 elif isinstance(elem["value"], int):
459 dictelem = {}
460 dictelem["name"] = elem["name"]
461 dictelem["value"] = float(elem["value"])
462 res.append(dictelem)
463 elif isinstance(elem["value"], str):
464 dictelem = {}
465 dictelem["name"] = elem["name"]
466 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
467 res.append(dictelem)
468 else:
469 mesg = "restfreq elements must be float, int, or string."
470 raise Exception, mesg
471 return res
472 else:
473 mesg = "wrong type of restfreq given."
474 raise Exception, mesg
475
476def set_restfreq(s, restfreq):
477 rfset = (restfreq != '') and (restfreq != [])
478 if rfset:
479 s.set_restfreqs(normalise_restfreq(restfreq))
480
[876]481class scantable(Scantable):
[1846]482 """\
483 The ASAP container for scans (single-dish data).
[102]484 """
[1819]485
[1862]486 @asaplog_post_dec
[2315]487 def __init__(self, filename, average=None, unit=None, parallactify=None,
488 **args):
[1846]489 """\
[102]490 Create a scantable from a saved one or make a reference
[1846]491
[102]492 Parameters:
[1846]493
494 filename: the name of an asap table on disk
495 or
496 the name of a rpfits/sdfits/ms file
497 (integrations within scans are auto averaged
498 and the whole file is read) or
499 [advanced] a reference to an existing scantable
500
501 average: average all integrations withinb a scan on read.
502 The default (True) is taken from .asaprc.
503
[484]504 unit: brightness unit; must be consistent with K or Jy.
[1846]505 Over-rides the default selected by the filler
506 (input rpfits/sdfits/ms) or replaces the value
507 in existing scantables
508
[1920]509 antenna: for MeasurementSet input data only:
[2349]510 Antenna selection. integer (id) or string
511 (name or id).
[1846]512
[2349]513 parallactify: Indicate that the data had been parallactified.
514 Default (false) is taken from rc file.
[1846]515
[2754]516 getpt: Whether to import direction from MS/POINTING
517 table properly or not.
518 This is effective only when filename is MS.
519 The default (True) is to import direction
520 from MS/POINTING.
[710]521 """
[976]522 if average is None:
[710]523 average = rcParams['scantable.autoaverage']
[1593]524 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]525 varlist = vars()
[876]526 from asap._asap import stmath
[1819]527 self._math = stmath( rcParams['insitu'] )
[876]528 if isinstance(filename, Scantable):
529 Scantable.__init__(self, filename)
[181]530 else:
[1697]531 if isinstance(filename, str):
[976]532 filename = os.path.expandvars(filename)
533 filename = os.path.expanduser(filename)
534 if not os.path.exists(filename):
535 s = "File '%s' not found." % (filename)
536 raise IOError(s)
[1697]537 if is_scantable(filename):
538 ondisk = rcParams['scantable.storage'] == 'disk'
539 Scantable.__init__(self, filename, ondisk)
540 if unit is not None:
541 self.set_fluxunit(unit)
[2008]542 if average:
543 self._assign( self.average_time( scanav=True ) )
[1819]544 # do not reset to the default freqframe
545 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]546 elif is_ms(filename):
[1916]547 # Measurement Set
548 opts={'ms': {}}
[2844]549 mskeys=['getpt','antenna']
[1916]550 for key in mskeys:
551 if key in args.keys():
552 opts['ms'][key] = args[key]
553 self._fill([filename], unit, average, opts)
[1893]554 elif os.path.isfile(filename):
[2761]555 opts={'nro': {}}
556 nrokeys=['freqref']
557 for key in nrokeys:
558 if key in args.keys():
559 opts['nro'][key] = args[key]
560 self._fill([filename], unit, average, opts)
[2350]561 # only apply to new data not "copy constructor"
562 self.parallactify(parallactify)
[1883]563 else:
[1819]564 msg = "The given file '%s'is not a valid " \
565 "asap table." % (filename)
[1859]566 raise IOError(msg)
[1118]567 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]568 and isinstance(filename[-1], str):
[1916]569 self._fill(filename, unit, average)
[1586]570 self.parallactify(parallactify)
[1259]571 self._add_history("scantable", varlist)
[102]572
[1862]573 @asaplog_post_dec
[876]574 def save(self, name=None, format=None, overwrite=False):
[1846]575 """\
[1280]576 Store the scantable on disk. This can be an asap (aips++) Table,
577 SDFITS or MS2 format.
[1846]578
[116]579 Parameters:
[1846]580
[2431]581 name: the name of the outputfile. For format 'ASCII'
[1093]582 this is the root file name (data in 'name'.txt
[497]583 and header in 'name'_header.txt)
[1855]584
[116]585 format: an optional file format. Default is ASAP.
[1855]586 Allowed are:
587
588 * 'ASAP' (save as ASAP [aips++] Table),
589 * 'SDFITS' (save as SDFITS file)
590 * 'ASCII' (saves as ascii text file)
591 * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]592 * 'FITS' (save as image FITS - not readable by
593 class)
[1855]594 * 'CLASS' (save as FITS readable by CLASS)
595
[411]596 overwrite: If the file should be overwritten if it exists.
[256]597 The default False is to return with warning
[411]598 without writing the output. USE WITH CARE.
[1855]599
[1846]600 Example::
601
[116]602 scan.save('myscan.asap')
[1118]603 scan.save('myscan.sdfits', 'SDFITS')
[1846]604
[116]605 """
[411]606 from os import path
[1593]607 format = format or rcParams['scantable.save']
[256]608 suffix = '.'+format.lower()
[1118]609 if name is None or name == "":
[256]610 name = 'scantable'+suffix
[718]611 msg = "No filename given. Using default name %s..." % name
612 asaplog.push(msg)
[411]613 name = path.expandvars(name)
[256]614 if path.isfile(name) or path.isdir(name):
615 if not overwrite:
[718]616 msg = "File %s exists." % name
[1859]617 raise IOError(msg)
[451]618 format2 = format.upper()
619 if format2 == 'ASAP':
[116]620 self._save(name)
[2029]621 elif format2 == 'MS2':
622 msopt = {'ms': {'overwrite': overwrite } }
623 from asap._asap import mswriter
624 writer = mswriter( self )
625 writer.write( name, msopt )
[116]626 else:
[989]627 from asap._asap import stwriter as stw
[1118]628 writer = stw(format2)
629 writer.write(self, name)
[116]630 return
631
[102]632 def copy(self):
[1846]633 """Return a copy of this scantable.
634
635 *Note*:
636
[1348]637 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]638
639 Example::
640
[102]641 copiedscan = scan.copy()
[1846]642
[102]643 """
[876]644 sd = scantable(Scantable._copy(self))
[113]645 return sd
646
[1093]647 def drop_scan(self, scanid=None):
[1846]648 """\
[1093]649 Return a new scantable where the specified scan number(s) has(have)
650 been dropped.
[1846]651
[1093]652 Parameters:
[1846]653
[1093]654 scanid: a (list of) scan number(s)
[1846]655
[1093]656 """
657 from asap import _is_sequence_or_number as _is_valid
658 from asap import _to_list
659 from asap import unique
660 if not _is_valid(scanid):
[2315]661 raise RuntimeError( 'Please specify a scanno to drop from the'
662 ' scantable' )
[1859]663 scanid = _to_list(scanid)
664 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
665 for sid in scanid: allscans.remove(sid)
666 if len(allscans) == 0:
667 raise ValueError("Can't remove all scans")
668 sel = selector(scans=allscans)
669 return self._select_copy(sel)
[1093]670
[1594]671 def _select_copy(self, selection):
672 orig = self.get_selection()
673 self.set_selection(orig+selection)
674 cp = self.copy()
675 self.set_selection(orig)
676 return cp
677
[102]678 def get_scan(self, scanid=None):
[1855]679 """\
[102]680 Return a specific scan (by scanno) or collection of scans (by
681 source name) in a new scantable.
[1846]682
683 *Note*:
684
[1348]685 See scantable.drop_scan() for the inverse operation.
[1846]686
[102]687 Parameters:
[1846]688
[513]689 scanid: a (list of) scanno or a source name, unix-style
690 patterns are accepted for source name matching, e.g.
691 '*_R' gets all 'ref scans
[1846]692
693 Example::
694
[513]695 # get all scans containing the source '323p459'
696 newscan = scan.get_scan('323p459')
697 # get all 'off' scans
698 refscans = scan.get_scan('*_R')
699 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]700 newscan = scan.get_scan([0, 2, 7, 10])
[1846]701
[102]702 """
703 if scanid is None:
[1859]704 raise RuntimeError( 'Please specify a scan no or name to '
705 'retrieve from the scantable' )
[102]706 try:
[946]707 bsel = self.get_selection()
708 sel = selector()
[102]709 if type(scanid) is str:
[946]710 sel.set_name(scanid)
[1594]711 return self._select_copy(sel)
[102]712 elif type(scanid) is int:
[946]713 sel.set_scans([scanid])
[1594]714 return self._select_copy(sel)
[381]715 elif type(scanid) is list:
[946]716 sel.set_scans(scanid)
[1594]717 return self._select_copy(sel)
[381]718 else:
[718]719 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]720 raise TypeError(msg)
[102]721 except RuntimeError:
[1859]722 raise
[102]723
724 def __str__(self):
[2315]725 tempFile = tempfile.NamedTemporaryFile()
726 Scantable._summary(self, tempFile.name)
727 tempFile.seek(0)
728 asaplog.clear()
729 return tempFile.file.read()
[102]730
[2315]731 @asaplog_post_dec
[976]732 def summary(self, filename=None):
[1846]733 """\
[102]734 Print a summary of the contents of this scantable.
[1846]735
[102]736 Parameters:
[1846]737
[1931]738 filename: the name of a file to write the putput to
[102]739 Default - no file output
[1846]740
[102]741 """
742 if filename is not None:
[256]743 if filename is "":
744 filename = 'scantable_summary.txt'
[415]745 from os.path import expandvars, isdir
[411]746 filename = expandvars(filename)
[2286]747 if isdir(filename):
[718]748 msg = "Illegal file name '%s'." % (filename)
[1859]749 raise IOError(msg)
[2286]750 else:
751 filename = ""
752 Scantable._summary(self, filename)
[710]753
[1512]754 def get_spectrum(self, rowno):
[1471]755 """Return the spectrum for the current row in the scantable as a list.
[1846]756
[1471]757 Parameters:
[1846]758
[1573]759 rowno: the row number to retrieve the spectrum from
[1846]760
[1471]761 """
762 return self._getspectrum(rowno)
[946]763
[1471]764 def get_mask(self, rowno):
765 """Return the mask for the current row in the scantable as a list.
[1846]766
[1471]767 Parameters:
[1846]768
[1573]769 rowno: the row number to retrieve the mask from
[1846]770
[1471]771 """
772 return self._getmask(rowno)
773
774 def set_spectrum(self, spec, rowno):
[1938]775 """Set the spectrum for the current row in the scantable.
[1846]776
[1471]777 Parameters:
[1846]778
[1855]779 spec: the new spectrum
[1846]780
[1855]781 rowno: the row number to set the spectrum for
782
[1471]783 """
[2348]784 assert(len(spec) == self.nchan(self.getif(rowno)))
[1471]785 return self._setspectrum(spec, rowno)
786
[1600]787 def get_coordinate(self, rowno):
788 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]789
790 *Note*:
791
[1600]792 * This coordinate is only valid until a scantable method modifies
793 the frequency axis.
794 * This coordinate does contain the original frequency set-up
795 NOT the new frame. The conversions however are done using the user
796 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
797 use scantable.freq_align first. Without it there is no closure,
[1846]798 i.e.::
[1600]799
[1846]800 c = myscan.get_coordinate(0)
801 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
802
[1600]803 Parameters:
[1846]804
[1600]805 rowno: the row number for the spectral coordinate
806
807 """
808 return coordinate(Scantable.get_coordinate(self, rowno))
809
[946]810 def get_selection(self):
[1846]811 """\
[1005]812 Get the selection object currently set on this scantable.
[1846]813
814 Example::
815
[1005]816 sel = scan.get_selection()
817 sel.set_ifs(0) # select IF 0
818 scan.set_selection(sel) # apply modified selection
[1846]819
[946]820 """
821 return selector(self._getselection())
822
[1576]823 def set_selection(self, selection=None, **kw):
[1846]824 """\
[1005]825 Select a subset of the data. All following operations on this scantable
826 are only applied to thi selection.
[1846]827
[1005]828 Parameters:
[1697]829
[1846]830 selection: a selector object (default unset the selection), or
[2431]831 any combination of 'pols', 'ifs', 'beams', 'scans',
832 'cycles', 'name', 'query'
[1697]833
[1846]834 Examples::
[1697]835
[1005]836 sel = selector() # create a selection object
[1118]837 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]838 scan.set_selection(sel) # set the selection
839 scan.summary() # will only print summary of scanno 0 an 3
840 scan.set_selection() # unset the selection
[1697]841 # or the equivalent
842 scan.set_selection(scans=[0,3])
843 scan.summary() # will only print summary of scanno 0 an 3
844 scan.set_selection() # unset the selection
[1846]845
[946]846 """
[1576]847 if selection is None:
848 # reset
849 if len(kw) == 0:
850 selection = selector()
851 else:
852 # try keywords
853 for k in kw:
854 if k not in selector.fields:
[2320]855 raise KeyError("Invalid selection key '%s', "
856 "valid keys are %s" % (k,
857 selector.fields))
[1576]858 selection = selector(**kw)
[946]859 self._setselection(selection)
860
[1819]861 def get_row(self, row=0, insitu=None):
[1846]862 """\
[1819]863 Select a row in the scantable.
864 Return a scantable with single row.
[1846]865
[1819]866 Parameters:
[1846]867
868 row: row no of integration, default is 0.
869 insitu: if False a new scantable is returned. Otherwise, the
870 scaling is done in-situ. The default is taken from .asaprc
871 (False)
872
[1819]873 """
[2349]874 if insitu is None:
875 insitu = rcParams['insitu']
[1819]876 if not insitu:
877 workscan = self.copy()
878 else:
879 workscan = self
880 # Select a row
[2349]881 sel = selector()
[1992]882 sel.set_rows([row])
[1819]883 workscan.set_selection(sel)
884 if not workscan.nrow() == 1:
[2349]885 msg = "Could not identify single row. %d rows selected." \
886 % (workscan.nrow())
[1819]887 raise RuntimeError(msg)
888 if insitu:
889 self._assign(workscan)
890 else:
891 return workscan
892
[1862]893 @asaplog_post_dec
[1907]894 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]895 """\
[135]896 Determine the specified statistic of the current beam/if/pol
[102]897 Takes a 'mask' as an optional parameter to specify which
898 channels should be excluded.
[1846]899
[102]900 Parameters:
[1846]901
[1819]902 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
903 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]904
[135]905 mask: an optional mask specifying where the statistic
[102]906 should be determined.
[1855]907
[1819]908 form: format string to print statistic values
[1846]909
[1907]910 row: row number of spectrum to process.
911 (default is None: for all rows)
[1846]912
[1907]913 Example:
[113]914 scan.set_unit('channel')
[1118]915 msk = scan.create_mask([100, 200], [500, 600])
[135]916 scan.stats(stat='mean', mask=m)
[1846]917
[102]918 """
[1593]919 mask = mask or []
[876]920 if not self._check_ifs():
[1118]921 raise ValueError("Cannot apply mask as the IFs have different "
922 "number of channels. Please use setselection() "
923 "to select individual IFs")
[1819]924 rtnabc = False
925 if stat.lower().endswith('_abc'): rtnabc = True
926 getchan = False
927 if stat.lower().startswith('min') or stat.lower().startswith('max'):
928 chan = self._math._minmaxchan(self, mask, stat)
929 getchan = True
930 statvals = []
[1907]931 if not rtnabc:
932 if row == None:
933 statvals = self._math._stats(self, mask, stat)
934 else:
935 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]936
[1819]937 #def cb(i):
938 # return statvals[i]
[256]939
[1819]940 #return self._row_callback(cb, stat)
[102]941
[1819]942 label=stat
943 #callback=cb
944 out = ""
945 #outvec = []
946 sep = '-'*50
[1907]947
948 if row == None:
949 rows = xrange(self.nrow())
950 elif isinstance(row, int):
951 rows = [ row ]
952
953 for i in rows:
[1819]954 refstr = ''
955 statunit= ''
956 if getchan:
957 qx, qy = self.chan2data(rowno=i, chan=chan[i])
958 if rtnabc:
959 statvals.append(qx['value'])
960 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
961 statunit= '['+qx['unit']+']'
962 else:
963 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
964
965 tm = self._gettime(i)
966 src = self._getsourcename(i)
967 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
968 out += 'Time[%s]:\n' % (tm)
[1907]969 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
970 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
971 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]972 #outvec.append(callback(i))
[1907]973 if len(rows) > 1:
974 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
975 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
976 else:
977 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
978 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]979 out += sep+"\n"
980
[1859]981 import os
982 if os.environ.has_key( 'USER' ):
983 usr = os.environ['USER']
984 else:
985 import commands
986 usr = commands.getoutput( 'whoami' )
987 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
988 f = open(tmpfile,'w')
989 print >> f, sep
990 print >> f, ' %s %s' % (label, statunit)
991 print >> f, sep
992 print >> f, out
993 f.close()
994 f = open(tmpfile,'r')
995 x = f.readlines()
996 f.close()
997 asaplog.push(''.join(x), False)
998
[1819]999 return statvals
1000
1001 def chan2data(self, rowno=0, chan=0):
[1846]1002 """\
[1819]1003 Returns channel/frequency/velocity and spectral value
1004 at an arbitrary row and channel in the scantable.
[1846]1005
[1819]1006 Parameters:
[1846]1007
[1819]1008 rowno: a row number in the scantable. Default is the
1009 first row, i.e. rowno=0
[1855]1010
[1819]1011 chan: a channel in the scantable. Default is the first
1012 channel, i.e. pos=0
[1846]1013
[1819]1014 """
1015 if isinstance(rowno, int) and isinstance(chan, int):
1016 qx = {'unit': self.get_unit(),
1017 'value': self._getabcissa(rowno)[chan]}
1018 qy = {'unit': self.get_fluxunit(),
1019 'value': self._getspectrum(rowno)[chan]}
1020 return qx, qy
1021
[1118]1022 def stddev(self, mask=None):
[1846]1023 """\
[135]1024 Determine the standard deviation of the current beam/if/pol
1025 Takes a 'mask' as an optional parameter to specify which
1026 channels should be excluded.
[1846]1027
[135]1028 Parameters:
[1846]1029
[135]1030 mask: an optional mask specifying where the standard
1031 deviation should be determined.
1032
[1846]1033 Example::
1034
[135]1035 scan.set_unit('channel')
[1118]1036 msk = scan.create_mask([100, 200], [500, 600])
[135]1037 scan.stddev(mask=m)
[1846]1038
[135]1039 """
[1118]1040 return self.stats(stat='stddev', mask=mask);
[135]1041
[1003]1042
[1259]1043 def get_column_names(self):
[1846]1044 """\
[1003]1045 Return a list of column names, which can be used for selection.
1046 """
[1259]1047 return list(Scantable.get_column_names(self))
[1003]1048
[1730]1049 def get_tsys(self, row=-1):
[1846]1050 """\
[113]1051 Return the System temperatures.
[1846]1052
1053 Parameters:
1054
1055 row: the rowno to get the information for. (default all rows)
1056
[113]1057 Returns:
[1846]1058
[876]1059 a list of Tsys values for the current selection
[1846]1060
[113]1061 """
[1730]1062 if row > -1:
1063 return self._get_column(self._gettsys, row)
[876]1064 return self._row_callback(self._gettsys, "Tsys")
[256]1065
[2406]1066 def get_tsysspectrum(self, row=-1):
1067 """\
1068 Return the channel dependent system temperatures.
[1730]1069
[2406]1070 Parameters:
1071
1072 row: the rowno to get the information for. (default all rows)
1073
1074 Returns:
1075
1076 a list of Tsys values for the current selection
1077
1078 """
1079 return self._get_column( self._gettsysspectrum, row )
1080
[2791]1081 def set_tsys(self, values, row=-1):
1082 """\
1083 Set the Tsys value(s) of the given 'row' or the whole scantable
1084 (selection).
1085
1086 Parameters:
1087
1088 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1089 row: the row number to apply Tsys values to.
1090 (default all rows)
1091
1092 """
1093
1094 if not hasattr(values, "__len__"):
1095 values = [values]
1096 self._settsys(values, row)
1097
[1730]1098 def get_weather(self, row=-1):
[1846]1099 """\
1100 Return the weather informations.
1101
1102 Parameters:
1103
1104 row: the rowno to get the information for. (default all rows)
1105
1106 Returns:
1107
1108 a dict or list of of dicts of values for the current selection
1109
1110 """
1111
[1730]1112 values = self._get_column(self._get_weather, row)
1113 if row > -1:
1114 return {'temperature': values[0],
1115 'pressure': values[1], 'humidity' : values[2],
1116 'windspeed' : values[3], 'windaz' : values[4]
1117 }
1118 else:
1119 out = []
1120 for r in values:
1121
1122 out.append({'temperature': r[0],
1123 'pressure': r[1], 'humidity' : r[2],
1124 'windspeed' : r[3], 'windaz' : r[4]
1125 })
1126 return out
1127
[876]1128 def _row_callback(self, callback, label):
1129 out = ""
[1118]1130 outvec = []
[1590]1131 sep = '-'*50
[876]1132 for i in range(self.nrow()):
1133 tm = self._gettime(i)
1134 src = self._getsourcename(i)
[1590]1135 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]1136 out += 'Time[%s]:\n' % (tm)
[1590]1137 if self.nbeam(-1) > 1:
1138 out += ' Beam[%d] ' % (self.getbeam(i))
1139 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1140 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]1141 outvec.append(callback(i))
1142 out += '= %3.3f\n' % (outvec[i])
[1590]1143 out += sep+'\n'
[1859]1144
1145 asaplog.push(sep)
1146 asaplog.push(" %s" % (label))
1147 asaplog.push(sep)
1148 asaplog.push(out)
[1861]1149 asaplog.post()
[1175]1150 return outvec
[256]1151
[1947]1152 def _get_column(self, callback, row=-1, *args):
[1070]1153 """
1154 """
1155 if row == -1:
[1947]1156 return [callback(i, *args) for i in range(self.nrow())]
[1070]1157 else:
[1819]1158 if 0 <= row < self.nrow():
[1947]1159 return callback(row, *args)
[256]1160
[1070]1161
[1948]1162 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]1163 """\
[113]1164 Get a list of time stamps for the observations.
[1938]1165 Return a datetime object or a string (default) for each
1166 integration time stamp in the scantable.
[1846]1167
[113]1168 Parameters:
[1846]1169
[1348]1170 row: row no of integration. Default -1 return all rows
[1855]1171
[1348]1172 asdatetime: return values as datetime objects rather than strings
[1846]1173
[2349]1174 prec: number of digits shown. Default -1 to automatic
1175 calculation.
[1948]1176 Note this number is equals to the digits of MVTime,
1177 i.e., 0<prec<3: dates with hh:: only,
1178 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]1179 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1180
[113]1181 """
[1175]1182 from datetime import datetime
[1948]1183 if prec < 0:
1184 # automagically set necessary precision +1
[2349]1185 prec = 7 - \
1186 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]1187 prec = max(6, int(prec))
1188 else:
1189 prec = max(0, prec)
1190 if asdatetime:
1191 #precision can be 1 millisecond at max
1192 prec = min(12, prec)
1193
[1947]1194 times = self._get_column(self._gettime, row, prec)
[1348]1195 if not asdatetime:
[1392]1196 return times
[1947]1197 format = "%Y/%m/%d/%H:%M:%S.%f"
1198 if prec < 7:
1199 nsub = 1 + (((6-prec)/2) % 3)
1200 substr = [".%f","%S","%M"]
1201 for i in range(nsub):
1202 format = format.replace(substr[i],"")
[1175]1203 if isinstance(times, list):
[1947]1204 return [datetime.strptime(i, format) for i in times]
[1175]1205 else:
[1947]1206 return datetime.strptime(times, format)
[102]1207
[1348]1208
1209 def get_inttime(self, row=-1):
[1846]1210 """\
[1348]1211 Get a list of integration times for the observations.
1212 Return a time in seconds for each integration in the scantable.
[1846]1213
[1348]1214 Parameters:
[1846]1215
[1348]1216 row: row no of integration. Default -1 return all rows.
[1846]1217
[1348]1218 """
[1573]1219 return self._get_column(self._getinttime, row)
[1348]1220
[1573]1221
[714]1222 def get_sourcename(self, row=-1):
[1846]1223 """\
[794]1224 Get a list source names for the observations.
[714]1225 Return a string for each integration in the scantable.
1226 Parameters:
[1846]1227
[1348]1228 row: row no of integration. Default -1 return all rows.
[1846]1229
[714]1230 """
[1070]1231 return self._get_column(self._getsourcename, row)
[714]1232
[794]1233 def get_elevation(self, row=-1):
[1846]1234 """\
[794]1235 Get a list of elevations for the observations.
1236 Return a float for each integration in the scantable.
[1846]1237
[794]1238 Parameters:
[1846]1239
[1348]1240 row: row no of integration. Default -1 return all rows.
[1846]1241
[794]1242 """
[1070]1243 return self._get_column(self._getelevation, row)
[794]1244
1245 def get_azimuth(self, row=-1):
[1846]1246 """\
[794]1247 Get a list of azimuths for the observations.
1248 Return a float for each integration in the scantable.
[1846]1249
[794]1250 Parameters:
[1348]1251 row: row no of integration. Default -1 return all rows.
[1846]1252
[794]1253 """
[1070]1254 return self._get_column(self._getazimuth, row)
[794]1255
1256 def get_parangle(self, row=-1):
[1846]1257 """\
[794]1258 Get a list of parallactic angles for the observations.
1259 Return a float for each integration in the scantable.
[1846]1260
[794]1261 Parameters:
[1846]1262
[1348]1263 row: row no of integration. Default -1 return all rows.
[1846]1264
[794]1265 """
[1070]1266 return self._get_column(self._getparangle, row)
[794]1267
[1070]1268 def get_direction(self, row=-1):
1269 """
1270 Get a list of Positions on the sky (direction) for the observations.
[1594]1271 Return a string for each integration in the scantable.
[1855]1272
[1070]1273 Parameters:
[1855]1274
[1070]1275 row: row no of integration. Default -1 return all rows
[1855]1276
[1070]1277 """
1278 return self._get_column(self._getdirection, row)
1279
[1391]1280 def get_directionval(self, row=-1):
[1846]1281 """\
[1391]1282 Get a list of Positions on the sky (direction) for the observations.
1283 Return a float for each integration in the scantable.
[1846]1284
[1391]1285 Parameters:
[1846]1286
[1391]1287 row: row no of integration. Default -1 return all rows
[1846]1288
[1391]1289 """
1290 return self._get_column(self._getdirectionvec, row)
1291
[1862]1292 @asaplog_post_dec
[102]1293 def set_unit(self, unit='channel'):
[1846]1294 """\
[102]1295 Set the unit for all following operations on this scantable
[1846]1296
[102]1297 Parameters:
[1846]1298
1299 unit: optional unit, default is 'channel'. Use one of '*Hz',
1300 'km/s', 'channel' or equivalent ''
1301
[102]1302 """
[484]1303 varlist = vars()
[1118]1304 if unit in ['', 'pixel', 'channel']:
[113]1305 unit = ''
1306 inf = list(self._getcoordinfo())
1307 inf[0] = unit
1308 self._setcoordinfo(inf)
[1118]1309 self._add_history("set_unit", varlist)
[113]1310
[1862]1311 @asaplog_post_dec
[484]1312 def set_instrument(self, instr):
[1846]1313 """\
[1348]1314 Set the instrument for subsequent processing.
[1846]1315
[358]1316 Parameters:
[1846]1317
[710]1318 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]1319 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1320
[358]1321 """
1322 self._setInstrument(instr)
[1118]1323 self._add_history("set_instument", vars())
[358]1324
[1862]1325 @asaplog_post_dec
[1190]1326 def set_feedtype(self, feedtype):
[1846]1327 """\
[1190]1328 Overwrite the feed type, which might not be set correctly.
[1846]1329
[1190]1330 Parameters:
[1846]1331
[1190]1332 feedtype: 'linear' or 'circular'
[1846]1333
[1190]1334 """
1335 self._setfeedtype(feedtype)
1336 self._add_history("set_feedtype", vars())
1337
[1862]1338 @asaplog_post_dec
[2897]1339 def get_doppler(self):
1340 """\
1341 Get the doppler.
1342 """
1343 return self._getcoordinfo()[2]
1344
1345 @asaplog_post_dec
[276]1346 def set_doppler(self, doppler='RADIO'):
[1846]1347 """\
[276]1348 Set the doppler for all following operations on this scantable.
[1846]1349
[276]1350 Parameters:
[1846]1351
[276]1352 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1353
[276]1354 """
[484]1355 varlist = vars()
[276]1356 inf = list(self._getcoordinfo())
1357 inf[2] = doppler
1358 self._setcoordinfo(inf)
[1118]1359 self._add_history("set_doppler", vars())
[710]1360
[1862]1361 @asaplog_post_dec
[226]1362 def set_freqframe(self, frame=None):
[1846]1363 """\
[113]1364 Set the frame type of the Spectral Axis.
[1846]1365
[113]1366 Parameters:
[1846]1367
[591]1368 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1369 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1370 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1371
1372 Example::
1373
[113]1374 scan.set_freqframe('BARY')
[1846]1375
[113]1376 """
[1593]1377 frame = frame or rcParams['scantable.freqframe']
[484]1378 varlist = vars()
[1819]1379 # "REST" is not implemented in casacore
1380 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1381 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1382 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1383 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1384
[989]1385 if frame in valid:
[113]1386 inf = list(self._getcoordinfo())
1387 inf[1] = frame
1388 self._setcoordinfo(inf)
[1118]1389 self._add_history("set_freqframe", varlist)
[102]1390 else:
[1118]1391 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1392 raise TypeError(msg)
[710]1393
[1862]1394 @asaplog_post_dec
[989]1395 def set_dirframe(self, frame=""):
[1846]1396 """\
[989]1397 Set the frame type of the Direction on the sky.
[1846]1398
[989]1399 Parameters:
[1846]1400
[989]1401 frame: an optional frame type, default ''. Valid frames are:
1402 'J2000', 'B1950', 'GALACTIC'
[1846]1403
1404 Example:
1405
[989]1406 scan.set_dirframe('GALACTIC')
[1846]1407
[989]1408 """
1409 varlist = vars()
[1859]1410 Scantable.set_dirframe(self, frame)
[1118]1411 self._add_history("set_dirframe", varlist)
[989]1412
[113]1413 def get_unit(self):
[1846]1414 """\
[113]1415 Get the default unit set in this scantable
[1846]1416
[113]1417 Returns:
[1846]1418
[113]1419 A unit string
[1846]1420
[113]1421 """
1422 inf = self._getcoordinfo()
1423 unit = inf[0]
1424 if unit == '': unit = 'channel'
1425 return unit
[102]1426
[1862]1427 @asaplog_post_dec
[158]1428 def get_abcissa(self, rowno=0):
[1846]1429 """\
[158]1430 Get the abcissa in the current coordinate setup for the currently
[113]1431 selected Beam/IF/Pol
[1846]1432
[113]1433 Parameters:
[1846]1434
[226]1435 rowno: an optional row number in the scantable. Default is the
1436 first row, i.e. rowno=0
[1846]1437
[113]1438 Returns:
[1846]1439
[1348]1440 The abcissa values and the format string (as a dictionary)
[1846]1441
[113]1442 """
[256]1443 abc = self._getabcissa(rowno)
[710]1444 lbl = self._getabcissalabel(rowno)
[158]1445 return abc, lbl
[113]1446
[1862]1447 @asaplog_post_dec
[2322]1448 def flag(self, mask=None, unflag=False, row=-1):
[1846]1449 """\
[1001]1450 Flag the selected data using an optional channel mask.
[1846]1451
[1001]1452 Parameters:
[1846]1453
[1001]1454 mask: an optional channel mask, created with create_mask. Default
1455 (no mask) is all channels.
[1855]1456
[1819]1457 unflag: if True, unflag the data
[1846]1458
[2322]1459 row: an optional row number in the scantable.
1460 Default -1 flags all rows
1461
[1001]1462 """
1463 varlist = vars()
[1593]1464 mask = mask or []
[1994]1465 self._flag(row, mask, unflag)
[1001]1466 self._add_history("flag", varlist)
1467
[1862]1468 @asaplog_post_dec
[2322]1469 def flag_row(self, rows=None, unflag=False):
[1846]1470 """\
[1819]1471 Flag the selected data in row-based manner.
[1846]1472
[1819]1473 Parameters:
[1846]1474
[1843]1475 rows: list of row numbers to be flagged. Default is no row
[2322]1476 (must be explicitly specified to execute row-based
1477 flagging).
[1855]1478
[1819]1479 unflag: if True, unflag the data.
[1846]1480
[1819]1481 """
1482 varlist = vars()
[2322]1483 if rows is None:
1484 rows = []
[1859]1485 self._flag_row(rows, unflag)
[1819]1486 self._add_history("flag_row", varlist)
1487
[1862]1488 @asaplog_post_dec
[1819]1489 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1490 """\
[1819]1491 Flag the selected data outside a specified range (in channel-base)
[1846]1492
[1819]1493 Parameters:
[1846]1494
[1819]1495 uthres: upper threshold.
[1855]1496
[1819]1497 dthres: lower threshold
[1846]1498
[2322]1499 clipoutside: True for flagging data outside the range
1500 [dthres:uthres].
[1928]1501 False for flagging data inside the range.
[1855]1502
[1846]1503 unflag: if True, unflag the data.
1504
[1819]1505 """
1506 varlist = vars()
[1859]1507 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1508 self._add_history("clip", varlist)
1509
[1862]1510 @asaplog_post_dec
[1584]1511 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1512 """\
[1192]1513 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1514 Flagged data in the scantable get interpolated over the region.
[1192]1515 No taper is applied.
[1846]1516
[1192]1517 Parameters:
[1846]1518
[1579]1519 start: the start frequency (really a period within the
1520 bandwidth) or period to remove
[1855]1521
[1579]1522 end: the end frequency or period to remove
[1855]1523
[2431]1524 unit: the frequency unit (default 'MHz') or '' for
[1579]1525 explicit lag channels
[1846]1526
1527 *Notes*:
1528
[1579]1529 It is recommended to flag edges of the band or strong
[1348]1530 signals beforehand.
[1846]1531
[1192]1532 """
1533 if insitu is None: insitu = rcParams['insitu']
1534 self._math._setinsitu(insitu)
1535 varlist = vars()
[1579]1536 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1537 if not (unit == "" or base.has_key(unit)):
[1192]1538 raise ValueError("%s is not a valid unit." % unit)
[1859]1539 if unit == "":
1540 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1541 else:
1542 s = scantable(self._math._lag_flag(self, start*base[unit],
1543 end*base[unit], "frequency"))
[1192]1544 s._add_history("lag_flag", varlist)
1545 if insitu:
1546 self._assign(s)
1547 else:
1548 return s
[1001]1549
[1862]1550 @asaplog_post_dec
[2349]1551 def fft(self, rowno=None, mask=None, getrealimag=False):
[2177]1552 """\
1553 Apply FFT to the spectra.
1554 Flagged data in the scantable get interpolated over the region.
1555
1556 Parameters:
[2186]1557
1558 rowno: The row number(s) to be processed. int, list
[2349]1559 and tuple are accepted. By default (None), FFT
[2186]1560 is applied to the whole data.
1561
1562 mask: Auxiliary channel mask(s). Given as a boolean
1563 list, it is applied to all specified rows.
1564 A list of boolean lists can also be used to
1565 apply different masks. In the latter case, the
1566 length of 'mask' must be the same as that of
[2349]1567 'rowno'. The default is None.
[2177]1568
1569 getrealimag: If True, returns the real and imaginary part
1570 values of the complex results.
1571 If False (the default), returns the amplitude
1572 (absolute value) normalised with Ndata/2 and
1573 phase (argument, in unit of radian).
1574
1575 Returns:
1576
[2186]1577 A list of dictionaries containing the results for each spectrum.
1578 Each dictionary contains two values, the real and the imaginary
1579 parts when getrealimag = True, or the amplitude(absolute value)
1580 and the phase(argument) when getrealimag = False. The key for
1581 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1582 respectively.
1583 """
[2349]1584 if rowno is None:
1585 rowno = []
[2177]1586 if isinstance(rowno, int):
1587 rowno = [rowno]
1588 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1589 raise TypeError("The row number(s) must be int, list or tuple.")
1590 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1591
[2411]1592 usecommonmask = True
1593
1594 if mask is None:
1595 mask = []
1596 if isinstance(mask, list) or isinstance(mask, tuple):
1597 if len(mask) == 0:
1598 mask = [[]]
1599 else:
1600 if isinstance(mask[0], bool):
1601 if len(mask) != self.nchan(self.getif(rowno[0])):
1602 raise ValueError("The spectra and the mask have "
1603 "different length.")
1604 mask = [mask]
1605 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1606 usecommonmask = False
1607 if len(mask) != len(rowno):
1608 raise ValueError("When specifying masks for each "
1609 "spectrum, the numbers of them "
1610 "must be identical.")
1611 for i in xrange(mask):
1612 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1613 raise ValueError("The spectra and the mask have "
1614 "different length.")
1615 else:
1616 raise TypeError("The mask must be a boolean list or "
1617 "a list of boolean list.")
1618 else:
[2349]1619 raise TypeError("The mask must be a boolean list or a list of "
1620 "boolean list.")
[2186]1621
1622 res = []
1623
1624 imask = 0
1625 for whichrow in rowno:
1626 fspec = self._fft(whichrow, mask[imask], getrealimag)
1627 nspec = len(fspec)
[2177]1628
[2349]1629 i = 0
1630 v1 = []
1631 v2 = []
1632 reselem = {"real":[],"imag":[]} if getrealimag \
1633 else {"ampl":[],"phase":[]}
[2177]1634
[2186]1635 while (i < nspec):
1636 v1.append(fspec[i])
1637 v2.append(fspec[i+1])
[2349]1638 i += 2
[2186]1639
[2177]1640 if getrealimag:
[2186]1641 reselem["real"] += v1
1642 reselem["imag"] += v2
[2177]1643 else:
[2186]1644 reselem["ampl"] += v1
1645 reselem["phase"] += v2
[2177]1646
[2186]1647 res.append(reselem)
1648
[2349]1649 if not usecommonmask:
1650 imask += 1
[2186]1651
[2177]1652 return res
1653
1654 @asaplog_post_dec
[113]1655 def create_mask(self, *args, **kwargs):
[1846]1656 """\
[1118]1657 Compute and return a mask based on [min, max] windows.
[189]1658 The specified windows are to be INCLUDED, when the mask is
[113]1659 applied.
[1846]1660
[102]1661 Parameters:
[1846]1662
[1118]1663 [min, max], [min2, max2], ...
[1024]1664 Pairs of start/end points (inclusive)specifying the regions
[102]1665 to be masked
[1855]1666
[189]1667 invert: optional argument. If specified as True,
1668 return an inverted mask, i.e. the regions
1669 specified are EXCLUDED
[1855]1670
[513]1671 row: create the mask using the specified row for
1672 unit conversions, default is row=0
1673 only necessary if frequency varies over rows.
[1846]1674
1675 Examples::
1676
[113]1677 scan.set_unit('channel')
[1846]1678 # a)
[1118]1679 msk = scan.create_mask([400, 500], [800, 900])
[189]1680 # masks everything outside 400 and 500
[113]1681 # and 800 and 900 in the unit 'channel'
1682
[1846]1683 # b)
[1118]1684 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1685 # masks the regions between 400 and 500
[113]1686 # and 800 and 900 in the unit 'channel'
[1846]1687
1688 # c)
1689 #mask only channel 400
[1554]1690 msk = scan.create_mask([400])
[1846]1691
[102]1692 """
[1554]1693 row = kwargs.get("row", 0)
[513]1694 data = self._getabcissa(row)
[113]1695 u = self._getcoordinfo()[0]
[1859]1696 if u == "":
1697 u = "channel"
1698 msg = "The current mask window unit is %s" % u
1699 i = self._check_ifs()
1700 if not i:
1701 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1702 asaplog.push(msg)
[2348]1703 n = len(data)
[1295]1704 msk = _n_bools(n, False)
[710]1705 # test if args is a 'list' or a 'normal *args - UGLY!!!
1706
[2349]1707 ws = (isinstance(args[-1][-1], int)
1708 or isinstance(args[-1][-1], float)) and args or args[0]
[710]1709 for window in ws:
[1554]1710 if len(window) == 1:
1711 window = [window[0], window[0]]
1712 if len(window) == 0 or len(window) > 2:
[2349]1713 raise ValueError("A window needs to be defined as "
1714 "[start(, end)]")
[1545]1715 if window[0] > window[1]:
1716 tmp = window[0]
1717 window[0] = window[1]
1718 window[1] = tmp
[102]1719 for i in range(n):
[1024]1720 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1721 msk[i] = True
[113]1722 if kwargs.has_key('invert'):
1723 if kwargs.get('invert'):
[1295]1724 msk = mask_not(msk)
[102]1725 return msk
[710]1726
[1931]1727 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1728 """\
[1819]1729 Compute and return a list of mask windows, [min, max].
[1846]1730
[1819]1731 Parameters:
[1846]1732
[1819]1733 mask: channel mask, created with create_mask.
[1855]1734
[1819]1735 row: calcutate the masklist using the specified row
1736 for unit conversions, default is row=0
1737 only necessary if frequency varies over rows.
[1846]1738
[1819]1739 Returns:
[1846]1740
[1819]1741 [min, max], [min2, max2], ...
1742 Pairs of start/end points (inclusive)specifying
1743 the masked regions
[1846]1744
[1819]1745 """
1746 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1747 raise TypeError("The mask should be list or tuple.")
[2427]1748 if len(mask) <= 0:
1749 raise TypeError("The mask elements should be > 0")
[2348]1750 data = self._getabcissa(row)
1751 if len(data) != len(mask):
[1819]1752 msg = "Number of channels in scantable != number of mask elements"
1753 raise TypeError(msg)
1754 u = self._getcoordinfo()[0]
[1859]1755 if u == "":
1756 u = "channel"
1757 msg = "The current mask window unit is %s" % u
1758 i = self._check_ifs()
1759 if not i:
1760 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1761 if not silent:
1762 asaplog.push(msg)
[2349]1763 masklist = []
[1819]1764 ist, ien = None, None
1765 ist, ien=self.get_mask_indices(mask)
1766 if ist is not None and ien is not None:
1767 for i in xrange(len(ist)):
1768 range=[data[ist[i]],data[ien[i]]]
1769 range.sort()
1770 masklist.append([range[0],range[1]])
1771 return masklist
1772
1773 def get_mask_indices(self, mask=None):
[1846]1774 """\
[1819]1775 Compute and Return lists of mask start indices and mask end indices.
[1855]1776
1777 Parameters:
1778
[1819]1779 mask: channel mask, created with create_mask.
[1846]1780
[1819]1781 Returns:
[1846]1782
[1819]1783 List of mask start indices and that of mask end indices,
1784 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1785
[1819]1786 """
1787 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1788 raise TypeError("The mask should be list or tuple.")
[2427]1789 if len(mask) <= 0:
1790 raise TypeError("The mask elements should be > 0")
[2349]1791 istart = []
1792 iend = []
1793 if mask[0]:
1794 istart.append(0)
[1819]1795 for i in range(len(mask)-1):
1796 if not mask[i] and mask[i+1]:
1797 istart.append(i+1)
1798 elif mask[i] and not mask[i+1]:
1799 iend.append(i)
[2349]1800 if mask[len(mask)-1]:
1801 iend.append(len(mask)-1)
[1819]1802 if len(istart) != len(iend):
1803 raise RuntimeError("Numbers of mask start != mask end.")
1804 for i in range(len(istart)):
1805 if istart[i] > iend[i]:
1806 raise RuntimeError("Mask start index > mask end index")
1807 break
1808 return istart,iend
1809
[2013]1810 @asaplog_post_dec
[2882]1811 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1812 """
1813 Parse MS type spw/channel selection syntax.
1814
1815 Parameters:
1816 selectstring : A string expression of spw and channel selection.
1817 Comma-separated expressions mean different spw -
1818 channel combinations. Spws and channel selections
1819 are partitioned by a colon ':'. In a single
1820 selection expression, you can put multiple values
1821 separated by semicolons ';'. Both for spw and
1822 channel selection, allowed cases include single
1823 value, blank('') or asterisk('*') to specify all
1824 available values, two values connected with a
1825 tilde ('~') to specify an inclusive range. Unit
1826 strings for frequency or velocity can be added to
1827 the tilde-connected values. For channel selection
1828 expression, placing a '<' or a '>' is possible to
1829 specify a semi-infinite interval as well.
1830
1831 examples:
1832 '' or '*' = all spws (all channels)
1833 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1834 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1835 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1836 all channels in spw8
[2884]1837 '1.3~1.5GHz' = all spws that fall in or have at
1838 least some overwrap with frequency
1839 range between 1.3GHz and 1.5GHz.
1840 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1841 between the specified
1842 frequency range in spws
1843 that fall in or have
1844 overwrap with the
1845 specified frequency
1846 range.
1847 '1:-200~250km/s' = channels that fall between the
1848 specified velocity range in
1849 spw 1.
[2897]1850 restfreq: the rest frequency.
1851 examples: '115.2712GHz', 115271201800.0
1852 frame: an optional frame type, default 'LSRK'. Valid frames are:
1853 'TOPO', 'LSRD', 'LSRK', 'BARY',
1854 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1855 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[2882]1856 Returns:
1857 A dictionary of selected (valid) spw and masklist pairs,
1858 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1859 """
1860 if not isinstance(selectstring, str):
1861 asaplog.post()
1862 asaplog.push("Expression of spw/channel selection must be a string.")
1863 asaplog.post("ERROR")
1864
1865 orig_unit = self.get_unit()
1866 self.set_unit('channel')
1867
[2891]1868 if restfreq is not None:
[2892]1869 orig_molids = self._getmolidcol_list()
1870 set_restfreq(self, restfreq)
[2882]1871
[2897]1872 orig_coord = self._getcoordinfo()
[2892]1873
1874 if frame is not None:
1875 orig_frame = orig_coord[1]
1876 self.set_freqframe(frame)
1877
1878 if doppler is not None:
1879 orig_doppler = orig_coord[2]
1880 self.set_doppler(doppler)
[2882]1881
1882 valid_ifs = self.getifnos()
1883
1884 comma_sep = selectstring.split(",")
1885 res = {}
1886
1887 for cms_elem in comma_sep:
1888 colon_sep = cms_elem.split(":")
1889
1890 if (len(colon_sep) > 2):
1891 raise RuntimeError("Invalid selection expression: more than two colons!")
1892
1893 # parse spw expression and store result in spw_list.
1894 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
[2884]1895 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1896 # 'a~b*m/s' (where * can be '' or 'k') and also
[2882]1897 # several of the above expressions connected with ';'.
1898
1899 spw_list = []
1900
1901 semicolon_sep = colon_sep[0].split(";")
1902
1903 for scs_elem in semicolon_sep:
1904 scs_elem = scs_elem.strip()
1905
1906 lt_sep = scs_elem.split("<")
1907 gt_sep = scs_elem.split(">")
1908 ti_sep = scs_elem.split("~")
1909
1910 lt_sep_length = len(lt_sep)
1911 gt_sep_length = len(gt_sep)
1912 ti_sep_length = len(ti_sep)
1913
1914 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1915
1916 if (len_product > 2):
1917 # '<', '>' and '~' must not coexist in a single spw expression
1918
1919 raise RuntimeError("Invalid spw selection.")
1920
1921 elif (len_product == 1):
1922 # '', '*', or single spw number.
1923
1924 if (scs_elem == "") or (scs_elem == "*"):
1925 spw_list = valid_ifs[:] # deep copy
1926
1927 else: # single number
[2887]1928 expr = int(scs_elem)
1929 spw_list.append(expr)
1930 if expr not in valid_ifs:
1931 asaplog.push("Invalid spw given. Ignored.")
1932
[2882]1933 else: # (len_product == 2)
[2887]1934 # namely, one of '<', '>' or '~' appears just once.
[2882]1935
1936 if (lt_sep_length == 2): # '<a'
1937 if is_number(lt_sep[1]):
[2886]1938 no_valid_spw = True
[2882]1939 for i in valid_ifs:
1940 if (i < float(lt_sep[1])):
1941 spw_list.append(i)
[2886]1942 no_valid_spw = False
1943
1944 if no_valid_spw:
1945 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
[2882]1946
1947 else:
[2886]1948 raise RuntimeError("Invalid spw selection.")
[2882]1949
1950 elif (gt_sep_length == 2): # '>a'
1951 if is_number(gt_sep[1]):
[2886]1952 no_valid_spw = True
[2882]1953 for i in valid_ifs:
1954 if (i > float(gt_sep[1])):
1955 spw_list.append(i)
[2886]1956 no_valid_spw = False
1957
1958 if no_valid_spw:
1959 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
[2882]1960
1961 else:
[2886]1962 raise RuntimeError("Invalid spw selection.")
[2882]1963
1964 else: # (ti_sep_length == 2) where both boundaries inclusive
1965 expr0 = ti_sep[0].strip()
1966 expr1 = ti_sep[1].strip()
1967
1968 if is_number(expr0) and is_number(expr1):
1969 # 'a~b'
1970 expr_pmin = min(float(expr0), float(expr1))
1971 expr_pmax = max(float(expr0), float(expr1))
[2887]1972 has_invalid_spw = False
[2886]1973 no_valid_spw = True
1974
[2882]1975 for i in valid_ifs:
1976 if (expr_pmin <= i) and (i <= expr_pmax):
1977 spw_list.append(i)
[2886]1978 no_valid_spw = False
[2887]1979 else:
1980 has_invalid_spw = True
[2886]1981
[2887]1982 if has_invalid_spw:
1983 msg = "Invalid spw is given. Ignored."
1984 asaplog.push(msg)
1985 asaplog.post()
1986
[2886]1987 if no_valid_spw:
1988 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
[2887]1989
[2884]1990 elif is_number(expr0) and is_frequency(expr1):
1991 # 'a~b*Hz'
1992 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
[2886]1993 no_valid_spw = True
1994
[2882]1995 for coord in self._get_coordinate_list():
1996 expr_p0 = coord['coord'].to_pixel(expr_f0)
1997 expr_p1 = coord['coord'].to_pixel(expr_f1)
1998 expr_pmin = min(expr_p0, expr_p1)
1999 expr_pmax = max(expr_p0, expr_p1)
2000
2001 spw = coord['if']
2002 pmin = 0.0
2003 pmax = float(self.nchan(spw) - 1)
2004
2005 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2006 spw_list.append(spw)
[2886]2007 no_valid_spw = False
2008
2009 if no_valid_spw:
2010 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
[2882]2011
[2884]2012 elif is_number(expr0) and is_velocity(expr1):
2013 # 'a~b*m/s'
2014 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
[2882]2015 expr_vmin = min(expr_v0, expr_v1)
2016 expr_vmax = max(expr_v0, expr_v1)
[2886]2017 no_valid_spw = True
2018
[2882]2019 for coord in self._get_coordinate_list():
2020 spw = coord['if']
2021
2022 pmin = 0.0
2023 pmax = float(self.nchan(spw) - 1)
2024
2025 vel0 = coord['coord'].to_velocity(pmin)
2026 vel1 = coord['coord'].to_velocity(pmax)
2027
2028 vmin = min(vel0, vel1)
2029 vmax = max(vel0, vel1)
2030
2031 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2032 spw_list.append(spw)
[2886]2033 no_valid_spw = False
2034
2035 if no_valid_spw:
2036 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
[2882]2037
2038 else:
2039 # cases such as 'aGHz~bkm/s' are not allowed now
2040 raise RuntimeError("Invalid spw selection.")
2041
[2887]2042 # check spw list and remove invalid ones.
2043 # if no valid spw left, emit ValueError.
2044 if len(spw_list) == 0:
2045 raise ValueError("No valid spw in given range.")
2046
[2882]2047 # parse channel expression and store the result in crange_list.
2048 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2049 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2050 # and also several of the above expressions connected with ';'.
2051
2052 for spw in spw_list:
2053 pmin = 0.0
2054 pmax = float(self.nchan(spw) - 1)
2055
2056 if (len(colon_sep) == 1):
2057 # no expression for channel selection,
2058 # which means all channels are to be selected.
2059 crange_list = [[pmin, pmax]]
2060
2061 else: # (len(colon_sep) == 2)
2062 crange_list = []
2063
2064 found = False
2065 for i in self._get_coordinate_list():
2066 if (i['if'] == spw):
2067 coord = i['coord']
2068 found = True
2069 break
2070
[2887]2071 if found:
2072 semicolon_sep = colon_sep[1].split(";")
2073 for scs_elem in semicolon_sep:
2074 scs_elem = scs_elem.strip()
[2882]2075
[2887]2076 ti_sep = scs_elem.split("~")
2077 ti_sep_length = len(ti_sep)
[2882]2078
[2887]2079 if (ti_sep_length > 2):
2080 raise RuntimeError("Invalid channel selection.")
[2882]2081
[2887]2082 elif (ti_sep_length == 1):
2083 if (scs_elem == "") or (scs_elem == "*"):
2084 # '' and '*' for all channels
2085 crange_list = [[pmin, pmax]]
2086 break
2087 elif (is_number(scs_elem)):
2088 # single channel given
2089 crange_list.append([float(scs_elem), float(scs_elem)])
2090 else:
2091 raise RuntimeError("Invalid channel selection.")
[2882]2092
[2887]2093 else: #(ti_sep_length == 2)
2094 expr0 = ti_sep[0].strip()
2095 expr1 = ti_sep[1].strip()
[2882]2096
[2887]2097 if is_number(expr0) and is_number(expr1):
2098 # 'a~b'
2099 expr_pmin = min(float(expr0), float(expr1))
2100 expr_pmax = max(float(expr0), float(expr1))
[2882]2101
[2887]2102 elif is_number(expr0) and is_frequency(expr1):
2103 # 'a~b*Hz'
2104 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2105 expr_p0 = coord.to_pixel(expr_f0)
2106 expr_p1 = coord.to_pixel(expr_f1)
2107 expr_pmin = min(expr_p0, expr_p1)
2108 expr_pmax = max(expr_p0, expr_p1)
[2882]2109
[2887]2110 elif is_number(expr0) and is_velocity(expr1):
2111 # 'a~b*m/s'
2112 restf = self.get_restfreqs().values()[0][0]
2113 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
[2897]2114 dppl = self.get_doppler()
2115 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)
2116 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)
[2887]2117 expr_p0 = coord.to_pixel(expr_f0)
2118 expr_p1 = coord.to_pixel(expr_f1)
2119 expr_pmin = min(expr_p0, expr_p1)
2120 expr_pmax = max(expr_p0, expr_p1)
[2882]2121
[2887]2122 else:
2123 # cases such as 'aGHz~bkm/s' are not allowed now
2124 raise RuntimeError("Invalid channel selection.")
[2882]2125
[2887]2126 cmin = max(pmin, expr_pmin)
2127 cmax = min(pmax, expr_pmax)
2128 # if the given range of channel selection has overwrap with
2129 # that of current spw, output the overwrap area.
2130 if (cmin <= cmax):
2131 cmin = float(int(cmin + 0.5))
2132 cmax = float(int(cmax + 0.5))
2133 crange_list.append([cmin, cmax])
[2882]2134
2135 if (len(crange_list) == 0):
2136 crange_list.append([])
2137
2138 if res.has_key(spw):
2139 res[spw].extend(crange_list)
2140 else:
2141 res[spw] = crange_list
2142
[2887]2143 for spw in res.keys():
2144 if spw not in valid_ifs:
2145 del res[spw]
2146
2147 if len(res) == 0:
2148 raise RuntimeError("No valid spw.")
2149
[2882]2150 # restore original values
[2892]2151 self.set_unit(orig_unit)
[2891]2152 if restfreq is not None:
[2892]2153 self._setmolidcol_list(orig_molids)
2154 if frame is not None:
2155 self.set_freqframe(orig_frame)
2156 if doppler is not None:
2157 self.set_doppler(orig_doppler)
[2882]2158
2159 return res
[2890]2160
[2882]2161 @asaplog_post_dec
2162 def get_first_rowno_by_if(self, ifno):
2163 found = False
2164 for irow in xrange(self.nrow()):
2165 if (self.getif(irow) == ifno):
2166 res = irow
2167 found = True
2168 break
2169
2170 if not found: raise RuntimeError("Invalid IF value.")
2171
2172 return res
2173
2174 @asaplog_post_dec
2175 def _get_coordinate_list(self):
2176 res = []
2177 spws = self.getifnos()
2178 for spw in spws:
2179 elem = {}
2180 elem['if'] = spw
2181 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2182 res.append(elem)
2183
2184 return res
2185
2186 @asaplog_post_dec
[2349]2187 def parse_maskexpr(self, maskstring):
[2013]2188 """
2189 Parse CASA type mask selection syntax (IF dependent).
2190
2191 Parameters:
2192 maskstring : A string mask selection expression.
2193 A comma separated selections mean different IF -
2194 channel combinations. IFs and channel selections
2195 are partitioned by a colon, ':'.
2196 examples:
[2015]2197 '' = all IFs (all channels)
[2013]2198 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2199 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2200 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2201 all channels in IF8
2202 Returns:
2203 A dictionary of selected (valid) IF and masklist pairs,
2204 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2205 """
2206 if not isinstance(maskstring,str):
2207 asaplog.post()
[2611]2208 asaplog.push("Mask expression should be a string.")
[2013]2209 asaplog.post("ERROR")
2210
2211 valid_ifs = self.getifnos()
2212 frequnit = self.get_unit()
2213 seldict = {}
[2015]2214 if maskstring == "":
2215 maskstring = str(valid_ifs)[1:-1]
[2611]2216 ## split each selection "IF range[:CHAN range]"
[2867]2217 # split maskstring by "<spaces>,<spaces>"
2218 comma_sep = re.compile('\s*,\s*')
2219 sellist = comma_sep.split(maskstring)
2220 # separator by "<spaces>:<spaces>"
2221 collon_sep = re.compile('\s*:\s*')
[2013]2222 for currselstr in sellist:
[2867]2223 selset = collon_sep.split(currselstr)
[2013]2224 # spw and mask string (may include ~, < or >)
[2349]2225 spwmasklist = self._parse_selection(selset[0], typestr='integer',
[2611]2226 minval=min(valid_ifs),
[2349]2227 maxval=max(valid_ifs))
[2013]2228 for spwlist in spwmasklist:
2229 selspws = []
2230 for ispw in range(spwlist[0],spwlist[1]+1):
2231 # Put into the list only if ispw exists
2232 if valid_ifs.count(ispw):
2233 selspws.append(ispw)
2234 del spwmasklist, spwlist
2235
2236 # parse frequency mask list
2237 if len(selset) > 1:
[2349]2238 freqmasklist = self._parse_selection(selset[1], typestr='float',
2239 offset=0.)
[2013]2240 else:
2241 # want to select the whole spectrum
2242 freqmasklist = [None]
2243
2244 ## define a dictionary of spw - masklist combination
2245 for ispw in selspws:
2246 #print "working on", ispw
2247 spwstr = str(ispw)
2248 if len(selspws) == 0:
2249 # empty spw
2250 continue
2251 else:
2252 ## want to get min and max of the spw and
2253 ## offset to set for '<' and '>'
2254 if frequnit == 'channel':
2255 minfreq = 0
2256 maxfreq = self.nchan(ifno=ispw)
2257 offset = 0.5
2258 else:
2259 ## This is ugly part. need improvement
2260 for ifrow in xrange(self.nrow()):
2261 if self.getif(ifrow) == ispw:
2262 #print "IF",ispw,"found in row =",ifrow
2263 break
2264 freqcoord = self.get_coordinate(ifrow)
2265 freqs = self._getabcissa(ifrow)
2266 minfreq = min(freqs)
2267 maxfreq = max(freqs)
2268 if len(freqs) == 1:
2269 offset = 0.5
2270 elif frequnit.find('Hz') > 0:
[2349]2271 offset = abs(freqcoord.to_frequency(1,
2272 unit=frequnit)
2273 -freqcoord.to_frequency(0,
2274 unit=frequnit)
2275 )*0.5
[2013]2276 elif frequnit.find('m/s') > 0:
[2349]2277 offset = abs(freqcoord.to_velocity(1,
2278 unit=frequnit)
2279 -freqcoord.to_velocity(0,
2280 unit=frequnit)
2281 )*0.5
[2013]2282 else:
2283 asaplog.post()
2284 asaplog.push("Invalid frequency unit")
2285 asaplog.post("ERROR")
2286 del freqs, freqcoord, ifrow
2287 for freq in freqmasklist:
2288 selmask = freq or [minfreq, maxfreq]
2289 if selmask[0] == None:
2290 ## selection was "<freq[1]".
2291 if selmask[1] < minfreq:
2292 ## avoid adding region selection
2293 selmask = None
2294 else:
2295 selmask = [minfreq,selmask[1]-offset]
2296 elif selmask[1] == None:
2297 ## selection was ">freq[0]"
2298 if selmask[0] > maxfreq:
2299 ## avoid adding region selection
2300 selmask = None
2301 else:
2302 selmask = [selmask[0]+offset,maxfreq]
2303 if selmask:
2304 if not seldict.has_key(spwstr):
2305 # new spw selection
2306 seldict[spwstr] = []
2307 seldict[spwstr] += [selmask]
2308 del minfreq,maxfreq,offset,freq,selmask
2309 del spwstr
2310 del freqmasklist
2311 del valid_ifs
2312 if len(seldict) == 0:
2313 asaplog.post()
[2349]2314 asaplog.push("No valid selection in the mask expression: "
2315 +maskstring)
[2013]2316 asaplog.post("WARN")
2317 return None
2318 msg = "Selected masklist:\n"
2319 for sif, lmask in seldict.iteritems():
2320 msg += " IF"+sif+" - "+str(lmask)+"\n"
2321 asaplog.push(msg)
2322 return seldict
2323
[2611]2324 @asaplog_post_dec
2325 def parse_idx_selection(self, mode, selexpr):
2326 """
2327 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2328 BEAMNO, and row number
2329
2330 Parameters:
2331 mode : which column to select.
2332 ['scan',|'if'|'pol'|'beam'|'row']
2333 selexpr : A comma separated selection expression.
2334 examples:
2335 '' = all (returns [])
2336 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2337 (returns [0,1,4,5,6,9])
2338 Returns:
2339 A List of selected indices
2340 """
2341 if selexpr == "":
2342 return []
2343 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2344 'b': 'beam', 'r': 'row'}
2345 smode = mode.lower()[0]
2346 if not (smode in valid_modes.keys()):
2347 msg = "Invalid mode '%s'. Valid modes are %s" %\
2348 (mode, str(valid_modes.values()))
2349 asaplog.post()
2350 asaplog.push(msg)
2351 asaplog.post("ERROR")
2352 mode = valid_modes[smode]
2353 minidx = None
2354 maxidx = None
2355 if smode == 'r':
2356 minidx = 0
2357 maxidx = self.nrow()
2358 else:
2359 idx = getattr(self,"get"+mode+"nos")()
2360 minidx = min(idx)
2361 maxidx = max(idx)
2362 del idx
[2867]2363 # split selexpr by "<spaces>,<spaces>"
2364 comma_sep = re.compile('\s*,\s*')
2365 sellist = comma_sep.split(selexpr)
[2611]2366 idxlist = []
2367 for currselstr in sellist:
2368 # single range (may include ~, < or >)
2369 currlist = self._parse_selection(currselstr, typestr='integer',
2370 minval=minidx,maxval=maxidx)
2371 for thelist in currlist:
2372 idxlist += range(thelist[0],thelist[1]+1)
2373 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2374 asaplog.push(msg)
2375 return idxlist
2376
[2349]2377 def _parse_selection(self, selstr, typestr='float', offset=0.,
[2351]2378 minval=None, maxval=None):
[2013]2379 """
2380 Parameters:
2381 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2382 typestr : The type of the values in returned list
2383 ('integer' or 'float')
2384 offset : The offset value to subtract from or add to
2385 the boundary value if the selection string
[2611]2386 includes '<' or '>' [Valid only for typestr='float']
[2013]2387 minval, maxval : The minimum/maximum values to set if the
2388 selection string includes '<' or '>'.
2389 The list element is filled with None by default.
2390 Returns:
2391 A list of min/max pair of selections.
2392 Example:
[2611]2393 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2394 --> returns [[0,2],[5,7],[9,9]]
2395 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2396 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
[2013]2397 """
[2867]2398 # split selstr by '<spaces>;<spaces>'
2399 semi_sep = re.compile('\s*;\s*')
2400 selgroups = semi_sep.split(selstr)
[2013]2401 sellists = []
2402 if typestr.lower().startswith('int'):
2403 formatfunc = int
[2611]2404 offset = 1
[2013]2405 else:
2406 formatfunc = float
2407
2408 for currsel in selgroups:
[2867]2409 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2410 minsel = minval
2411 maxsel = maxval
[2013]2412 if currsel.find('~') > 0:
[2611]2413 # val0 <= x <= val1
[2013]2414 minsel = formatfunc(currsel.split('~')[0].strip())
[2867]2415 maxsel = formatfunc(currsel.split('~')[1].strip())
[2611]2416 elif currsel.strip().find('<=') > -1:
2417 bound = currsel.split('<=')
2418 try: # try "x <= val"
2419 minsel = minval
2420 maxsel = formatfunc(bound[1].strip())
2421 except ValueError: # now "val <= x"
2422 minsel = formatfunc(bound[0].strip())
2423 maxsel = maxval
2424 elif currsel.strip().find('>=') > -1:
2425 bound = currsel.split('>=')
2426 try: # try "x >= val"
2427 minsel = formatfunc(bound[1].strip())
2428 maxsel = maxval
2429 except ValueError: # now "val >= x"
2430 minsel = minval
2431 maxsel = formatfunc(bound[0].strip())
2432 elif currsel.strip().find('<') > -1:
2433 bound = currsel.split('<')
2434 try: # try "x < val"
2435 minsel = minval
2436 maxsel = formatfunc(bound[1].strip()) \
2437 - formatfunc(offset)
2438 except ValueError: # now "val < x"
2439 minsel = formatfunc(bound[0].strip()) \
[2013]2440 + formatfunc(offset)
[2611]2441 maxsel = maxval
2442 elif currsel.strip().find('>') > -1:
2443 bound = currsel.split('>')
2444 try: # try "x > val"
2445 minsel = formatfunc(bound[1].strip()) \
2446 + formatfunc(offset)
2447 maxsel = maxval
2448 except ValueError: # now "val > x"
2449 minsel = minval
2450 maxsel = formatfunc(bound[0].strip()) \
2451 - formatfunc(offset)
[2013]2452 else:
2453 minsel = formatfunc(currsel)
2454 maxsel = formatfunc(currsel)
2455 sellists.append([minsel,maxsel])
2456 return sellists
2457
[1819]2458# def get_restfreqs(self):
2459# """
2460# Get the restfrequency(s) stored in this scantable.
2461# The return value(s) are always of unit 'Hz'
2462# Parameters:
2463# none
2464# Returns:
2465# a list of doubles
2466# """
2467# return list(self._getrestfreqs())
2468
2469 def get_restfreqs(self, ids=None):
[1846]2470 """\
[256]2471 Get the restfrequency(s) stored in this scantable.
2472 The return value(s) are always of unit 'Hz'
[1846]2473
[256]2474 Parameters:
[1846]2475
[1819]2476 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2477 be retrieved
[1846]2478
[256]2479 Returns:
[1846]2480
[1819]2481 dictionary containing ids and a list of doubles for each id
[1846]2482
[256]2483 """
[1819]2484 if ids is None:
[2349]2485 rfreqs = {}
[1819]2486 idlist = self.getmolnos()
2487 for i in idlist:
[2349]2488 rfreqs[i] = list(self._getrestfreqs(i))
[1819]2489 return rfreqs
2490 else:
[2349]2491 if type(ids) == list or type(ids) == tuple:
2492 rfreqs = {}
[1819]2493 for i in ids:
[2349]2494 rfreqs[i] = list(self._getrestfreqs(i))
[1819]2495 return rfreqs
2496 else:
2497 return list(self._getrestfreqs(ids))
[102]2498
[2349]2499 @asaplog_post_dec
[931]2500 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]2501 """\
[931]2502 Set or replace the restfrequency specified and
[1938]2503 if the 'freqs' argument holds a scalar,
[931]2504 then that rest frequency will be applied to all the selected
2505 data. If the 'freqs' argument holds
2506 a vector, then it MUST be of equal or smaller length than
2507 the number of IFs (and the available restfrequencies will be
2508 replaced by this vector). In this case, *all* data have
2509 the restfrequency set per IF according
2510 to the corresponding value you give in the 'freqs' vector.
[1118]2511 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]2512 IF 1 gets restfreq 2e9.
[1846]2513
[1395]2514 You can also specify the frequencies via a linecatalog.
[1153]2515
[931]2516 Parameters:
[1846]2517
[931]2518 freqs: list of rest frequency values or string idenitfiers
[1855]2519
[931]2520 unit: unit for rest frequency (default 'Hz')
[402]2521
[1846]2522
2523 Example::
2524
[1819]2525 # set the given restfrequency for the all currently selected IFs
[931]2526 scan.set_restfreqs(freqs=1.4e9)
[1845]2527 # set restfrequencies for the n IFs (n > 1) in the order of the
2528 # list, i.e
2529 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2530 # len(list_of_restfreqs) == nIF
2531 # for nIF == 1 the following will set multiple restfrequency for
2532 # that IF
[1819]2533 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]2534 # set multiple restfrequencies per IF. as a list of lists where
2535 # the outer list has nIF elements, the inner s arbitrary
2536 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]2537
[1846]2538 *Note*:
[1845]2539
[931]2540 To do more sophisticate Restfrequency setting, e.g. on a
2541 source and IF basis, use scantable.set_selection() before using
[1846]2542 this function::
[931]2543
[1846]2544 # provided your scantable is called scan
2545 selection = selector()
[2431]2546 selection.set_name('ORION*')
[1846]2547 selection.set_ifs([1])
2548 scan.set_selection(selection)
2549 scan.set_restfreqs(freqs=86.6e9)
2550
[931]2551 """
2552 varlist = vars()
[1157]2553 from asap import linecatalog
2554 # simple value
[1118]2555 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]2556 self._setrestfreqs([freqs], [""], unit)
[1157]2557 # list of values
[1118]2558 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]2559 # list values are scalars
[1118]2560 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]2561 if len(freqs) == 1:
2562 self._setrestfreqs(freqs, [""], unit)
2563 else:
2564 # allow the 'old' mode of setting mulitple IFs
2565 savesel = self._getselection()
[2599]2566 sel = self.get_selection()
[1845]2567 iflist = self.getifnos()
2568 if len(freqs)>len(iflist):
2569 raise ValueError("number of elements in list of list "
2570 "exeeds the current IF selections")
2571 iflist = self.getifnos()
2572 for i, fval in enumerate(freqs):
2573 sel.set_ifs(iflist[i])
2574 self._setselection(sel)
2575 self._setrestfreqs([fval], [""], unit)
2576 self._setselection(savesel)
2577
2578 # list values are dict, {'value'=, 'name'=)
[1157]2579 elif isinstance(freqs[-1], dict):
[1845]2580 values = []
2581 names = []
2582 for d in freqs:
2583 values.append(d["value"])
2584 names.append(d["name"])
2585 self._setrestfreqs(values, names, unit)
[1819]2586 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]2587 savesel = self._getselection()
[2599]2588 sel = self.get_selection()
[1322]2589 iflist = self.getifnos()
[1819]2590 if len(freqs)>len(iflist):
[1845]2591 raise ValueError("number of elements in list of list exeeds"
2592 " the current IF selections")
2593 for i, fval in enumerate(freqs):
[1322]2594 sel.set_ifs(iflist[i])
[1259]2595 self._setselection(sel)
[1845]2596 self._setrestfreqs(fval, [""], unit)
[1157]2597 self._setselection(savesel)
2598 # freqs are to be taken from a linecatalog
[1153]2599 elif isinstance(freqs, linecatalog):
2600 savesel = self._getselection()
[2599]2601 sel = self.get_selection()
[1153]2602 for i in xrange(freqs.nrow()):
[1322]2603 sel.set_ifs(iflist[i])
[1153]2604 self._setselection(sel)
[1845]2605 self._setrestfreqs([freqs.get_frequency(i)],
2606 [freqs.get_name(i)], "MHz")
[1153]2607 # ensure that we are not iterating past nIF
2608 if i == self.nif()-1: break
2609 self._setselection(savesel)
[931]2610 else:
2611 return
2612 self._add_history("set_restfreqs", varlist)
2613
[2349]2614 @asaplog_post_dec
[1360]2615 def shift_refpix(self, delta):
[1846]2616 """\
[1589]2617 Shift the reference pixel of the Spectra Coordinate by an
2618 integer amount.
[1846]2619
[1589]2620 Parameters:
[1846]2621
[1589]2622 delta: the amount to shift by
[1846]2623
2624 *Note*:
2625
[1589]2626 Be careful using this with broadband data.
[1846]2627
[1360]2628 """
[2349]2629 varlist = vars()
[1731]2630 Scantable.shift_refpix(self, delta)
[2349]2631 s._add_history("shift_refpix", varlist)
[931]2632
[1862]2633 @asaplog_post_dec
[2820]2634 def history(self, filename=None, nrows=-1, start=0):
[1846]2635 """\
[1259]2636 Print the history. Optionally to a file.
[1846]2637
[1348]2638 Parameters:
[1846]2639
[1928]2640 filename: The name of the file to save the history to.
[1846]2641
[1259]2642 """
[2820]2643 n = self._historylength()
2644 if nrows == -1:
2645 nrows = n
2646 if start+nrows > n:
2647 nrows = nrows-start
2648 if n > 1000 and nrows == n:
2649 nrows = 1000
2650 start = n-1000
2651 asaplog.push("Warning: History has {0} entries. Displaying last "
2652 "1000".format(n))
2653 hist = list(self._gethistory(nrows, start))
[794]2654 out = "-"*80
[484]2655 for h in hist:
[2820]2656 if not h.strip():
2657 continue
2658 if h.find("---") >-1:
2659 continue
[489]2660 else:
2661 items = h.split("##")
2662 date = items[0]
2663 func = items[1]
2664 items = items[2:]
[794]2665 out += "\n"+date+"\n"
2666 out += "Function: %s\n Parameters:" % (func)
[489]2667 for i in items:
[1938]2668 if i == '':
2669 continue
[489]2670 s = i.split("=")
[1118]2671 out += "\n %s = %s" % (s[0], s[1])
[2820]2672 out = "\n".join([out, "*"*80])
[1259]2673 if filename is not None:
2674 if filename is "":
2675 filename = 'scantable_history.txt'
2676 filename = os.path.expandvars(os.path.expanduser(filename))
2677 if not os.path.isdir(filename):
2678 data = open(filename, 'w')
2679 data.write(out)
2680 data.close()
2681 else:
2682 msg = "Illegal file name '%s'." % (filename)
[1859]2683 raise IOError(msg)
2684 return page(out)
[2349]2685
[513]2686 #
2687 # Maths business
2688 #
[1862]2689 @asaplog_post_dec
[2818]2690 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2691 avmode="NONE"):
[1846]2692 """\
[2349]2693 Return the (time) weighted average of a scan. Scans will be averaged
2694 only if the source direction (RA/DEC) is within 1' otherwise
[1846]2695
2696 *Note*:
2697
[1070]2698 in channels only - align if necessary
[1846]2699
[513]2700 Parameters:
[1846]2701
[513]2702 mask: an optional mask (only used for 'var' and 'tsys'
2703 weighting)
[1855]2704
[558]2705 scanav: True averages each scan separately
2706 False (default) averages all scans together,
[1855]2707
[1099]2708 weight: Weighting scheme.
2709 'none' (mean no weight)
2710 'var' (1/var(spec) weighted)
2711 'tsys' (1/Tsys**2 weighted)
2712 'tint' (integration time weighted)
2713 'tintsys' (Tint/Tsys**2)
2714 'median' ( median averaging)
[535]2715 The default is 'tint'
[1855]2716
[931]2717 align: align the spectra in velocity before averaging. It takes
2718 the time of the first spectrum as reference time.
[2818]2719 avmode: 'SOURCE' - also select by source name - or
2720 'NONE' (default). Not applicable for scanav=True or
2721 weight=median
[1846]2722
2723 Example::
2724
[513]2725 # time average the scantable without using a mask
[710]2726 newscan = scan.average_time()
[1846]2727
[513]2728 """
2729 varlist = vars()
[1593]2730 weight = weight or 'TINT'
2731 mask = mask or ()
[2818]2732 scanav = (scanav and 'SCAN') or avmode.upper()
[1118]2733 scan = (self, )
[1859]2734
2735 if align:
2736 scan = (self.freq_align(insitu=False), )
[2818]2737 asaplog.push("Note: Alignment is don on a source-by-source basis")
2738 asaplog.push("Note: Averaging (by default) is not")
2739 # we need to set it to SOURCE averaging here
[1859]2740 s = None
2741 if weight.upper() == 'MEDIAN':
2742 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2743 scanav))
2744 else:
2745 s = scantable(self._math._average(scan, mask, weight.upper(),
2746 scanav))
[1099]2747 s._add_history("average_time", varlist)
[513]2748 return s
[710]2749
[1862]2750 @asaplog_post_dec
[876]2751 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]2752 """\
[513]2753 Return a scan where all spectra are converted to either
2754 Jansky or Kelvin depending upon the flux units of the scan table.
2755 By default the function tries to look the values up internally.
2756 If it can't find them (or if you want to over-ride), you must
2757 specify EITHER jyperk OR eta (and D which it will try to look up
2758 also if you don't set it). jyperk takes precedence if you set both.
[1846]2759
[513]2760 Parameters:
[1846]2761
[513]2762 jyperk: the Jy / K conversion factor
[1855]2763
[513]2764 eta: the aperture efficiency
[1855]2765
[1928]2766 d: the geometric diameter (metres)
[1855]2767
[513]2768 insitu: if False a new scantable is returned.
2769 Otherwise, the scaling is done in-situ
2770 The default is taken from .asaprc (False)
[1846]2771
[513]2772 """
2773 if insitu is None: insitu = rcParams['insitu']
[876]2774 self._math._setinsitu(insitu)
[513]2775 varlist = vars()
[1593]2776 jyperk = jyperk or -1.0
2777 d = d or -1.0
2778 eta = eta or -1.0
[876]2779 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2780 s._add_history("convert_flux", varlist)
2781 if insitu: self._assign(s)
2782 else: return s
[513]2783
[1862]2784 @asaplog_post_dec
[876]2785 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]2786 """\
[513]2787 Return a scan after applying a gain-elevation correction.
2788 The correction can be made via either a polynomial or a
2789 table-based interpolation (and extrapolation if necessary).
2790 You specify polynomial coefficients, an ascii table or neither.
2791 If you specify neither, then a polynomial correction will be made
2792 with built in coefficients known for certain telescopes (an error
2793 will occur if the instrument is not known).
2794 The data and Tsys are *divided* by the scaling factors.
[1846]2795
[513]2796 Parameters:
[1846]2797
[513]2798 poly: Polynomial coefficients (default None) to compute a
2799 gain-elevation correction as a function of
2800 elevation (in degrees).
[1855]2801
[513]2802 filename: The name of an ascii file holding correction factors.
2803 The first row of the ascii file must give the column
2804 names and these MUST include columns
[2431]2805 'ELEVATION' (degrees) and 'FACTOR' (multiply data
[513]2806 by this) somewhere.
2807 The second row must give the data type of the
2808 column. Use 'R' for Real and 'I' for Integer.
2809 An example file would be
2810 (actual factors are arbitrary) :
2811
2812 TIME ELEVATION FACTOR
2813 R R R
2814 0.1 0 0.8
2815 0.2 20 0.85
2816 0.3 40 0.9
2817 0.4 60 0.85
2818 0.5 80 0.8
2819 0.6 90 0.75
[1855]2820
[513]2821 method: Interpolation method when correcting from a table.
[2431]2822 Values are 'nearest', 'linear' (default), 'cubic'
2823 and 'spline'
[1855]2824
[513]2825 insitu: if False a new scantable is returned.
2826 Otherwise, the scaling is done in-situ
2827 The default is taken from .asaprc (False)
[1846]2828
[513]2829 """
2830
2831 if insitu is None: insitu = rcParams['insitu']
[876]2832 self._math._setinsitu(insitu)
[513]2833 varlist = vars()
[1593]2834 poly = poly or ()
[513]2835 from os.path import expandvars
2836 filename = expandvars(filename)
[876]2837 s = scantable(self._math._gainel(self, poly, filename, method))
2838 s._add_history("gain_el", varlist)
[1593]2839 if insitu:
2840 self._assign(s)
2841 else:
2842 return s
[710]2843
[1862]2844 @asaplog_post_dec
[931]2845 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2846 """\
[513]2847 Return a scan where all rows have been aligned in frequency/velocity.
2848 The alignment frequency frame (e.g. LSRK) is that set by function
2849 set_freqframe.
[1846]2850
[513]2851 Parameters:
[1855]2852
[513]2853 reftime: reference time to align at. By default, the time of
2854 the first row of data is used.
[1855]2855
[513]2856 method: Interpolation method for regridding the spectra.
[2431]2857 Choose from 'nearest', 'linear', 'cubic' (default)
2858 and 'spline'
[1855]2859
[513]2860 insitu: if False a new scantable is returned.
2861 Otherwise, the scaling is done in-situ
2862 The default is taken from .asaprc (False)
[1846]2863
[513]2864 """
[931]2865 if insitu is None: insitu = rcParams["insitu"]
[2429]2866 oldInsitu = self._math._insitu()
[876]2867 self._math._setinsitu(insitu)
[513]2868 varlist = vars()
[1593]2869 reftime = reftime or ""
[931]2870 s = scantable(self._math._freq_align(self, reftime, method))
[876]2871 s._add_history("freq_align", varlist)
[2429]2872 self._math._setinsitu(oldInsitu)
[2349]2873 if insitu:
2874 self._assign(s)
2875 else:
2876 return s
[513]2877
[1862]2878 @asaplog_post_dec
[1725]2879 def opacity(self, tau=None, insitu=None):
[1846]2880 """\
[513]2881 Apply an opacity correction. The data
2882 and Tsys are multiplied by the correction factor.
[1846]2883
[513]2884 Parameters:
[1855]2885
[1689]2886 tau: (list of) opacity from which the correction factor is
[513]2887 exp(tau*ZD)
[1689]2888 where ZD is the zenith-distance.
2889 If a list is provided, it has to be of length nIF,
2890 nIF*nPol or 1 and in order of IF/POL, e.g.
2891 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2892 if tau is `None` the opacities are determined from a
2893 model.
[1855]2894
[513]2895 insitu: if False a new scantable is returned.
2896 Otherwise, the scaling is done in-situ
2897 The default is taken from .asaprc (False)
[1846]2898
[513]2899 """
[2349]2900 if insitu is None:
2901 insitu = rcParams['insitu']
[876]2902 self._math._setinsitu(insitu)
[513]2903 varlist = vars()
[1689]2904 if not hasattr(tau, "__len__"):
2905 tau = [tau]
[876]2906 s = scantable(self._math._opacity(self, tau))
2907 s._add_history("opacity", varlist)
[2349]2908 if insitu:
2909 self._assign(s)
2910 else:
2911 return s
[513]2912
[1862]2913 @asaplog_post_dec
[513]2914 def bin(self, width=5, insitu=None):
[1846]2915 """\
[513]2916 Return a scan where all spectra have been binned up.
[1846]2917
[1348]2918 Parameters:
[1846]2919
[513]2920 width: The bin width (default=5) in pixels
[1855]2921
[513]2922 insitu: if False a new scantable is returned.
2923 Otherwise, the scaling is done in-situ
2924 The default is taken from .asaprc (False)
[1846]2925
[513]2926 """
[2349]2927 if insitu is None:
2928 insitu = rcParams['insitu']
[876]2929 self._math._setinsitu(insitu)
[513]2930 varlist = vars()
[876]2931 s = scantable(self._math._bin(self, width))
[1118]2932 s._add_history("bin", varlist)
[1589]2933 if insitu:
2934 self._assign(s)
2935 else:
2936 return s
[513]2937
[1862]2938 @asaplog_post_dec
[2672]2939 def reshape(self, first, last, insitu=None):
2940 """Resize the band by providing first and last channel.
2941 This will cut off all channels outside [first, last].
2942 """
2943 if insitu is None:
2944 insitu = rcParams['insitu']
2945 varlist = vars()
2946 if last < 0:
2947 last = self.nchan()-1 + last
2948 s = None
2949 if insitu:
2950 s = self
2951 else:
2952 s = self.copy()
2953 s._reshape(first,last)
2954 s._add_history("reshape", varlist)
2955 if not insitu:
2956 return s
2957
2958 @asaplog_post_dec
[513]2959 def resample(self, width=5, method='cubic', insitu=None):
[1846]2960 """\
[1348]2961 Return a scan where all spectra have been binned up.
[1573]2962
[1348]2963 Parameters:
[1846]2964
[513]2965 width: The bin width (default=5) in pixels
[1855]2966
[513]2967 method: Interpolation method when correcting from a table.
[2431]2968 Values are 'nearest', 'linear', 'cubic' (default)
2969 and 'spline'
[1855]2970
[513]2971 insitu: if False a new scantable is returned.
2972 Otherwise, the scaling is done in-situ
2973 The default is taken from .asaprc (False)
[1846]2974
[513]2975 """
[2349]2976 if insitu is None:
2977 insitu = rcParams['insitu']
[876]2978 self._math._setinsitu(insitu)
[513]2979 varlist = vars()
[876]2980 s = scantable(self._math._resample(self, method, width))
[1118]2981 s._add_history("resample", varlist)
[2349]2982 if insitu:
2983 self._assign(s)
2984 else:
2985 return s
[513]2986
[1862]2987 @asaplog_post_dec
[946]2988 def average_pol(self, mask=None, weight='none'):
[1846]2989 """\
[946]2990 Average the Polarisations together.
[1846]2991
[946]2992 Parameters:
[1846]2993
[946]2994 mask: An optional mask defining the region, where the
2995 averaging will be applied. The output will have all
2996 specified points masked.
[1855]2997
[946]2998 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2999 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]3000
[946]3001 """
3002 varlist = vars()
[1593]3003 mask = mask or ()
[1010]3004 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]3005 s._add_history("average_pol", varlist)
[992]3006 return s
[513]3007
[1862]3008 @asaplog_post_dec
[1145]3009 def average_beam(self, mask=None, weight='none'):
[1846]3010 """\
[1145]3011 Average the Beams together.
[1846]3012
[1145]3013 Parameters:
3014 mask: An optional mask defining the region, where the
3015 averaging will be applied. The output will have all
3016 specified points masked.
[1855]3017
[1145]3018 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3019 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]3020
[1145]3021 """
3022 varlist = vars()
[1593]3023 mask = mask or ()
[1145]3024 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3025 s._add_history("average_beam", varlist)
3026 return s
3027
[1586]3028 def parallactify(self, pflag):
[1846]3029 """\
[1843]3030 Set a flag to indicate whether this data should be treated as having
[1617]3031 been 'parallactified' (total phase == 0.0)
[1846]3032
[1617]3033 Parameters:
[1855]3034
[1843]3035 pflag: Bool indicating whether to turn this on (True) or
[1617]3036 off (False)
[1846]3037
[1617]3038 """
[1586]3039 varlist = vars()
3040 self._parallactify(pflag)
3041 self._add_history("parallactify", varlist)
3042
[1862]3043 @asaplog_post_dec
[992]3044 def convert_pol(self, poltype=None):
[1846]3045 """\
[992]3046 Convert the data to a different polarisation type.
[1565]3047 Note that you will need cross-polarisation terms for most conversions.
[1846]3048
[992]3049 Parameters:
[1855]3050
[992]3051 poltype: The new polarisation type. Valid types are:
[2431]3052 'linear', 'circular', 'stokes' and 'linpol'
[1846]3053
[992]3054 """
3055 varlist = vars()
[1859]3056 s = scantable(self._math._convertpol(self, poltype))
[1118]3057 s._add_history("convert_pol", varlist)
[992]3058 return s
3059
[1862]3060 @asaplog_post_dec
[2269]3061 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3062 insitu=None):
[1846]3063 """\
[513]3064 Smooth the spectrum by the specified kernel (conserving flux).
[1846]3065
[513]3066 Parameters:
[1846]3067
[513]3068 kernel: The type of smoothing kernel. Select from
[1574]3069 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3070 or 'poly'
[1855]3071
[513]3072 width: The width of the kernel in pixels. For hanning this is
3073 ignored otherwise it defauls to 5 pixels.
3074 For 'gaussian' it is the Full Width Half
3075 Maximum. For 'boxcar' it is the full width.
[1574]3076 For 'rmedian' and 'poly' it is the half width.
[1855]3077
[1574]3078 order: Optional parameter for 'poly' kernel (default is 2), to
3079 specify the order of the polnomial. Ignored by all other
3080 kernels.
[1855]3081
[1819]3082 plot: plot the original and the smoothed spectra.
3083 In this each indivual fit has to be approved, by
3084 typing 'y' or 'n'
[1855]3085
[513]3086 insitu: if False a new scantable is returned.
3087 Otherwise, the scaling is done in-situ
3088 The default is taken from .asaprc (False)
[1846]3089
[513]3090 """
3091 if insitu is None: insitu = rcParams['insitu']
[876]3092 self._math._setinsitu(insitu)
[513]3093 varlist = vars()
[1819]3094
3095 if plot: orgscan = self.copy()
3096
[1574]3097 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]3098 s._add_history("smooth", varlist)
[1819]3099
[2610]3100 action = 'H'
[1819]3101 if plot:
[2150]3102 from asap.asapplotter import new_asaplot
3103 theplot = new_asaplot(rcParams['plotter.gui'])
[2535]3104 from matplotlib import rc as rcp
3105 rcp('lines', linewidth=1)
[2150]3106 theplot.set_panels()
[1819]3107 ylab=s._get_ordinate_label()
[2150]3108 #theplot.palette(0,["#777777","red"])
[1819]3109 for r in xrange(s.nrow()):
3110 xsm=s._getabcissa(r)
3111 ysm=s._getspectrum(r)
3112 xorg=orgscan._getabcissa(r)
3113 yorg=orgscan._getspectrum(r)
[2610]3114 if action != "N": #skip plotting if rejecting all
3115 theplot.clear()
3116 theplot.hold()
3117 theplot.set_axes('ylabel',ylab)
3118 theplot.set_axes('xlabel',s._getabcissalabel(r))
3119 theplot.set_axes('title',s._getsourcename(r))
3120 theplot.set_line(label='Original',color="#777777")
3121 theplot.plot(xorg,yorg)
3122 theplot.set_line(label='Smoothed',color="red")
3123 theplot.plot(xsm,ysm)
3124 ### Ugly part for legend
3125 for i in [0,1]:
3126 theplot.subplots[0]['lines'].append(
3127 [theplot.subplots[0]['axes'].lines[i]]
3128 )
3129 theplot.release()
3130 ### Ugly part for legend
3131 theplot.subplots[0]['lines']=[]
3132 res = self._get_verify_action("Accept smoothing?",action)
3133 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3134 if r == 0: action = None
3135 #res = raw_input("Accept smoothing ([y]/n): ")
[1819]3136 if res.upper() == 'N':
[2610]3137 # reject for the current rows
[1819]3138 s._setspectrum(yorg, r)
[2610]3139 elif res.upper() == 'R':
3140 # reject all the following rows
3141 action = "N"
3142 s._setspectrum(yorg, r)
3143 elif res.upper() == 'A':
3144 # accept all the following rows
3145 break
[2150]3146 theplot.quit()
3147 del theplot
[1819]3148 del orgscan
3149
[876]3150 if insitu: self._assign(s)
3151 else: return s
[513]3152
[2186]3153 @asaplog_post_dec
[2435]3154 def regrid_channel(self, width=5, plot=False, insitu=None):
3155 """\
3156 Regrid the spectra by the specified channel width
3157
3158 Parameters:
3159
3160 width: The channel width (float) of regridded spectra
3161 in the current spectral unit.
3162
3163 plot: [NOT IMPLEMENTED YET]
3164 plot the original and the regridded spectra.
3165 In this each indivual fit has to be approved, by
3166 typing 'y' or 'n'
3167
3168 insitu: if False a new scantable is returned.
3169 Otherwise, the scaling is done in-situ
3170 The default is taken from .asaprc (False)
3171
3172 """
3173 if insitu is None: insitu = rcParams['insitu']
3174 varlist = vars()
3175
3176 if plot:
3177 asaplog.post()
3178 asaplog.push("Verification plot is not implemtnetd yet.")
3179 asaplog.post("WARN")
3180
3181 s = self.copy()
3182 s._regrid_specchan(width)
3183
3184 s._add_history("regrid_channel", varlist)
3185
3186# if plot:
3187# from asap.asapplotter import new_asaplot
3188# theplot = new_asaplot(rcParams['plotter.gui'])
[2535]3189# from matplotlib import rc as rcp
3190# rcp('lines', linewidth=1)
[2435]3191# theplot.set_panels()
3192# ylab=s._get_ordinate_label()
3193# #theplot.palette(0,["#777777","red"])
3194# for r in xrange(s.nrow()):
3195# xsm=s._getabcissa(r)
3196# ysm=s._getspectrum(r)
3197# xorg=orgscan._getabcissa(r)
3198# yorg=orgscan._getspectrum(r)
3199# theplot.clear()
3200# theplot.hold()
3201# theplot.set_axes('ylabel',ylab)
3202# theplot.set_axes('xlabel',s._getabcissalabel(r))
3203# theplot.set_axes('title',s._getsourcename(r))
3204# theplot.set_line(label='Original',color="#777777")
3205# theplot.plot(xorg,yorg)
3206# theplot.set_line(label='Smoothed',color="red")
3207# theplot.plot(xsm,ysm)
3208# ### Ugly part for legend
3209# for i in [0,1]:
3210# theplot.subplots[0]['lines'].append(
3211# [theplot.subplots[0]['axes'].lines[i]]
3212# )
3213# theplot.release()
3214# ### Ugly part for legend
3215# theplot.subplots[0]['lines']=[]
3216# res = raw_input("Accept smoothing ([y]/n): ")
3217# if res.upper() == 'N':
3218# s._setspectrum(yorg, r)
3219# theplot.quit()
3220# del theplot
3221# del orgscan
3222
3223 if insitu: self._assign(s)
3224 else: return s
3225
3226 @asaplog_post_dec
[2186]3227 def _parse_wn(self, wn):
3228 if isinstance(wn, list) or isinstance(wn, tuple):
3229 return wn
3230 elif isinstance(wn, int):
3231 return [ wn ]
3232 elif isinstance(wn, str):
[2277]3233 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]3234 val = wn.split('-')
3235 val = [int(val[0]), int(val[1])]
3236 val.sort()
3237 res = [i for i in xrange(val[0], val[1]+1)]
[2277]3238 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]3239 val = int(wn[2:])+1
3240 res = [i for i in xrange(val)]
[2277]3241 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]3242 val = int(wn[:-2])+1
3243 res = [i for i in xrange(val)]
[2277]3244 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]3245 val = int(wn[1:])
3246 res = [i for i in xrange(val)]
[2277]3247 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]3248 val = int(wn[:-1])
3249 res = [i for i in xrange(val)]
[2411]3250 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3251 # then interpreted in C++
3252 # side as [a,a+1,...,a_nyq]
3253 # (CAS-3759)
[2186]3254 val = int(wn[2:])
[2411]3255 res = [val, -999]
3256 #res = [i for i in xrange(val, self.nchan()/2+1)]
3257 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3258 # then interpreted in C++
3259 # side as [a,a+1,...,a_nyq]
3260 # (CAS-3759)
[2186]3261 val = int(wn[:-2])
[2411]3262 res = [val, -999]
3263 #res = [i for i in xrange(val, self.nchan()/2+1)]
3264 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3265 # then interpreted in C++
3266 # side as [a+1,a+2,...,a_nyq]
3267 # (CAS-3759)
[2186]3268 val = int(wn[1:])+1
[2411]3269 res = [val, -999]
3270 #res = [i for i in xrange(val, self.nchan()/2+1)]
3271 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3272 # then interpreted in C++
3273 # side as [a+1,a+2,...,a_nyq]
3274 # (CAS-3759)
[2186]3275 val = int(wn[:-1])+1
[2411]3276 res = [val, -999]
3277 #res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]3278
[2186]3279 return res
3280 else:
3281 msg = 'wrong value given for addwn/rejwn'
3282 raise RuntimeError(msg)
3283
[2713]3284 @asaplog_post_dec
[2810]3285 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
[2767]3286 """\
3287 Subtract baseline based on parameters written in Baseline Table.
3288
3289 Parameters:
[2809]3290 insitu: if True, baseline fitting/subtraction is done
[2810]3291 in-situ. If False, a new scantable with
3292 baseline subtracted is returned. Actually,
3293 format of the returned value depends on both
3294 insitu and retfitres (see below).
[2767]3295 The default is taken from .asaprc (False)
[2810]3296 retfitres: if True, the results of baseline fitting (i.e.,
3297 coefficients and rms) are returned.
3298 default is False.
3299 The format of the returned value of this
3300 function varies as follows:
3301 (1) in case insitu=True and retfitres=True:
3302 fitting result.
3303 (2) in case insitu=True and retfitres=False:
3304 None.
3305 (3) in case insitu=False and retfitres=True:
3306 a dictionary containing a new scantable
3307 (with baseline subtracted) and the fitting
3308 results.
3309 (4) in case insitu=False and retfitres=False:
3310 a new scantable (with baseline subtracted).
[2767]3311 inbltable: name of input baseline table. The row number of
3312 scantable and that of inbltable must be
3313 identical.
3314 outbltable: name of output baseline table where baseline
3315 parameters and fitting results recorded.
3316 default is ''(no output).
[2809]3317 overwrite: if True when an existing baseline table is
3318 specified for outbltable, overwrites it.
3319 Otherwise there is no harm.
[2767]3320 default is False.
3321 """
3322
3323 try:
3324 varlist = vars()
[2810]3325 if retfitres is None: retfitres = False
[2767]3326 if inbltable is None: raise ValueError("bltable missing.")
3327 if outbltable is None: outbltable = ''
3328 if overwrite is None: overwrite = False
3329
3330 if insitu is None: insitu = rcParams['insitu']
3331 if insitu:
3332 workscan = self
3333 else:
3334 workscan = self.copy()
3335
3336 sres = workscan._apply_bltable(inbltable,
[2810]3337 retfitres,
[2767]3338 outbltable,
3339 os.path.exists(outbltable),
3340 overwrite)
[2810]3341 if retfitres: res = parse_fitresult(sres)
[2767]3342
3343 workscan._add_history('apply_bltable', varlist)
3344
3345 if insitu:
3346 self._assign(workscan)
[2810]3347 if retfitres:
3348 return res
3349 else:
3350 return None
[2767]3351 else:
[2810]3352 if retfitres:
3353 return {'scantable': workscan, 'fitresults': res}
3354 else:
3355 return workscan
[2767]3356
3357 except RuntimeError, e:
3358 raise_fitting_failure_exception(e)
3359
3360 @asaplog_post_dec
[2810]3361 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
[2767]3362 """\
3363 Subtract baseline based on parameters written in the input list.
3364
3365 Parameters:
[2809]3366 insitu: if True, baseline fitting/subtraction is done
[2810]3367 in-situ. If False, a new scantable with
3368 baseline subtracted is returned. Actually,
3369 format of the returned value depends on both
3370 insitu and retfitres (see below).
[2767]3371 The default is taken from .asaprc (False)
[2810]3372 retfitres: if True, the results of baseline fitting (i.e.,
3373 coefficients and rms) are returned.
3374 default is False.
3375 The format of the returned value of this
3376 function varies as follows:
3377 (1) in case insitu=True and retfitres=True:
3378 fitting result.
3379 (2) in case insitu=True and retfitres=False:
3380 None.
3381 (3) in case insitu=False and retfitres=True:
3382 a dictionary containing a new scantable
3383 (with baseline subtracted) and the fitting
3384 results.
3385 (4) in case insitu=False and retfitres=False:
3386 a new scantable (with baseline subtracted).
[2767]3387 blinfo: baseline parameter set stored in a dictionary
3388 or a list of dictionary. Each dictionary
3389 corresponds to each spectrum and must contain
3390 the following keys and values:
3391 'row': row number,
3392 'blfunc': function name. available ones include
3393 'poly', 'chebyshev', 'cspline' and
3394 'sinusoid',
3395 'order': maximum order of polynomial. needed
3396 if blfunc='poly' or 'chebyshev',
3397 'npiece': number or piecewise polynomial.
3398 needed if blfunc='cspline',
3399 'nwave': a list of sinusoidal wave numbers.
3400 needed if blfunc='sinusoid', and
3401 'masklist': min-max windows for channel mask.
3402 the specified ranges will be used
3403 for fitting.
3404 bltable: name of output baseline table where baseline
3405 parameters and fitting results recorded.
3406 default is ''(no output).
[2809]3407 overwrite: if True when an existing baseline table is
3408 specified for bltable, overwrites it.
3409 Otherwise there is no harm.
[2767]3410 default is False.
3411
3412 Example:
3413 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3414 'masklist':[[10,350],[352,510]]},
3415 {'row':1, 'blfunc':'cspline', 'npiece':3,
3416 'masklist':[[3,16],[19,404],[407,511]]}
3417 ])
3418
3419 the first spectrum (row=0) will be fitted with polynomial
3420 of order=5 and the next one (row=1) will be fitted with cubic
3421 spline consisting of 3 pieces.
3422 """
3423
3424 try:
3425 varlist = vars()
[2810]3426 if retfitres is None: retfitres = False
[2767]3427 if blinfo is None: blinfo = []
3428 if bltable is None: bltable = ''
3429 if overwrite is None: overwrite = False
3430
3431 if insitu is None: insitu = rcParams['insitu']
3432 if insitu:
3433 workscan = self
3434 else:
3435 workscan = self.copy()
3436
3437 nrow = workscan.nrow()
3438
3439 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3440
3441 print "in_blinfo=< "+ str(in_blinfo)+" >"
3442
3443 sres = workscan._sub_baseline(in_blinfo,
[2810]3444 retfitres,
[2767]3445 bltable,
3446 os.path.exists(bltable),
3447 overwrite)
[2810]3448 if retfitres: res = parse_fitresult(sres)
[2767]3449
3450 workscan._add_history('sub_baseline', varlist)
3451
3452 if insitu:
3453 self._assign(workscan)
[2810]3454 if retfitres:
3455 return res
3456 else:
3457 return None
[2767]3458 else:
[2810]3459 if retfitres:
3460 return {'scantable': workscan, 'fitresults': res}
3461 else:
3462 return workscan
[2767]3463
3464 except RuntimeError, e:
3465 raise_fitting_failure_exception(e)
3466
3467 @asaplog_post_dec
[2713]3468 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3469 whichrow=None, uselinefinder=None, edge=None,
3470 threshold=None, chan_avg_limit=None):
3471 """\
3472 Calculates and returns model selection criteria for a specified
3473 baseline model and a given spectrum data.
3474 Available values include Akaike Information Criterion (AIC), the
3475 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3476 Bayesian Information Criterion (BIC) and the Generalised Cross
3477 Validation (GCV).
[2186]3478
[2713]3479 Parameters:
3480 value: name of model selection criteria to calculate.
3481 available ones include 'aic', 'aicc', 'bic' and
3482 'gcv'. default is 'aicc'.
3483 blfunc: baseline function name. available ones include
3484 'chebyshev', 'cspline' and 'sinusoid'.
3485 default is 'chebyshev'.
3486 order: parameter for basline function. actually stands for
3487 order of polynomial (order) for 'chebyshev',
3488 number of spline pieces (npiece) for 'cspline' and
3489 maximum wave number for 'sinusoid', respectively.
3490 default is 5 (which is also the default order value
3491 for [auto_]chebyshev_baseline()).
3492 mask: an optional mask. default is [].
3493 whichrow: row number. default is 0 (the first row)
3494 uselinefinder: use sd.linefinder() to flag out line regions
3495 default is True.
3496 edge: an optional number of channel to drop at
3497 the edge of spectrum. If only one value is
3498 specified, the same number will be dropped
3499 from both sides of the spectrum. Default
3500 is to keep all channels. Nested tuples
3501 represent individual edge selection for
3502 different IFs (a number of spectral channels
3503 can be different)
3504 default is (0, 0).
3505 threshold: the threshold used by line finder. It is
3506 better to keep it large as only strong lines
3507 affect the baseline solution.
3508 default is 3.
3509 chan_avg_limit: a maximum number of consequtive spectral
3510 channels to average during the search of
3511 weak and broad lines. The default is no
3512 averaging (and no search for weak lines).
3513 If such lines can affect the fitted baseline
3514 (e.g. a high order polynomial is fitted),
3515 increase this parameter (usually values up
3516 to 8 are reasonable). Most users of this
3517 method should find the default value sufficient.
3518 default is 1.
3519
3520 Example:
3521 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3522 """
3523
3524 try:
3525 varlist = vars()
3526
3527 if value is None: value = 'aicc'
3528 if blfunc is None: blfunc = 'chebyshev'
3529 if order is None: order = 5
3530 if mask is None: mask = []
3531 if whichrow is None: whichrow = 0
3532 if uselinefinder is None: uselinefinder = True
3533 if edge is None: edge = (0, 0)
3534 if threshold is None: threshold = 3
3535 if chan_avg_limit is None: chan_avg_limit = 1
3536
3537 return self._calc_aic(value, blfunc, order, mask,
3538 whichrow, uselinefinder, edge,
3539 threshold, chan_avg_limit)
3540
3541 except RuntimeError, e:
3542 raise_fitting_failure_exception(e)
3543
[1862]3544 @asaplog_post_dec
[2771]3545 def sinusoid_baseline(self, mask=None, applyfft=None,
[2269]3546 fftmethod=None, fftthresh=None,
[2771]3547 addwn=None, rejwn=None,
3548 insitu=None,
3549 clipthresh=None, clipniter=None,
3550 plot=None,
3551 getresidual=None,
3552 showprogress=None, minnrow=None,
3553 outlog=None,
[2767]3554 blfile=None, csvformat=None,
3555 bltable=None):
[2047]3556 """\
[2349]3557 Return a scan which has been baselined (all rows) with sinusoidal
3558 functions.
3559
[2047]3560 Parameters:
[2186]3561 mask: an optional mask
3562 applyfft: if True use some method, such as FFT, to find
3563 strongest sinusoidal components in the wavenumber
3564 domain to be used for baseline fitting.
3565 default is True.
3566 fftmethod: method to find the strong sinusoidal components.
3567 now only 'fft' is available and it is the default.
3568 fftthresh: the threshold to select wave numbers to be used for
3569 fitting from the distribution of amplitudes in the
3570 wavenumber domain.
3571 both float and string values accepted.
3572 given a float value, the unit is set to sigma.
3573 for string values, allowed formats include:
[2349]3574 'xsigma' or 'x' (= x-sigma level. e.g.,
3575 '3sigma'), or
[2186]3576 'topx' (= the x strongest ones, e.g. 'top5').
3577 default is 3.0 (unit: sigma).
3578 addwn: the additional wave numbers to be used for fitting.
3579 list or integer value is accepted to specify every
3580 wave numbers. also string value can be used in case
3581 you need to specify wave numbers in a certain range,
3582 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3583 '<a' (= 0,1,...,a-2,a-1),
3584 '>=a' (= a, a+1, ... up to the maximum wave
3585 number corresponding to the Nyquist
3586 frequency for the case of FFT).
[2411]3587 default is [0].
[2186]3588 rejwn: the wave numbers NOT to be used for fitting.
3589 can be set just as addwn but has higher priority:
3590 wave numbers which are specified both in addwn
3591 and rejwn will NOT be used. default is [].
[2771]3592 insitu: if False a new scantable is returned.
3593 Otherwise, the scaling is done in-situ
3594 The default is taken from .asaprc (False)
[2081]3595 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3596 clipniter: maximum number of iteration of 'clipthresh'-sigma
3597 clipping (default is 0)
[2081]3598 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3599 plot the fit and the residual. In this each
3600 indivual fit has to be approved, by typing 'y'
3601 or 'n'
3602 getresidual: if False, returns best-fit values instead of
3603 residual. (default is True)
[2189]3604 showprogress: show progress status for large data.
3605 default is True.
3606 minnrow: minimum number of input spectra to show.
3607 default is 1000.
[2081]3608 outlog: Output the coefficients of the best-fit
3609 function to logger (default is False)
3610 blfile: Name of a text file in which the best-fit
3611 parameter values to be written
[2186]3612 (default is '': no file/logger output)
[2641]3613 csvformat: if True blfile is csv-formatted, default is False.
[2767]3614 bltable: name of a baseline table where fitting results
3615 (coefficients, rms, etc.) are to be written.
3616 if given, fitting results will NOT be output to
3617 scantable (insitu=True) or None will be
3618 returned (insitu=False).
3619 (default is "": no table output)
[2047]3620
3621 Example:
[2349]3622 # return a scan baselined by a combination of sinusoidal curves
3623 # having wave numbers in spectral window up to 10,
[2047]3624 # also with 3-sigma clipping, iteration up to 4 times
[2186]3625 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]3626
3627 Note:
3628 The best-fit parameter values output in logger and/or blfile are now
3629 based on specunit of 'channel'.
[2047]3630 """
3631
[2186]3632 try:
3633 varlist = vars()
[2047]3634
[2186]3635 if insitu is None: insitu = rcParams['insitu']
3636 if insitu:
3637 workscan = self
3638 else:
3639 workscan = self.copy()
3640
[2410]3641 if mask is None: mask = []
[2186]3642 if applyfft is None: applyfft = True
3643 if fftmethod is None: fftmethod = 'fft'
3644 if fftthresh is None: fftthresh = 3.0
[2411]3645 if addwn is None: addwn = [0]
[2186]3646 if rejwn is None: rejwn = []
3647 if clipthresh is None: clipthresh = 3.0
3648 if clipniter is None: clipniter = 0
3649 if plot is None: plot = False
3650 if getresidual is None: getresidual = True
[2189]3651 if showprogress is None: showprogress = True
3652 if minnrow is None: minnrow = 1000
[2186]3653 if outlog is None: outlog = False
3654 if blfile is None: blfile = ''
[2641]3655 if csvformat is None: csvformat = False
[2767]3656 if bltable is None: bltable = ''
[2047]3657
[2767]3658 sapplyfft = 'true' if applyfft else 'false'
3659 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3660
[2767]3661 scsvformat = 'T' if csvformat else 'F'
3662
[2081]3663 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3664 workscan._sinusoid_baseline(mask,
3665 fftinfo,
3666 #applyfft, fftmethod.lower(),
3667 #str(fftthresh).lower(),
[2349]3668 workscan._parse_wn(addwn),
[2643]3669 workscan._parse_wn(rejwn),
3670 clipthresh, clipniter,
3671 getresidual,
[2349]3672 pack_progress_params(showprogress,
[2641]3673 minnrow),
[2767]3674 outlog, scsvformat+blfile,
3675 bltable)
[2186]3676 workscan._add_history('sinusoid_baseline', varlist)
[2767]3677
3678 if bltable == '':
3679 if insitu:
3680 self._assign(workscan)
3681 else:
3682 return workscan
[2047]3683 else:
[2767]3684 if not insitu:
3685 return None
[2047]3686
3687 except RuntimeError, e:
[2186]3688 raise_fitting_failure_exception(e)
[2047]3689
3690
[2186]3691 @asaplog_post_dec
[2771]3692 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
[2349]3693 fftmethod=None, fftthresh=None,
[2771]3694 addwn=None, rejwn=None,
3695 insitu=None,
3696 clipthresh=None, clipniter=None,
3697 edge=None, threshold=None, chan_avg_limit=None,
3698 plot=None,
3699 getresidual=None,
3700 showprogress=None, minnrow=None,
3701 outlog=None,
[2767]3702 blfile=None, csvformat=None,
3703 bltable=None):
[2047]3704 """\
[2349]3705 Return a scan which has been baselined (all rows) with sinusoidal
3706 functions.
[2047]3707 Spectral lines are detected first using linefinder and masked out
3708 to avoid them affecting the baseline solution.
3709
3710 Parameters:
[2189]3711 mask: an optional mask retreived from scantable
3712 applyfft: if True use some method, such as FFT, to find
3713 strongest sinusoidal components in the wavenumber
3714 domain to be used for baseline fitting.
3715 default is True.
3716 fftmethod: method to find the strong sinusoidal components.
3717 now only 'fft' is available and it is the default.
3718 fftthresh: the threshold to select wave numbers to be used for
3719 fitting from the distribution of amplitudes in the
3720 wavenumber domain.
3721 both float and string values accepted.
3722 given a float value, the unit is set to sigma.
3723 for string values, allowed formats include:
[2349]3724 'xsigma' or 'x' (= x-sigma level. e.g.,
3725 '3sigma'), or
[2189]3726 'topx' (= the x strongest ones, e.g. 'top5').
3727 default is 3.0 (unit: sigma).
3728 addwn: the additional wave numbers to be used for fitting.
3729 list or integer value is accepted to specify every
3730 wave numbers. also string value can be used in case
3731 you need to specify wave numbers in a certain range,
3732 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3733 '<a' (= 0,1,...,a-2,a-1),
3734 '>=a' (= a, a+1, ... up to the maximum wave
3735 number corresponding to the Nyquist
3736 frequency for the case of FFT).
[2411]3737 default is [0].
[2189]3738 rejwn: the wave numbers NOT to be used for fitting.
3739 can be set just as addwn but has higher priority:
3740 wave numbers which are specified both in addwn
3741 and rejwn will NOT be used. default is [].
[2771]3742 insitu: if False a new scantable is returned.
3743 Otherwise, the scaling is done in-situ
3744 The default is taken from .asaprc (False)
[2189]3745 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3746 clipniter: maximum number of iteration of 'clipthresh'-sigma
3747 clipping (default is 0)
[2189]3748 edge: an optional number of channel to drop at
3749 the edge of spectrum. If only one value is
3750 specified, the same number will be dropped
3751 from both sides of the spectrum. Default
3752 is to keep all channels. Nested tuples
3753 represent individual edge selection for
3754 different IFs (a number of spectral channels
3755 can be different)
3756 threshold: the threshold used by line finder. It is
3757 better to keep it large as only strong lines
3758 affect the baseline solution.
3759 chan_avg_limit: a maximum number of consequtive spectral
3760 channels to average during the search of
3761 weak and broad lines. The default is no
3762 averaging (and no search for weak lines).
3763 If such lines can affect the fitted baseline
3764 (e.g. a high order polynomial is fitted),
3765 increase this parameter (usually values up
3766 to 8 are reasonable). Most users of this
3767 method should find the default value sufficient.
3768 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3769 plot the fit and the residual. In this each
3770 indivual fit has to be approved, by typing 'y'
3771 or 'n'
3772 getresidual: if False, returns best-fit values instead of
3773 residual. (default is True)
3774 showprogress: show progress status for large data.
3775 default is True.
3776 minnrow: minimum number of input spectra to show.
3777 default is 1000.
3778 outlog: Output the coefficients of the best-fit
3779 function to logger (default is False)
3780 blfile: Name of a text file in which the best-fit
3781 parameter values to be written
3782 (default is "": no file/logger output)
[2641]3783 csvformat: if True blfile is csv-formatted, default is False.
[2767]3784 bltable: name of a baseline table where fitting results
3785 (coefficients, rms, etc.) are to be written.
3786 if given, fitting results will NOT be output to
3787 scantable (insitu=True) or None will be
3788 returned (insitu=False).
3789 (default is "": no table output)
[2047]3790
3791 Example:
[2186]3792 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]3793
3794 Note:
3795 The best-fit parameter values output in logger and/or blfile are now
3796 based on specunit of 'channel'.
[2047]3797 """
3798
[2186]3799 try:
3800 varlist = vars()
[2047]3801
[2186]3802 if insitu is None: insitu = rcParams['insitu']
3803 if insitu:
3804 workscan = self
[2047]3805 else:
[2186]3806 workscan = self.copy()
3807
[2410]3808 if mask is None: mask = []
[2186]3809 if applyfft is None: applyfft = True
3810 if fftmethod is None: fftmethod = 'fft'
3811 if fftthresh is None: fftthresh = 3.0
[2411]3812 if addwn is None: addwn = [0]
[2186]3813 if rejwn is None: rejwn = []
3814 if clipthresh is None: clipthresh = 3.0
3815 if clipniter is None: clipniter = 0
3816 if edge is None: edge = (0,0)
3817 if threshold is None: threshold = 3
3818 if chan_avg_limit is None: chan_avg_limit = 1
3819 if plot is None: plot = False
3820 if getresidual is None: getresidual = True
[2189]3821 if showprogress is None: showprogress = True
3822 if minnrow is None: minnrow = 1000
[2186]3823 if outlog is None: outlog = False
3824 if blfile is None: blfile = ''
[2641]3825 if csvformat is None: csvformat = False
[2767]3826 if bltable is None: bltable = ''
[2047]3827
[2767]3828 sapplyfft = 'true' if applyfft else 'false'
3829 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
[2641]3830
[2767]3831 scsvformat = 'T' if csvformat else 'F'
3832
[2277]3833 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2767]3834 workscan._auto_sinusoid_baseline(mask,
3835 fftinfo,
[2349]3836 workscan._parse_wn(addwn),
3837 workscan._parse_wn(rejwn),
3838 clipthresh, clipniter,
3839 normalise_edge_param(edge),
3840 threshold, chan_avg_limit,
3841 getresidual,
3842 pack_progress_params(showprogress,
3843 minnrow),
[2767]3844 outlog, scsvformat+blfile, bltable)
[2047]3845 workscan._add_history("auto_sinusoid_baseline", varlist)
[2767]3846
3847 if bltable == '':
3848 if insitu:
3849 self._assign(workscan)
3850 else:
3851 return workscan
[2047]3852 else:
[2767]3853 if not insitu:
3854 return None
[2047]3855
3856 except RuntimeError, e:
[2186]3857 raise_fitting_failure_exception(e)
[2047]3858
3859 @asaplog_post_dec
[2771]3860 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]3861 clipthresh=None, clipniter=None, plot=None,
3862 getresidual=None, showprogress=None, minnrow=None,
[2767]3863 outlog=None, blfile=None, csvformat=None,
3864 bltable=None):
[1846]3865 """\
[2349]3866 Return a scan which has been baselined (all rows) by cubic spline
3867 function (piecewise cubic polynomial).
3868
[513]3869 Parameters:
[2771]3870 mask: An optional mask
3871 npiece: Number of pieces. (default is 2)
[2189]3872 insitu: If False a new scantable is returned.
3873 Otherwise, the scaling is done in-situ
3874 The default is taken from .asaprc (False)
3875 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3876 clipniter: maximum number of iteration of 'clipthresh'-sigma
3877 clipping (default is 0)
[2189]3878 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3879 plot the fit and the residual. In this each
3880 indivual fit has to be approved, by typing 'y'
3881 or 'n'
3882 getresidual: if False, returns best-fit values instead of
3883 residual. (default is True)
3884 showprogress: show progress status for large data.
3885 default is True.
3886 minnrow: minimum number of input spectra to show.
3887 default is 1000.
3888 outlog: Output the coefficients of the best-fit
3889 function to logger (default is False)
3890 blfile: Name of a text file in which the best-fit
3891 parameter values to be written
3892 (default is "": no file/logger output)
[2641]3893 csvformat: if True blfile is csv-formatted, default is False.
[2767]3894 bltable: name of a baseline table where fitting results
3895 (coefficients, rms, etc.) are to be written.
3896 if given, fitting results will NOT be output to
3897 scantable (insitu=True) or None will be
3898 returned (insitu=False).
3899 (default is "": no table output)
[1846]3900
[2012]3901 Example:
[2349]3902 # return a scan baselined by a cubic spline consisting of 2 pieces
3903 # (i.e., 1 internal knot),
[2012]3904 # also with 3-sigma clipping, iteration up to 4 times
3905 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]3906
3907 Note:
3908 The best-fit parameter values output in logger and/or blfile are now
3909 based on specunit of 'channel'.
[2012]3910 """
3911
[2186]3912 try:
3913 varlist = vars()
3914
3915 if insitu is None: insitu = rcParams['insitu']
3916 if insitu:
3917 workscan = self
3918 else:
3919 workscan = self.copy()
[1855]3920
[2410]3921 if mask is None: mask = []
[2189]3922 if npiece is None: npiece = 2
3923 if clipthresh is None: clipthresh = 3.0
3924 if clipniter is None: clipniter = 0
3925 if plot is None: plot = False
3926 if getresidual is None: getresidual = True
3927 if showprogress is None: showprogress = True
3928 if minnrow is None: minnrow = 1000
3929 if outlog is None: outlog = False
3930 if blfile is None: blfile = ''
[2767]3931 if csvformat is None: csvformat = False
3932 if bltable is None: bltable = ''
[1855]3933
[2767]3934 scsvformat = 'T' if csvformat else 'F'
[2641]3935
[2012]3936 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]3937 workscan._cspline_baseline(mask, npiece,
3938 clipthresh, clipniter,
[2349]3939 getresidual,
3940 pack_progress_params(showprogress,
[2641]3941 minnrow),
[2767]3942 outlog, scsvformat+blfile,
3943 bltable)
[2012]3944 workscan._add_history("cspline_baseline", varlist)
[2767]3945
3946 if bltable == '':
3947 if insitu:
3948 self._assign(workscan)
3949 else:
3950 return workscan
[2012]3951 else:
[2767]3952 if not insitu:
3953 return None
[2012]3954
3955 except RuntimeError, e:
[2186]3956 raise_fitting_failure_exception(e)
[1855]3957
[2186]3958 @asaplog_post_dec
[2771]3959 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
[2349]3960 clipthresh=None, clipniter=None,
3961 edge=None, threshold=None, chan_avg_limit=None,
3962 getresidual=None, plot=None,
3963 showprogress=None, minnrow=None, outlog=None,
[2767]3964 blfile=None, csvformat=None, bltable=None):
[2012]3965 """\
3966 Return a scan which has been baselined (all rows) by cubic spline
3967 function (piecewise cubic polynomial).
3968 Spectral lines are detected first using linefinder and masked out
3969 to avoid them affecting the baseline solution.
3970
3971 Parameters:
[2771]3972 mask: an optional mask retreived from scantable
3973 npiece: Number of pieces. (default is 2)
[2189]3974 insitu: if False a new scantable is returned.
3975 Otherwise, the scaling is done in-situ
3976 The default is taken from .asaprc (False)
3977 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2349]3978 clipniter: maximum number of iteration of 'clipthresh'-sigma
3979 clipping (default is 0)
[2189]3980 edge: an optional number of channel to drop at
3981 the edge of spectrum. If only one value is
3982 specified, the same number will be dropped
3983 from both sides of the spectrum. Default
3984 is to keep all channels. Nested tuples
3985 represent individual edge selection for
3986 different IFs (a number of spectral channels
3987 can be different)
3988 threshold: the threshold used by line finder. It is
3989 better to keep it large as only strong lines
3990 affect the baseline solution.
3991 chan_avg_limit: a maximum number of consequtive spectral
3992 channels to average during the search of
3993 weak and broad lines. The default is no
3994 averaging (and no search for weak lines).
3995 If such lines can affect the fitted baseline
3996 (e.g. a high order polynomial is fitted),
3997 increase this parameter (usually values up
3998 to 8 are reasonable). Most users of this
3999 method should find the default value sufficient.
4000 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4001 plot the fit and the residual. In this each
4002 indivual fit has to be approved, by typing 'y'
4003 or 'n'
4004 getresidual: if False, returns best-fit values instead of
4005 residual. (default is True)
4006 showprogress: show progress status for large data.
4007 default is True.
4008 minnrow: minimum number of input spectra to show.
4009 default is 1000.
4010 outlog: Output the coefficients of the best-fit
4011 function to logger (default is False)
4012 blfile: Name of a text file in which the best-fit
4013 parameter values to be written
4014 (default is "": no file/logger output)
[2641]4015 csvformat: if True blfile is csv-formatted, default is False.
[2767]4016 bltable: name of a baseline table where fitting results
4017 (coefficients, rms, etc.) are to be written.
4018 if given, fitting results will NOT be output to
4019 scantable (insitu=True) or None will be
4020 returned (insitu=False).
4021 (default is "": no table output)
[1846]4022
[1907]4023 Example:
[2012]4024 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]4025
4026 Note:
4027 The best-fit parameter values output in logger and/or blfile are now
4028 based on specunit of 'channel'.
[2012]4029 """
[1846]4030
[2186]4031 try:
4032 varlist = vars()
[2012]4033
[2186]4034 if insitu is None: insitu = rcParams['insitu']
4035 if insitu:
4036 workscan = self
[1391]4037 else:
[2186]4038 workscan = self.copy()
4039
[2410]4040 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4041 if mask is None: mask = []
[2186]4042 if npiece is None: npiece = 2
4043 if clipthresh is None: clipthresh = 3.0
4044 if clipniter is None: clipniter = 0
4045 if edge is None: edge = (0, 0)
4046 if threshold is None: threshold = 3
4047 if chan_avg_limit is None: chan_avg_limit = 1
4048 if plot is None: plot = False
4049 if getresidual is None: getresidual = True
[2189]4050 if showprogress is None: showprogress = True
4051 if minnrow is None: minnrow = 1000
[2186]4052 if outlog is None: outlog = False
4053 if blfile is None: blfile = ''
[2641]4054 if csvformat is None: csvformat = False
[2767]4055 if bltable is None: bltable = ''
[1819]4056
[2767]4057 scsvformat = 'T' if csvformat else 'F'
[2641]4058
[2277]4059 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]4060 workscan._auto_cspline_baseline(mask, npiece,
4061 clipthresh, clipniter,
[2269]4062 normalise_edge_param(edge),
4063 threshold,
4064 chan_avg_limit, getresidual,
4065 pack_progress_params(showprogress,
4066 minnrow),
[2767]4067 outlog,
4068 scsvformat+blfile,
4069 bltable)
[2012]4070 workscan._add_history("auto_cspline_baseline", varlist)
[2767]4071
4072 if bltable == '':
4073 if insitu:
4074 self._assign(workscan)
4075 else:
4076 return workscan
[1856]4077 else:
[2767]4078 if not insitu:
4079 return None
[2012]4080
4081 except RuntimeError, e:
[2186]4082 raise_fitting_failure_exception(e)
[513]4083
[1931]4084 @asaplog_post_dec
[2771]4085 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]4086 clipthresh=None, clipniter=None, plot=None,
4087 getresidual=None, showprogress=None, minnrow=None,
[2767]4088 outlog=None, blfile=None, csvformat=None,
4089 bltable=None):
[2645]4090 """\
4091 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4092
4093 Parameters:
[2771]4094 mask: An optional mask
4095 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]4096 insitu: If False a new scantable is returned.
4097 Otherwise, the scaling is done in-situ
4098 The default is taken from .asaprc (False)
4099 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4100 clipniter: maximum number of iteration of 'clipthresh'-sigma
4101 clipping (default is 0)
4102 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4103 plot the fit and the residual. In this each
4104 indivual fit has to be approved, by typing 'y'
4105 or 'n'
4106 getresidual: if False, returns best-fit values instead of
4107 residual. (default is True)
4108 showprogress: show progress status for large data.
4109 default is True.
4110 minnrow: minimum number of input spectra to show.
4111 default is 1000.
4112 outlog: Output the coefficients of the best-fit
4113 function to logger (default is False)
4114 blfile: Name of a text file in which the best-fit
4115 parameter values to be written
4116 (default is "": no file/logger output)
4117 csvformat: if True blfile is csv-formatted, default is False.
[2767]4118 bltable: name of a baseline table where fitting results
4119 (coefficients, rms, etc.) are to be written.
4120 if given, fitting results will NOT be output to
4121 scantable (insitu=True) or None will be
4122 returned (insitu=False).
4123 (default is "": no table output)
[2645]4124
4125 Example:
4126 # return a scan baselined by a cubic spline consisting of 2 pieces
4127 # (i.e., 1 internal knot),
4128 # also with 3-sigma clipping, iteration up to 4 times
4129 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4130
4131 Note:
4132 The best-fit parameter values output in logger and/or blfile are now
4133 based on specunit of 'channel'.
4134 """
4135
4136 try:
4137 varlist = vars()
4138
4139 if insitu is None: insitu = rcParams['insitu']
4140 if insitu:
4141 workscan = self
4142 else:
4143 workscan = self.copy()
4144
4145 if mask is None: mask = []
4146 if order is None: order = 5
4147 if clipthresh is None: clipthresh = 3.0
4148 if clipniter is None: clipniter = 0
4149 if plot is None: plot = False
4150 if getresidual is None: getresidual = True
4151 if showprogress is None: showprogress = True
4152 if minnrow is None: minnrow = 1000
4153 if outlog is None: outlog = False
4154 if blfile is None: blfile = ''
[2767]4155 if csvformat is None: csvformat = False
4156 if bltable is None: bltable = ''
[2645]4157
[2767]4158 scsvformat = 'T' if csvformat else 'F'
[2645]4159
4160 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]4161 workscan._chebyshev_baseline(mask, order,
4162 clipthresh, clipniter,
[2645]4163 getresidual,
4164 pack_progress_params(showprogress,
4165 minnrow),
[2767]4166 outlog, scsvformat+blfile,
4167 bltable)
[2645]4168 workscan._add_history("chebyshev_baseline", varlist)
[2767]4169
4170 if bltable == '':
4171 if insitu:
4172 self._assign(workscan)
4173 else:
4174 return workscan
[2645]4175 else:
[2767]4176 if not insitu:
4177 return None
[2645]4178
4179 except RuntimeError, e:
4180 raise_fitting_failure_exception(e)
4181
4182 @asaplog_post_dec
[2771]4183 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
[2645]4184 clipthresh=None, clipniter=None,
4185 edge=None, threshold=None, chan_avg_limit=None,
4186 getresidual=None, plot=None,
4187 showprogress=None, minnrow=None, outlog=None,
[2767]4188 blfile=None, csvformat=None, bltable=None):
[2645]4189 """\
4190 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4191 Spectral lines are detected first using linefinder and masked out
4192 to avoid them affecting the baseline solution.
4193
4194 Parameters:
[2771]4195 mask: an optional mask retreived from scantable
4196 order: the maximum order of Chebyshev polynomial (default is 5)
[2645]4197 insitu: if False a new scantable is returned.
4198 Otherwise, the scaling is done in-situ
4199 The default is taken from .asaprc (False)
4200 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4201 clipniter: maximum number of iteration of 'clipthresh'-sigma
4202 clipping (default is 0)
4203 edge: an optional number of channel to drop at
4204 the edge of spectrum. If only one value is
4205 specified, the same number will be dropped
4206 from both sides of the spectrum. Default
4207 is to keep all channels. Nested tuples
4208 represent individual edge selection for
4209 different IFs (a number of spectral channels
4210 can be different)
4211 threshold: the threshold used by line finder. It is
4212 better to keep it large as only strong lines
4213 affect the baseline solution.
4214 chan_avg_limit: a maximum number of consequtive spectral
4215 channels to average during the search of
4216 weak and broad lines. The default is no
4217 averaging (and no search for weak lines).
4218 If such lines can affect the fitted baseline
4219 (e.g. a high order polynomial is fitted),
4220 increase this parameter (usually values up
4221 to 8 are reasonable). Most users of this
4222 method should find the default value sufficient.
4223 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4224 plot the fit and the residual. In this each
4225 indivual fit has to be approved, by typing 'y'
4226 or 'n'
4227 getresidual: if False, returns best-fit values instead of
4228 residual. (default is True)
4229 showprogress: show progress status for large data.
4230 default is True.
4231 minnrow: minimum number of input spectra to show.
4232 default is 1000.
4233 outlog: Output the coefficients of the best-fit
4234 function to logger (default is False)
4235 blfile: Name of a text file in which the best-fit
4236 parameter values to be written
4237 (default is "": no file/logger output)
4238 csvformat: if True blfile is csv-formatted, default is False.
[2767]4239 bltable: name of a baseline table where fitting results
4240 (coefficients, rms, etc.) are to be written.
4241 if given, fitting results will NOT be output to
4242 scantable (insitu=True) or None will be
4243 returned (insitu=False).
4244 (default is "": no table output)
[2645]4245
4246 Example:
4247 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4248
4249 Note:
4250 The best-fit parameter values output in logger and/or blfile are now
4251 based on specunit of 'channel'.
4252 """
4253
4254 try:
4255 varlist = vars()
4256
4257 if insitu is None: insitu = rcParams['insitu']
4258 if insitu:
4259 workscan = self
4260 else:
4261 workscan = self.copy()
4262
4263 if mask is None: mask = []
4264 if order is None: order = 5
4265 if clipthresh is None: clipthresh = 3.0
4266 if clipniter is None: clipniter = 0
4267 if edge is None: edge = (0, 0)
4268 if threshold is None: threshold = 3
4269 if chan_avg_limit is None: chan_avg_limit = 1
4270 if plot is None: plot = False
4271 if getresidual is None: getresidual = True
4272 if showprogress is None: showprogress = True
4273 if minnrow is None: minnrow = 1000
4274 if outlog is None: outlog = False
4275 if blfile is None: blfile = ''
4276 if csvformat is None: csvformat = False
[2767]4277 if bltable is None: bltable = ''
[2645]4278
[2767]4279 scsvformat = 'T' if csvformat else 'F'
[2645]4280
4281 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2767]4282 workscan._auto_chebyshev_baseline(mask, order,
4283 clipthresh, clipniter,
[2645]4284 normalise_edge_param(edge),
4285 threshold,
4286 chan_avg_limit, getresidual,
4287 pack_progress_params(showprogress,
4288 minnrow),
[2767]4289 outlog, scsvformat+blfile,
4290 bltable)
[2645]4291 workscan._add_history("auto_chebyshev_baseline", varlist)
[2767]4292
4293 if bltable == '':
4294 if insitu:
4295 self._assign(workscan)
4296 else:
4297 return workscan
[2645]4298 else:
[2767]4299 if not insitu:
4300 return None
[2645]4301
4302 except RuntimeError, e:
4303 raise_fitting_failure_exception(e)
4304
4305 @asaplog_post_dec
[2771]4306 def poly_baseline(self, mask=None, order=None, insitu=None,
[2767]4307 clipthresh=None, clipniter=None, plot=None,
[2269]4308 getresidual=None, showprogress=None, minnrow=None,
[2767]4309 outlog=None, blfile=None, csvformat=None,
4310 bltable=None):
[1907]4311 """\
4312 Return a scan which has been baselined (all rows) by a polynomial.
4313 Parameters:
[2771]4314 mask: an optional mask
4315 order: the order of the polynomial (default is 0)
[2189]4316 insitu: if False a new scantable is returned.
4317 Otherwise, the scaling is done in-situ
4318 The default is taken from .asaprc (False)
[2767]4319 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4320 clipniter: maximum number of iteration of 'clipthresh'-sigma
4321 clipping (default is 0)
[2189]4322 plot: plot the fit and the residual. In this each
4323 indivual fit has to be approved, by typing 'y'
4324 or 'n'
4325 getresidual: if False, returns best-fit values instead of
4326 residual. (default is True)
4327 showprogress: show progress status for large data.
4328 default is True.
4329 minnrow: minimum number of input spectra to show.
4330 default is 1000.
4331 outlog: Output the coefficients of the best-fit
4332 function to logger (default is False)
4333 blfile: Name of a text file in which the best-fit
4334 parameter values to be written
4335 (default is "": no file/logger output)
[2641]4336 csvformat: if True blfile is csv-formatted, default is False.
[2767]4337 bltable: name of a baseline table where fitting results
4338 (coefficients, rms, etc.) are to be written.
4339 if given, fitting results will NOT be output to
4340 scantable (insitu=True) or None will be
4341 returned (insitu=False).
4342 (default is "": no table output)
[2012]4343
[1907]4344 Example:
4345 # return a scan baselined by a third order polynomial,
4346 # not using a mask
4347 bscan = scan.poly_baseline(order=3)
4348 """
[1931]4349
[2186]4350 try:
4351 varlist = vars()
[1931]4352
[2269]4353 if insitu is None:
4354 insitu = rcParams["insitu"]
[2186]4355 if insitu:
4356 workscan = self
4357 else:
4358 workscan = self.copy()
[1907]4359
[2410]4360 if mask is None: mask = []
[2189]4361 if order is None: order = 0
[2767]4362 if clipthresh is None: clipthresh = 3.0
4363 if clipniter is None: clipniter = 0
[2189]4364 if plot is None: plot = False
4365 if getresidual is None: getresidual = True
4366 if showprogress is None: showprogress = True
4367 if minnrow is None: minnrow = 1000
4368 if outlog is None: outlog = False
[2767]4369 if blfile is None: blfile = ''
[2641]4370 if csvformat is None: csvformat = False
[2767]4371 if bltable is None: bltable = ''
[1907]4372
[2767]4373 scsvformat = 'T' if csvformat else 'F'
[2641]4374
[2012]4375 if plot:
[2269]4376 outblfile = (blfile != "") and \
[2349]4377 os.path.exists(os.path.expanduser(
4378 os.path.expandvars(blfile))
4379 )
[2269]4380 if outblfile:
4381 blf = open(blfile, "a")
[2012]4382
[1907]4383 f = fitter()
4384 f.set_function(lpoly=order)
[2186]4385
4386 rows = xrange(workscan.nrow())
4387 #if len(rows) > 0: workscan._init_blinfo()
[2610]4388
4389 action = "H"
[1907]4390 for r in rows:
4391 f.x = workscan._getabcissa(r)
4392 f.y = workscan._getspectrum(r)
[2541]4393 if mask:
4394 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4395 else: # mask=None
4396 f.mask = workscan._getmask(r)
4397
[1907]4398 f.data = None
4399 f.fit()
[2541]4400
[2610]4401 if action != "Y": # skip plotting when accepting all
4402 f.plot(residual=True)
4403 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4404 #if accept_fit.upper() == "N":
4405 # #workscan._append_blinfo(None, None, None)
4406 # continue
4407 accept_fit = self._get_verify_action("Accept fit?",action)
4408 if r == 0: action = None
[1907]4409 if accept_fit.upper() == "N":
4410 continue
[2610]4411 elif accept_fit.upper() == "R":
4412 break
4413 elif accept_fit.upper() == "A":
4414 action = "Y"
[2012]4415
4416 blpars = f.get_parameters()
4417 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4418 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]4419 workscan._setspectrum((f.fitter.getresidual()
4420 if getresidual else f.fitter.getfit()), r)
[1907]4421
[2012]4422 if outblfile:
4423 rms = workscan.get_rms(f.mask, r)
[2269]4424 dataout = \
4425 workscan.format_blparams_row(blpars["params"],
4426 blpars["fixed"],
4427 rms, str(masklist),
[2641]4428 r, True, csvformat)
[2012]4429 blf.write(dataout)
4430
[1907]4431 f._p.unmap()
4432 f._p = None
[2012]4433
[2349]4434 if outblfile:
4435 blf.close()
[1907]4436 else:
[2767]4437 workscan._poly_baseline(mask, order,
4438 clipthresh, clipniter, #
4439 getresidual,
[2269]4440 pack_progress_params(showprogress,
4441 minnrow),
[2767]4442 outlog, scsvformat+blfile,
4443 bltable) #
[1907]4444
4445 workscan._add_history("poly_baseline", varlist)
4446
4447 if insitu:
4448 self._assign(workscan)
4449 else:
4450 return workscan
4451
[1919]4452 except RuntimeError, e:
[2186]4453 raise_fitting_failure_exception(e)
[1907]4454
[2186]4455 @asaplog_post_dec
[2771]4456 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
[2767]4457 clipthresh=None, clipniter=None,
4458 edge=None, threshold=None, chan_avg_limit=None,
4459 getresidual=None, plot=None,
4460 showprogress=None, minnrow=None, outlog=None,
4461 blfile=None, csvformat=None, bltable=None):
[1846]4462 """\
[1931]4463 Return a scan which has been baselined (all rows) by a polynomial.
[880]4464 Spectral lines are detected first using linefinder and masked out
4465 to avoid them affecting the baseline solution.
4466
4467 Parameters:
[2771]4468 mask: an optional mask retreived from scantable
4469 order: the order of the polynomial (default is 0)
[2189]4470 insitu: if False a new scantable is returned.
4471 Otherwise, the scaling is done in-situ
4472 The default is taken from .asaprc (False)
[2767]4473 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4474 clipniter: maximum number of iteration of 'clipthresh'-sigma
4475 clipping (default is 0)
[2189]4476 edge: an optional number of channel to drop at
4477 the edge of spectrum. If only one value is
4478 specified, the same number will be dropped
4479 from both sides of the spectrum. Default
4480 is to keep all channels. Nested tuples
4481 represent individual edge selection for
4482 different IFs (a number of spectral channels
4483 can be different)
4484 threshold: the threshold used by line finder. It is
4485 better to keep it large as only strong lines
4486 affect the baseline solution.
4487 chan_avg_limit: a maximum number of consequtive spectral
4488 channels to average during the search of
4489 weak and broad lines. The default is no
4490 averaging (and no search for weak lines).
4491 If such lines can affect the fitted baseline
4492 (e.g. a high order polynomial is fitted),
4493 increase this parameter (usually values up
4494 to 8 are reasonable). Most users of this
4495 method should find the default value sufficient.
4496 plot: plot the fit and the residual. In this each
4497 indivual fit has to be approved, by typing 'y'
4498 or 'n'
4499 getresidual: if False, returns best-fit values instead of
4500 residual. (default is True)
4501 showprogress: show progress status for large data.
4502 default is True.
4503 minnrow: minimum number of input spectra to show.
4504 default is 1000.
4505 outlog: Output the coefficients of the best-fit
4506 function to logger (default is False)
4507 blfile: Name of a text file in which the best-fit
4508 parameter values to be written
4509 (default is "": no file/logger output)
[2641]4510 csvformat: if True blfile is csv-formatted, default is False.
[2767]4511 bltable: name of a baseline table where fitting results
4512 (coefficients, rms, etc.) are to be written.
4513 if given, fitting results will NOT be output to
4514 scantable (insitu=True) or None will be
4515 returned (insitu=False).
4516 (default is "": no table output)
[1846]4517
[2012]4518 Example:
4519 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4520 """
[880]4521
[2186]4522 try:
4523 varlist = vars()
[1846]4524
[2269]4525 if insitu is None:
4526 insitu = rcParams['insitu']
[2186]4527 if insitu:
4528 workscan = self
4529 else:
4530 workscan = self.copy()
[1846]4531
[2410]4532 if mask is None: mask = []
[2186]4533 if order is None: order = 0
[2767]4534 if clipthresh is None: clipthresh = 3.0
4535 if clipniter is None: clipniter = 0
[2186]4536 if edge is None: edge = (0, 0)
4537 if threshold is None: threshold = 3
4538 if chan_avg_limit is None: chan_avg_limit = 1
4539 if plot is None: plot = False
4540 if getresidual is None: getresidual = True
[2189]4541 if showprogress is None: showprogress = True
4542 if minnrow is None: minnrow = 1000
[2186]4543 if outlog is None: outlog = False
4544 if blfile is None: blfile = ''
[2641]4545 if csvformat is None: csvformat = False
[2767]4546 if bltable is None: bltable = ''
[1846]4547
[2767]4548 scsvformat = 'T' if csvformat else 'F'
[2641]4549
[2186]4550 edge = normalise_edge_param(edge)
[880]4551
[2012]4552 if plot:
[2269]4553 outblfile = (blfile != "") and \
4554 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]4555 if outblfile: blf = open(blfile, "a")
4556
[2186]4557 from asap.asaplinefind import linefinder
[2012]4558 fl = linefinder()
[2269]4559 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]4560 fl.set_scan(workscan)
[2186]4561
[2012]4562 f = fitter()
4563 f.set_function(lpoly=order)
[880]4564
[2186]4565 rows = xrange(workscan.nrow())
4566 #if len(rows) > 0: workscan._init_blinfo()
[2610]4567
4568 action = "H"
[2012]4569 for r in rows:
[2186]4570 idx = 2*workscan.getif(r)
[2541]4571 if mask:
4572 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4573 else: # mask=None
4574 msk = workscan._getmask(r)
4575 fl.find_lines(r, msk, edge[idx:idx+2])
[907]4576
[2012]4577 f.x = workscan._getabcissa(r)
4578 f.y = workscan._getspectrum(r)
4579 f.mask = fl.get_mask()
4580 f.data = None
4581 f.fit()
4582
[2610]4583 if action != "Y": # skip plotting when accepting all
4584 f.plot(residual=True)
4585 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4586 accept_fit = self._get_verify_action("Accept fit?",action)
4587 if r == 0: action = None
[2012]4588 if accept_fit.upper() == "N":
4589 #workscan._append_blinfo(None, None, None)
4590 continue
[2610]4591 elif accept_fit.upper() == "R":
4592 break
4593 elif accept_fit.upper() == "A":
4594 action = "Y"
[2012]4595
4596 blpars = f.get_parameters()
4597 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4598 #workscan._append_blinfo(blpars, masklist, f.mask)
[2349]4599 workscan._setspectrum(
4600 (f.fitter.getresidual() if getresidual
4601 else f.fitter.getfit()), r
4602 )
[2012]4603
4604 if outblfile:
4605 rms = workscan.get_rms(f.mask, r)
[2269]4606 dataout = \
4607 workscan.format_blparams_row(blpars["params"],
4608 blpars["fixed"],
4609 rms, str(masklist),
[2641]4610 r, True, csvformat)
[2012]4611 blf.write(dataout)
4612
4613 f._p.unmap()
4614 f._p = None
4615
4616 if outblfile: blf.close()
4617 else:
[2767]4618 workscan._auto_poly_baseline(mask, order,
4619 clipthresh, clipniter,
4620 edge, threshold,
[2269]4621 chan_avg_limit, getresidual,
4622 pack_progress_params(showprogress,
4623 minnrow),
[2767]4624 outlog, scsvformat+blfile,
4625 bltable)
4626 workscan._add_history("auto_poly_baseline", varlist)
[2012]4627
[2767]4628 if bltable == '':
4629 if insitu:
4630 self._assign(workscan)
4631 else:
4632 return workscan
[2012]4633 else:
[2767]4634 if not insitu:
4635 return None
[2012]4636
4637 except RuntimeError, e:
[2186]4638 raise_fitting_failure_exception(e)
[2012]4639
4640 def _init_blinfo(self):
4641 """\
4642 Initialise the following three auxiliary members:
4643 blpars : parameters of the best-fit baseline,
4644 masklists : mask data (edge positions of masked channels) and
4645 actualmask : mask data (in boolean list),
4646 to keep for use later (including output to logger/text files).
4647 Used by poly_baseline() and auto_poly_baseline() in case of
4648 'plot=True'.
4649 """
4650 self.blpars = []
4651 self.masklists = []
4652 self.actualmask = []
4653 return
[880]4654
[2012]4655 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4656 """\
4657 Append baseline-fitting related info to blpars, masklist and
4658 actualmask.
4659 """
4660 self.blpars.append(data_blpars)
4661 self.masklists.append(data_masklists)
4662 self.actualmask.append(data_actualmask)
4663 return
4664
[1862]4665 @asaplog_post_dec
[914]4666 def rotate_linpolphase(self, angle):
[1846]4667 """\
[914]4668 Rotate the phase of the complex polarization O=Q+iU correlation.
4669 This is always done in situ in the raw data. So if you call this
4670 function more than once then each call rotates the phase further.
[1846]4671
[914]4672 Parameters:
[1846]4673
[914]4674 angle: The angle (degrees) to rotate (add) by.
[1846]4675
4676 Example::
4677
[914]4678 scan.rotate_linpolphase(2.3)
[1846]4679
[914]4680 """
4681 varlist = vars()
[936]4682 self._math._rotate_linpolphase(self, angle)
[914]4683 self._add_history("rotate_linpolphase", varlist)
4684 return
[710]4685
[1862]4686 @asaplog_post_dec
[914]4687 def rotate_xyphase(self, angle):
[1846]4688 """\
[914]4689 Rotate the phase of the XY correlation. This is always done in situ
4690 in the data. So if you call this function more than once
4691 then each call rotates the phase further.
[1846]4692
[914]4693 Parameters:
[1846]4694
[914]4695 angle: The angle (degrees) to rotate (add) by.
[1846]4696
4697 Example::
4698
[914]4699 scan.rotate_xyphase(2.3)
[1846]4700
[914]4701 """
4702 varlist = vars()
[936]4703 self._math._rotate_xyphase(self, angle)
[914]4704 self._add_history("rotate_xyphase", varlist)
4705 return
4706
[1862]4707 @asaplog_post_dec
[914]4708 def swap_linears(self):
[1846]4709 """\
[1573]4710 Swap the linear polarisations XX and YY, or better the first two
[1348]4711 polarisations as this also works for ciculars.
[914]4712 """
4713 varlist = vars()
[936]4714 self._math._swap_linears(self)
[914]4715 self._add_history("swap_linears", varlist)
4716 return
4717
[1862]4718 @asaplog_post_dec
[914]4719 def invert_phase(self):
[1846]4720 """\
[914]4721 Invert the phase of the complex polarisation
4722 """
4723 varlist = vars()
[936]4724 self._math._invert_phase(self)
[914]4725 self._add_history("invert_phase", varlist)
4726 return
4727
[1862]4728 @asaplog_post_dec
[876]4729 def add(self, offset, insitu=None):
[1846]4730 """\
[513]4731 Return a scan where all spectra have the offset added
[1846]4732
[513]4733 Parameters:
[1846]4734
[513]4735 offset: the offset
[1855]4736
[513]4737 insitu: if False a new scantable is returned.
4738 Otherwise, the scaling is done in-situ
4739 The default is taken from .asaprc (False)
[1846]4740
[513]4741 """
4742 if insitu is None: insitu = rcParams['insitu']
[876]4743 self._math._setinsitu(insitu)
[513]4744 varlist = vars()
[876]4745 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]4746 s._add_history("add", varlist)
[876]4747 if insitu:
4748 self._assign(s)
4749 else:
[513]4750 return s
4751
[1862]4752 @asaplog_post_dec
[1308]4753 def scale(self, factor, tsys=True, insitu=None):
[1846]4754 """\
4755
[1938]4756 Return a scan where all spectra are scaled by the given 'factor'
[1846]4757
[513]4758 Parameters:
[1846]4759
[1819]4760 factor: the scaling factor (float or 1D float list)
[1855]4761
[513]4762 insitu: if False a new scantable is returned.
4763 Otherwise, the scaling is done in-situ
4764 The default is taken from .asaprc (False)
[1855]4765
[513]4766 tsys: if True (default) then apply the operation to Tsys
4767 as well as the data
[1846]4768
[513]4769 """
4770 if insitu is None: insitu = rcParams['insitu']
[876]4771 self._math._setinsitu(insitu)
[513]4772 varlist = vars()
[1819]4773 s = None
4774 import numpy
4775 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]4776 if isinstance(factor[0], list) or isinstance(factor[0],
4777 numpy.ndarray):
[1819]4778 from asapmath import _array2dOp
[2320]4779 s = _array2dOp( self, factor, "MUL", tsys, insitu )
[1819]4780 else:
[2320]4781 s = scantable( self._math._arrayop( self, factor,
4782 "MUL", tsys ) )
[1819]4783 else:
[2320]4784 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]4785 s._add_history("scale", varlist)
[876]4786 if insitu:
4787 self._assign(s)
4788 else:
[513]4789 return s
4790
[2349]4791 @preserve_selection
4792 def set_sourcetype(self, match, matchtype="pattern",
[1504]4793 sourcetype="reference"):
[1846]4794 """\
[1502]4795 Set the type of the source to be an source or reference scan
[1846]4796 using the provided pattern.
4797
[1502]4798 Parameters:
[1846]4799
[1504]4800 match: a Unix style pattern, regular expression or selector
[1855]4801
[1504]4802 matchtype: 'pattern' (default) UNIX style pattern or
4803 'regex' regular expression
[1855]4804
[1502]4805 sourcetype: the type of the source to use (source/reference)
[1846]4806
[1502]4807 """
4808 varlist = vars()
4809 stype = -1
[2480]4810 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
[1502]4811 stype = 1
[2480]4812 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
[1502]4813 stype = 0
[1504]4814 else:
[2480]4815 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
[1504]4816 if matchtype.lower().startswith("p"):
4817 matchtype = "pattern"
4818 elif matchtype.lower().startswith("r"):
4819 matchtype = "regex"
4820 else:
4821 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]4822 sel = selector()
4823 if isinstance(match, selector):
4824 sel = match
4825 else:
[2480]4826 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4827 self.set_selection(sel)
[1502]4828 self._setsourcetype(stype)
[1573]4829 self._add_history("set_sourcetype", varlist)
[1502]4830
[2818]4831
4832 def set_sourcename(self, name):
4833 varlist = vars()
4834 self._setsourcename(name)
4835 self._add_history("set_sourcename", varlist)
4836
[1862]4837 @asaplog_post_dec
[1857]4838 @preserve_selection
[1819]4839 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]4840 """\
[670]4841 This function allows to build quotients automatically.
[1819]4842 It assumes the observation to have the same number of
[670]4843 "ons" and "offs"
[1846]4844
[670]4845 Parameters:
[1846]4846
[710]4847 preserve: you can preserve (default) the continuum or
4848 remove it. The equations used are
[1857]4849
[670]4850 preserve: Output = Toff * (on/off) - Toff
[1857]4851
[1070]4852 remove: Output = Toff * (on/off) - Ton
[1855]4853
[1573]4854 mode: the on/off detection mode
[1348]4855 'paired' (default)
4856 identifies 'off' scans by the
4857 trailing '_R' (Mopra/Parkes) or
4858 '_e'/'_w' (Tid) and matches
4859 on/off pairs from the observing pattern
[1502]4860 'time'
4861 finds the closest off in time
[1348]4862
[1857]4863 .. todo:: verify argument is not implemented
4864
[670]4865 """
[1857]4866 varlist = vars()
[1348]4867 modes = ["time", "paired"]
[670]4868 if not mode in modes:
[876]4869 msg = "please provide valid mode. Valid modes are %s" % (modes)
4870 raise ValueError(msg)
[1348]4871 s = None
4872 if mode.lower() == "paired":
[2840]4873 from asap._asap import srctype
[1857]4874 sel = self.get_selection()
[2840]4875 #sel.set_query("SRCTYPE==psoff")
4876 sel.set_types(srctype.psoff)
[1356]4877 self.set_selection(sel)
[1348]4878 offs = self.copy()
[2840]4879 #sel.set_query("SRCTYPE==pson")
4880 sel.set_types(srctype.pson)
[1356]4881 self.set_selection(sel)
[1348]4882 ons = self.copy()
4883 s = scantable(self._math._quotient(ons, offs, preserve))
4884 elif mode.lower() == "time":
4885 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]4886 s._add_history("auto_quotient", varlist)
[876]4887 return s
[710]4888
[1862]4889 @asaplog_post_dec
[1145]4890 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]4891 """\
[1143]4892 Form a quotient using "off" beams when observing in "MX" mode.
[1846]4893
[1143]4894 Parameters:
[1846]4895
[1145]4896 mask: an optional mask to be used when weight == 'stddev'
[1855]4897
[1143]4898 weight: How to average the off beams. Default is 'median'.
[1855]4899
[1145]4900 preserve: you can preserve (default) the continuum or
[1855]4901 remove it. The equations used are:
[1846]4902
[1855]4903 preserve: Output = Toff * (on/off) - Toff
4904
4905 remove: Output = Toff * (on/off) - Ton
4906
[1217]4907 """
[1593]4908 mask = mask or ()
[1141]4909 varlist = vars()
4910 on = scantable(self._math._mx_extract(self, 'on'))
[1143]4911 preoff = scantable(self._math._mx_extract(self, 'off'))
4912 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]4913 from asapmath import quotient
[1145]4914 q = quotient(on, off, preserve)
[1143]4915 q._add_history("mx_quotient", varlist)
[1217]4916 return q
[513]4917
[1862]4918 @asaplog_post_dec
[718]4919 def freq_switch(self, insitu=None):
[1846]4920 """\
[718]4921 Apply frequency switching to the data.
[1846]4922
[718]4923 Parameters:
[1846]4924
[718]4925 insitu: if False a new scantable is returned.
4926 Otherwise, the swictching is done in-situ
4927 The default is taken from .asaprc (False)
[1846]4928
[718]4929 """
4930 if insitu is None: insitu = rcParams['insitu']
[876]4931 self._math._setinsitu(insitu)
[718]4932 varlist = vars()
[876]4933 s = scantable(self._math._freqswitch(self))
[1118]4934 s._add_history("freq_switch", varlist)
[1856]4935 if insitu:
4936 self._assign(s)
4937 else:
4938 return s
[718]4939
[1862]4940 @asaplog_post_dec
[780]4941 def recalc_azel(self):
[1846]4942 """Recalculate the azimuth and elevation for each position."""
[780]4943 varlist = vars()
[876]4944 self._recalcazel()
[780]4945 self._add_history("recalc_azel", varlist)
4946 return
4947
[1862]4948 @asaplog_post_dec
[513]4949 def __add__(self, other):
[2574]4950 """
4951 implicit on all axes and on Tsys
4952 """
[513]4953 varlist = vars()
[2574]4954 s = self.__op( other, "ADD" )
[513]4955 s._add_history("operator +", varlist)
4956 return s
4957
[1862]4958 @asaplog_post_dec
[513]4959 def __sub__(self, other):
4960 """
4961 implicit on all axes and on Tsys
4962 """
4963 varlist = vars()
[2574]4964 s = self.__op( other, "SUB" )
[513]4965 s._add_history("operator -", varlist)
4966 return s
[710]4967
[1862]4968 @asaplog_post_dec
[513]4969 def __mul__(self, other):
4970 """
4971 implicit on all axes and on Tsys
4972 """
4973 varlist = vars()
[2574]4974 s = self.__op( other, "MUL" ) ;
[513]4975 s._add_history("operator *", varlist)
4976 return s
4977
[710]4978
[1862]4979 @asaplog_post_dec
[513]4980 def __div__(self, other):
4981 """
4982 implicit on all axes and on Tsys
4983 """
4984 varlist = vars()
[2574]4985 s = self.__op( other, "DIV" )
4986 s._add_history("operator /", varlist)
4987 return s
4988
4989 @asaplog_post_dec
4990 def __op( self, other, mode ):
[513]4991 s = None
4992 if isinstance(other, scantable):
[2574]4993 s = scantable(self._math._binaryop(self, other, mode))
[513]4994 elif isinstance(other, float):
4995 if other == 0.0:
[718]4996 raise ZeroDivisionError("Dividing by zero is not recommended")
[2574]4997 s = scantable(self._math._unaryop(self, other, mode, False))
[2144]4998 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2349]4999 if isinstance(other[0], list) \
5000 or isinstance(other[0], numpy.ndarray):
[2144]5001 from asapmath import _array2dOp
[2574]5002 s = _array2dOp( self, other, mode, False )
[2144]5003 else:
[2574]5004 s = scantable( self._math._arrayop( self, other,
5005 mode, False ) )
[513]5006 else:
[718]5007 raise TypeError("Other input is not a scantable or float value")
[513]5008 return s
5009
[1862]5010 @asaplog_post_dec
[530]5011 def get_fit(self, row=0):
[1846]5012 """\
[530]5013 Print or return the stored fits for a row in the scantable
[1846]5014
[530]5015 Parameters:
[1846]5016
[530]5017 row: the row which the fit has been applied to.
[1846]5018
[530]5019 """
5020 if row > self.nrow():
5021 return
[976]5022 from asap.asapfit import asapfit
[530]5023 fit = asapfit(self._getfit(row))
[1859]5024 asaplog.push( '%s' %(fit) )
5025 return fit.as_dict()
[530]5026
[2349]5027 @preserve_selection
[1483]5028 def flag_nans(self):
[1846]5029 """\
[1483]5030 Utility function to flag NaN values in the scantable.
5031 """
5032 import numpy
5033 basesel = self.get_selection()
5034 for i in range(self.nrow()):
[1589]5035 sel = self.get_row_selector(i)
5036 self.set_selection(basesel+sel)
[1483]5037 nans = numpy.isnan(self._getspectrum(0))
[2877]5038 if numpy.any(nans):
5039 bnans = [ bool(v) for v in nans]
5040 self.flag(bnans)
5041
5042 self.set_selection(basesel)
[1483]5043
[1588]5044 def get_row_selector(self, rowno):
[1992]5045 return selector(rows=[rowno])
[1573]5046
[484]5047 def _add_history(self, funcname, parameters):
[1435]5048 if not rcParams['scantable.history']:
5049 return
[484]5050 # create date
5051 sep = "##"
5052 from datetime import datetime
5053 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5054 hist = dstr+sep
5055 hist += funcname+sep#cdate+sep
[2349]5056 if parameters.has_key('self'):
5057 del parameters['self']
[1118]5058 for k, v in parameters.iteritems():
[484]5059 if type(v) is dict:
[1118]5060 for k2, v2 in v.iteritems():
[484]5061 hist += k2
5062 hist += "="
[1118]5063 if isinstance(v2, scantable):
[484]5064 hist += 'scantable'
5065 elif k2 == 'mask':
[1118]5066 if isinstance(v2, list) or isinstance(v2, tuple):
[513]5067 hist += str(self._zip_mask(v2))
5068 else:
5069 hist += str(v2)
[484]5070 else:
[513]5071 hist += str(v2)
[484]5072 else:
5073 hist += k
5074 hist += "="
[1118]5075 if isinstance(v, scantable):
[484]5076 hist += 'scantable'
5077 elif k == 'mask':
[1118]5078 if isinstance(v, list) or isinstance(v, tuple):
[513]5079 hist += str(self._zip_mask(v))
5080 else:
5081 hist += str(v)
[484]5082 else:
5083 hist += str(v)
5084 hist += sep
5085 hist = hist[:-2] # remove trailing '##'
5086 self._addhistory(hist)
5087
[710]5088
[484]5089 def _zip_mask(self, mask):
5090 mask = list(mask)
5091 i = 0
5092 segments = []
5093 while mask[i:].count(1):
5094 i += mask[i:].index(1)
5095 if mask[i:].count(0):
5096 j = i + mask[i:].index(0)
5097 else:
[710]5098 j = len(mask)
[1118]5099 segments.append([i, j])
[710]5100 i = j
[484]5101 return segments
[714]5102
[626]5103 def _get_ordinate_label(self):
5104 fu = "("+self.get_fluxunit()+")"
5105 import re
5106 lbl = "Intensity"
[1118]5107 if re.match(".K.", fu):
[626]5108 lbl = "Brightness Temperature "+ fu
[1118]5109 elif re.match(".Jy.", fu):
[626]5110 lbl = "Flux density "+ fu
5111 return lbl
[710]5112
[876]5113 def _check_ifs(self):
[2349]5114# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
[1986]5115 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]5116 nchans = filter(lambda t: t > 0, nchans)
[876]5117 return (sum(nchans)/len(nchans) == nchans[0])
[976]5118
[1862]5119 @asaplog_post_dec
[1916]5120 def _fill(self, names, unit, average, opts={}):
[976]5121 first = True
5122 fullnames = []
5123 for name in names:
5124 name = os.path.expandvars(name)
5125 name = os.path.expanduser(name)
5126 if not os.path.exists(name):
5127 msg = "File '%s' does not exists" % (name)
5128 raise IOError(msg)
5129 fullnames.append(name)
5130 if average:
5131 asaplog.push('Auto averaging integrations')
[1079]5132 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]5133 for name in fullnames:
[1073]5134 tbl = Scantable(stype)
[2004]5135 if is_ms( name ):
5136 r = msfiller( tbl )
5137 else:
5138 r = filler( tbl )
[976]5139 msg = "Importing %s..." % (name)
[1118]5140 asaplog.push(msg, False)
[2349]5141 r.open(name, opts)
[2480]5142 rx = rcParams['scantable.reference']
5143 r.setreferenceexpr(rx)
[1843]5144 r.fill()
[976]5145 if average:
[1118]5146 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]5147 if not first:
[2902]5148 tbl = self._math._merge([self, tbl])
[976]5149 Scantable.__init__(self, tbl)
[1843]5150 r.close()
[1118]5151 del r, tbl
[976]5152 first = False
[1861]5153 #flush log
5154 asaplog.post()
[976]5155 if unit is not None:
5156 self.set_fluxunit(unit)
[1824]5157 if not is_casapy():
5158 self.set_freqframe(rcParams['scantable.freqframe'])
[976]5159
[2610]5160 def _get_verify_action( self, msg, action=None ):
5161 valid_act = ['Y', 'N', 'A', 'R']
5162 if not action or not isinstance(action, str):
5163 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5164 if action == '':
5165 return "Y"
5166 elif (action.upper()[0] in valid_act):
5167 return action.upper()[0]
5168 elif (action.upper()[0] in ['H','?']):
5169 print "Available actions of verification [Y|n|a|r]"
5170 print " Y : Yes for current data (default)"
5171 print " N : No for current data"
5172 print " A : Accept all in the following and exit from verification"
5173 print " R : Reject all in the following and exit from verification"
5174 print " H or ?: help (show this message)"
5175 return self._get_verify_action(msg)
5176 else:
5177 return 'Y'
[2012]5178
[1402]5179 def __getitem__(self, key):
5180 if key < 0:
5181 key += self.nrow()
5182 if key >= self.nrow():
5183 raise IndexError("Row index out of range.")
5184 return self._getspectrum(key)
5185
5186 def __setitem__(self, key, value):
5187 if key < 0:
5188 key += self.nrow()
5189 if key >= self.nrow():
5190 raise IndexError("Row index out of range.")
5191 if not hasattr(value, "__len__") or \
5192 len(value) > self.nchan(self.getif(key)):
5193 raise ValueError("Spectrum length doesn't match.")
5194 return self._setspectrum(value, key)
5195
5196 def __len__(self):
5197 return self.nrow()
5198
5199 def __iter__(self):
5200 for i in range(len(self)):
5201 yield self[i]
Note: See TracBrowser for help on using the repository browser.