source: trunk/python/scantable.py@ 2909

Last change on this file since 2909 was 2909, checked in by WataruKawasaki, 11 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes:

Module(s): sd

Description: (1) a bug fix in sd.scantable.parse_spw_selection(). modified to get rest frequency value correctly based on molecule ID stored for each spw (the first spectrum having the specified spw in scantable). (2) renamed 'annotation' to 'text' in class or variable names for sd.plotter2.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 205.7 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel, doppler):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
380
381def get_restfreq_in_Hz(s_restfreq):
382 value = 0.0
383 unit = ""
384 s = s_restfreq.replace(" ","")
385
386 for i in range(len(s))[::-1]:
387 if s[i].isalpha():
388 unit = s[i] + unit
389 else:
390 value = float(s[0:i+1])
391 break
392
393 if (unit == "") or (unit.lower() == "hz"):
394 return value
395 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
396 unitprefix = unit[0]
397 factor = 1.0
398
399 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
400 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
401 factor = factor_list[prefix_list.index(unitprefix)]
402 """
403 if (unitprefix == 'a'):
404 factor = 1.0e-18
405 elif (unitprefix == 'f'):
406 factor = 1.0e-15
407 elif (unitprefix == 'p'):
408 factor = 1.0e-12
409 elif (unitprefix == 'n'):
410 factor = 1.0e-9
411 elif (unitprefix == 'u'):
412 factor = 1.0e-6
413 elif (unitprefix == 'm'):
414 factor = 1.0e-3
415 elif (unitprefix == 'k'):
416 factor = 1.0e+3
417 elif (unitprefix == 'M'):
418 factor = 1.0e+6
419 elif (unitprefix == 'G'):
420 factor = 1.0e+9
421 elif (unitprefix == 'T'):
422 factor = 1.0e+12
423 elif (unitprefix == 'P'):
424 factor = 1.0e+15
425 elif (unitprefix == 'E'):
426 factor = 1.0e+18
427 """
428 return value*factor
429 else:
430 mesg = "wrong unit of restfreq."
431 raise Exception, mesg
432
433def normalise_restfreq(in_restfreq):
434 if isinstance(in_restfreq, float):
435 return in_restfreq
436 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
437 return float(in_restfreq)
438 elif isinstance(in_restfreq, str):
439 return get_restfreq_in_Hz(in_restfreq)
440 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
441 if isinstance(in_restfreq, numpy.ndarray):
442 if len(in_restfreq.shape) > 1:
443 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
444 raise Exception, mesg
445
446 res = []
447 for i in xrange(len(in_restfreq)):
448 elem = in_restfreq[i]
449 if isinstance(elem, float):
450 res.append(elem)
451 elif isinstance(elem, int) or isinstance(elem, long):
452 res.append(float(elem))
453 elif isinstance(elem, str):
454 res.append(get_restfreq_in_Hz(elem))
455 elif isinstance(elem, dict):
456 if isinstance(elem["value"], float):
457 res.append(elem)
458 elif isinstance(elem["value"], int):
459 dictelem = {}
460 dictelem["name"] = elem["name"]
461 dictelem["value"] = float(elem["value"])
462 res.append(dictelem)
463 elif isinstance(elem["value"], str):
464 dictelem = {}
465 dictelem["name"] = elem["name"]
466 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
467 res.append(dictelem)
468 else:
469 mesg = "restfreq elements must be float, int, or string."
470 raise Exception, mesg
471 return res
472 else:
473 mesg = "wrong type of restfreq given."
474 raise Exception, mesg
475
476def set_restfreq(s, restfreq):
477 rfset = (restfreq != '') and (restfreq != [])
478 if rfset:
479 s.set_restfreqs(normalise_restfreq(restfreq))
480
481class scantable(Scantable):
482 """\
483 The ASAP container for scans (single-dish data).
484 """
485
486 @asaplog_post_dec
487 def __init__(self, filename, average=None, unit=None, parallactify=None,
488 **args):
489 """\
490 Create a scantable from a saved one or make a reference
491
492 Parameters:
493
494 filename: the name of an asap table on disk
495 or
496 the name of a rpfits/sdfits/ms file
497 (integrations within scans are auto averaged
498 and the whole file is read) or
499 [advanced] a reference to an existing scantable
500
501 average: average all integrations withinb a scan on read.
502 The default (True) is taken from .asaprc.
503
504 unit: brightness unit; must be consistent with K or Jy.
505 Over-rides the default selected by the filler
506 (input rpfits/sdfits/ms) or replaces the value
507 in existing scantables
508
509 antenna: for MeasurementSet input data only:
510 Antenna selection. integer (id) or string
511 (name or id).
512
513 parallactify: Indicate that the data had been parallactified.
514 Default (false) is taken from rc file.
515
516 getpt: Whether to import direction from MS/POINTING
517 table properly or not.
518 This is effective only when filename is MS.
519 The default (True) is to import direction
520 from MS/POINTING.
521 """
522 if average is None:
523 average = rcParams['scantable.autoaverage']
524 parallactify = parallactify or rcParams['scantable.parallactify']
525 varlist = vars()
526 from asap._asap import stmath
527 self._math = stmath( rcParams['insitu'] )
528 if isinstance(filename, Scantable):
529 Scantable.__init__(self, filename)
530 else:
531 if isinstance(filename, str):
532 filename = os.path.expandvars(filename)
533 filename = os.path.expanduser(filename)
534 if not os.path.exists(filename):
535 s = "File '%s' not found." % (filename)
536 raise IOError(s)
537 if is_scantable(filename):
538 ondisk = rcParams['scantable.storage'] == 'disk'
539 Scantable.__init__(self, filename, ondisk)
540 if unit is not None:
541 self.set_fluxunit(unit)
542 if average:
543 self._assign( self.average_time( scanav=True ) )
544 # do not reset to the default freqframe
545 #self.set_freqframe(rcParams['scantable.freqframe'])
546 elif is_ms(filename):
547 # Measurement Set
548 opts={'ms': {}}
549 mskeys=['getpt','antenna']
550 for key in mskeys:
551 if key in args.keys():
552 opts['ms'][key] = args[key]
553 self._fill([filename], unit, average, opts)
554 elif os.path.isfile(filename):
555 opts={'nro': {}}
556 nrokeys=['freqref']
557 for key in nrokeys:
558 if key in args.keys():
559 opts['nro'][key] = args[key]
560 self._fill([filename], unit, average, opts)
561 # only apply to new data not "copy constructor"
562 self.parallactify(parallactify)
563 else:
564 msg = "The given file '%s'is not a valid " \
565 "asap table." % (filename)
566 raise IOError(msg)
567 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
568 and isinstance(filename[-1], str):
569 self._fill(filename, unit, average)
570 self.parallactify(parallactify)
571 self._add_history("scantable", varlist)
572
573 @asaplog_post_dec
574 def save(self, name=None, format=None, overwrite=False):
575 """\
576 Store the scantable on disk. This can be an asap (aips++) Table,
577 SDFITS or MS2 format.
578
579 Parameters:
580
581 name: the name of the outputfile. For format 'ASCII'
582 this is the root file name (data in 'name'.txt
583 and header in 'name'_header.txt)
584
585 format: an optional file format. Default is ASAP.
586 Allowed are:
587
588 * 'ASAP' (save as ASAP [aips++] Table),
589 * 'SDFITS' (save as SDFITS file)
590 * 'ASCII' (saves as ascii text file)
591 * 'MS2' (saves as an casacore MeasurementSet V2)
592 * 'FITS' (save as image FITS - not readable by
593 class)
594 * 'CLASS' (save as FITS readable by CLASS)
595
596 overwrite: If the file should be overwritten if it exists.
597 The default False is to return with warning
598 without writing the output. USE WITH CARE.
599
600 Example::
601
602 scan.save('myscan.asap')
603 scan.save('myscan.sdfits', 'SDFITS')
604
605 """
606 from os import path
607 format = format or rcParams['scantable.save']
608 suffix = '.'+format.lower()
609 if name is None or name == "":
610 name = 'scantable'+suffix
611 msg = "No filename given. Using default name %s..." % name
612 asaplog.push(msg)
613 name = path.expandvars(name)
614 if path.isfile(name) or path.isdir(name):
615 if not overwrite:
616 msg = "File %s exists." % name
617 raise IOError(msg)
618 format2 = format.upper()
619 if format2 == 'ASAP':
620 self._save(name)
621 elif format2 == 'MS2':
622 msopt = {'ms': {'overwrite': overwrite } }
623 from asap._asap import mswriter
624 writer = mswriter( self )
625 writer.write( name, msopt )
626 else:
627 from asap._asap import stwriter as stw
628 writer = stw(format2)
629 writer.write(self, name)
630 return
631
632 def copy(self):
633 """Return a copy of this scantable.
634
635 *Note*:
636
637 This makes a full (deep) copy. scan2 = scan1 makes a reference.
638
639 Example::
640
641 copiedscan = scan.copy()
642
643 """
644 sd = scantable(Scantable._copy(self))
645 return sd
646
647 def drop_scan(self, scanid=None):
648 """\
649 Return a new scantable where the specified scan number(s) has(have)
650 been dropped.
651
652 Parameters:
653
654 scanid: a (list of) scan number(s)
655
656 """
657 from asap import _is_sequence_or_number as _is_valid
658 from asap import _to_list
659 from asap import unique
660 if not _is_valid(scanid):
661 raise RuntimeError( 'Please specify a scanno to drop from the'
662 ' scantable' )
663 scanid = _to_list(scanid)
664 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
665 for sid in scanid: allscans.remove(sid)
666 if len(allscans) == 0:
667 raise ValueError("Can't remove all scans")
668 sel = selector(scans=allscans)
669 return self._select_copy(sel)
670
671 def _select_copy(self, selection):
672 orig = self.get_selection()
673 self.set_selection(orig+selection)
674 cp = self.copy()
675 self.set_selection(orig)
676 return cp
677
678 def get_scan(self, scanid=None):
679 """\
680 Return a specific scan (by scanno) or collection of scans (by
681 source name) in a new scantable.
682
683 *Note*:
684
685 See scantable.drop_scan() for the inverse operation.
686
687 Parameters:
688
689 scanid: a (list of) scanno or a source name, unix-style
690 patterns are accepted for source name matching, e.g.
691 '*_R' gets all 'ref scans
692
693 Example::
694
695 # get all scans containing the source '323p459'
696 newscan = scan.get_scan('323p459')
697 # get all 'off' scans
698 refscans = scan.get_scan('*_R')
699 # get a susbset of scans by scanno (as listed in scan.summary())
700 newscan = scan.get_scan([0, 2, 7, 10])
701
702 """
703 if scanid is None:
704 raise RuntimeError( 'Please specify a scan no or name to '
705 'retrieve from the scantable' )
706 try:
707 bsel = self.get_selection()
708 sel = selector()
709 if type(scanid) is str:
710 sel.set_name(scanid)
711 return self._select_copy(sel)
712 elif type(scanid) is int:
713 sel.set_scans([scanid])
714 return self._select_copy(sel)
715 elif type(scanid) is list:
716 sel.set_scans(scanid)
717 return self._select_copy(sel)
718 else:
719 msg = "Illegal scanid type, use 'int' or 'list' if ints."
720 raise TypeError(msg)
721 except RuntimeError:
722 raise
723
724 def __str__(self):
725 tempFile = tempfile.NamedTemporaryFile()
726 Scantable._summary(self, tempFile.name)
727 tempFile.seek(0)
728 asaplog.clear()
729 return tempFile.file.read()
730
731 @asaplog_post_dec
732 def summary(self, filename=None):
733 """\
734 Print a summary of the contents of this scantable.
735
736 Parameters:
737
738 filename: the name of a file to write the putput to
739 Default - no file output
740
741 """
742 if filename is not None:
743 if filename is "":
744 filename = 'scantable_summary.txt'
745 from os.path import expandvars, isdir
746 filename = expandvars(filename)
747 if isdir(filename):
748 msg = "Illegal file name '%s'." % (filename)
749 raise IOError(msg)
750 else:
751 filename = ""
752 Scantable._summary(self, filename)
753
754 def get_spectrum(self, rowno):
755 """Return the spectrum for the current row in the scantable as a list.
756
757 Parameters:
758
759 rowno: the row number to retrieve the spectrum from
760
761 """
762 return self._getspectrum(rowno)
763
764 def get_mask(self, rowno):
765 """Return the mask for the current row in the scantable as a list.
766
767 Parameters:
768
769 rowno: the row number to retrieve the mask from
770
771 """
772 return self._getmask(rowno)
773
774 def set_spectrum(self, spec, rowno):
775 """Set the spectrum for the current row in the scantable.
776
777 Parameters:
778
779 spec: the new spectrum
780
781 rowno: the row number to set the spectrum for
782
783 """
784 assert(len(spec) == self.nchan(self.getif(rowno)))
785 return self._setspectrum(spec, rowno)
786
787 def get_coordinate(self, rowno):
788 """Return the (spectral) coordinate for a a given 'rowno'.
789
790 *Note*:
791
792 * This coordinate is only valid until a scantable method modifies
793 the frequency axis.
794 * This coordinate does contain the original frequency set-up
795 NOT the new frame. The conversions however are done using the user
796 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
797 use scantable.freq_align first. Without it there is no closure,
798 i.e.::
799
800 c = myscan.get_coordinate(0)
801 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
802
803 Parameters:
804
805 rowno: the row number for the spectral coordinate
806
807 """
808 return coordinate(Scantable.get_coordinate(self, rowno))
809
810 def get_selection(self):
811 """\
812 Get the selection object currently set on this scantable.
813
814 Example::
815
816 sel = scan.get_selection()
817 sel.set_ifs(0) # select IF 0
818 scan.set_selection(sel) # apply modified selection
819
820 """
821 return selector(self._getselection())
822
823 def set_selection(self, selection=None, **kw):
824 """\
825 Select a subset of the data. All following operations on this scantable
826 are only applied to thi selection.
827
828 Parameters:
829
830 selection: a selector object (default unset the selection), or
831 any combination of 'pols', 'ifs', 'beams', 'scans',
832 'cycles', 'name', 'query'
833
834 Examples::
835
836 sel = selector() # create a selection object
837 self.set_scans([0, 3]) # select SCANNO 0 and 3
838 scan.set_selection(sel) # set the selection
839 scan.summary() # will only print summary of scanno 0 an 3
840 scan.set_selection() # unset the selection
841 # or the equivalent
842 scan.set_selection(scans=[0,3])
843 scan.summary() # will only print summary of scanno 0 an 3
844 scan.set_selection() # unset the selection
845
846 """
847 if selection is None:
848 # reset
849 if len(kw) == 0:
850 selection = selector()
851 else:
852 # try keywords
853 for k in kw:
854 if k not in selector.fields:
855 raise KeyError("Invalid selection key '%s', "
856 "valid keys are %s" % (k,
857 selector.fields))
858 selection = selector(**kw)
859 self._setselection(selection)
860
861 def get_row(self, row=0, insitu=None):
862 """\
863 Select a row in the scantable.
864 Return a scantable with single row.
865
866 Parameters:
867
868 row: row no of integration, default is 0.
869 insitu: if False a new scantable is returned. Otherwise, the
870 scaling is done in-situ. The default is taken from .asaprc
871 (False)
872
873 """
874 if insitu is None:
875 insitu = rcParams['insitu']
876 if not insitu:
877 workscan = self.copy()
878 else:
879 workscan = self
880 # Select a row
881 sel = selector()
882 sel.set_rows([row])
883 workscan.set_selection(sel)
884 if not workscan.nrow() == 1:
885 msg = "Could not identify single row. %d rows selected." \
886 % (workscan.nrow())
887 raise RuntimeError(msg)
888 if insitu:
889 self._assign(workscan)
890 else:
891 return workscan
892
893 @asaplog_post_dec
894 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
895 """\
896 Determine the specified statistic of the current beam/if/pol
897 Takes a 'mask' as an optional parameter to specify which
898 channels should be excluded.
899
900 Parameters:
901
902 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
903 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
904
905 mask: an optional mask specifying where the statistic
906 should be determined.
907
908 form: format string to print statistic values
909
910 row: row number of spectrum to process.
911 (default is None: for all rows)
912
913 Example:
914 scan.set_unit('channel')
915 msk = scan.create_mask([100, 200], [500, 600])
916 scan.stats(stat='mean', mask=m)
917
918 """
919 mask = mask or []
920 if not self._check_ifs():
921 raise ValueError("Cannot apply mask as the IFs have different "
922 "number of channels. Please use setselection() "
923 "to select individual IFs")
924 rtnabc = False
925 if stat.lower().endswith('_abc'): rtnabc = True
926 getchan = False
927 if stat.lower().startswith('min') or stat.lower().startswith('max'):
928 chan = self._math._minmaxchan(self, mask, stat)
929 getchan = True
930 statvals = []
931 if not rtnabc:
932 if row == None:
933 statvals = self._math._stats(self, mask, stat)
934 else:
935 statvals = self._math._statsrow(self, mask, stat, int(row))
936
937 #def cb(i):
938 # return statvals[i]
939
940 #return self._row_callback(cb, stat)
941
942 label=stat
943 #callback=cb
944 out = ""
945 #outvec = []
946 sep = '-'*50
947
948 if row == None:
949 rows = xrange(self.nrow())
950 elif isinstance(row, int):
951 rows = [ row ]
952
953 for i in rows:
954 refstr = ''
955 statunit= ''
956 if getchan:
957 qx, qy = self.chan2data(rowno=i, chan=chan[i])
958 if rtnabc:
959 statvals.append(qx['value'])
960 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
961 statunit= '['+qx['unit']+']'
962 else:
963 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
964
965 tm = self._gettime(i)
966 src = self._getsourcename(i)
967 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
968 out += 'Time[%s]:\n' % (tm)
969 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
970 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
971 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
972 #outvec.append(callback(i))
973 if len(rows) > 1:
974 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
975 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
976 else:
977 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
978 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
979 out += sep+"\n"
980
981 import os
982 if os.environ.has_key( 'USER' ):
983 usr = os.environ['USER']
984 else:
985 import commands
986 usr = commands.getoutput( 'whoami' )
987 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
988 f = open(tmpfile,'w')
989 print >> f, sep
990 print >> f, ' %s %s' % (label, statunit)
991 print >> f, sep
992 print >> f, out
993 f.close()
994 f = open(tmpfile,'r')
995 x = f.readlines()
996 f.close()
997 asaplog.push(''.join(x), False)
998
999 return statvals
1000
1001 def chan2data(self, rowno=0, chan=0):
1002 """\
1003 Returns channel/frequency/velocity and spectral value
1004 at an arbitrary row and channel in the scantable.
1005
1006 Parameters:
1007
1008 rowno: a row number in the scantable. Default is the
1009 first row, i.e. rowno=0
1010
1011 chan: a channel in the scantable. Default is the first
1012 channel, i.e. pos=0
1013
1014 """
1015 if isinstance(rowno, int) and isinstance(chan, int):
1016 qx = {'unit': self.get_unit(),
1017 'value': self._getabcissa(rowno)[chan]}
1018 qy = {'unit': self.get_fluxunit(),
1019 'value': self._getspectrum(rowno)[chan]}
1020 return qx, qy
1021
1022 def stddev(self, mask=None):
1023 """\
1024 Determine the standard deviation of the current beam/if/pol
1025 Takes a 'mask' as an optional parameter to specify which
1026 channels should be excluded.
1027
1028 Parameters:
1029
1030 mask: an optional mask specifying where the standard
1031 deviation should be determined.
1032
1033 Example::
1034
1035 scan.set_unit('channel')
1036 msk = scan.create_mask([100, 200], [500, 600])
1037 scan.stddev(mask=m)
1038
1039 """
1040 return self.stats(stat='stddev', mask=mask);
1041
1042
1043 def get_column_names(self):
1044 """\
1045 Return a list of column names, which can be used for selection.
1046 """
1047 return list(Scantable.get_column_names(self))
1048
1049 def get_tsys(self, row=-1):
1050 """\
1051 Return the System temperatures.
1052
1053 Parameters:
1054
1055 row: the rowno to get the information for. (default all rows)
1056
1057 Returns:
1058
1059 a list of Tsys values for the current selection
1060
1061 """
1062 if row > -1:
1063 return self._get_column(self._gettsys, row)
1064 return self._row_callback(self._gettsys, "Tsys")
1065
1066 def get_tsysspectrum(self, row=-1):
1067 """\
1068 Return the channel dependent system temperatures.
1069
1070 Parameters:
1071
1072 row: the rowno to get the information for. (default all rows)
1073
1074 Returns:
1075
1076 a list of Tsys values for the current selection
1077
1078 """
1079 return self._get_column( self._gettsysspectrum, row )
1080
1081 def set_tsys(self, values, row=-1):
1082 """\
1083 Set the Tsys value(s) of the given 'row' or the whole scantable
1084 (selection).
1085
1086 Parameters:
1087
1088 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1089 row: the row number to apply Tsys values to.
1090 (default all rows)
1091
1092 """
1093
1094 if not hasattr(values, "__len__"):
1095 values = [values]
1096 self._settsys(values, row)
1097
1098 def get_weather(self, row=-1):
1099 """\
1100 Return the weather informations.
1101
1102 Parameters:
1103
1104 row: the rowno to get the information for. (default all rows)
1105
1106 Returns:
1107
1108 a dict or list of of dicts of values for the current selection
1109
1110 """
1111
1112 values = self._get_column(self._get_weather, row)
1113 if row > -1:
1114 return {'temperature': values[0],
1115 'pressure': values[1], 'humidity' : values[2],
1116 'windspeed' : values[3], 'windaz' : values[4]
1117 }
1118 else:
1119 out = []
1120 for r in values:
1121
1122 out.append({'temperature': r[0],
1123 'pressure': r[1], 'humidity' : r[2],
1124 'windspeed' : r[3], 'windaz' : r[4]
1125 })
1126 return out
1127
1128 def _row_callback(self, callback, label):
1129 out = ""
1130 outvec = []
1131 sep = '-'*50
1132 for i in range(self.nrow()):
1133 tm = self._gettime(i)
1134 src = self._getsourcename(i)
1135 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1136 out += 'Time[%s]:\n' % (tm)
1137 if self.nbeam(-1) > 1:
1138 out += ' Beam[%d] ' % (self.getbeam(i))
1139 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1140 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1141 outvec.append(callback(i))
1142 out += '= %3.3f\n' % (outvec[i])
1143 out += sep+'\n'
1144
1145 asaplog.push(sep)
1146 asaplog.push(" %s" % (label))
1147 asaplog.push(sep)
1148 asaplog.push(out)
1149 asaplog.post()
1150 return outvec
1151
1152 def _get_column(self, callback, row=-1, *args):
1153 """
1154 """
1155 if row == -1:
1156 return [callback(i, *args) for i in range(self.nrow())]
1157 else:
1158 if 0 <= row < self.nrow():
1159 return callback(row, *args)
1160
1161
1162 def get_time(self, row=-1, asdatetime=False, prec=-1):
1163 """\
1164 Get a list of time stamps for the observations.
1165 Return a datetime object or a string (default) for each
1166 integration time stamp in the scantable.
1167
1168 Parameters:
1169
1170 row: row no of integration. Default -1 return all rows
1171
1172 asdatetime: return values as datetime objects rather than strings
1173
1174 prec: number of digits shown. Default -1 to automatic
1175 calculation.
1176 Note this number is equals to the digits of MVTime,
1177 i.e., 0<prec<3: dates with hh:: only,
1178 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1179 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1180
1181 """
1182 from datetime import datetime
1183 if prec < 0:
1184 # automagically set necessary precision +1
1185 prec = 7 - \
1186 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1187 prec = max(6, int(prec))
1188 else:
1189 prec = max(0, prec)
1190 if asdatetime:
1191 #precision can be 1 millisecond at max
1192 prec = min(12, prec)
1193
1194 times = self._get_column(self._gettime, row, prec)
1195 if not asdatetime:
1196 return times
1197 format = "%Y/%m/%d/%H:%M:%S.%f"
1198 if prec < 7:
1199 nsub = 1 + (((6-prec)/2) % 3)
1200 substr = [".%f","%S","%M"]
1201 for i in range(nsub):
1202 format = format.replace(substr[i],"")
1203 if isinstance(times, list):
1204 return [datetime.strptime(i, format) for i in times]
1205 else:
1206 return datetime.strptime(times, format)
1207
1208
1209 def get_inttime(self, row=-1):
1210 """\
1211 Get a list of integration times for the observations.
1212 Return a time in seconds for each integration in the scantable.
1213
1214 Parameters:
1215
1216 row: row no of integration. Default -1 return all rows.
1217
1218 """
1219 return self._get_column(self._getinttime, row)
1220
1221
1222 def get_sourcename(self, row=-1):
1223 """\
1224 Get a list source names for the observations.
1225 Return a string for each integration in the scantable.
1226 Parameters:
1227
1228 row: row no of integration. Default -1 return all rows.
1229
1230 """
1231 return self._get_column(self._getsourcename, row)
1232
1233 def get_elevation(self, row=-1):
1234 """\
1235 Get a list of elevations for the observations.
1236 Return a float for each integration in the scantable.
1237
1238 Parameters:
1239
1240 row: row no of integration. Default -1 return all rows.
1241
1242 """
1243 return self._get_column(self._getelevation, row)
1244
1245 def get_azimuth(self, row=-1):
1246 """\
1247 Get a list of azimuths for the observations.
1248 Return a float for each integration in the scantable.
1249
1250 Parameters:
1251 row: row no of integration. Default -1 return all rows.
1252
1253 """
1254 return self._get_column(self._getazimuth, row)
1255
1256 def get_parangle(self, row=-1):
1257 """\
1258 Get a list of parallactic angles for the observations.
1259 Return a float for each integration in the scantable.
1260
1261 Parameters:
1262
1263 row: row no of integration. Default -1 return all rows.
1264
1265 """
1266 return self._get_column(self._getparangle, row)
1267
1268 def get_direction(self, row=-1):
1269 """
1270 Get a list of Positions on the sky (direction) for the observations.
1271 Return a string for each integration in the scantable.
1272
1273 Parameters:
1274
1275 row: row no of integration. Default -1 return all rows
1276
1277 """
1278 return self._get_column(self._getdirection, row)
1279
1280 def get_directionval(self, row=-1):
1281 """\
1282 Get a list of Positions on the sky (direction) for the observations.
1283 Return a float for each integration in the scantable.
1284
1285 Parameters:
1286
1287 row: row no of integration. Default -1 return all rows
1288
1289 """
1290 return self._get_column(self._getdirectionvec, row)
1291
1292 @asaplog_post_dec
1293 def set_unit(self, unit='channel'):
1294 """\
1295 Set the unit for all following operations on this scantable
1296
1297 Parameters:
1298
1299 unit: optional unit, default is 'channel'. Use one of '*Hz',
1300 'km/s', 'channel' or equivalent ''
1301
1302 """
1303 varlist = vars()
1304 if unit in ['', 'pixel', 'channel']:
1305 unit = ''
1306 inf = list(self._getcoordinfo())
1307 inf[0] = unit
1308 self._setcoordinfo(inf)
1309 self._add_history("set_unit", varlist)
1310
1311 @asaplog_post_dec
1312 def set_instrument(self, instr):
1313 """\
1314 Set the instrument for subsequent processing.
1315
1316 Parameters:
1317
1318 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1319 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1320
1321 """
1322 self._setInstrument(instr)
1323 self._add_history("set_instument", vars())
1324
1325 @asaplog_post_dec
1326 def set_feedtype(self, feedtype):
1327 """\
1328 Overwrite the feed type, which might not be set correctly.
1329
1330 Parameters:
1331
1332 feedtype: 'linear' or 'circular'
1333
1334 """
1335 self._setfeedtype(feedtype)
1336 self._add_history("set_feedtype", vars())
1337
1338 @asaplog_post_dec
1339 def get_doppler(self):
1340 """\
1341 Get the doppler.
1342 """
1343 return self._getcoordinfo()[2]
1344
1345 @asaplog_post_dec
1346 def set_doppler(self, doppler='RADIO'):
1347 """\
1348 Set the doppler for all following operations on this scantable.
1349
1350 Parameters:
1351
1352 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1353
1354 """
1355 varlist = vars()
1356 inf = list(self._getcoordinfo())
1357 inf[2] = doppler
1358 self._setcoordinfo(inf)
1359 self._add_history("set_doppler", vars())
1360
1361 @asaplog_post_dec
1362 def set_freqframe(self, frame=None):
1363 """\
1364 Set the frame type of the Spectral Axis.
1365
1366 Parameters:
1367
1368 frame: an optional frame type, default 'LSRK'. Valid frames are:
1369 'TOPO', 'LSRD', 'LSRK', 'BARY',
1370 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1371
1372 Example::
1373
1374 scan.set_freqframe('BARY')
1375
1376 """
1377 frame = frame or rcParams['scantable.freqframe']
1378 varlist = vars()
1379 # "REST" is not implemented in casacore
1380 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1381 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1382 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1383 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1384
1385 if frame in valid:
1386 inf = list(self._getcoordinfo())
1387 inf[1] = frame
1388 self._setcoordinfo(inf)
1389 self._add_history("set_freqframe", varlist)
1390 else:
1391 msg = "Please specify a valid freq type. Valid types are:\n", valid
1392 raise TypeError(msg)
1393
1394 @asaplog_post_dec
1395 def set_dirframe(self, frame=""):
1396 """\
1397 Set the frame type of the Direction on the sky.
1398
1399 Parameters:
1400
1401 frame: an optional frame type, default ''. Valid frames are:
1402 'J2000', 'B1950', 'GALACTIC'
1403
1404 Example:
1405
1406 scan.set_dirframe('GALACTIC')
1407
1408 """
1409 varlist = vars()
1410 Scantable.set_dirframe(self, frame)
1411 self._add_history("set_dirframe", varlist)
1412
1413 def get_unit(self):
1414 """\
1415 Get the default unit set in this scantable
1416
1417 Returns:
1418
1419 A unit string
1420
1421 """
1422 inf = self._getcoordinfo()
1423 unit = inf[0]
1424 if unit == '': unit = 'channel'
1425 return unit
1426
1427 @asaplog_post_dec
1428 def get_abcissa(self, rowno=0):
1429 """\
1430 Get the abcissa in the current coordinate setup for the currently
1431 selected Beam/IF/Pol
1432
1433 Parameters:
1434
1435 rowno: an optional row number in the scantable. Default is the
1436 first row, i.e. rowno=0
1437
1438 Returns:
1439
1440 The abcissa values and the format string (as a dictionary)
1441
1442 """
1443 abc = self._getabcissa(rowno)
1444 lbl = self._getabcissalabel(rowno)
1445 return abc, lbl
1446
1447 @asaplog_post_dec
1448 def flag(self, mask=None, unflag=False, row=-1):
1449 """\
1450 Flag the selected data using an optional channel mask.
1451
1452 Parameters:
1453
1454 mask: an optional channel mask, created with create_mask. Default
1455 (no mask) is all channels.
1456
1457 unflag: if True, unflag the data
1458
1459 row: an optional row number in the scantable.
1460 Default -1 flags all rows
1461
1462 """
1463 varlist = vars()
1464 mask = mask or []
1465 self._flag(row, mask, unflag)
1466 self._add_history("flag", varlist)
1467
1468 @asaplog_post_dec
1469 def flag_row(self, rows=None, unflag=False):
1470 """\
1471 Flag the selected data in row-based manner.
1472
1473 Parameters:
1474
1475 rows: list of row numbers to be flagged. Default is no row
1476 (must be explicitly specified to execute row-based
1477 flagging).
1478
1479 unflag: if True, unflag the data.
1480
1481 """
1482 varlist = vars()
1483 if rows is None:
1484 rows = []
1485 self._flag_row(rows, unflag)
1486 self._add_history("flag_row", varlist)
1487
1488 @asaplog_post_dec
1489 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1490 """\
1491 Flag the selected data outside a specified range (in channel-base)
1492
1493 Parameters:
1494
1495 uthres: upper threshold.
1496
1497 dthres: lower threshold
1498
1499 clipoutside: True for flagging data outside the range
1500 [dthres:uthres].
1501 False for flagging data inside the range.
1502
1503 unflag: if True, unflag the data.
1504
1505 """
1506 varlist = vars()
1507 self._clip(uthres, dthres, clipoutside, unflag)
1508 self._add_history("clip", varlist)
1509
1510 @asaplog_post_dec
1511 def lag_flag(self, start, end, unit="MHz", insitu=None):
1512 """\
1513 Flag the data in 'lag' space by providing a frequency to remove.
1514 Flagged data in the scantable get interpolated over the region.
1515 No taper is applied.
1516
1517 Parameters:
1518
1519 start: the start frequency (really a period within the
1520 bandwidth) or period to remove
1521
1522 end: the end frequency or period to remove
1523
1524 unit: the frequency unit (default 'MHz') or '' for
1525 explicit lag channels
1526
1527 *Notes*:
1528
1529 It is recommended to flag edges of the band or strong
1530 signals beforehand.
1531
1532 """
1533 if insitu is None: insitu = rcParams['insitu']
1534 self._math._setinsitu(insitu)
1535 varlist = vars()
1536 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1537 if not (unit == "" or base.has_key(unit)):
1538 raise ValueError("%s is not a valid unit." % unit)
1539 if unit == "":
1540 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1541 else:
1542 s = scantable(self._math._lag_flag(self, start*base[unit],
1543 end*base[unit], "frequency"))
1544 s._add_history("lag_flag", varlist)
1545 if insitu:
1546 self._assign(s)
1547 else:
1548 return s
1549
1550 @asaplog_post_dec
1551 def fft(self, rowno=None, mask=None, getrealimag=False):
1552 """\
1553 Apply FFT to the spectra.
1554 Flagged data in the scantable get interpolated over the region.
1555
1556 Parameters:
1557
1558 rowno: The row number(s) to be processed. int, list
1559 and tuple are accepted. By default (None), FFT
1560 is applied to the whole data.
1561
1562 mask: Auxiliary channel mask(s). Given as a boolean
1563 list, it is applied to all specified rows.
1564 A list of boolean lists can also be used to
1565 apply different masks. In the latter case, the
1566 length of 'mask' must be the same as that of
1567 'rowno'. The default is None.
1568
1569 getrealimag: If True, returns the real and imaginary part
1570 values of the complex results.
1571 If False (the default), returns the amplitude
1572 (absolute value) normalised with Ndata/2 and
1573 phase (argument, in unit of radian).
1574
1575 Returns:
1576
1577 A list of dictionaries containing the results for each spectrum.
1578 Each dictionary contains two values, the real and the imaginary
1579 parts when getrealimag = True, or the amplitude(absolute value)
1580 and the phase(argument) when getrealimag = False. The key for
1581 these values are 'real' and 'imag', or 'ampl' and 'phase',
1582 respectively.
1583 """
1584 if rowno is None:
1585 rowno = []
1586 if isinstance(rowno, int):
1587 rowno = [rowno]
1588 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1589 raise TypeError("The row number(s) must be int, list or tuple.")
1590 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1591
1592 usecommonmask = True
1593
1594 if mask is None:
1595 mask = []
1596 if isinstance(mask, list) or isinstance(mask, tuple):
1597 if len(mask) == 0:
1598 mask = [[]]
1599 else:
1600 if isinstance(mask[0], bool):
1601 if len(mask) != self.nchan(self.getif(rowno[0])):
1602 raise ValueError("The spectra and the mask have "
1603 "different length.")
1604 mask = [mask]
1605 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1606 usecommonmask = False
1607 if len(mask) != len(rowno):
1608 raise ValueError("When specifying masks for each "
1609 "spectrum, the numbers of them "
1610 "must be identical.")
1611 for i in xrange(mask):
1612 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1613 raise ValueError("The spectra and the mask have "
1614 "different length.")
1615 else:
1616 raise TypeError("The mask must be a boolean list or "
1617 "a list of boolean list.")
1618 else:
1619 raise TypeError("The mask must be a boolean list or a list of "
1620 "boolean list.")
1621
1622 res = []
1623
1624 imask = 0
1625 for whichrow in rowno:
1626 fspec = self._fft(whichrow, mask[imask], getrealimag)
1627 nspec = len(fspec)
1628
1629 i = 0
1630 v1 = []
1631 v2 = []
1632 reselem = {"real":[],"imag":[]} if getrealimag \
1633 else {"ampl":[],"phase":[]}
1634
1635 while (i < nspec):
1636 v1.append(fspec[i])
1637 v2.append(fspec[i+1])
1638 i += 2
1639
1640 if getrealimag:
1641 reselem["real"] += v1
1642 reselem["imag"] += v2
1643 else:
1644 reselem["ampl"] += v1
1645 reselem["phase"] += v2
1646
1647 res.append(reselem)
1648
1649 if not usecommonmask:
1650 imask += 1
1651
1652 return res
1653
1654 @asaplog_post_dec
1655 def create_mask(self, *args, **kwargs):
1656 """\
1657 Compute and return a mask based on [min, max] windows.
1658 The specified windows are to be INCLUDED, when the mask is
1659 applied.
1660
1661 Parameters:
1662
1663 [min, max], [min2, max2], ...
1664 Pairs of start/end points (inclusive)specifying the regions
1665 to be masked
1666
1667 invert: optional argument. If specified as True,
1668 return an inverted mask, i.e. the regions
1669 specified are EXCLUDED
1670
1671 row: create the mask using the specified row for
1672 unit conversions, default is row=0
1673 only necessary if frequency varies over rows.
1674
1675 Examples::
1676
1677 scan.set_unit('channel')
1678 # a)
1679 msk = scan.create_mask([400, 500], [800, 900])
1680 # masks everything outside 400 and 500
1681 # and 800 and 900 in the unit 'channel'
1682
1683 # b)
1684 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1685 # masks the regions between 400 and 500
1686 # and 800 and 900 in the unit 'channel'
1687
1688 # c)
1689 #mask only channel 400
1690 msk = scan.create_mask([400])
1691
1692 """
1693 row = kwargs.get("row", 0)
1694 data = self._getabcissa(row)
1695 u = self._getcoordinfo()[0]
1696 if u == "":
1697 u = "channel"
1698 msg = "The current mask window unit is %s" % u
1699 i = self._check_ifs()
1700 if not i:
1701 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1702 asaplog.push(msg)
1703 n = len(data)
1704 msk = _n_bools(n, False)
1705 # test if args is a 'list' or a 'normal *args - UGLY!!!
1706
1707 ws = (isinstance(args[-1][-1], int)
1708 or isinstance(args[-1][-1], float)) and args or args[0]
1709 for window in ws:
1710 if len(window) == 1:
1711 window = [window[0], window[0]]
1712 if len(window) == 0 or len(window) > 2:
1713 raise ValueError("A window needs to be defined as "
1714 "[start(, end)]")
1715 if window[0] > window[1]:
1716 tmp = window[0]
1717 window[0] = window[1]
1718 window[1] = tmp
1719 for i in range(n):
1720 if data[i] >= window[0] and data[i] <= window[1]:
1721 msk[i] = True
1722 if kwargs.has_key('invert'):
1723 if kwargs.get('invert'):
1724 msk = mask_not(msk)
1725 return msk
1726
1727 def get_masklist(self, mask=None, row=0, silent=False):
1728 """\
1729 Compute and return a list of mask windows, [min, max].
1730
1731 Parameters:
1732
1733 mask: channel mask, created with create_mask.
1734
1735 row: calcutate the masklist using the specified row
1736 for unit conversions, default is row=0
1737 only necessary if frequency varies over rows.
1738
1739 Returns:
1740
1741 [min, max], [min2, max2], ...
1742 Pairs of start/end points (inclusive)specifying
1743 the masked regions
1744
1745 """
1746 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1747 raise TypeError("The mask should be list or tuple.")
1748 if len(mask) <= 0:
1749 raise TypeError("The mask elements should be > 0")
1750 data = self._getabcissa(row)
1751 if len(data) != len(mask):
1752 msg = "Number of channels in scantable != number of mask elements"
1753 raise TypeError(msg)
1754 u = self._getcoordinfo()[0]
1755 if u == "":
1756 u = "channel"
1757 msg = "The current mask window unit is %s" % u
1758 i = self._check_ifs()
1759 if not i:
1760 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1761 if not silent:
1762 asaplog.push(msg)
1763 masklist = []
1764 ist, ien = None, None
1765 ist, ien=self.get_mask_indices(mask)
1766 if ist is not None and ien is not None:
1767 for i in xrange(len(ist)):
1768 range=[data[ist[i]],data[ien[i]]]
1769 range.sort()
1770 masklist.append([range[0],range[1]])
1771 return masklist
1772
1773 def get_mask_indices(self, mask=None):
1774 """\
1775 Compute and Return lists of mask start indices and mask end indices.
1776
1777 Parameters:
1778
1779 mask: channel mask, created with create_mask.
1780
1781 Returns:
1782
1783 List of mask start indices and that of mask end indices,
1784 i.e., [istart1,istart2,....], [iend1,iend2,....].
1785
1786 """
1787 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1788 raise TypeError("The mask should be list or tuple.")
1789 if len(mask) <= 0:
1790 raise TypeError("The mask elements should be > 0")
1791 istart = []
1792 iend = []
1793 if mask[0]:
1794 istart.append(0)
1795 for i in range(len(mask)-1):
1796 if not mask[i] and mask[i+1]:
1797 istart.append(i+1)
1798 elif mask[i] and not mask[i+1]:
1799 iend.append(i)
1800 if mask[len(mask)-1]:
1801 iend.append(len(mask)-1)
1802 if len(istart) != len(iend):
1803 raise RuntimeError("Numbers of mask start != mask end.")
1804 for i in range(len(istart)):
1805 if istart[i] > iend[i]:
1806 raise RuntimeError("Mask start index > mask end index")
1807 break
1808 return istart,iend
1809
1810 @asaplog_post_dec
1811 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1812 """
1813 Parse MS type spw/channel selection syntax.
1814
1815 Parameters:
1816 selectstring : A string expression of spw and channel selection.
1817 Comma-separated expressions mean different spw -
1818 channel combinations. Spws and channel selections
1819 are partitioned by a colon ':'. In a single
1820 selection expression, you can put multiple values
1821 separated by semicolons ';'. Both for spw and
1822 channel selection, allowed cases include single
1823 value, blank('') or asterisk('*') to specify all
1824 available values, two values connected with a
1825 tilde ('~') to specify an inclusive range. Unit
1826 strings for frequency or velocity can be added to
1827 the tilde-connected values. For channel selection
1828 expression, placing a '<' or a '>' is possible to
1829 specify a semi-infinite interval as well.
1830
1831 examples:
1832 '' or '*' = all spws (all channels)
1833 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1834 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1835 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1836 all channels in spw8
1837 '1.3~1.5GHz' = all spws that fall in or have at
1838 least some overwrap with frequency
1839 range between 1.3GHz and 1.5GHz.
1840 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1841 between the specified
1842 frequency range in spws
1843 that fall in or have
1844 overwrap with the
1845 specified frequency
1846 range.
1847 '1:-200~250km/s' = channels that fall between the
1848 specified velocity range in
1849 spw 1.
1850 restfreq: the rest frequency.
1851 examples: '115.2712GHz', 115271201800.0
1852 frame: an optional frame type, default 'LSRK'. Valid frames are:
1853 'TOPO', 'LSRD', 'LSRK', 'BARY',
1854 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1855 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1856 Returns:
1857 A dictionary of selected (valid) spw and masklist pairs,
1858 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1859 """
1860 if not isinstance(selectstring, str):
1861 asaplog.post()
1862 asaplog.push("Expression of spw/channel selection must be a string.")
1863 asaplog.post("ERROR")
1864
1865 orig_unit = self.get_unit()
1866 self.set_unit('channel')
1867
1868 if restfreq is not None:
1869 orig_molids = self._getmolidcol_list()
1870 set_restfreq(self, restfreq)
1871
1872 orig_coord = self._getcoordinfo()
1873
1874 if frame is not None:
1875 orig_frame = orig_coord[1]
1876 self.set_freqframe(frame)
1877
1878 if doppler is not None:
1879 orig_doppler = orig_coord[2]
1880 self.set_doppler(doppler)
1881
1882 valid_ifs = self.getifnos()
1883
1884 comma_sep = selectstring.split(",")
1885 res = {}
1886
1887 for cms_elem in comma_sep:
1888 colon_sep = cms_elem.split(":")
1889
1890 if (len(colon_sep) > 2):
1891 raise RuntimeError("Invalid selection expression: more than two colons!")
1892
1893 # parse spw expression and store result in spw_list.
1894 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1895 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1896 # 'a~b*m/s' (where * can be '' or 'k') and also
1897 # several of the above expressions connected with ';'.
1898
1899 spw_list = []
1900
1901 semicolon_sep = colon_sep[0].split(";")
1902
1903 for scs_elem in semicolon_sep:
1904 scs_elem = scs_elem.strip()
1905
1906 lt_sep = scs_elem.split("<")
1907 gt_sep = scs_elem.split(">")
1908 ti_sep = scs_elem.split("~")
1909
1910 lt_sep_length = len(lt_sep)
1911 gt_sep_length = len(gt_sep)
1912 ti_sep_length = len(ti_sep)
1913
1914 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1915
1916 if (len_product > 2):
1917 # '<', '>' and '~' must not coexist in a single spw expression
1918
1919 raise RuntimeError("Invalid spw selection.")
1920
1921 elif (len_product == 1):
1922 # '', '*', or single spw number.
1923
1924 if (scs_elem == "") or (scs_elem == "*"):
1925 spw_list = valid_ifs[:] # deep copy
1926
1927 else: # single number
1928 expr = int(scs_elem)
1929 spw_list.append(expr)
1930 if expr not in valid_ifs:
1931 asaplog.push("Invalid spw given. Ignored.")
1932
1933 else: # (len_product == 2)
1934 # namely, one of '<', '>' or '~' appears just once.
1935
1936 if (lt_sep_length == 2): # '<a'
1937 if is_number(lt_sep[1]):
1938 no_valid_spw = True
1939 for i in valid_ifs:
1940 if (i < float(lt_sep[1])):
1941 spw_list.append(i)
1942 no_valid_spw = False
1943
1944 if no_valid_spw:
1945 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1946
1947 else:
1948 raise RuntimeError("Invalid spw selection.")
1949
1950 elif (gt_sep_length == 2): # '>a'
1951 if is_number(gt_sep[1]):
1952 no_valid_spw = True
1953 for i in valid_ifs:
1954 if (i > float(gt_sep[1])):
1955 spw_list.append(i)
1956 no_valid_spw = False
1957
1958 if no_valid_spw:
1959 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1960
1961 else:
1962 raise RuntimeError("Invalid spw selection.")
1963
1964 else: # (ti_sep_length == 2) where both boundaries inclusive
1965 expr0 = ti_sep[0].strip()
1966 expr1 = ti_sep[1].strip()
1967
1968 if is_number(expr0) and is_number(expr1):
1969 # 'a~b'
1970 expr_pmin = min(float(expr0), float(expr1))
1971 expr_pmax = max(float(expr0), float(expr1))
1972 has_invalid_spw = False
1973 no_valid_spw = True
1974
1975 for i in valid_ifs:
1976 if (expr_pmin <= i) and (i <= expr_pmax):
1977 spw_list.append(i)
1978 no_valid_spw = False
1979 else:
1980 has_invalid_spw = True
1981
1982 if has_invalid_spw:
1983 msg = "Invalid spw is given. Ignored."
1984 asaplog.push(msg)
1985 asaplog.post()
1986
1987 if no_valid_spw:
1988 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1989
1990 elif is_number(expr0) and is_frequency(expr1):
1991 # 'a~b*Hz'
1992 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1993 no_valid_spw = True
1994
1995 for coord in self._get_coordinate_list():
1996 expr_p0 = coord['coord'].to_pixel(expr_f0)
1997 expr_p1 = coord['coord'].to_pixel(expr_f1)
1998 expr_pmin = min(expr_p0, expr_p1)
1999 expr_pmax = max(expr_p0, expr_p1)
2000
2001 spw = coord['if']
2002 pmin = 0.0
2003 pmax = float(self.nchan(spw) - 1)
2004
2005 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2006 spw_list.append(spw)
2007 no_valid_spw = False
2008
2009 if no_valid_spw:
2010 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2011
2012 elif is_number(expr0) and is_velocity(expr1):
2013 # 'a~b*m/s'
2014 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2015 expr_vmin = min(expr_v0, expr_v1)
2016 expr_vmax = max(expr_v0, expr_v1)
2017 no_valid_spw = True
2018
2019 for coord in self._get_coordinate_list():
2020 spw = coord['if']
2021
2022 pmin = 0.0
2023 pmax = float(self.nchan(spw) - 1)
2024
2025 vel0 = coord['coord'].to_velocity(pmin)
2026 vel1 = coord['coord'].to_velocity(pmax)
2027
2028 vmin = min(vel0, vel1)
2029 vmax = max(vel0, vel1)
2030
2031 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2032 spw_list.append(spw)
2033 no_valid_spw = False
2034
2035 if no_valid_spw:
2036 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2037
2038 else:
2039 # cases such as 'aGHz~bkm/s' are not allowed now
2040 raise RuntimeError("Invalid spw selection.")
2041
2042 # check spw list and remove invalid ones.
2043 # if no valid spw left, emit ValueError.
2044 if len(spw_list) == 0:
2045 raise ValueError("No valid spw in given range.")
2046
2047 # parse channel expression and store the result in crange_list.
2048 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2049 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2050 # and also several of the above expressions connected with ';'.
2051
2052 for spw in spw_list:
2053 pmin = 0.0
2054 pmax = float(self.nchan(spw) - 1)
2055
2056 molid = self._getmolidcol_list()[self.get_first_rowno_by_if(spw)]
2057
2058 if (len(colon_sep) == 1):
2059 # no expression for channel selection,
2060 # which means all channels are to be selected.
2061 crange_list = [[pmin, pmax]]
2062
2063 else: # (len(colon_sep) == 2)
2064 crange_list = []
2065
2066 found = False
2067 for i in self._get_coordinate_list():
2068 if (i['if'] == spw):
2069 coord = i['coord']
2070 found = True
2071 break
2072
2073 if found:
2074 semicolon_sep = colon_sep[1].split(";")
2075 for scs_elem in semicolon_sep:
2076 scs_elem = scs_elem.strip()
2077
2078 ti_sep = scs_elem.split("~")
2079 ti_sep_length = len(ti_sep)
2080
2081 if (ti_sep_length > 2):
2082 raise RuntimeError("Invalid channel selection.")
2083
2084 elif (ti_sep_length == 1):
2085 if (scs_elem == "") or (scs_elem == "*"):
2086 # '' and '*' for all channels
2087 crange_list = [[pmin, pmax]]
2088 break
2089 elif (is_number(scs_elem)):
2090 # single channel given
2091 crange_list.append([float(scs_elem), float(scs_elem)])
2092 else:
2093 raise RuntimeError("Invalid channel selection.")
2094
2095 else: #(ti_sep_length == 2)
2096 expr0 = ti_sep[0].strip()
2097 expr1 = ti_sep[1].strip()
2098
2099 if is_number(expr0) and is_number(expr1):
2100 # 'a~b'
2101 expr_pmin = min(float(expr0), float(expr1))
2102 expr_pmax = max(float(expr0), float(expr1))
2103
2104 elif is_number(expr0) and is_frequency(expr1):
2105 # 'a~b*Hz'
2106 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2107 expr_p0 = coord.to_pixel(expr_f0)
2108 expr_p1 = coord.to_pixel(expr_f1)
2109 expr_pmin = min(expr_p0, expr_p1)
2110 expr_pmax = max(expr_p0, expr_p1)
2111
2112 elif is_number(expr0) and is_velocity(expr1):
2113 # 'a~b*m/s'
2114 restf = self.get_restfreqs()[molid][0]
2115 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2116 dppl = self.get_doppler()
2117 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)
2118 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)
2119 expr_p0 = coord.to_pixel(expr_f0)
2120 expr_p1 = coord.to_pixel(expr_f1)
2121 expr_pmin = min(expr_p0, expr_p1)
2122 expr_pmax = max(expr_p0, expr_p1)
2123
2124 else:
2125 # cases such as 'aGHz~bkm/s' are not allowed now
2126 raise RuntimeError("Invalid channel selection.")
2127
2128 cmin = max(pmin, expr_pmin)
2129 cmax = min(pmax, expr_pmax)
2130 # if the given range of channel selection has overwrap with
2131 # that of current spw, output the overwrap area.
2132 if (cmin <= cmax):
2133 cmin = float(int(cmin + 0.5))
2134 cmax = float(int(cmax + 0.5))
2135 crange_list.append([cmin, cmax])
2136
2137 if (len(crange_list) == 0):
2138 crange_list.append([])
2139
2140 if res.has_key(spw):
2141 res[spw].extend(crange_list)
2142 else:
2143 res[spw] = crange_list
2144
2145 for spw in res.keys():
2146 if spw not in valid_ifs:
2147 del res[spw]
2148
2149 if len(res) == 0:
2150 raise RuntimeError("No valid spw.")
2151
2152 # restore original values
2153 self.set_unit(orig_unit)
2154 if restfreq is not None:
2155 self._setmolidcol_list(orig_molids)
2156 if frame is not None:
2157 self.set_freqframe(orig_frame)
2158 if doppler is not None:
2159 self.set_doppler(orig_doppler)
2160
2161 return res
2162
2163 @asaplog_post_dec
2164 def get_first_rowno_by_if(self, ifno):
2165 found = False
2166 for irow in xrange(self.nrow()):
2167 if (self.getif(irow) == ifno):
2168 res = irow
2169 found = True
2170 break
2171
2172 if not found: raise RuntimeError("Invalid IF value.")
2173
2174 return res
2175
2176 @asaplog_post_dec
2177 def _get_coordinate_list(self):
2178 res = []
2179 spws = self.getifnos()
2180 for spw in spws:
2181 elem = {}
2182 elem['if'] = spw
2183 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2184 res.append(elem)
2185
2186 return res
2187
2188 @asaplog_post_dec
2189 def parse_maskexpr(self, maskstring):
2190 """
2191 Parse CASA type mask selection syntax (IF dependent).
2192
2193 Parameters:
2194 maskstring : A string mask selection expression.
2195 A comma separated selections mean different IF -
2196 channel combinations. IFs and channel selections
2197 are partitioned by a colon, ':'.
2198 examples:
2199 '' = all IFs (all channels)
2200 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2201 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2202 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2203 all channels in IF8
2204 Returns:
2205 A dictionary of selected (valid) IF and masklist pairs,
2206 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2207 """
2208 if not isinstance(maskstring,str):
2209 asaplog.post()
2210 asaplog.push("Mask expression should be a string.")
2211 asaplog.post("ERROR")
2212
2213 valid_ifs = self.getifnos()
2214 frequnit = self.get_unit()
2215 seldict = {}
2216 if maskstring == "":
2217 maskstring = str(valid_ifs)[1:-1]
2218 ## split each selection "IF range[:CHAN range]"
2219 # split maskstring by "<spaces>,<spaces>"
2220 comma_sep = re.compile('\s*,\s*')
2221 sellist = comma_sep.split(maskstring)
2222 # separator by "<spaces>:<spaces>"
2223 collon_sep = re.compile('\s*:\s*')
2224 for currselstr in sellist:
2225 selset = collon_sep.split(currselstr)
2226 # spw and mask string (may include ~, < or >)
2227 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2228 minval=min(valid_ifs),
2229 maxval=max(valid_ifs))
2230 for spwlist in spwmasklist:
2231 selspws = []
2232 for ispw in range(spwlist[0],spwlist[1]+1):
2233 # Put into the list only if ispw exists
2234 if valid_ifs.count(ispw):
2235 selspws.append(ispw)
2236 del spwmasklist, spwlist
2237
2238 # parse frequency mask list
2239 if len(selset) > 1:
2240 freqmasklist = self._parse_selection(selset[1], typestr='float',
2241 offset=0.)
2242 else:
2243 # want to select the whole spectrum
2244 freqmasklist = [None]
2245
2246 ## define a dictionary of spw - masklist combination
2247 for ispw in selspws:
2248 #print "working on", ispw
2249 spwstr = str(ispw)
2250 if len(selspws) == 0:
2251 # empty spw
2252 continue
2253 else:
2254 ## want to get min and max of the spw and
2255 ## offset to set for '<' and '>'
2256 if frequnit == 'channel':
2257 minfreq = 0
2258 maxfreq = self.nchan(ifno=ispw)
2259 offset = 0.5
2260 else:
2261 ## This is ugly part. need improvement
2262 for ifrow in xrange(self.nrow()):
2263 if self.getif(ifrow) == ispw:
2264 #print "IF",ispw,"found in row =",ifrow
2265 break
2266 freqcoord = self.get_coordinate(ifrow)
2267 freqs = self._getabcissa(ifrow)
2268 minfreq = min(freqs)
2269 maxfreq = max(freqs)
2270 if len(freqs) == 1:
2271 offset = 0.5
2272 elif frequnit.find('Hz') > 0:
2273 offset = abs(freqcoord.to_frequency(1,
2274 unit=frequnit)
2275 -freqcoord.to_frequency(0,
2276 unit=frequnit)
2277 )*0.5
2278 elif frequnit.find('m/s') > 0:
2279 offset = abs(freqcoord.to_velocity(1,
2280 unit=frequnit)
2281 -freqcoord.to_velocity(0,
2282 unit=frequnit)
2283 )*0.5
2284 else:
2285 asaplog.post()
2286 asaplog.push("Invalid frequency unit")
2287 asaplog.post("ERROR")
2288 del freqs, freqcoord, ifrow
2289 for freq in freqmasklist:
2290 selmask = freq or [minfreq, maxfreq]
2291 if selmask[0] == None:
2292 ## selection was "<freq[1]".
2293 if selmask[1] < minfreq:
2294 ## avoid adding region selection
2295 selmask = None
2296 else:
2297 selmask = [minfreq,selmask[1]-offset]
2298 elif selmask[1] == None:
2299 ## selection was ">freq[0]"
2300 if selmask[0] > maxfreq:
2301 ## avoid adding region selection
2302 selmask = None
2303 else:
2304 selmask = [selmask[0]+offset,maxfreq]
2305 if selmask:
2306 if not seldict.has_key(spwstr):
2307 # new spw selection
2308 seldict[spwstr] = []
2309 seldict[spwstr] += [selmask]
2310 del minfreq,maxfreq,offset,freq,selmask
2311 del spwstr
2312 del freqmasklist
2313 del valid_ifs
2314 if len(seldict) == 0:
2315 asaplog.post()
2316 asaplog.push("No valid selection in the mask expression: "
2317 +maskstring)
2318 asaplog.post("WARN")
2319 return None
2320 msg = "Selected masklist:\n"
2321 for sif, lmask in seldict.iteritems():
2322 msg += " IF"+sif+" - "+str(lmask)+"\n"
2323 asaplog.push(msg)
2324 return seldict
2325
2326 @asaplog_post_dec
2327 def parse_idx_selection(self, mode, selexpr):
2328 """
2329 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2330 BEAMNO, and row number
2331
2332 Parameters:
2333 mode : which column to select.
2334 ['scan',|'if'|'pol'|'beam'|'row']
2335 selexpr : A comma separated selection expression.
2336 examples:
2337 '' = all (returns [])
2338 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2339 (returns [0,1,4,5,6,9])
2340 Returns:
2341 A List of selected indices
2342 """
2343 if selexpr == "":
2344 return []
2345 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2346 'b': 'beam', 'r': 'row'}
2347 smode = mode.lower()[0]
2348 if not (smode in valid_modes.keys()):
2349 msg = "Invalid mode '%s'. Valid modes are %s" %\
2350 (mode, str(valid_modes.values()))
2351 asaplog.post()
2352 asaplog.push(msg)
2353 asaplog.post("ERROR")
2354 mode = valid_modes[smode]
2355 minidx = None
2356 maxidx = None
2357 if smode == 'r':
2358 minidx = 0
2359 maxidx = self.nrow()
2360 else:
2361 idx = getattr(self,"get"+mode+"nos")()
2362 minidx = min(idx)
2363 maxidx = max(idx)
2364 del idx
2365 # split selexpr by "<spaces>,<spaces>"
2366 comma_sep = re.compile('\s*,\s*')
2367 sellist = comma_sep.split(selexpr)
2368 idxlist = []
2369 for currselstr in sellist:
2370 # single range (may include ~, < or >)
2371 currlist = self._parse_selection(currselstr, typestr='integer',
2372 minval=minidx,maxval=maxidx)
2373 for thelist in currlist:
2374 idxlist += range(thelist[0],thelist[1]+1)
2375 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2376 asaplog.push(msg)
2377 return idxlist
2378
2379 def _parse_selection(self, selstr, typestr='float', offset=0.,
2380 minval=None, maxval=None):
2381 """
2382 Parameters:
2383 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2384 typestr : The type of the values in returned list
2385 ('integer' or 'float')
2386 offset : The offset value to subtract from or add to
2387 the boundary value if the selection string
2388 includes '<' or '>' [Valid only for typestr='float']
2389 minval, maxval : The minimum/maximum values to set if the
2390 selection string includes '<' or '>'.
2391 The list element is filled with None by default.
2392 Returns:
2393 A list of min/max pair of selections.
2394 Example:
2395 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2396 --> returns [[0,2],[5,7],[9,9]]
2397 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2398 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2399 """
2400 # split selstr by '<spaces>;<spaces>'
2401 semi_sep = re.compile('\s*;\s*')
2402 selgroups = semi_sep.split(selstr)
2403 sellists = []
2404 if typestr.lower().startswith('int'):
2405 formatfunc = int
2406 offset = 1
2407 else:
2408 formatfunc = float
2409
2410 for currsel in selgroups:
2411 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2412 minsel = minval
2413 maxsel = maxval
2414 if currsel.find('~') > 0:
2415 # val0 <= x <= val1
2416 minsel = formatfunc(currsel.split('~')[0].strip())
2417 maxsel = formatfunc(currsel.split('~')[1].strip())
2418 elif currsel.strip().find('<=') > -1:
2419 bound = currsel.split('<=')
2420 try: # try "x <= val"
2421 minsel = minval
2422 maxsel = formatfunc(bound[1].strip())
2423 except ValueError: # now "val <= x"
2424 minsel = formatfunc(bound[0].strip())
2425 maxsel = maxval
2426 elif currsel.strip().find('>=') > -1:
2427 bound = currsel.split('>=')
2428 try: # try "x >= val"
2429 minsel = formatfunc(bound[1].strip())
2430 maxsel = maxval
2431 except ValueError: # now "val >= x"
2432 minsel = minval
2433 maxsel = formatfunc(bound[0].strip())
2434 elif currsel.strip().find('<') > -1:
2435 bound = currsel.split('<')
2436 try: # try "x < val"
2437 minsel = minval
2438 maxsel = formatfunc(bound[1].strip()) \
2439 - formatfunc(offset)
2440 except ValueError: # now "val < x"
2441 minsel = formatfunc(bound[0].strip()) \
2442 + formatfunc(offset)
2443 maxsel = maxval
2444 elif currsel.strip().find('>') > -1:
2445 bound = currsel.split('>')
2446 try: # try "x > val"
2447 minsel = formatfunc(bound[1].strip()) \
2448 + formatfunc(offset)
2449 maxsel = maxval
2450 except ValueError: # now "val > x"
2451 minsel = minval
2452 maxsel = formatfunc(bound[0].strip()) \
2453 - formatfunc(offset)
2454 else:
2455 minsel = formatfunc(currsel)
2456 maxsel = formatfunc(currsel)
2457 sellists.append([minsel,maxsel])
2458 return sellists
2459
2460# def get_restfreqs(self):
2461# """
2462# Get the restfrequency(s) stored in this scantable.
2463# The return value(s) are always of unit 'Hz'
2464# Parameters:
2465# none
2466# Returns:
2467# a list of doubles
2468# """
2469# return list(self._getrestfreqs())
2470
2471 def get_restfreqs(self, ids=None):
2472 """\
2473 Get the restfrequency(s) stored in this scantable.
2474 The return value(s) are always of unit 'Hz'
2475
2476 Parameters:
2477
2478 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2479 be retrieved
2480
2481 Returns:
2482
2483 dictionary containing ids and a list of doubles for each id
2484
2485 """
2486 if ids is None:
2487 rfreqs = {}
2488 idlist = self.getmolnos()
2489 for i in idlist:
2490 rfreqs[i] = list(self._getrestfreqs(i))
2491 return rfreqs
2492 else:
2493 if type(ids) == list or type(ids) == tuple:
2494 rfreqs = {}
2495 for i in ids:
2496 rfreqs[i] = list(self._getrestfreqs(i))
2497 return rfreqs
2498 else:
2499 return list(self._getrestfreqs(ids))
2500
2501 @asaplog_post_dec
2502 def set_restfreqs(self, freqs=None, unit='Hz'):
2503 """\
2504 Set or replace the restfrequency specified and
2505 if the 'freqs' argument holds a scalar,
2506 then that rest frequency will be applied to all the selected
2507 data. If the 'freqs' argument holds
2508 a vector, then it MUST be of equal or smaller length than
2509 the number of IFs (and the available restfrequencies will be
2510 replaced by this vector). In this case, *all* data have
2511 the restfrequency set per IF according
2512 to the corresponding value you give in the 'freqs' vector.
2513 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2514 IF 1 gets restfreq 2e9.
2515
2516 You can also specify the frequencies via a linecatalog.
2517
2518 Parameters:
2519
2520 freqs: list of rest frequency values or string idenitfiers
2521
2522 unit: unit for rest frequency (default 'Hz')
2523
2524
2525 Example::
2526
2527 # set the given restfrequency for the all currently selected IFs
2528 scan.set_restfreqs(freqs=1.4e9)
2529 # set restfrequencies for the n IFs (n > 1) in the order of the
2530 # list, i.e
2531 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2532 # len(list_of_restfreqs) == nIF
2533 # for nIF == 1 the following will set multiple restfrequency for
2534 # that IF
2535 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2536 # set multiple restfrequencies per IF. as a list of lists where
2537 # the outer list has nIF elements, the inner s arbitrary
2538 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2539
2540 *Note*:
2541
2542 To do more sophisticate Restfrequency setting, e.g. on a
2543 source and IF basis, use scantable.set_selection() before using
2544 this function::
2545
2546 # provided your scantable is called scan
2547 selection = selector()
2548 selection.set_name('ORION*')
2549 selection.set_ifs([1])
2550 scan.set_selection(selection)
2551 scan.set_restfreqs(freqs=86.6e9)
2552
2553 """
2554 varlist = vars()
2555 from asap import linecatalog
2556 # simple value
2557 if isinstance(freqs, int) or isinstance(freqs, float):
2558 self._setrestfreqs([freqs], [""], unit)
2559 # list of values
2560 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2561 # list values are scalars
2562 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2563 if len(freqs) == 1:
2564 self._setrestfreqs(freqs, [""], unit)
2565 else:
2566 # allow the 'old' mode of setting mulitple IFs
2567 savesel = self._getselection()
2568 sel = self.get_selection()
2569 iflist = self.getifnos()
2570 if len(freqs)>len(iflist):
2571 raise ValueError("number of elements in list of list "
2572 "exeeds the current IF selections")
2573 iflist = self.getifnos()
2574 for i, fval in enumerate(freqs):
2575 sel.set_ifs(iflist[i])
2576 self._setselection(sel)
2577 self._setrestfreqs([fval], [""], unit)
2578 self._setselection(savesel)
2579
2580 # list values are dict, {'value'=, 'name'=)
2581 elif isinstance(freqs[-1], dict):
2582 values = []
2583 names = []
2584 for d in freqs:
2585 values.append(d["value"])
2586 names.append(d["name"])
2587 self._setrestfreqs(values, names, unit)
2588 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2589 savesel = self._getselection()
2590 sel = self.get_selection()
2591 iflist = self.getifnos()
2592 if len(freqs)>len(iflist):
2593 raise ValueError("number of elements in list of list exeeds"
2594 " the current IF selections")
2595 for i, fval in enumerate(freqs):
2596 sel.set_ifs(iflist[i])
2597 self._setselection(sel)
2598 self._setrestfreqs(fval, [""], unit)
2599 self._setselection(savesel)
2600 # freqs are to be taken from a linecatalog
2601 elif isinstance(freqs, linecatalog):
2602 savesel = self._getselection()
2603 sel = self.get_selection()
2604 for i in xrange(freqs.nrow()):
2605 sel.set_ifs(iflist[i])
2606 self._setselection(sel)
2607 self._setrestfreqs([freqs.get_frequency(i)],
2608 [freqs.get_name(i)], "MHz")
2609 # ensure that we are not iterating past nIF
2610 if i == self.nif()-1: break
2611 self._setselection(savesel)
2612 else:
2613 return
2614 self._add_history("set_restfreqs", varlist)
2615
2616 @asaplog_post_dec
2617 def shift_refpix(self, delta):
2618 """\
2619 Shift the reference pixel of the Spectra Coordinate by an
2620 integer amount.
2621
2622 Parameters:
2623
2624 delta: the amount to shift by
2625
2626 *Note*:
2627
2628 Be careful using this with broadband data.
2629
2630 """
2631 varlist = vars()
2632 Scantable.shift_refpix(self, delta)
2633 s._add_history("shift_refpix", varlist)
2634
2635 @asaplog_post_dec
2636 def history(self, filename=None, nrows=-1, start=0):
2637 """\
2638 Print the history. Optionally to a file.
2639
2640 Parameters:
2641
2642 filename: The name of the file to save the history to.
2643
2644 """
2645 n = self._historylength()
2646 if nrows == -1:
2647 nrows = n
2648 if start+nrows > n:
2649 nrows = nrows-start
2650 if n > 1000 and nrows == n:
2651 nrows = 1000
2652 start = n-1000
2653 asaplog.push("Warning: History has {0} entries. Displaying last "
2654 "1000".format(n))
2655 hist = list(self._gethistory(nrows, start))
2656 out = "-"*80
2657 for h in hist:
2658 if not h.strip():
2659 continue
2660 if h.find("---") >-1:
2661 continue
2662 else:
2663 items = h.split("##")
2664 date = items[0]
2665 func = items[1]
2666 items = items[2:]
2667 out += "\n"+date+"\n"
2668 out += "Function: %s\n Parameters:" % (func)
2669 for i in items:
2670 if i == '':
2671 continue
2672 s = i.split("=")
2673 out += "\n %s = %s" % (s[0], s[1])
2674 out = "\n".join([out, "*"*80])
2675 if filename is not None:
2676 if filename is "":
2677 filename = 'scantable_history.txt'
2678 filename = os.path.expandvars(os.path.expanduser(filename))
2679 if not os.path.isdir(filename):
2680 data = open(filename, 'w')
2681 data.write(out)
2682 data.close()
2683 else:
2684 msg = "Illegal file name '%s'." % (filename)
2685 raise IOError(msg)
2686 return page(out)
2687
2688 #
2689 # Maths business
2690 #
2691 @asaplog_post_dec
2692 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2693 avmode="NONE"):
2694 """\
2695 Return the (time) weighted average of a scan. Scans will be averaged
2696 only if the source direction (RA/DEC) is within 1' otherwise
2697
2698 *Note*:
2699
2700 in channels only - align if necessary
2701
2702 Parameters:
2703
2704 mask: an optional mask (only used for 'var' and 'tsys'
2705 weighting)
2706
2707 scanav: True averages each scan separately
2708 False (default) averages all scans together,
2709
2710 weight: Weighting scheme.
2711 'none' (mean no weight)
2712 'var' (1/var(spec) weighted)
2713 'tsys' (1/Tsys**2 weighted)
2714 'tint' (integration time weighted)
2715 'tintsys' (Tint/Tsys**2)
2716 'median' ( median averaging)
2717 The default is 'tint'
2718
2719 align: align the spectra in velocity before averaging. It takes
2720 the time of the first spectrum as reference time.
2721 avmode: 'SOURCE' - also select by source name - or
2722 'NONE' (default). Not applicable for scanav=True or
2723 weight=median
2724
2725 Example::
2726
2727 # time average the scantable without using a mask
2728 newscan = scan.average_time()
2729
2730 """
2731 varlist = vars()
2732 weight = weight or 'TINT'
2733 mask = mask or ()
2734 scanav = (scanav and 'SCAN') or avmode.upper()
2735 scan = (self, )
2736
2737 if align:
2738 scan = (self.freq_align(insitu=False), )
2739 asaplog.push("Note: Alignment is don on a source-by-source basis")
2740 asaplog.push("Note: Averaging (by default) is not")
2741 # we need to set it to SOURCE averaging here
2742 s = None
2743 if weight.upper() == 'MEDIAN':
2744 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2745 scanav))
2746 else:
2747 s = scantable(self._math._average(scan, mask, weight.upper(),
2748 scanav))
2749 s._add_history("average_time", varlist)
2750 return s
2751
2752 @asaplog_post_dec
2753 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2754 """\
2755 Return a scan where all spectra are converted to either
2756 Jansky or Kelvin depending upon the flux units of the scan table.
2757 By default the function tries to look the values up internally.
2758 If it can't find them (or if you want to over-ride), you must
2759 specify EITHER jyperk OR eta (and D which it will try to look up
2760 also if you don't set it). jyperk takes precedence if you set both.
2761
2762 Parameters:
2763
2764 jyperk: the Jy / K conversion factor
2765
2766 eta: the aperture efficiency
2767
2768 d: the geometric diameter (metres)
2769
2770 insitu: if False a new scantable is returned.
2771 Otherwise, the scaling is done in-situ
2772 The default is taken from .asaprc (False)
2773
2774 """
2775 if insitu is None: insitu = rcParams['insitu']
2776 self._math._setinsitu(insitu)
2777 varlist = vars()
2778 jyperk = jyperk or -1.0
2779 d = d or -1.0
2780 eta = eta or -1.0
2781 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2782 s._add_history("convert_flux", varlist)
2783 if insitu: self._assign(s)
2784 else: return s
2785
2786 @asaplog_post_dec
2787 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2788 """\
2789 Return a scan after applying a gain-elevation correction.
2790 The correction can be made via either a polynomial or a
2791 table-based interpolation (and extrapolation if necessary).
2792 You specify polynomial coefficients, an ascii table or neither.
2793 If you specify neither, then a polynomial correction will be made
2794 with built in coefficients known for certain telescopes (an error
2795 will occur if the instrument is not known).
2796 The data and Tsys are *divided* by the scaling factors.
2797
2798 Parameters:
2799
2800 poly: Polynomial coefficients (default None) to compute a
2801 gain-elevation correction as a function of
2802 elevation (in degrees).
2803
2804 filename: The name of an ascii file holding correction factors.
2805 The first row of the ascii file must give the column
2806 names and these MUST include columns
2807 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2808 by this) somewhere.
2809 The second row must give the data type of the
2810 column. Use 'R' for Real and 'I' for Integer.
2811 An example file would be
2812 (actual factors are arbitrary) :
2813
2814 TIME ELEVATION FACTOR
2815 R R R
2816 0.1 0 0.8
2817 0.2 20 0.85
2818 0.3 40 0.9
2819 0.4 60 0.85
2820 0.5 80 0.8
2821 0.6 90 0.75
2822
2823 method: Interpolation method when correcting from a table.
2824 Values are 'nearest', 'linear' (default), 'cubic'
2825 and 'spline'
2826
2827 insitu: if False a new scantable is returned.
2828 Otherwise, the scaling is done in-situ
2829 The default is taken from .asaprc (False)
2830
2831 """
2832
2833 if insitu is None: insitu = rcParams['insitu']
2834 self._math._setinsitu(insitu)
2835 varlist = vars()
2836 poly = poly or ()
2837 from os.path import expandvars
2838 filename = expandvars(filename)
2839 s = scantable(self._math._gainel(self, poly, filename, method))
2840 s._add_history("gain_el", varlist)
2841 if insitu:
2842 self._assign(s)
2843 else:
2844 return s
2845
2846 @asaplog_post_dec
2847 def freq_align(self, reftime=None, method='cubic', insitu=None):
2848 """\
2849 Return a scan where all rows have been aligned in frequency/velocity.
2850 The alignment frequency frame (e.g. LSRK) is that set by function
2851 set_freqframe.
2852
2853 Parameters:
2854
2855 reftime: reference time to align at. By default, the time of
2856 the first row of data is used.
2857
2858 method: Interpolation method for regridding the spectra.
2859 Choose from 'nearest', 'linear', 'cubic' (default)
2860 and 'spline'
2861
2862 insitu: if False a new scantable is returned.
2863 Otherwise, the scaling is done in-situ
2864 The default is taken from .asaprc (False)
2865
2866 """
2867 if insitu is None: insitu = rcParams["insitu"]
2868 oldInsitu = self._math._insitu()
2869 self._math._setinsitu(insitu)
2870 varlist = vars()
2871 reftime = reftime or ""
2872 s = scantable(self._math._freq_align(self, reftime, method))
2873 s._add_history("freq_align", varlist)
2874 self._math._setinsitu(oldInsitu)
2875 if insitu:
2876 self._assign(s)
2877 else:
2878 return s
2879
2880 @asaplog_post_dec
2881 def opacity(self, tau=None, insitu=None):
2882 """\
2883 Apply an opacity correction. The data
2884 and Tsys are multiplied by the correction factor.
2885
2886 Parameters:
2887
2888 tau: (list of) opacity from which the correction factor is
2889 exp(tau*ZD)
2890 where ZD is the zenith-distance.
2891 If a list is provided, it has to be of length nIF,
2892 nIF*nPol or 1 and in order of IF/POL, e.g.
2893 [opif0pol0, opif0pol1, opif1pol0 ...]
2894 if tau is `None` the opacities are determined from a
2895 model.
2896
2897 insitu: if False a new scantable is returned.
2898 Otherwise, the scaling is done in-situ
2899 The default is taken from .asaprc (False)
2900
2901 """
2902 if insitu is None:
2903 insitu = rcParams['insitu']
2904 self._math._setinsitu(insitu)
2905 varlist = vars()
2906 if not hasattr(tau, "__len__"):
2907 tau = [tau]
2908 s = scantable(self._math._opacity(self, tau))
2909 s._add_history("opacity", varlist)
2910 if insitu:
2911 self._assign(s)
2912 else:
2913 return s
2914
2915 @asaplog_post_dec
2916 def bin(self, width=5, insitu=None):
2917 """\
2918 Return a scan where all spectra have been binned up.
2919
2920 Parameters:
2921
2922 width: The bin width (default=5) in pixels
2923
2924 insitu: if False a new scantable is returned.
2925 Otherwise, the scaling is done in-situ
2926 The default is taken from .asaprc (False)
2927
2928 """
2929 if insitu is None:
2930 insitu = rcParams['insitu']
2931 self._math._setinsitu(insitu)
2932 varlist = vars()
2933 s = scantable(self._math._bin(self, width))
2934 s._add_history("bin", varlist)
2935 if insitu:
2936 self._assign(s)
2937 else:
2938 return s
2939
2940 @asaplog_post_dec
2941 def reshape(self, first, last, insitu=None):
2942 """Resize the band by providing first and last channel.
2943 This will cut off all channels outside [first, last].
2944 """
2945 if insitu is None:
2946 insitu = rcParams['insitu']
2947 varlist = vars()
2948 if last < 0:
2949 last = self.nchan()-1 + last
2950 s = None
2951 if insitu:
2952 s = self
2953 else:
2954 s = self.copy()
2955 s._reshape(first,last)
2956 s._add_history("reshape", varlist)
2957 if not insitu:
2958 return s
2959
2960 @asaplog_post_dec
2961 def resample(self, width=5, method='cubic', insitu=None):
2962 """\
2963 Return a scan where all spectra have been binned up.
2964
2965 Parameters:
2966
2967 width: The bin width (default=5) in pixels
2968
2969 method: Interpolation method when correcting from a table.
2970 Values are 'nearest', 'linear', 'cubic' (default)
2971 and 'spline'
2972
2973 insitu: if False a new scantable is returned.
2974 Otherwise, the scaling is done in-situ
2975 The default is taken from .asaprc (False)
2976
2977 """
2978 if insitu is None:
2979 insitu = rcParams['insitu']
2980 self._math._setinsitu(insitu)
2981 varlist = vars()
2982 s = scantable(self._math._resample(self, method, width))
2983 s._add_history("resample", varlist)
2984 if insitu:
2985 self._assign(s)
2986 else:
2987 return s
2988
2989 @asaplog_post_dec
2990 def average_pol(self, mask=None, weight='none'):
2991 """\
2992 Average the Polarisations together.
2993
2994 Parameters:
2995
2996 mask: An optional mask defining the region, where the
2997 averaging will be applied. The output will have all
2998 specified points masked.
2999
3000 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3001 weighted), or 'tsys' (1/Tsys**2 weighted)
3002
3003 """
3004 varlist = vars()
3005 mask = mask or ()
3006 s = scantable(self._math._averagepol(self, mask, weight.upper()))
3007 s._add_history("average_pol", varlist)
3008 return s
3009
3010 @asaplog_post_dec
3011 def average_beam(self, mask=None, weight='none'):
3012 """\
3013 Average the Beams together.
3014
3015 Parameters:
3016 mask: An optional mask defining the region, where the
3017 averaging will be applied. The output will have all
3018 specified points masked.
3019
3020 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3021 weighted), or 'tsys' (1/Tsys**2 weighted)
3022
3023 """
3024 varlist = vars()
3025 mask = mask or ()
3026 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3027 s._add_history("average_beam", varlist)
3028 return s
3029
3030 def parallactify(self, pflag):
3031 """\
3032 Set a flag to indicate whether this data should be treated as having
3033 been 'parallactified' (total phase == 0.0)
3034
3035 Parameters:
3036
3037 pflag: Bool indicating whether to turn this on (True) or
3038 off (False)
3039
3040 """
3041 varlist = vars()
3042 self._parallactify(pflag)
3043 self._add_history("parallactify", varlist)
3044
3045 @asaplog_post_dec
3046 def convert_pol(self, poltype=None):
3047 """\
3048 Convert the data to a different polarisation type.
3049 Note that you will need cross-polarisation terms for most conversions.
3050
3051 Parameters:
3052
3053 poltype: The new polarisation type. Valid types are:
3054 'linear', 'circular', 'stokes' and 'linpol'
3055
3056 """
3057 varlist = vars()
3058 s = scantable(self._math._convertpol(self, poltype))
3059 s._add_history("convert_pol", varlist)
3060 return s
3061
3062 @asaplog_post_dec
3063 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3064 insitu=None):
3065 """\
3066 Smooth the spectrum by the specified kernel (conserving flux).
3067
3068 Parameters:
3069
3070 kernel: The type of smoothing kernel. Select from
3071 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3072 or 'poly'
3073
3074 width: The width of the kernel in pixels. For hanning this is
3075 ignored otherwise it defauls to 5 pixels.
3076 For 'gaussian' it is the Full Width Half
3077 Maximum. For 'boxcar' it is the full width.
3078 For 'rmedian' and 'poly' it is the half width.
3079
3080 order: Optional parameter for 'poly' kernel (default is 2), to
3081 specify the order of the polnomial. Ignored by all other
3082 kernels.
3083
3084 plot: plot the original and the smoothed spectra.
3085 In this each indivual fit has to be approved, by
3086 typing 'y' or 'n'
3087
3088 insitu: if False a new scantable is returned.
3089 Otherwise, the scaling is done in-situ
3090 The default is taken from .asaprc (False)
3091
3092 """
3093 if insitu is None: insitu = rcParams['insitu']
3094 self._math._setinsitu(insitu)
3095 varlist = vars()
3096
3097 if plot: orgscan = self.copy()
3098
3099 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
3100 s._add_history("smooth", varlist)
3101
3102 action = 'H'
3103 if plot:
3104 from asap.asapplotter import new_asaplot
3105 theplot = new_asaplot(rcParams['plotter.gui'])
3106 from matplotlib import rc as rcp
3107 rcp('lines', linewidth=1)
3108 theplot.set_panels()
3109 ylab=s._get_ordinate_label()
3110 #theplot.palette(0,["#777777","red"])
3111 for r in xrange(s.nrow()):
3112 xsm=s._getabcissa(r)
3113 ysm=s._getspectrum(r)
3114 xorg=orgscan._getabcissa(r)
3115 yorg=orgscan._getspectrum(r)
3116 if action != "N": #skip plotting if rejecting all
3117 theplot.clear()
3118 theplot.hold()
3119 theplot.set_axes('ylabel',ylab)
3120 theplot.set_axes('xlabel',s._getabcissalabel(r))
3121 theplot.set_axes('title',s._getsourcename(r))
3122 theplot.set_line(label='Original',color="#777777")
3123 theplot.plot(xorg,yorg)
3124 theplot.set_line(label='Smoothed',color="red")
3125 theplot.plot(xsm,ysm)
3126 ### Ugly part for legend
3127 for i in [0,1]:
3128 theplot.subplots[0]['lines'].append(
3129 [theplot.subplots[0]['axes'].lines[i]]
3130 )
3131 theplot.release()
3132 ### Ugly part for legend
3133 theplot.subplots[0]['lines']=[]
3134 res = self._get_verify_action("Accept smoothing?",action)
3135 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3136 if r == 0: action = None
3137 #res = raw_input("Accept smoothing ([y]/n): ")
3138 if res.upper() == 'N':
3139 # reject for the current rows
3140 s._setspectrum(yorg, r)
3141 elif res.upper() == 'R':
3142 # reject all the following rows
3143 action = "N"
3144 s._setspectrum(yorg, r)
3145 elif res.upper() == 'A':
3146 # accept all the following rows
3147 break
3148 theplot.quit()
3149 del theplot
3150 del orgscan
3151
3152 if insitu: self._assign(s)
3153 else: return s
3154
3155 @asaplog_post_dec
3156 def regrid_channel(self, width=5, plot=False, insitu=None):
3157 """\
3158 Regrid the spectra by the specified channel width
3159
3160 Parameters:
3161
3162 width: The channel width (float) of regridded spectra
3163 in the current spectral unit.
3164
3165 plot: [NOT IMPLEMENTED YET]
3166 plot the original and the regridded spectra.
3167 In this each indivual fit has to be approved, by
3168 typing 'y' or 'n'
3169
3170 insitu: if False a new scantable is returned.
3171 Otherwise, the scaling is done in-situ
3172 The default is taken from .asaprc (False)
3173
3174 """
3175 if insitu is None: insitu = rcParams['insitu']
3176 varlist = vars()
3177
3178 if plot:
3179 asaplog.post()
3180 asaplog.push("Verification plot is not implemtnetd yet.")
3181 asaplog.post("WARN")
3182
3183 s = self.copy()
3184 s._regrid_specchan(width)
3185
3186 s._add_history("regrid_channel", varlist)
3187
3188# if plot:
3189# from asap.asapplotter import new_asaplot
3190# theplot = new_asaplot(rcParams['plotter.gui'])
3191# from matplotlib import rc as rcp
3192# rcp('lines', linewidth=1)
3193# theplot.set_panels()
3194# ylab=s._get_ordinate_label()
3195# #theplot.palette(0,["#777777","red"])
3196# for r in xrange(s.nrow()):
3197# xsm=s._getabcissa(r)
3198# ysm=s._getspectrum(r)
3199# xorg=orgscan._getabcissa(r)
3200# yorg=orgscan._getspectrum(r)
3201# theplot.clear()
3202# theplot.hold()
3203# theplot.set_axes('ylabel',ylab)
3204# theplot.set_axes('xlabel',s._getabcissalabel(r))
3205# theplot.set_axes('title',s._getsourcename(r))
3206# theplot.set_line(label='Original',color="#777777")
3207# theplot.plot(xorg,yorg)
3208# theplot.set_line(label='Smoothed',color="red")
3209# theplot.plot(xsm,ysm)
3210# ### Ugly part for legend
3211# for i in [0,1]:
3212# theplot.subplots[0]['lines'].append(
3213# [theplot.subplots[0]['axes'].lines[i]]
3214# )
3215# theplot.release()
3216# ### Ugly part for legend
3217# theplot.subplots[0]['lines']=[]
3218# res = raw_input("Accept smoothing ([y]/n): ")
3219# if res.upper() == 'N':
3220# s._setspectrum(yorg, r)
3221# theplot.quit()
3222# del theplot
3223# del orgscan
3224
3225 if insitu: self._assign(s)
3226 else: return s
3227
3228 @asaplog_post_dec
3229 def _parse_wn(self, wn):
3230 if isinstance(wn, list) or isinstance(wn, tuple):
3231 return wn
3232 elif isinstance(wn, int):
3233 return [ wn ]
3234 elif isinstance(wn, str):
3235 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3236 val = wn.split('-')
3237 val = [int(val[0]), int(val[1])]
3238 val.sort()
3239 res = [i for i in xrange(val[0], val[1]+1)]
3240 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3241 val = int(wn[2:])+1
3242 res = [i for i in xrange(val)]
3243 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3244 val = int(wn[:-2])+1
3245 res = [i for i in xrange(val)]
3246 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3247 val = int(wn[1:])
3248 res = [i for i in xrange(val)]
3249 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3250 val = int(wn[:-1])
3251 res = [i for i in xrange(val)]
3252 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3253 # then interpreted in C++
3254 # side as [a,a+1,...,a_nyq]
3255 # (CAS-3759)
3256 val = int(wn[2:])
3257 res = [val, -999]
3258 #res = [i for i in xrange(val, self.nchan()/2+1)]
3259 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3260 # then interpreted in C++
3261 # side as [a,a+1,...,a_nyq]
3262 # (CAS-3759)
3263 val = int(wn[:-2])
3264 res = [val, -999]
3265 #res = [i for i in xrange(val, self.nchan()/2+1)]
3266 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3267 # then interpreted in C++
3268 # side as [a+1,a+2,...,a_nyq]
3269 # (CAS-3759)
3270 val = int(wn[1:])+1
3271 res = [val, -999]
3272 #res = [i for i in xrange(val, self.nchan()/2+1)]
3273 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3274 # then interpreted in C++
3275 # side as [a+1,a+2,...,a_nyq]
3276 # (CAS-3759)
3277 val = int(wn[:-1])+1
3278 res = [val, -999]
3279 #res = [i for i in xrange(val, self.nchan()/2+1)]
3280
3281 return res
3282 else:
3283 msg = 'wrong value given for addwn/rejwn'
3284 raise RuntimeError(msg)
3285
3286 @asaplog_post_dec
3287 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3288 """\
3289 Subtract baseline based on parameters written in Baseline Table.
3290
3291 Parameters:
3292 insitu: if True, baseline fitting/subtraction is done
3293 in-situ. If False, a new scantable with
3294 baseline subtracted is returned. Actually,
3295 format of the returned value depends on both
3296 insitu and retfitres (see below).
3297 The default is taken from .asaprc (False)
3298 retfitres: if True, the results of baseline fitting (i.e.,
3299 coefficients and rms) are returned.
3300 default is False.
3301 The format of the returned value of this
3302 function varies as follows:
3303 (1) in case insitu=True and retfitres=True:
3304 fitting result.
3305 (2) in case insitu=True and retfitres=False:
3306 None.
3307 (3) in case insitu=False and retfitres=True:
3308 a dictionary containing a new scantable
3309 (with baseline subtracted) and the fitting
3310 results.
3311 (4) in case insitu=False and retfitres=False:
3312 a new scantable (with baseline subtracted).
3313 inbltable: name of input baseline table. The row number of
3314 scantable and that of inbltable must be
3315 identical.
3316 outbltable: name of output baseline table where baseline
3317 parameters and fitting results recorded.
3318 default is ''(no output).
3319 overwrite: if True when an existing baseline table is
3320 specified for outbltable, overwrites it.
3321 Otherwise there is no harm.
3322 default is False.
3323 """
3324
3325 try:
3326 varlist = vars()
3327 if retfitres is None: retfitres = False
3328 if inbltable is None: raise ValueError("bltable missing.")
3329 if outbltable is None: outbltable = ''
3330 if overwrite is None: overwrite = False
3331
3332 if insitu is None: insitu = rcParams['insitu']
3333 if insitu:
3334 workscan = self
3335 else:
3336 workscan = self.copy()
3337
3338 sres = workscan._apply_bltable(inbltable,
3339 retfitres,
3340 outbltable,
3341 os.path.exists(outbltable),
3342 overwrite)
3343 if retfitres: res = parse_fitresult(sres)
3344
3345 workscan._add_history('apply_bltable', varlist)
3346
3347 if insitu:
3348 self._assign(workscan)
3349 if retfitres:
3350 return res
3351 else:
3352 return None
3353 else:
3354 if retfitres:
3355 return {'scantable': workscan, 'fitresults': res}
3356 else:
3357 return workscan
3358
3359 except RuntimeError, e:
3360 raise_fitting_failure_exception(e)
3361
3362 @asaplog_post_dec
3363 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3364 """\
3365 Subtract baseline based on parameters written in the input list.
3366
3367 Parameters:
3368 insitu: if True, baseline fitting/subtraction is done
3369 in-situ. If False, a new scantable with
3370 baseline subtracted is returned. Actually,
3371 format of the returned value depends on both
3372 insitu and retfitres (see below).
3373 The default is taken from .asaprc (False)
3374 retfitres: if True, the results of baseline fitting (i.e.,
3375 coefficients and rms) are returned.
3376 default is False.
3377 The format of the returned value of this
3378 function varies as follows:
3379 (1) in case insitu=True and retfitres=True:
3380 fitting result.
3381 (2) in case insitu=True and retfitres=False:
3382 None.
3383 (3) in case insitu=False and retfitres=True:
3384 a dictionary containing a new scantable
3385 (with baseline subtracted) and the fitting
3386 results.
3387 (4) in case insitu=False and retfitres=False:
3388 a new scantable (with baseline subtracted).
3389 blinfo: baseline parameter set stored in a dictionary
3390 or a list of dictionary. Each dictionary
3391 corresponds to each spectrum and must contain
3392 the following keys and values:
3393 'row': row number,
3394 'blfunc': function name. available ones include
3395 'poly', 'chebyshev', 'cspline' and
3396 'sinusoid',
3397 'order': maximum order of polynomial. needed
3398 if blfunc='poly' or 'chebyshev',
3399 'npiece': number or piecewise polynomial.
3400 needed if blfunc='cspline',
3401 'nwave': a list of sinusoidal wave numbers.
3402 needed if blfunc='sinusoid', and
3403 'masklist': min-max windows for channel mask.
3404 the specified ranges will be used
3405 for fitting.
3406 bltable: name of output baseline table where baseline
3407 parameters and fitting results recorded.
3408 default is ''(no output).
3409 overwrite: if True when an existing baseline table is
3410 specified for bltable, overwrites it.
3411 Otherwise there is no harm.
3412 default is False.
3413
3414 Example:
3415 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3416 'masklist':[[10,350],[352,510]]},
3417 {'row':1, 'blfunc':'cspline', 'npiece':3,
3418 'masklist':[[3,16],[19,404],[407,511]]}
3419 ])
3420
3421 the first spectrum (row=0) will be fitted with polynomial
3422 of order=5 and the next one (row=1) will be fitted with cubic
3423 spline consisting of 3 pieces.
3424 """
3425
3426 try:
3427 varlist = vars()
3428 if retfitres is None: retfitres = False
3429 if blinfo is None: blinfo = []
3430 if bltable is None: bltable = ''
3431 if overwrite is None: overwrite = False
3432
3433 if insitu is None: insitu = rcParams['insitu']
3434 if insitu:
3435 workscan = self
3436 else:
3437 workscan = self.copy()
3438
3439 nrow = workscan.nrow()
3440
3441 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3442
3443 print "in_blinfo=< "+ str(in_blinfo)+" >"
3444
3445 sres = workscan._sub_baseline(in_blinfo,
3446 retfitres,
3447 bltable,
3448 os.path.exists(bltable),
3449 overwrite)
3450 if retfitres: res = parse_fitresult(sres)
3451
3452 workscan._add_history('sub_baseline', varlist)
3453
3454 if insitu:
3455 self._assign(workscan)
3456 if retfitres:
3457 return res
3458 else:
3459 return None
3460 else:
3461 if retfitres:
3462 return {'scantable': workscan, 'fitresults': res}
3463 else:
3464 return workscan
3465
3466 except RuntimeError, e:
3467 raise_fitting_failure_exception(e)
3468
3469 @asaplog_post_dec
3470 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3471 whichrow=None, uselinefinder=None, edge=None,
3472 threshold=None, chan_avg_limit=None):
3473 """\
3474 Calculates and returns model selection criteria for a specified
3475 baseline model and a given spectrum data.
3476 Available values include Akaike Information Criterion (AIC), the
3477 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3478 Bayesian Information Criterion (BIC) and the Generalised Cross
3479 Validation (GCV).
3480
3481 Parameters:
3482 value: name of model selection criteria to calculate.
3483 available ones include 'aic', 'aicc', 'bic' and
3484 'gcv'. default is 'aicc'.
3485 blfunc: baseline function name. available ones include
3486 'chebyshev', 'cspline' and 'sinusoid'.
3487 default is 'chebyshev'.
3488 order: parameter for basline function. actually stands for
3489 order of polynomial (order) for 'chebyshev',
3490 number of spline pieces (npiece) for 'cspline' and
3491 maximum wave number for 'sinusoid', respectively.
3492 default is 5 (which is also the default order value
3493 for [auto_]chebyshev_baseline()).
3494 mask: an optional mask. default is [].
3495 whichrow: row number. default is 0 (the first row)
3496 uselinefinder: use sd.linefinder() to flag out line regions
3497 default is True.
3498 edge: an optional number of channel to drop at
3499 the edge of spectrum. If only one value is
3500 specified, the same number will be dropped
3501 from both sides of the spectrum. Default
3502 is to keep all channels. Nested tuples
3503 represent individual edge selection for
3504 different IFs (a number of spectral channels
3505 can be different)
3506 default is (0, 0).
3507 threshold: the threshold used by line finder. It is
3508 better to keep it large as only strong lines
3509 affect the baseline solution.
3510 default is 3.
3511 chan_avg_limit: a maximum number of consequtive spectral
3512 channels to average during the search of
3513 weak and broad lines. The default is no
3514 averaging (and no search for weak lines).
3515 If such lines can affect the fitted baseline
3516 (e.g. a high order polynomial is fitted),
3517 increase this parameter (usually values up
3518 to 8 are reasonable). Most users of this
3519 method should find the default value sufficient.
3520 default is 1.
3521
3522 Example:
3523 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3524 """
3525
3526 try:
3527 varlist = vars()
3528
3529 if value is None: value = 'aicc'
3530 if blfunc is None: blfunc = 'chebyshev'
3531 if order is None: order = 5
3532 if mask is None: mask = []
3533 if whichrow is None: whichrow = 0
3534 if uselinefinder is None: uselinefinder = True
3535 if edge is None: edge = (0, 0)
3536 if threshold is None: threshold = 3
3537 if chan_avg_limit is None: chan_avg_limit = 1
3538
3539 return self._calc_aic(value, blfunc, order, mask,
3540 whichrow, uselinefinder, edge,
3541 threshold, chan_avg_limit)
3542
3543 except RuntimeError, e:
3544 raise_fitting_failure_exception(e)
3545
3546 @asaplog_post_dec
3547 def sinusoid_baseline(self, mask=None, applyfft=None,
3548 fftmethod=None, fftthresh=None,
3549 addwn=None, rejwn=None,
3550 insitu=None,
3551 clipthresh=None, clipniter=None,
3552 plot=None,
3553 getresidual=None,
3554 showprogress=None, minnrow=None,
3555 outlog=None,
3556 blfile=None, csvformat=None,
3557 bltable=None):
3558 """\
3559 Return a scan which has been baselined (all rows) with sinusoidal
3560 functions.
3561
3562 Parameters:
3563 mask: an optional mask
3564 applyfft: if True use some method, such as FFT, to find
3565 strongest sinusoidal components in the wavenumber
3566 domain to be used for baseline fitting.
3567 default is True.
3568 fftmethod: method to find the strong sinusoidal components.
3569 now only 'fft' is available and it is the default.
3570 fftthresh: the threshold to select wave numbers to be used for
3571 fitting from the distribution of amplitudes in the
3572 wavenumber domain.
3573 both float and string values accepted.
3574 given a float value, the unit is set to sigma.
3575 for string values, allowed formats include:
3576 'xsigma' or 'x' (= x-sigma level. e.g.,
3577 '3sigma'), or
3578 'topx' (= the x strongest ones, e.g. 'top5').
3579 default is 3.0 (unit: sigma).
3580 addwn: the additional wave numbers to be used for fitting.
3581 list or integer value is accepted to specify every
3582 wave numbers. also string value can be used in case
3583 you need to specify wave numbers in a certain range,
3584 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3585 '<a' (= 0,1,...,a-2,a-1),
3586 '>=a' (= a, a+1, ... up to the maximum wave
3587 number corresponding to the Nyquist
3588 frequency for the case of FFT).
3589 default is [0].
3590 rejwn: the wave numbers NOT to be used for fitting.
3591 can be set just as addwn but has higher priority:
3592 wave numbers which are specified both in addwn
3593 and rejwn will NOT be used. default is [].
3594 insitu: if False a new scantable is returned.
3595 Otherwise, the scaling is done in-situ
3596 The default is taken from .asaprc (False)
3597 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3598 clipniter: maximum number of iteration of 'clipthresh'-sigma
3599 clipping (default is 0)
3600 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3601 plot the fit and the residual. In this each
3602 indivual fit has to be approved, by typing 'y'
3603 or 'n'
3604 getresidual: if False, returns best-fit values instead of
3605 residual. (default is True)
3606 showprogress: show progress status for large data.
3607 default is True.
3608 minnrow: minimum number of input spectra to show.
3609 default is 1000.
3610 outlog: Output the coefficients of the best-fit
3611 function to logger (default is False)
3612 blfile: Name of a text file in which the best-fit
3613 parameter values to be written
3614 (default is '': no file/logger output)
3615 csvformat: if True blfile is csv-formatted, default is False.
3616 bltable: name of a baseline table where fitting results
3617 (coefficients, rms, etc.) are to be written.
3618 if given, fitting results will NOT be output to
3619 scantable (insitu=True) or None will be
3620 returned (insitu=False).
3621 (default is "": no table output)
3622
3623 Example:
3624 # return a scan baselined by a combination of sinusoidal curves
3625 # having wave numbers in spectral window up to 10,
3626 # also with 3-sigma clipping, iteration up to 4 times
3627 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3628
3629 Note:
3630 The best-fit parameter values output in logger and/or blfile are now
3631 based on specunit of 'channel'.
3632 """
3633
3634 try:
3635 varlist = vars()
3636
3637 if insitu is None: insitu = rcParams['insitu']
3638 if insitu:
3639 workscan = self
3640 else:
3641 workscan = self.copy()
3642
3643 if mask is None: mask = []
3644 if applyfft is None: applyfft = True
3645 if fftmethod is None: fftmethod = 'fft'
3646 if fftthresh is None: fftthresh = 3.0
3647 if addwn is None: addwn = [0]
3648 if rejwn is None: rejwn = []
3649 if clipthresh is None: clipthresh = 3.0
3650 if clipniter is None: clipniter = 0
3651 if plot is None: plot = False
3652 if getresidual is None: getresidual = True
3653 if showprogress is None: showprogress = True
3654 if minnrow is None: minnrow = 1000
3655 if outlog is None: outlog = False
3656 if blfile is None: blfile = ''
3657 if csvformat is None: csvformat = False
3658 if bltable is None: bltable = ''
3659
3660 sapplyfft = 'true' if applyfft else 'false'
3661 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3662
3663 scsvformat = 'T' if csvformat else 'F'
3664
3665 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3666 workscan._sinusoid_baseline(mask,
3667 fftinfo,
3668 #applyfft, fftmethod.lower(),
3669 #str(fftthresh).lower(),
3670 workscan._parse_wn(addwn),
3671 workscan._parse_wn(rejwn),
3672 clipthresh, clipniter,
3673 getresidual,
3674 pack_progress_params(showprogress,
3675 minnrow),
3676 outlog, scsvformat+blfile,
3677 bltable)
3678 workscan._add_history('sinusoid_baseline', varlist)
3679
3680 if bltable == '':
3681 if insitu:
3682 self._assign(workscan)
3683 else:
3684 return workscan
3685 else:
3686 if not insitu:
3687 return None
3688
3689 except RuntimeError, e:
3690 raise_fitting_failure_exception(e)
3691
3692
3693 @asaplog_post_dec
3694 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3695 fftmethod=None, fftthresh=None,
3696 addwn=None, rejwn=None,
3697 insitu=None,
3698 clipthresh=None, clipniter=None,
3699 edge=None, threshold=None, chan_avg_limit=None,
3700 plot=None,
3701 getresidual=None,
3702 showprogress=None, minnrow=None,
3703 outlog=None,
3704 blfile=None, csvformat=None,
3705 bltable=None):
3706 """\
3707 Return a scan which has been baselined (all rows) with sinusoidal
3708 functions.
3709 Spectral lines are detected first using linefinder and masked out
3710 to avoid them affecting the baseline solution.
3711
3712 Parameters:
3713 mask: an optional mask retreived from scantable
3714 applyfft: if True use some method, such as FFT, to find
3715 strongest sinusoidal components in the wavenumber
3716 domain to be used for baseline fitting.
3717 default is True.
3718 fftmethod: method to find the strong sinusoidal components.
3719 now only 'fft' is available and it is the default.
3720 fftthresh: the threshold to select wave numbers to be used for
3721 fitting from the distribution of amplitudes in the
3722 wavenumber domain.
3723 both float and string values accepted.
3724 given a float value, the unit is set to sigma.
3725 for string values, allowed formats include:
3726 'xsigma' or 'x' (= x-sigma level. e.g.,
3727 '3sigma'), or
3728 'topx' (= the x strongest ones, e.g. 'top5').
3729 default is 3.0 (unit: sigma).
3730 addwn: the additional wave numbers to be used for fitting.
3731 list or integer value is accepted to specify every
3732 wave numbers. also string value can be used in case
3733 you need to specify wave numbers in a certain range,
3734 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3735 '<a' (= 0,1,...,a-2,a-1),
3736 '>=a' (= a, a+1, ... up to the maximum wave
3737 number corresponding to the Nyquist
3738 frequency for the case of FFT).
3739 default is [0].
3740 rejwn: the wave numbers NOT to be used for fitting.
3741 can be set just as addwn but has higher priority:
3742 wave numbers which are specified both in addwn
3743 and rejwn will NOT be used. default is [].
3744 insitu: if False a new scantable is returned.
3745 Otherwise, the scaling is done in-situ
3746 The default is taken from .asaprc (False)
3747 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3748 clipniter: maximum number of iteration of 'clipthresh'-sigma
3749 clipping (default is 0)
3750 edge: an optional number of channel to drop at
3751 the edge of spectrum. If only one value is
3752 specified, the same number will be dropped
3753 from both sides of the spectrum. Default
3754 is to keep all channels. Nested tuples
3755 represent individual edge selection for
3756 different IFs (a number of spectral channels
3757 can be different)
3758 threshold: the threshold used by line finder. It is
3759 better to keep it large as only strong lines
3760 affect the baseline solution.
3761 chan_avg_limit: a maximum number of consequtive spectral
3762 channels to average during the search of
3763 weak and broad lines. The default is no
3764 averaging (and no search for weak lines).
3765 If such lines can affect the fitted baseline
3766 (e.g. a high order polynomial is fitted),
3767 increase this parameter (usually values up
3768 to 8 are reasonable). Most users of this
3769 method should find the default value sufficient.
3770 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3771 plot the fit and the residual. In this each
3772 indivual fit has to be approved, by typing 'y'
3773 or 'n'
3774 getresidual: if False, returns best-fit values instead of
3775 residual. (default is True)
3776 showprogress: show progress status for large data.
3777 default is True.
3778 minnrow: minimum number of input spectra to show.
3779 default is 1000.
3780 outlog: Output the coefficients of the best-fit
3781 function to logger (default is False)
3782 blfile: Name of a text file in which the best-fit
3783 parameter values to be written
3784 (default is "": no file/logger output)
3785 csvformat: if True blfile is csv-formatted, default is False.
3786 bltable: name of a baseline table where fitting results
3787 (coefficients, rms, etc.) are to be written.
3788 if given, fitting results will NOT be output to
3789 scantable (insitu=True) or None will be
3790 returned (insitu=False).
3791 (default is "": no table output)
3792
3793 Example:
3794 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3795
3796 Note:
3797 The best-fit parameter values output in logger and/or blfile are now
3798 based on specunit of 'channel'.
3799 """
3800
3801 try:
3802 varlist = vars()
3803
3804 if insitu is None: insitu = rcParams['insitu']
3805 if insitu:
3806 workscan = self
3807 else:
3808 workscan = self.copy()
3809
3810 if mask is None: mask = []
3811 if applyfft is None: applyfft = True
3812 if fftmethod is None: fftmethod = 'fft'
3813 if fftthresh is None: fftthresh = 3.0
3814 if addwn is None: addwn = [0]
3815 if rejwn is None: rejwn = []
3816 if clipthresh is None: clipthresh = 3.0
3817 if clipniter is None: clipniter = 0
3818 if edge is None: edge = (0,0)
3819 if threshold is None: threshold = 3
3820 if chan_avg_limit is None: chan_avg_limit = 1
3821 if plot is None: plot = False
3822 if getresidual is None: getresidual = True
3823 if showprogress is None: showprogress = True
3824 if minnrow is None: minnrow = 1000
3825 if outlog is None: outlog = False
3826 if blfile is None: blfile = ''
3827 if csvformat is None: csvformat = False
3828 if bltable is None: bltable = ''
3829
3830 sapplyfft = 'true' if applyfft else 'false'
3831 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3832
3833 scsvformat = 'T' if csvformat else 'F'
3834
3835 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3836 workscan._auto_sinusoid_baseline(mask,
3837 fftinfo,
3838 workscan._parse_wn(addwn),
3839 workscan._parse_wn(rejwn),
3840 clipthresh, clipniter,
3841 normalise_edge_param(edge),
3842 threshold, chan_avg_limit,
3843 getresidual,
3844 pack_progress_params(showprogress,
3845 minnrow),
3846 outlog, scsvformat+blfile, bltable)
3847 workscan._add_history("auto_sinusoid_baseline", varlist)
3848
3849 if bltable == '':
3850 if insitu:
3851 self._assign(workscan)
3852 else:
3853 return workscan
3854 else:
3855 if not insitu:
3856 return None
3857
3858 except RuntimeError, e:
3859 raise_fitting_failure_exception(e)
3860
3861 @asaplog_post_dec
3862 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3863 clipthresh=None, clipniter=None, plot=None,
3864 getresidual=None, showprogress=None, minnrow=None,
3865 outlog=None, blfile=None, csvformat=None,
3866 bltable=None):
3867 """\
3868 Return a scan which has been baselined (all rows) by cubic spline
3869 function (piecewise cubic polynomial).
3870
3871 Parameters:
3872 mask: An optional mask
3873 npiece: Number of pieces. (default is 2)
3874 insitu: If False a new scantable is returned.
3875 Otherwise, the scaling is done in-situ
3876 The default is taken from .asaprc (False)
3877 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3878 clipniter: maximum number of iteration of 'clipthresh'-sigma
3879 clipping (default is 0)
3880 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3881 plot the fit and the residual. In this each
3882 indivual fit has to be approved, by typing 'y'
3883 or 'n'
3884 getresidual: if False, returns best-fit values instead of
3885 residual. (default is True)
3886 showprogress: show progress status for large data.
3887 default is True.
3888 minnrow: minimum number of input spectra to show.
3889 default is 1000.
3890 outlog: Output the coefficients of the best-fit
3891 function to logger (default is False)
3892 blfile: Name of a text file in which the best-fit
3893 parameter values to be written
3894 (default is "": no file/logger output)
3895 csvformat: if True blfile is csv-formatted, default is False.
3896 bltable: name of a baseline table where fitting results
3897 (coefficients, rms, etc.) are to be written.
3898 if given, fitting results will NOT be output to
3899 scantable (insitu=True) or None will be
3900 returned (insitu=False).
3901 (default is "": no table output)
3902
3903 Example:
3904 # return a scan baselined by a cubic spline consisting of 2 pieces
3905 # (i.e., 1 internal knot),
3906 # also with 3-sigma clipping, iteration up to 4 times
3907 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3908
3909 Note:
3910 The best-fit parameter values output in logger and/or blfile are now
3911 based on specunit of 'channel'.
3912 """
3913
3914 try:
3915 varlist = vars()
3916
3917 if insitu is None: insitu = rcParams['insitu']
3918 if insitu:
3919 workscan = self
3920 else:
3921 workscan = self.copy()
3922
3923 if mask is None: mask = []
3924 if npiece is None: npiece = 2
3925 if clipthresh is None: clipthresh = 3.0
3926 if clipniter is None: clipniter = 0
3927 if plot is None: plot = False
3928 if getresidual is None: getresidual = True
3929 if showprogress is None: showprogress = True
3930 if minnrow is None: minnrow = 1000
3931 if outlog is None: outlog = False
3932 if blfile is None: blfile = ''
3933 if csvformat is None: csvformat = False
3934 if bltable is None: bltable = ''
3935
3936 scsvformat = 'T' if csvformat else 'F'
3937
3938 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3939 workscan._cspline_baseline(mask, npiece,
3940 clipthresh, clipniter,
3941 getresidual,
3942 pack_progress_params(showprogress,
3943 minnrow),
3944 outlog, scsvformat+blfile,
3945 bltable)
3946 workscan._add_history("cspline_baseline", varlist)
3947
3948 if bltable == '':
3949 if insitu:
3950 self._assign(workscan)
3951 else:
3952 return workscan
3953 else:
3954 if not insitu:
3955 return None
3956
3957 except RuntimeError, e:
3958 raise_fitting_failure_exception(e)
3959
3960 @asaplog_post_dec
3961 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3962 clipthresh=None, clipniter=None,
3963 edge=None, threshold=None, chan_avg_limit=None,
3964 getresidual=None, plot=None,
3965 showprogress=None, minnrow=None, outlog=None,
3966 blfile=None, csvformat=None, bltable=None):
3967 """\
3968 Return a scan which has been baselined (all rows) by cubic spline
3969 function (piecewise cubic polynomial).
3970 Spectral lines are detected first using linefinder and masked out
3971 to avoid them affecting the baseline solution.
3972
3973 Parameters:
3974 mask: an optional mask retreived from scantable
3975 npiece: Number of pieces. (default is 2)
3976 insitu: if False a new scantable is returned.
3977 Otherwise, the scaling is done in-situ
3978 The default is taken from .asaprc (False)
3979 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3980 clipniter: maximum number of iteration of 'clipthresh'-sigma
3981 clipping (default is 0)
3982 edge: an optional number of channel to drop at
3983 the edge of spectrum. If only one value is
3984 specified, the same number will be dropped
3985 from both sides of the spectrum. Default
3986 is to keep all channels. Nested tuples
3987 represent individual edge selection for
3988 different IFs (a number of spectral channels
3989 can be different)
3990 threshold: the threshold used by line finder. It is
3991 better to keep it large as only strong lines
3992 affect the baseline solution.
3993 chan_avg_limit: a maximum number of consequtive spectral
3994 channels to average during the search of
3995 weak and broad lines. The default is no
3996 averaging (and no search for weak lines).
3997 If such lines can affect the fitted baseline
3998 (e.g. a high order polynomial is fitted),
3999 increase this parameter (usually values up
4000 to 8 are reasonable). Most users of this
4001 method should find the default value sufficient.
4002 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4003 plot the fit and the residual. In this each
4004 indivual fit has to be approved, by typing 'y'
4005 or 'n'
4006 getresidual: if False, returns best-fit values instead of
4007 residual. (default is True)
4008 showprogress: show progress status for large data.
4009 default is True.
4010 minnrow: minimum number of input spectra to show.
4011 default is 1000.
4012 outlog: Output the coefficients of the best-fit
4013 function to logger (default is False)
4014 blfile: Name of a text file in which the best-fit
4015 parameter values to be written
4016 (default is "": no file/logger output)
4017 csvformat: if True blfile is csv-formatted, default is False.
4018 bltable: name of a baseline table where fitting results
4019 (coefficients, rms, etc.) are to be written.
4020 if given, fitting results will NOT be output to
4021 scantable (insitu=True) or None will be
4022 returned (insitu=False).
4023 (default is "": no table output)
4024
4025 Example:
4026 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4027
4028 Note:
4029 The best-fit parameter values output in logger and/or blfile are now
4030 based on specunit of 'channel'.
4031 """
4032
4033 try:
4034 varlist = vars()
4035
4036 if insitu is None: insitu = rcParams['insitu']
4037 if insitu:
4038 workscan = self
4039 else:
4040 workscan = self.copy()
4041
4042 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4043 if mask is None: mask = []
4044 if npiece is None: npiece = 2
4045 if clipthresh is None: clipthresh = 3.0
4046 if clipniter is None: clipniter = 0
4047 if edge is None: edge = (0, 0)
4048 if threshold is None: threshold = 3
4049 if chan_avg_limit is None: chan_avg_limit = 1
4050 if plot is None: plot = False
4051 if getresidual is None: getresidual = True
4052 if showprogress is None: showprogress = True
4053 if minnrow is None: minnrow = 1000
4054 if outlog is None: outlog = False
4055 if blfile is None: blfile = ''
4056 if csvformat is None: csvformat = False
4057 if bltable is None: bltable = ''
4058
4059 scsvformat = 'T' if csvformat else 'F'
4060
4061 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4062 workscan._auto_cspline_baseline(mask, npiece,
4063 clipthresh, clipniter,
4064 normalise_edge_param(edge),
4065 threshold,
4066 chan_avg_limit, getresidual,
4067 pack_progress_params(showprogress,
4068 minnrow),
4069 outlog,
4070 scsvformat+blfile,
4071 bltable)
4072 workscan._add_history("auto_cspline_baseline", varlist)
4073
4074 if bltable == '':
4075 if insitu:
4076 self._assign(workscan)
4077 else:
4078 return workscan
4079 else:
4080 if not insitu:
4081 return None
4082
4083 except RuntimeError, e:
4084 raise_fitting_failure_exception(e)
4085
4086 @asaplog_post_dec
4087 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
4088 clipthresh=None, clipniter=None, plot=None,
4089 getresidual=None, showprogress=None, minnrow=None,
4090 outlog=None, blfile=None, csvformat=None,
4091 bltable=None):
4092 """\
4093 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4094
4095 Parameters:
4096 mask: An optional mask
4097 order: the maximum order of Chebyshev polynomial (default is 5)
4098 insitu: If False a new scantable is returned.
4099 Otherwise, the scaling is done in-situ
4100 The default is taken from .asaprc (False)
4101 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4102 clipniter: maximum number of iteration of 'clipthresh'-sigma
4103 clipping (default is 0)
4104 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4105 plot the fit and the residual. In this each
4106 indivual fit has to be approved, by typing 'y'
4107 or 'n'
4108 getresidual: if False, returns best-fit values instead of
4109 residual. (default is True)
4110 showprogress: show progress status for large data.
4111 default is True.
4112 minnrow: minimum number of input spectra to show.
4113 default is 1000.
4114 outlog: Output the coefficients of the best-fit
4115 function to logger (default is False)
4116 blfile: Name of a text file in which the best-fit
4117 parameter values to be written
4118 (default is "": no file/logger output)
4119 csvformat: if True blfile is csv-formatted, default is False.
4120 bltable: name of a baseline table where fitting results
4121 (coefficients, rms, etc.) are to be written.
4122 if given, fitting results will NOT be output to
4123 scantable (insitu=True) or None will be
4124 returned (insitu=False).
4125 (default is "": no table output)
4126
4127 Example:
4128 # return a scan baselined by a cubic spline consisting of 2 pieces
4129 # (i.e., 1 internal knot),
4130 # also with 3-sigma clipping, iteration up to 4 times
4131 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4132
4133 Note:
4134 The best-fit parameter values output in logger and/or blfile are now
4135 based on specunit of 'channel'.
4136 """
4137
4138 try:
4139 varlist = vars()
4140
4141 if insitu is None: insitu = rcParams['insitu']
4142 if insitu:
4143 workscan = self
4144 else:
4145 workscan = self.copy()
4146
4147 if mask is None: mask = []
4148 if order is None: order = 5
4149 if clipthresh is None: clipthresh = 3.0
4150 if clipniter is None: clipniter = 0
4151 if plot is None: plot = False
4152 if getresidual is None: getresidual = True
4153 if showprogress is None: showprogress = True
4154 if minnrow is None: minnrow = 1000
4155 if outlog is None: outlog = False
4156 if blfile is None: blfile = ''
4157 if csvformat is None: csvformat = False
4158 if bltable is None: bltable = ''
4159
4160 scsvformat = 'T' if csvformat else 'F'
4161
4162 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4163 workscan._chebyshev_baseline(mask, order,
4164 clipthresh, clipniter,
4165 getresidual,
4166 pack_progress_params(showprogress,
4167 minnrow),
4168 outlog, scsvformat+blfile,
4169 bltable)
4170 workscan._add_history("chebyshev_baseline", varlist)
4171
4172 if bltable == '':
4173 if insitu:
4174 self._assign(workscan)
4175 else:
4176 return workscan
4177 else:
4178 if not insitu:
4179 return None
4180
4181 except RuntimeError, e:
4182 raise_fitting_failure_exception(e)
4183
4184 @asaplog_post_dec
4185 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4186 clipthresh=None, clipniter=None,
4187 edge=None, threshold=None, chan_avg_limit=None,
4188 getresidual=None, plot=None,
4189 showprogress=None, minnrow=None, outlog=None,
4190 blfile=None, csvformat=None, bltable=None):
4191 """\
4192 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4193 Spectral lines are detected first using linefinder and masked out
4194 to avoid them affecting the baseline solution.
4195
4196 Parameters:
4197 mask: an optional mask retreived from scantable
4198 order: the maximum order of Chebyshev polynomial (default is 5)
4199 insitu: if False a new scantable is returned.
4200 Otherwise, the scaling is done in-situ
4201 The default is taken from .asaprc (False)
4202 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4203 clipniter: maximum number of iteration of 'clipthresh'-sigma
4204 clipping (default is 0)
4205 edge: an optional number of channel to drop at
4206 the edge of spectrum. If only one value is
4207 specified, the same number will be dropped
4208 from both sides of the spectrum. Default
4209 is to keep all channels. Nested tuples
4210 represent individual edge selection for
4211 different IFs (a number of spectral channels
4212 can be different)
4213 threshold: the threshold used by line finder. It is
4214 better to keep it large as only strong lines
4215 affect the baseline solution.
4216 chan_avg_limit: a maximum number of consequtive spectral
4217 channels to average during the search of
4218 weak and broad lines. The default is no
4219 averaging (and no search for weak lines).
4220 If such lines can affect the fitted baseline
4221 (e.g. a high order polynomial is fitted),
4222 increase this parameter (usually values up
4223 to 8 are reasonable). Most users of this
4224 method should find the default value sufficient.
4225 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4226 plot the fit and the residual. In this each
4227 indivual fit has to be approved, by typing 'y'
4228 or 'n'
4229 getresidual: if False, returns best-fit values instead of
4230 residual. (default is True)
4231 showprogress: show progress status for large data.
4232 default is True.
4233 minnrow: minimum number of input spectra to show.
4234 default is 1000.
4235 outlog: Output the coefficients of the best-fit
4236 function to logger (default is False)
4237 blfile: Name of a text file in which the best-fit
4238 parameter values to be written
4239 (default is "": no file/logger output)
4240 csvformat: if True blfile is csv-formatted, default is False.
4241 bltable: name of a baseline table where fitting results
4242 (coefficients, rms, etc.) are to be written.
4243 if given, fitting results will NOT be output to
4244 scantable (insitu=True) or None will be
4245 returned (insitu=False).
4246 (default is "": no table output)
4247
4248 Example:
4249 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4250
4251 Note:
4252 The best-fit parameter values output in logger and/or blfile are now
4253 based on specunit of 'channel'.
4254 """
4255
4256 try:
4257 varlist = vars()
4258
4259 if insitu is None: insitu = rcParams['insitu']
4260 if insitu:
4261 workscan = self
4262 else:
4263 workscan = self.copy()
4264
4265 if mask is None: mask = []
4266 if order is None: order = 5
4267 if clipthresh is None: clipthresh = 3.0
4268 if clipniter is None: clipniter = 0
4269 if edge is None: edge = (0, 0)
4270 if threshold is None: threshold = 3
4271 if chan_avg_limit is None: chan_avg_limit = 1
4272 if plot is None: plot = False
4273 if getresidual is None: getresidual = True
4274 if showprogress is None: showprogress = True
4275 if minnrow is None: minnrow = 1000
4276 if outlog is None: outlog = False
4277 if blfile is None: blfile = ''
4278 if csvformat is None: csvformat = False
4279 if bltable is None: bltable = ''
4280
4281 scsvformat = 'T' if csvformat else 'F'
4282
4283 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4284 workscan._auto_chebyshev_baseline(mask, order,
4285 clipthresh, clipniter,
4286 normalise_edge_param(edge),
4287 threshold,
4288 chan_avg_limit, getresidual,
4289 pack_progress_params(showprogress,
4290 minnrow),
4291 outlog, scsvformat+blfile,
4292 bltable)
4293 workscan._add_history("auto_chebyshev_baseline", varlist)
4294
4295 if bltable == '':
4296 if insitu:
4297 self._assign(workscan)
4298 else:
4299 return workscan
4300 else:
4301 if not insitu:
4302 return None
4303
4304 except RuntimeError, e:
4305 raise_fitting_failure_exception(e)
4306
4307 @asaplog_post_dec
4308 def poly_baseline(self, mask=None, order=None, insitu=None,
4309 clipthresh=None, clipniter=None, plot=None,
4310 getresidual=None, showprogress=None, minnrow=None,
4311 outlog=None, blfile=None, csvformat=None,
4312 bltable=None):
4313 """\
4314 Return a scan which has been baselined (all rows) by a polynomial.
4315 Parameters:
4316 mask: an optional mask
4317 order: the order of the polynomial (default is 0)
4318 insitu: if False a new scantable is returned.
4319 Otherwise, the scaling is done in-situ
4320 The default is taken from .asaprc (False)
4321 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4322 clipniter: maximum number of iteration of 'clipthresh'-sigma
4323 clipping (default is 0)
4324 plot: plot the fit and the residual. In this each
4325 indivual fit has to be approved, by typing 'y'
4326 or 'n'
4327 getresidual: if False, returns best-fit values instead of
4328 residual. (default is True)
4329 showprogress: show progress status for large data.
4330 default is True.
4331 minnrow: minimum number of input spectra to show.
4332 default is 1000.
4333 outlog: Output the coefficients of the best-fit
4334 function to logger (default is False)
4335 blfile: Name of a text file in which the best-fit
4336 parameter values to be written
4337 (default is "": no file/logger output)
4338 csvformat: if True blfile is csv-formatted, default is False.
4339 bltable: name of a baseline table where fitting results
4340 (coefficients, rms, etc.) are to be written.
4341 if given, fitting results will NOT be output to
4342 scantable (insitu=True) or None will be
4343 returned (insitu=False).
4344 (default is "": no table output)
4345
4346 Example:
4347 # return a scan baselined by a third order polynomial,
4348 # not using a mask
4349 bscan = scan.poly_baseline(order=3)
4350 """
4351
4352 try:
4353 varlist = vars()
4354
4355 if insitu is None:
4356 insitu = rcParams["insitu"]
4357 if insitu:
4358 workscan = self
4359 else:
4360 workscan = self.copy()
4361
4362 if mask is None: mask = []
4363 if order is None: order = 0
4364 if clipthresh is None: clipthresh = 3.0
4365 if clipniter is None: clipniter = 0
4366 if plot is None: plot = False
4367 if getresidual is None: getresidual = True
4368 if showprogress is None: showprogress = True
4369 if minnrow is None: minnrow = 1000
4370 if outlog is None: outlog = False
4371 if blfile is None: blfile = ''
4372 if csvformat is None: csvformat = False
4373 if bltable is None: bltable = ''
4374
4375 scsvformat = 'T' if csvformat else 'F'
4376
4377 if plot:
4378 outblfile = (blfile != "") and \
4379 os.path.exists(os.path.expanduser(
4380 os.path.expandvars(blfile))
4381 )
4382 if outblfile:
4383 blf = open(blfile, "a")
4384
4385 f = fitter()
4386 f.set_function(lpoly=order)
4387
4388 rows = xrange(workscan.nrow())
4389 #if len(rows) > 0: workscan._init_blinfo()
4390
4391 action = "H"
4392 for r in rows:
4393 f.x = workscan._getabcissa(r)
4394 f.y = workscan._getspectrum(r)
4395 if mask:
4396 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4397 else: # mask=None
4398 f.mask = workscan._getmask(r)
4399
4400 f.data = None
4401 f.fit()
4402
4403 if action != "Y": # skip plotting when accepting all
4404 f.plot(residual=True)
4405 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4406 #if accept_fit.upper() == "N":
4407 # #workscan._append_blinfo(None, None, None)
4408 # continue
4409 accept_fit = self._get_verify_action("Accept fit?",action)
4410 if r == 0: action = None
4411 if accept_fit.upper() == "N":
4412 continue
4413 elif accept_fit.upper() == "R":
4414 break
4415 elif accept_fit.upper() == "A":
4416 action = "Y"
4417
4418 blpars = f.get_parameters()
4419 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4420 #workscan._append_blinfo(blpars, masklist, f.mask)
4421 workscan._setspectrum((f.fitter.getresidual()
4422 if getresidual else f.fitter.getfit()), r)
4423
4424 if outblfile:
4425 rms = workscan.get_rms(f.mask, r)
4426 dataout = \
4427 workscan.format_blparams_row(blpars["params"],
4428 blpars["fixed"],
4429 rms, str(masklist),
4430 r, True, csvformat)
4431 blf.write(dataout)
4432
4433 f._p.unmap()
4434 f._p = None
4435
4436 if outblfile:
4437 blf.close()
4438 else:
4439 workscan._poly_baseline(mask, order,
4440 clipthresh, clipniter, #
4441 getresidual,
4442 pack_progress_params(showprogress,
4443 minnrow),
4444 outlog, scsvformat+blfile,
4445 bltable) #
4446
4447 workscan._add_history("poly_baseline", varlist)
4448
4449 if insitu:
4450 self._assign(workscan)
4451 else:
4452 return workscan
4453
4454 except RuntimeError, e:
4455 raise_fitting_failure_exception(e)
4456
4457 @asaplog_post_dec
4458 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4459 clipthresh=None, clipniter=None,
4460 edge=None, threshold=None, chan_avg_limit=None,
4461 getresidual=None, plot=None,
4462 showprogress=None, minnrow=None, outlog=None,
4463 blfile=None, csvformat=None, bltable=None):
4464 """\
4465 Return a scan which has been baselined (all rows) by a polynomial.
4466 Spectral lines are detected first using linefinder and masked out
4467 to avoid them affecting the baseline solution.
4468
4469 Parameters:
4470 mask: an optional mask retreived from scantable
4471 order: the order of the polynomial (default is 0)
4472 insitu: if False a new scantable is returned.
4473 Otherwise, the scaling is done in-situ
4474 The default is taken from .asaprc (False)
4475 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4476 clipniter: maximum number of iteration of 'clipthresh'-sigma
4477 clipping (default is 0)
4478 edge: an optional number of channel to drop at
4479 the edge of spectrum. If only one value is
4480 specified, the same number will be dropped
4481 from both sides of the spectrum. Default
4482 is to keep all channels. Nested tuples
4483 represent individual edge selection for
4484 different IFs (a number of spectral channels
4485 can be different)
4486 threshold: the threshold used by line finder. It is
4487 better to keep it large as only strong lines
4488 affect the baseline solution.
4489 chan_avg_limit: a maximum number of consequtive spectral
4490 channels to average during the search of
4491 weak and broad lines. The default is no
4492 averaging (and no search for weak lines).
4493 If such lines can affect the fitted baseline
4494 (e.g. a high order polynomial is fitted),
4495 increase this parameter (usually values up
4496 to 8 are reasonable). Most users of this
4497 method should find the default value sufficient.
4498 plot: plot the fit and the residual. In this each
4499 indivual fit has to be approved, by typing 'y'
4500 or 'n'
4501 getresidual: if False, returns best-fit values instead of
4502 residual. (default is True)
4503 showprogress: show progress status for large data.
4504 default is True.
4505 minnrow: minimum number of input spectra to show.
4506 default is 1000.
4507 outlog: Output the coefficients of the best-fit
4508 function to logger (default is False)
4509 blfile: Name of a text file in which the best-fit
4510 parameter values to be written
4511 (default is "": no file/logger output)
4512 csvformat: if True blfile is csv-formatted, default is False.
4513 bltable: name of a baseline table where fitting results
4514 (coefficients, rms, etc.) are to be written.
4515 if given, fitting results will NOT be output to
4516 scantable (insitu=True) or None will be
4517 returned (insitu=False).
4518 (default is "": no table output)
4519
4520 Example:
4521 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4522 """
4523
4524 try:
4525 varlist = vars()
4526
4527 if insitu is None:
4528 insitu = rcParams['insitu']
4529 if insitu:
4530 workscan = self
4531 else:
4532 workscan = self.copy()
4533
4534 if mask is None: mask = []
4535 if order is None: order = 0
4536 if clipthresh is None: clipthresh = 3.0
4537 if clipniter is None: clipniter = 0
4538 if edge is None: edge = (0, 0)
4539 if threshold is None: threshold = 3
4540 if chan_avg_limit is None: chan_avg_limit = 1
4541 if plot is None: plot = False
4542 if getresidual is None: getresidual = True
4543 if showprogress is None: showprogress = True
4544 if minnrow is None: minnrow = 1000
4545 if outlog is None: outlog = False
4546 if blfile is None: blfile = ''
4547 if csvformat is None: csvformat = False
4548 if bltable is None: bltable = ''
4549
4550 scsvformat = 'T' if csvformat else 'F'
4551
4552 edge = normalise_edge_param(edge)
4553
4554 if plot:
4555 outblfile = (blfile != "") and \
4556 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4557 if outblfile: blf = open(blfile, "a")
4558
4559 from asap.asaplinefind import linefinder
4560 fl = linefinder()
4561 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4562 fl.set_scan(workscan)
4563
4564 f = fitter()
4565 f.set_function(lpoly=order)
4566
4567 rows = xrange(workscan.nrow())
4568 #if len(rows) > 0: workscan._init_blinfo()
4569
4570 action = "H"
4571 for r in rows:
4572 idx = 2*workscan.getif(r)
4573 if mask:
4574 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4575 else: # mask=None
4576 msk = workscan._getmask(r)
4577 fl.find_lines(r, msk, edge[idx:idx+2])
4578
4579 f.x = workscan._getabcissa(r)
4580 f.y = workscan._getspectrum(r)
4581 f.mask = fl.get_mask()
4582 f.data = None
4583 f.fit()
4584
4585 if action != "Y": # skip plotting when accepting all
4586 f.plot(residual=True)
4587 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4588 accept_fit = self._get_verify_action("Accept fit?",action)
4589 if r == 0: action = None
4590 if accept_fit.upper() == "N":
4591 #workscan._append_blinfo(None, None, None)
4592 continue
4593 elif accept_fit.upper() == "R":
4594 break
4595 elif accept_fit.upper() == "A":
4596 action = "Y"
4597
4598 blpars = f.get_parameters()
4599 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4600 #workscan._append_blinfo(blpars, masklist, f.mask)
4601 workscan._setspectrum(
4602 (f.fitter.getresidual() if getresidual
4603 else f.fitter.getfit()), r
4604 )
4605
4606 if outblfile:
4607 rms = workscan.get_rms(f.mask, r)
4608 dataout = \
4609 workscan.format_blparams_row(blpars["params"],
4610 blpars["fixed"],
4611 rms, str(masklist),
4612 r, True, csvformat)
4613 blf.write(dataout)
4614
4615 f._p.unmap()
4616 f._p = None
4617
4618 if outblfile: blf.close()
4619 else:
4620 workscan._auto_poly_baseline(mask, order,
4621 clipthresh, clipniter,
4622 edge, threshold,
4623 chan_avg_limit, getresidual,
4624 pack_progress_params(showprogress,
4625 minnrow),
4626 outlog, scsvformat+blfile,
4627 bltable)
4628 workscan._add_history("auto_poly_baseline", varlist)
4629
4630 if bltable == '':
4631 if insitu:
4632 self._assign(workscan)
4633 else:
4634 return workscan
4635 else:
4636 if not insitu:
4637 return None
4638
4639 except RuntimeError, e:
4640 raise_fitting_failure_exception(e)
4641
4642 def _init_blinfo(self):
4643 """\
4644 Initialise the following three auxiliary members:
4645 blpars : parameters of the best-fit baseline,
4646 masklists : mask data (edge positions of masked channels) and
4647 actualmask : mask data (in boolean list),
4648 to keep for use later (including output to logger/text files).
4649 Used by poly_baseline() and auto_poly_baseline() in case of
4650 'plot=True'.
4651 """
4652 self.blpars = []
4653 self.masklists = []
4654 self.actualmask = []
4655 return
4656
4657 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4658 """\
4659 Append baseline-fitting related info to blpars, masklist and
4660 actualmask.
4661 """
4662 self.blpars.append(data_blpars)
4663 self.masklists.append(data_masklists)
4664 self.actualmask.append(data_actualmask)
4665 return
4666
4667 @asaplog_post_dec
4668 def rotate_linpolphase(self, angle):
4669 """\
4670 Rotate the phase of the complex polarization O=Q+iU correlation.
4671 This is always done in situ in the raw data. So if you call this
4672 function more than once then each call rotates the phase further.
4673
4674 Parameters:
4675
4676 angle: The angle (degrees) to rotate (add) by.
4677
4678 Example::
4679
4680 scan.rotate_linpolphase(2.3)
4681
4682 """
4683 varlist = vars()
4684 self._math._rotate_linpolphase(self, angle)
4685 self._add_history("rotate_linpolphase", varlist)
4686 return
4687
4688 @asaplog_post_dec
4689 def rotate_xyphase(self, angle):
4690 """\
4691 Rotate the phase of the XY correlation. This is always done in situ
4692 in the data. So if you call this function more than once
4693 then each call rotates the phase further.
4694
4695 Parameters:
4696
4697 angle: The angle (degrees) to rotate (add) by.
4698
4699 Example::
4700
4701 scan.rotate_xyphase(2.3)
4702
4703 """
4704 varlist = vars()
4705 self._math._rotate_xyphase(self, angle)
4706 self._add_history("rotate_xyphase", varlist)
4707 return
4708
4709 @asaplog_post_dec
4710 def swap_linears(self):
4711 """\
4712 Swap the linear polarisations XX and YY, or better the first two
4713 polarisations as this also works for ciculars.
4714 """
4715 varlist = vars()
4716 self._math._swap_linears(self)
4717 self._add_history("swap_linears", varlist)
4718 return
4719
4720 @asaplog_post_dec
4721 def invert_phase(self):
4722 """\
4723 Invert the phase of the complex polarisation
4724 """
4725 varlist = vars()
4726 self._math._invert_phase(self)
4727 self._add_history("invert_phase", varlist)
4728 return
4729
4730 @asaplog_post_dec
4731 def add(self, offset, insitu=None):
4732 """\
4733 Return a scan where all spectra have the offset added
4734
4735 Parameters:
4736
4737 offset: the offset
4738
4739 insitu: if False a new scantable is returned.
4740 Otherwise, the scaling is done in-situ
4741 The default is taken from .asaprc (False)
4742
4743 """
4744 if insitu is None: insitu = rcParams['insitu']
4745 self._math._setinsitu(insitu)
4746 varlist = vars()
4747 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4748 s._add_history("add", varlist)
4749 if insitu:
4750 self._assign(s)
4751 else:
4752 return s
4753
4754 @asaplog_post_dec
4755 def scale(self, factor, tsys=True, insitu=None):
4756 """\
4757
4758 Return a scan where all spectra are scaled by the given 'factor'
4759
4760 Parameters:
4761
4762 factor: the scaling factor (float or 1D float list)
4763
4764 insitu: if False a new scantable is returned.
4765 Otherwise, the scaling is done in-situ
4766 The default is taken from .asaprc (False)
4767
4768 tsys: if True (default) then apply the operation to Tsys
4769 as well as the data
4770
4771 """
4772 if insitu is None: insitu = rcParams['insitu']
4773 self._math._setinsitu(insitu)
4774 varlist = vars()
4775 s = None
4776 import numpy
4777 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4778 if isinstance(factor[0], list) or isinstance(factor[0],
4779 numpy.ndarray):
4780 from asapmath import _array2dOp
4781 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4782 else:
4783 s = scantable( self._math._arrayop( self, factor,
4784 "MUL", tsys ) )
4785 else:
4786 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4787 s._add_history("scale", varlist)
4788 if insitu:
4789 self._assign(s)
4790 else:
4791 return s
4792
4793 @preserve_selection
4794 def set_sourcetype(self, match, matchtype="pattern",
4795 sourcetype="reference"):
4796 """\
4797 Set the type of the source to be an source or reference scan
4798 using the provided pattern.
4799
4800 Parameters:
4801
4802 match: a Unix style pattern, regular expression or selector
4803
4804 matchtype: 'pattern' (default) UNIX style pattern or
4805 'regex' regular expression
4806
4807 sourcetype: the type of the source to use (source/reference)
4808
4809 """
4810 varlist = vars()
4811 stype = -1
4812 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4813 stype = 1
4814 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4815 stype = 0
4816 else:
4817 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4818 if matchtype.lower().startswith("p"):
4819 matchtype = "pattern"
4820 elif matchtype.lower().startswith("r"):
4821 matchtype = "regex"
4822 else:
4823 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4824 sel = selector()
4825 if isinstance(match, selector):
4826 sel = match
4827 else:
4828 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4829 self.set_selection(sel)
4830 self._setsourcetype(stype)
4831 self._add_history("set_sourcetype", varlist)
4832
4833
4834 def set_sourcename(self, name):
4835 varlist = vars()
4836 self._setsourcename(name)
4837 self._add_history("set_sourcename", varlist)
4838
4839 @asaplog_post_dec
4840 @preserve_selection
4841 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4842 """\
4843 This function allows to build quotients automatically.
4844 It assumes the observation to have the same number of
4845 "ons" and "offs"
4846
4847 Parameters:
4848
4849 preserve: you can preserve (default) the continuum or
4850 remove it. The equations used are
4851
4852 preserve: Output = Toff * (on/off) - Toff
4853
4854 remove: Output = Toff * (on/off) - Ton
4855
4856 mode: the on/off detection mode
4857 'paired' (default)
4858 identifies 'off' scans by the
4859 trailing '_R' (Mopra/Parkes) or
4860 '_e'/'_w' (Tid) and matches
4861 on/off pairs from the observing pattern
4862 'time'
4863 finds the closest off in time
4864
4865 .. todo:: verify argument is not implemented
4866
4867 """
4868 varlist = vars()
4869 modes = ["time", "paired"]
4870 if not mode in modes:
4871 msg = "please provide valid mode. Valid modes are %s" % (modes)
4872 raise ValueError(msg)
4873 s = None
4874 if mode.lower() == "paired":
4875 from asap._asap import srctype
4876 sel = self.get_selection()
4877 #sel.set_query("SRCTYPE==psoff")
4878 sel.set_types(srctype.psoff)
4879 self.set_selection(sel)
4880 offs = self.copy()
4881 #sel.set_query("SRCTYPE==pson")
4882 sel.set_types(srctype.pson)
4883 self.set_selection(sel)
4884 ons = self.copy()
4885 s = scantable(self._math._quotient(ons, offs, preserve))
4886 elif mode.lower() == "time":
4887 s = scantable(self._math._auto_quotient(self, mode, preserve))
4888 s._add_history("auto_quotient", varlist)
4889 return s
4890
4891 @asaplog_post_dec
4892 def mx_quotient(self, mask = None, weight='median', preserve=True):
4893 """\
4894 Form a quotient using "off" beams when observing in "MX" mode.
4895
4896 Parameters:
4897
4898 mask: an optional mask to be used when weight == 'stddev'
4899
4900 weight: How to average the off beams. Default is 'median'.
4901
4902 preserve: you can preserve (default) the continuum or
4903 remove it. The equations used are:
4904
4905 preserve: Output = Toff * (on/off) - Toff
4906
4907 remove: Output = Toff * (on/off) - Ton
4908
4909 """
4910 mask = mask or ()
4911 varlist = vars()
4912 on = scantable(self._math._mx_extract(self, 'on'))
4913 preoff = scantable(self._math._mx_extract(self, 'off'))
4914 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4915 from asapmath import quotient
4916 q = quotient(on, off, preserve)
4917 q._add_history("mx_quotient", varlist)
4918 return q
4919
4920 @asaplog_post_dec
4921 def freq_switch(self, insitu=None):
4922 """\
4923 Apply frequency switching to the data.
4924
4925 Parameters:
4926
4927 insitu: if False a new scantable is returned.
4928 Otherwise, the swictching is done in-situ
4929 The default is taken from .asaprc (False)
4930
4931 """
4932 if insitu is None: insitu = rcParams['insitu']
4933 self._math._setinsitu(insitu)
4934 varlist = vars()
4935 s = scantable(self._math._freqswitch(self))
4936 s._add_history("freq_switch", varlist)
4937 if insitu:
4938 self._assign(s)
4939 else:
4940 return s
4941
4942 @asaplog_post_dec
4943 def recalc_azel(self):
4944 """Recalculate the azimuth and elevation for each position."""
4945 varlist = vars()
4946 self._recalcazel()
4947 self._add_history("recalc_azel", varlist)
4948 return
4949
4950 @asaplog_post_dec
4951 def __add__(self, other):
4952 """
4953 implicit on all axes and on Tsys
4954 """
4955 varlist = vars()
4956 s = self.__op( other, "ADD" )
4957 s._add_history("operator +", varlist)
4958 return s
4959
4960 @asaplog_post_dec
4961 def __sub__(self, other):
4962 """
4963 implicit on all axes and on Tsys
4964 """
4965 varlist = vars()
4966 s = self.__op( other, "SUB" )
4967 s._add_history("operator -", varlist)
4968 return s
4969
4970 @asaplog_post_dec
4971 def __mul__(self, other):
4972 """
4973 implicit on all axes and on Tsys
4974 """
4975 varlist = vars()
4976 s = self.__op( other, "MUL" ) ;
4977 s._add_history("operator *", varlist)
4978 return s
4979
4980
4981 @asaplog_post_dec
4982 def __div__(self, other):
4983 """
4984 implicit on all axes and on Tsys
4985 """
4986 varlist = vars()
4987 s = self.__op( other, "DIV" )
4988 s._add_history("operator /", varlist)
4989 return s
4990
4991 @asaplog_post_dec
4992 def __op( self, other, mode ):
4993 s = None
4994 if isinstance(other, scantable):
4995 s = scantable(self._math._binaryop(self, other, mode))
4996 elif isinstance(other, float):
4997 if other == 0.0:
4998 raise ZeroDivisionError("Dividing by zero is not recommended")
4999 s = scantable(self._math._unaryop(self, other, mode, False))
5000 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
5001 if isinstance(other[0], list) \
5002 or isinstance(other[0], numpy.ndarray):
5003 from asapmath import _array2dOp
5004 s = _array2dOp( self, other, mode, False )
5005 else:
5006 s = scantable( self._math._arrayop( self, other,
5007 mode, False ) )
5008 else:
5009 raise TypeError("Other input is not a scantable or float value")
5010 return s
5011
5012 @asaplog_post_dec
5013 def get_fit(self, row=0):
5014 """\
5015 Print or return the stored fits for a row in the scantable
5016
5017 Parameters:
5018
5019 row: the row which the fit has been applied to.
5020
5021 """
5022 if row > self.nrow():
5023 return
5024 from asap.asapfit import asapfit
5025 fit = asapfit(self._getfit(row))
5026 asaplog.push( '%s' %(fit) )
5027 return fit.as_dict()
5028
5029 @preserve_selection
5030 def flag_nans(self):
5031 """\
5032 Utility function to flag NaN values in the scantable.
5033 """
5034 import numpy
5035 basesel = self.get_selection()
5036 for i in range(self.nrow()):
5037 sel = self.get_row_selector(i)
5038 self.set_selection(basesel+sel)
5039 nans = numpy.isnan(self._getspectrum(0))
5040 if numpy.any(nans):
5041 bnans = [ bool(v) for v in nans]
5042 self.flag(bnans)
5043
5044 self.set_selection(basesel)
5045
5046 def get_row_selector(self, rowno):
5047 return selector(rows=[rowno])
5048
5049 def _add_history(self, funcname, parameters):
5050 if not rcParams['scantable.history']:
5051 return
5052 # create date
5053 sep = "##"
5054 from datetime import datetime
5055 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5056 hist = dstr+sep
5057 hist += funcname+sep#cdate+sep
5058 if parameters.has_key('self'):
5059 del parameters['self']
5060 for k, v in parameters.iteritems():
5061 if type(v) is dict:
5062 for k2, v2 in v.iteritems():
5063 hist += k2
5064 hist += "="
5065 if isinstance(v2, scantable):
5066 hist += 'scantable'
5067 elif k2 == 'mask':
5068 if isinstance(v2, list) or isinstance(v2, tuple):
5069 hist += str(self._zip_mask(v2))
5070 else:
5071 hist += str(v2)
5072 else:
5073 hist += str(v2)
5074 else:
5075 hist += k
5076 hist += "="
5077 if isinstance(v, scantable):
5078 hist += 'scantable'
5079 elif k == 'mask':
5080 if isinstance(v, list) or isinstance(v, tuple):
5081 hist += str(self._zip_mask(v))
5082 else:
5083 hist += str(v)
5084 else:
5085 hist += str(v)
5086 hist += sep
5087 hist = hist[:-2] # remove trailing '##'
5088 self._addhistory(hist)
5089
5090
5091 def _zip_mask(self, mask):
5092 mask = list(mask)
5093 i = 0
5094 segments = []
5095 while mask[i:].count(1):
5096 i += mask[i:].index(1)
5097 if mask[i:].count(0):
5098 j = i + mask[i:].index(0)
5099 else:
5100 j = len(mask)
5101 segments.append([i, j])
5102 i = j
5103 return segments
5104
5105 def _get_ordinate_label(self):
5106 fu = "("+self.get_fluxunit()+")"
5107 import re
5108 lbl = "Intensity"
5109 if re.match(".K.", fu):
5110 lbl = "Brightness Temperature "+ fu
5111 elif re.match(".Jy.", fu):
5112 lbl = "Flux density "+ fu
5113 return lbl
5114
5115 def _check_ifs(self):
5116# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5117 nchans = [self.nchan(i) for i in self.getifnos()]
5118 nchans = filter(lambda t: t > 0, nchans)
5119 return (sum(nchans)/len(nchans) == nchans[0])
5120
5121 @asaplog_post_dec
5122 def _fill(self, names, unit, average, opts={}):
5123 first = True
5124 fullnames = []
5125 for name in names:
5126 name = os.path.expandvars(name)
5127 name = os.path.expanduser(name)
5128 if not os.path.exists(name):
5129 msg = "File '%s' does not exists" % (name)
5130 raise IOError(msg)
5131 fullnames.append(name)
5132 if average:
5133 asaplog.push('Auto averaging integrations')
5134 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5135 for name in fullnames:
5136 tbl = Scantable(stype)
5137 if is_ms( name ):
5138 r = msfiller( tbl )
5139 else:
5140 r = filler( tbl )
5141 msg = "Importing %s..." % (name)
5142 asaplog.push(msg, False)
5143 r.open(name, opts)
5144 rx = rcParams['scantable.reference']
5145 r.setreferenceexpr(rx)
5146 r.fill()
5147 if average:
5148 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5149 if not first:
5150 tbl = self._math._merge([self, tbl])
5151 Scantable.__init__(self, tbl)
5152 r.close()
5153 del r, tbl
5154 first = False
5155 #flush log
5156 asaplog.post()
5157 if unit is not None:
5158 self.set_fluxunit(unit)
5159 if not is_casapy():
5160 self.set_freqframe(rcParams['scantable.freqframe'])
5161
5162 def _get_verify_action( self, msg, action=None ):
5163 valid_act = ['Y', 'N', 'A', 'R']
5164 if not action or not isinstance(action, str):
5165 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5166 if action == '':
5167 return "Y"
5168 elif (action.upper()[0] in valid_act):
5169 return action.upper()[0]
5170 elif (action.upper()[0] in ['H','?']):
5171 print "Available actions of verification [Y|n|a|r]"
5172 print " Y : Yes for current data (default)"
5173 print " N : No for current data"
5174 print " A : Accept all in the following and exit from verification"
5175 print " R : Reject all in the following and exit from verification"
5176 print " H or ?: help (show this message)"
5177 return self._get_verify_action(msg)
5178 else:
5179 return 'Y'
5180
5181 def __getitem__(self, key):
5182 if key < 0:
5183 key += self.nrow()
5184 if key >= self.nrow():
5185 raise IndexError("Row index out of range.")
5186 return self._getspectrum(key)
5187
5188 def __setitem__(self, key, value):
5189 if key < 0:
5190 key += self.nrow()
5191 if key >= self.nrow():
5192 raise IndexError("Row index out of range.")
5193 if not hasattr(value, "__len__") or \
5194 len(value) > self.nchan(self.getif(key)):
5195 raise ValueError("Spectrum length doesn't match.")
5196 return self._setspectrum(value, key)
5197
5198 def __len__(self):
5199 return self.nrow()
5200
5201 def __iter__(self):
5202 for i in range(len(self)):
5203 yield self[i]
Note: See TracBrowser for help on using the repository browser.