source: trunk/python/scantable.py@ 2934

Last change on this file since 2934 was 2933, checked in by WataruKawasaki, 11 years ago

New Development: No

JIRA Issue: Yes CAS-6465

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: modified sd.scantable.parse_spw_selection() so that adjacent channel ranges in the same spw in the output merged into one range. for example, a result before this modification {20:0.0,30.0],[31.0,40.0} now should be {20:0.0,40.0}.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 206.6 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel, doppler):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
380
381def get_restfreq_in_Hz(s_restfreq):
382 value = 0.0
383 unit = ""
384 s = s_restfreq.replace(" ","")
385
386 for i in range(len(s))[::-1]:
387 if s[i].isalpha():
388 unit = s[i] + unit
389 else:
390 value = float(s[0:i+1])
391 break
392
393 if (unit == "") or (unit.lower() == "hz"):
394 return value
395 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
396 unitprefix = unit[0]
397 factor = 1.0
398
399 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
400 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
401 factor = factor_list[prefix_list.index(unitprefix)]
402 """
403 if (unitprefix == 'a'):
404 factor = 1.0e-18
405 elif (unitprefix == 'f'):
406 factor = 1.0e-15
407 elif (unitprefix == 'p'):
408 factor = 1.0e-12
409 elif (unitprefix == 'n'):
410 factor = 1.0e-9
411 elif (unitprefix == 'u'):
412 factor = 1.0e-6
413 elif (unitprefix == 'm'):
414 factor = 1.0e-3
415 elif (unitprefix == 'k'):
416 factor = 1.0e+3
417 elif (unitprefix == 'M'):
418 factor = 1.0e+6
419 elif (unitprefix == 'G'):
420 factor = 1.0e+9
421 elif (unitprefix == 'T'):
422 factor = 1.0e+12
423 elif (unitprefix == 'P'):
424 factor = 1.0e+15
425 elif (unitprefix == 'E'):
426 factor = 1.0e+18
427 """
428 return value*factor
429 else:
430 mesg = "wrong unit of restfreq."
431 raise Exception, mesg
432
433def normalise_restfreq(in_restfreq):
434 if isinstance(in_restfreq, float):
435 return in_restfreq
436 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
437 return float(in_restfreq)
438 elif isinstance(in_restfreq, str):
439 return get_restfreq_in_Hz(in_restfreq)
440 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
441 if isinstance(in_restfreq, numpy.ndarray):
442 if len(in_restfreq.shape) > 1:
443 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
444 raise Exception, mesg
445
446 res = []
447 for i in xrange(len(in_restfreq)):
448 elem = in_restfreq[i]
449 if isinstance(elem, float):
450 res.append(elem)
451 elif isinstance(elem, int) or isinstance(elem, long):
452 res.append(float(elem))
453 elif isinstance(elem, str):
454 res.append(get_restfreq_in_Hz(elem))
455 elif isinstance(elem, dict):
456 if isinstance(elem["value"], float):
457 res.append(elem)
458 elif isinstance(elem["value"], int):
459 dictelem = {}
460 dictelem["name"] = elem["name"]
461 dictelem["value"] = float(elem["value"])
462 res.append(dictelem)
463 elif isinstance(elem["value"], str):
464 dictelem = {}
465 dictelem["name"] = elem["name"]
466 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
467 res.append(dictelem)
468 else:
469 mesg = "restfreq elements must be float, int, or string."
470 raise Exception, mesg
471 return res
472 else:
473 mesg = "wrong type of restfreq given."
474 raise Exception, mesg
475
476def set_restfreq(s, restfreq):
477 rfset = (restfreq != '') and (restfreq != [])
478 if rfset:
479 s.set_restfreqs(normalise_restfreq(restfreq))
480
481class scantable(Scantable):
482 """\
483 The ASAP container for scans (single-dish data).
484 """
485
486 @asaplog_post_dec
487 def __init__(self, filename, average=None, unit=None, parallactify=None,
488 **args):
489 """\
490 Create a scantable from a saved one or make a reference
491
492 Parameters:
493
494 filename: the name of an asap table on disk
495 or
496 the name of a rpfits/sdfits/ms file
497 (integrations within scans are auto averaged
498 and the whole file is read) or
499 [advanced] a reference to an existing scantable
500
501 average: average all integrations withinb a scan on read.
502 The default (True) is taken from .asaprc.
503
504 unit: brightness unit; must be consistent with K or Jy.
505 Over-rides the default selected by the filler
506 (input rpfits/sdfits/ms) or replaces the value
507 in existing scantables
508
509 antenna: for MeasurementSet input data only:
510 Antenna selection. integer (id) or string
511 (name or id).
512
513 parallactify: Indicate that the data had been parallactified.
514 Default (false) is taken from rc file.
515
516 getpt: Whether to import direction from MS/POINTING
517 table properly or not.
518 This is effective only when filename is MS.
519 The default (True) is to import direction
520 from MS/POINTING.
521 """
522 if average is None:
523 average = rcParams['scantable.autoaverage']
524 parallactify = parallactify or rcParams['scantable.parallactify']
525 varlist = vars()
526 from asap._asap import stmath
527 self._math = stmath( rcParams['insitu'] )
528 if isinstance(filename, Scantable):
529 Scantable.__init__(self, filename)
530 else:
531 if isinstance(filename, str):
532 filename = os.path.expandvars(filename)
533 filename = os.path.expanduser(filename)
534 if not os.path.exists(filename):
535 s = "File '%s' not found." % (filename)
536 raise IOError(s)
537 if is_scantable(filename):
538 ondisk = rcParams['scantable.storage'] == 'disk'
539 Scantable.__init__(self, filename, ondisk)
540 if unit is not None:
541 self.set_fluxunit(unit)
542 if average:
543 self._assign( self.average_time( scanav=True ) )
544 # do not reset to the default freqframe
545 #self.set_freqframe(rcParams['scantable.freqframe'])
546 elif is_ms(filename):
547 # Measurement Set
548 opts={'ms': {}}
549 mskeys=['getpt','antenna']
550 for key in mskeys:
551 if key in args.keys():
552 opts['ms'][key] = args[key]
553 self._fill([filename], unit, average, opts)
554 elif os.path.isfile(filename):
555 opts={'nro': {}}
556 nrokeys=['freqref']
557 for key in nrokeys:
558 if key in args.keys():
559 opts['nro'][key] = args[key]
560 self._fill([filename], unit, average, opts)
561 # only apply to new data not "copy constructor"
562 self.parallactify(parallactify)
563 else:
564 msg = "The given file '%s'is not a valid " \
565 "asap table." % (filename)
566 raise IOError(msg)
567 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
568 and isinstance(filename[-1], str):
569 self._fill(filename, unit, average)
570 self.parallactify(parallactify)
571 self._add_history("scantable", varlist)
572
573 @asaplog_post_dec
574 def save(self, name=None, format=None, overwrite=False):
575 """\
576 Store the scantable on disk. This can be an asap (aips++) Table,
577 SDFITS or MS2 format.
578
579 Parameters:
580
581 name: the name of the outputfile. For format 'ASCII'
582 this is the root file name (data in 'name'.txt
583 and header in 'name'_header.txt)
584
585 format: an optional file format. Default is ASAP.
586 Allowed are:
587
588 * 'ASAP' (save as ASAP [aips++] Table),
589 * 'SDFITS' (save as SDFITS file)
590 * 'ASCII' (saves as ascii text file)
591 * 'MS2' (saves as an casacore MeasurementSet V2)
592 * 'FITS' (save as image FITS - not readable by
593 class)
594 * 'CLASS' (save as FITS readable by CLASS)
595
596 overwrite: If the file should be overwritten if it exists.
597 The default False is to return with warning
598 without writing the output. USE WITH CARE.
599
600 Example::
601
602 scan.save('myscan.asap')
603 scan.save('myscan.sdfits', 'SDFITS')
604
605 """
606 from os import path
607 format = format or rcParams['scantable.save']
608 suffix = '.'+format.lower()
609 if name is None or name == "":
610 name = 'scantable'+suffix
611 msg = "No filename given. Using default name %s..." % name
612 asaplog.push(msg)
613 name = path.expandvars(name)
614 if path.isfile(name) or path.isdir(name):
615 if not overwrite:
616 msg = "File %s exists." % name
617 raise IOError(msg)
618 format2 = format.upper()
619 if format2 == 'ASAP':
620 self._save(name)
621 elif format2 == 'MS2':
622 msopt = {'ms': {'overwrite': overwrite } }
623 from asap._asap import mswriter
624 writer = mswriter( self )
625 writer.write( name, msopt )
626 else:
627 from asap._asap import stwriter as stw
628 writer = stw(format2)
629 writer.write(self, name)
630 return
631
632 def copy(self):
633 """Return a copy of this scantable.
634
635 *Note*:
636
637 This makes a full (deep) copy. scan2 = scan1 makes a reference.
638
639 Example::
640
641 copiedscan = scan.copy()
642
643 """
644 sd = scantable(Scantable._copy(self))
645 return sd
646
647 def drop_scan(self, scanid=None):
648 """\
649 Return a new scantable where the specified scan number(s) has(have)
650 been dropped.
651
652 Parameters:
653
654 scanid: a (list of) scan number(s)
655
656 """
657 from asap import _is_sequence_or_number as _is_valid
658 from asap import _to_list
659 from asap import unique
660 if not _is_valid(scanid):
661 raise RuntimeError( 'Please specify a scanno to drop from the'
662 ' scantable' )
663 scanid = _to_list(scanid)
664 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
665 for sid in scanid: allscans.remove(sid)
666 if len(allscans) == 0:
667 raise ValueError("Can't remove all scans")
668 sel = selector(scans=allscans)
669 return self._select_copy(sel)
670
671 def _select_copy(self, selection):
672 orig = self.get_selection()
673 self.set_selection(orig+selection)
674 cp = self.copy()
675 self.set_selection(orig)
676 return cp
677
678 def get_scan(self, scanid=None):
679 """\
680 Return a specific scan (by scanno) or collection of scans (by
681 source name) in a new scantable.
682
683 *Note*:
684
685 See scantable.drop_scan() for the inverse operation.
686
687 Parameters:
688
689 scanid: a (list of) scanno or a source name, unix-style
690 patterns are accepted for source name matching, e.g.
691 '*_R' gets all 'ref scans
692
693 Example::
694
695 # get all scans containing the source '323p459'
696 newscan = scan.get_scan('323p459')
697 # get all 'off' scans
698 refscans = scan.get_scan('*_R')
699 # get a susbset of scans by scanno (as listed in scan.summary())
700 newscan = scan.get_scan([0, 2, 7, 10])
701
702 """
703 if scanid is None:
704 raise RuntimeError( 'Please specify a scan no or name to '
705 'retrieve from the scantable' )
706 try:
707 bsel = self.get_selection()
708 sel = selector()
709 if type(scanid) is str:
710 sel.set_name(scanid)
711 return self._select_copy(sel)
712 elif type(scanid) is int:
713 sel.set_scans([scanid])
714 return self._select_copy(sel)
715 elif type(scanid) is list:
716 sel.set_scans(scanid)
717 return self._select_copy(sel)
718 else:
719 msg = "Illegal scanid type, use 'int' or 'list' if ints."
720 raise TypeError(msg)
721 except RuntimeError:
722 raise
723
724 def __str__(self):
725 tempFile = tempfile.NamedTemporaryFile()
726 Scantable._summary(self, tempFile.name)
727 tempFile.seek(0)
728 asaplog.clear()
729 return tempFile.file.read()
730
731 @asaplog_post_dec
732 def summary(self, filename=None):
733 """\
734 Print a summary of the contents of this scantable.
735
736 Parameters:
737
738 filename: the name of a file to write the putput to
739 Default - no file output
740
741 """
742 if filename is not None:
743 if filename is "":
744 filename = 'scantable_summary.txt'
745 from os.path import expandvars, isdir
746 filename = expandvars(filename)
747 if isdir(filename):
748 msg = "Illegal file name '%s'." % (filename)
749 raise IOError(msg)
750 else:
751 filename = ""
752 Scantable._summary(self, filename)
753
754 def get_spectrum(self, rowno):
755 """Return the spectrum for the current row in the scantable as a list.
756
757 Parameters:
758
759 rowno: the row number to retrieve the spectrum from
760
761 """
762 return self._getspectrum(rowno)
763
764 def get_mask(self, rowno):
765 """Return the mask for the current row in the scantable as a list.
766
767 Parameters:
768
769 rowno: the row number to retrieve the mask from
770
771 """
772 return self._getmask(rowno)
773
774 def set_spectrum(self, spec, rowno):
775 """Set the spectrum for the current row in the scantable.
776
777 Parameters:
778
779 spec: the new spectrum
780
781 rowno: the row number to set the spectrum for
782
783 """
784 assert(len(spec) == self.nchan(self.getif(rowno)))
785 return self._setspectrum(spec, rowno)
786
787 def get_coordinate(self, rowno):
788 """Return the (spectral) coordinate for a a given 'rowno'.
789
790 *Note*:
791
792 * This coordinate is only valid until a scantable method modifies
793 the frequency axis.
794 * This coordinate does contain the original frequency set-up
795 NOT the new frame. The conversions however are done using the user
796 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
797 use scantable.freq_align first. Without it there is no closure,
798 i.e.::
799
800 c = myscan.get_coordinate(0)
801 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
802
803 Parameters:
804
805 rowno: the row number for the spectral coordinate
806
807 """
808 return coordinate(Scantable.get_coordinate(self, rowno))
809
810 def get_selection(self):
811 """\
812 Get the selection object currently set on this scantable.
813
814 Example::
815
816 sel = scan.get_selection()
817 sel.set_ifs(0) # select IF 0
818 scan.set_selection(sel) # apply modified selection
819
820 """
821 return selector(self._getselection())
822
823 def set_selection(self, selection=None, **kw):
824 """\
825 Select a subset of the data. All following operations on this scantable
826 are only applied to thi selection.
827
828 Parameters:
829
830 selection: a selector object (default unset the selection), or
831 any combination of 'pols', 'ifs', 'beams', 'scans',
832 'cycles', 'name', 'query'
833
834 Examples::
835
836 sel = selector() # create a selection object
837 self.set_scans([0, 3]) # select SCANNO 0 and 3
838 scan.set_selection(sel) # set the selection
839 scan.summary() # will only print summary of scanno 0 an 3
840 scan.set_selection() # unset the selection
841 # or the equivalent
842 scan.set_selection(scans=[0,3])
843 scan.summary() # will only print summary of scanno 0 an 3
844 scan.set_selection() # unset the selection
845
846 """
847 if selection is None:
848 # reset
849 if len(kw) == 0:
850 selection = selector()
851 else:
852 # try keywords
853 for k in kw:
854 if k not in selector.fields:
855 raise KeyError("Invalid selection key '%s', "
856 "valid keys are %s" % (k,
857 selector.fields))
858 selection = selector(**kw)
859 self._setselection(selection)
860
861 def get_row(self, row=0, insitu=None):
862 """\
863 Select a row in the scantable.
864 Return a scantable with single row.
865
866 Parameters:
867
868 row: row no of integration, default is 0.
869 insitu: if False a new scantable is returned. Otherwise, the
870 scaling is done in-situ. The default is taken from .asaprc
871 (False)
872
873 """
874 if insitu is None:
875 insitu = rcParams['insitu']
876 if not insitu:
877 workscan = self.copy()
878 else:
879 workscan = self
880 # Select a row
881 sel = selector()
882 sel.set_rows([row])
883 workscan.set_selection(sel)
884 if not workscan.nrow() == 1:
885 msg = "Could not identify single row. %d rows selected." \
886 % (workscan.nrow())
887 raise RuntimeError(msg)
888 if insitu:
889 self._assign(workscan)
890 else:
891 return workscan
892
893 @asaplog_post_dec
894 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
895 """\
896 Determine the specified statistic of the current beam/if/pol
897 Takes a 'mask' as an optional parameter to specify which
898 channels should be excluded.
899
900 Parameters:
901
902 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
903 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
904
905 mask: an optional mask specifying where the statistic
906 should be determined.
907
908 form: format string to print statistic values
909
910 row: row number of spectrum to process.
911 (default is None: for all rows)
912
913 Example:
914 scan.set_unit('channel')
915 msk = scan.create_mask([100, 200], [500, 600])
916 scan.stats(stat='mean', mask=m)
917
918 """
919 mask = mask or []
920 if not self._check_ifs():
921 raise ValueError("Cannot apply mask as the IFs have different "
922 "number of channels. Please use setselection() "
923 "to select individual IFs")
924 rtnabc = False
925 if stat.lower().endswith('_abc'): rtnabc = True
926 getchan = False
927 if stat.lower().startswith('min') or stat.lower().startswith('max'):
928 chan = self._math._minmaxchan(self, mask, stat)
929 getchan = True
930 statvals = []
931 if not rtnabc:
932 if row == None:
933 statvals = self._math._stats(self, mask, stat)
934 else:
935 statvals = self._math._statsrow(self, mask, stat, int(row))
936
937 #def cb(i):
938 # return statvals[i]
939
940 #return self._row_callback(cb, stat)
941
942 label=stat
943 #callback=cb
944 out = ""
945 #outvec = []
946 sep = '-'*50
947
948 if row == None:
949 rows = xrange(self.nrow())
950 elif isinstance(row, int):
951 rows = [ row ]
952
953 for i in rows:
954 refstr = ''
955 statunit= ''
956 if getchan:
957 qx, qy = self.chan2data(rowno=i, chan=chan[i])
958 if rtnabc:
959 statvals.append(qx['value'])
960 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
961 statunit= '['+qx['unit']+']'
962 else:
963 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
964
965 tm = self._gettime(i)
966 src = self._getsourcename(i)
967 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
968 out += 'Time[%s]:\n' % (tm)
969 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
970 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
971 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
972 #outvec.append(callback(i))
973 if len(rows) > 1:
974 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
975 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
976 else:
977 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
978 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
979 out += sep+"\n"
980
981 import os
982 if os.environ.has_key( 'USER' ):
983 usr = os.environ['USER']
984 else:
985 import commands
986 usr = commands.getoutput( 'whoami' )
987 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
988 f = open(tmpfile,'w')
989 print >> f, sep
990 print >> f, ' %s %s' % (label, statunit)
991 print >> f, sep
992 print >> f, out
993 f.close()
994 f = open(tmpfile,'r')
995 x = f.readlines()
996 f.close()
997 asaplog.push(''.join(x), False)
998
999 return statvals
1000
1001 def chan2data(self, rowno=0, chan=0):
1002 """\
1003 Returns channel/frequency/velocity and spectral value
1004 at an arbitrary row and channel in the scantable.
1005
1006 Parameters:
1007
1008 rowno: a row number in the scantable. Default is the
1009 first row, i.e. rowno=0
1010
1011 chan: a channel in the scantable. Default is the first
1012 channel, i.e. pos=0
1013
1014 """
1015 if isinstance(rowno, int) and isinstance(chan, int):
1016 qx = {'unit': self.get_unit(),
1017 'value': self._getabcissa(rowno)[chan]}
1018 qy = {'unit': self.get_fluxunit(),
1019 'value': self._getspectrum(rowno)[chan]}
1020 return qx, qy
1021
1022 def stddev(self, mask=None):
1023 """\
1024 Determine the standard deviation of the current beam/if/pol
1025 Takes a 'mask' as an optional parameter to specify which
1026 channels should be excluded.
1027
1028 Parameters:
1029
1030 mask: an optional mask specifying where the standard
1031 deviation should be determined.
1032
1033 Example::
1034
1035 scan.set_unit('channel')
1036 msk = scan.create_mask([100, 200], [500, 600])
1037 scan.stddev(mask=m)
1038
1039 """
1040 return self.stats(stat='stddev', mask=mask);
1041
1042
1043 def get_column_names(self):
1044 """\
1045 Return a list of column names, which can be used for selection.
1046 """
1047 return list(Scantable.get_column_names(self))
1048
1049 def get_tsys(self, row=-1):
1050 """\
1051 Return the System temperatures.
1052
1053 Parameters:
1054
1055 row: the rowno to get the information for. (default all rows)
1056
1057 Returns:
1058
1059 a list of Tsys values for the current selection
1060
1061 """
1062 if row > -1:
1063 return self._get_column(self._gettsys, row)
1064 return self._row_callback(self._gettsys, "Tsys")
1065
1066 def get_tsysspectrum(self, row=-1):
1067 """\
1068 Return the channel dependent system temperatures.
1069
1070 Parameters:
1071
1072 row: the rowno to get the information for. (default all rows)
1073
1074 Returns:
1075
1076 a list of Tsys values for the current selection
1077
1078 """
1079 return self._get_column( self._gettsysspectrum, row )
1080
1081 def set_tsys(self, values, row=-1):
1082 """\
1083 Set the Tsys value(s) of the given 'row' or the whole scantable
1084 (selection).
1085
1086 Parameters:
1087
1088 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1089 row: the row number to apply Tsys values to.
1090 (default all rows)
1091
1092 """
1093
1094 if not hasattr(values, "__len__"):
1095 values = [values]
1096 self._settsys(values, row)
1097
1098 def get_weather(self, row=-1):
1099 """\
1100 Return the weather information.
1101
1102 Parameters:
1103
1104 row: the rowno to get the information for. (default all rows)
1105
1106 Returns:
1107
1108 a dict or list of of dicts of values for the current selection
1109
1110 """
1111 if row >= len(self):
1112 raise IndexError("row out of range")
1113 values = self._get_column(self._get_weather, row)
1114 if row > -1:
1115 return {'temperature': values[0],
1116 'pressure': values[1], 'humidity' : values[2],
1117 'windspeed' : values[3], 'windaz' : values[4]
1118 }
1119 else:
1120 out = []
1121 for r in values:
1122 out.append({'temperature': r[0],
1123 'pressure': r[1], 'humidity' : r[2],
1124 'windspeed' : r[3], 'windaz' : r[4]
1125 })
1126 return out
1127
1128 def _row_callback(self, callback, label):
1129 out = ""
1130 outvec = []
1131 sep = '-'*50
1132 for i in range(self.nrow()):
1133 tm = self._gettime(i)
1134 src = self._getsourcename(i)
1135 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1136 out += 'Time[%s]:\n' % (tm)
1137 if self.nbeam(-1) > 1:
1138 out += ' Beam[%d] ' % (self.getbeam(i))
1139 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1140 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1141 outvec.append(callback(i))
1142 out += '= %3.3f\n' % (outvec[i])
1143 out += sep+'\n'
1144
1145 asaplog.push(sep)
1146 asaplog.push(" %s" % (label))
1147 asaplog.push(sep)
1148 asaplog.push(out)
1149 asaplog.post()
1150 return outvec
1151
1152 def _get_column(self, callback, row=-1, *args):
1153 """
1154 """
1155 if row == -1:
1156 return [callback(i, *args) for i in range(self.nrow())]
1157 else:
1158 if 0 <= row < self.nrow():
1159 return callback(row, *args)
1160
1161
1162 def get_time(self, row=-1, asdatetime=False, prec=-1):
1163 """\
1164 Get a list of time stamps for the observations.
1165 Return a datetime object or a string (default) for each
1166 integration time stamp in the scantable.
1167
1168 Parameters:
1169
1170 row: row no of integration. Default -1 return all rows
1171
1172 asdatetime: return values as datetime objects rather than strings
1173
1174 prec: number of digits shown. Default -1 to automatic
1175 calculation.
1176 Note this number is equals to the digits of MVTime,
1177 i.e., 0<prec<3: dates with hh:: only,
1178 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1179 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1180
1181 """
1182 from datetime import datetime
1183 if prec < 0:
1184 # automagically set necessary precision +1
1185 prec = 7 - \
1186 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1187 prec = max(6, int(prec))
1188 else:
1189 prec = max(0, prec)
1190 if asdatetime:
1191 #precision can be 1 millisecond at max
1192 prec = min(12, prec)
1193
1194 times = self._get_column(self._gettime, row, prec)
1195 if not asdatetime:
1196 return times
1197 format = "%Y/%m/%d/%H:%M:%S.%f"
1198 if prec < 7:
1199 nsub = 1 + (((6-prec)/2) % 3)
1200 substr = [".%f","%S","%M"]
1201 for i in range(nsub):
1202 format = format.replace(substr[i],"")
1203 if isinstance(times, list):
1204 return [datetime.strptime(i, format) for i in times]
1205 else:
1206 return datetime.strptime(times, format)
1207
1208
1209 def get_inttime(self, row=-1):
1210 """\
1211 Get a list of integration times for the observations.
1212 Return a time in seconds for each integration in the scantable.
1213
1214 Parameters:
1215
1216 row: row no of integration. Default -1 return all rows.
1217
1218 """
1219 return self._get_column(self._getinttime, row)
1220
1221
1222 def get_sourcename(self, row=-1):
1223 """\
1224 Get a list source names for the observations.
1225 Return a string for each integration in the scantable.
1226 Parameters:
1227
1228 row: row no of integration. Default -1 return all rows.
1229
1230 """
1231 return self._get_column(self._getsourcename, row)
1232
1233 def get_elevation(self, row=-1):
1234 """\
1235 Get a list of elevations for the observations.
1236 Return a float for each integration in the scantable.
1237
1238 Parameters:
1239
1240 row: row no of integration. Default -1 return all rows.
1241
1242 """
1243 return self._get_column(self._getelevation, row)
1244
1245 def get_azimuth(self, row=-1):
1246 """\
1247 Get a list of azimuths for the observations.
1248 Return a float for each integration in the scantable.
1249
1250 Parameters:
1251 row: row no of integration. Default -1 return all rows.
1252
1253 """
1254 return self._get_column(self._getazimuth, row)
1255
1256 def get_parangle(self, row=-1):
1257 """\
1258 Get a list of parallactic angles for the observations.
1259 Return a float for each integration in the scantable.
1260
1261 Parameters:
1262
1263 row: row no of integration. Default -1 return all rows.
1264
1265 """
1266 return self._get_column(self._getparangle, row)
1267
1268 def get_direction(self, row=-1):
1269 """
1270 Get a list of Positions on the sky (direction) for the observations.
1271 Return a string for each integration in the scantable.
1272
1273 Parameters:
1274
1275 row: row no of integration. Default -1 return all rows
1276
1277 """
1278 return self._get_column(self._getdirection, row)
1279
1280 def get_directionval(self, row=-1):
1281 """\
1282 Get a list of Positions on the sky (direction) for the observations.
1283 Return a float for each integration in the scantable.
1284
1285 Parameters:
1286
1287 row: row no of integration. Default -1 return all rows
1288
1289 """
1290 return self._get_column(self._getdirectionvec, row)
1291
1292 @asaplog_post_dec
1293 def set_unit(self, unit='channel'):
1294 """\
1295 Set the unit for all following operations on this scantable
1296
1297 Parameters:
1298
1299 unit: optional unit, default is 'channel'. Use one of '*Hz',
1300 'km/s', 'channel' or equivalent ''
1301
1302 """
1303 varlist = vars()
1304 if unit in ['', 'pixel', 'channel']:
1305 unit = ''
1306 inf = list(self._getcoordinfo())
1307 inf[0] = unit
1308 self._setcoordinfo(inf)
1309 self._add_history("set_unit", varlist)
1310
1311 @asaplog_post_dec
1312 def set_instrument(self, instr):
1313 """\
1314 Set the instrument for subsequent processing.
1315
1316 Parameters:
1317
1318 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1319 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1320
1321 """
1322 self._setInstrument(instr)
1323 self._add_history("set_instument", vars())
1324
1325 @asaplog_post_dec
1326 def set_feedtype(self, feedtype):
1327 """\
1328 Overwrite the feed type, which might not be set correctly.
1329
1330 Parameters:
1331
1332 feedtype: 'linear' or 'circular'
1333
1334 """
1335 self._setfeedtype(feedtype)
1336 self._add_history("set_feedtype", vars())
1337
1338 @asaplog_post_dec
1339 def get_doppler(self):
1340 """\
1341 Get the doppler.
1342 """
1343 return self._getcoordinfo()[2]
1344
1345 @asaplog_post_dec
1346 def set_doppler(self, doppler='RADIO'):
1347 """\
1348 Set the doppler for all following operations on this scantable.
1349
1350 Parameters:
1351
1352 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1353
1354 """
1355 varlist = vars()
1356 inf = list(self._getcoordinfo())
1357 inf[2] = doppler
1358 self._setcoordinfo(inf)
1359 self._add_history("set_doppler", vars())
1360
1361 @asaplog_post_dec
1362 def set_freqframe(self, frame=None):
1363 """\
1364 Set the frame type of the Spectral Axis.
1365
1366 Parameters:
1367
1368 frame: an optional frame type, default 'LSRK'. Valid frames are:
1369 'TOPO', 'LSRD', 'LSRK', 'BARY',
1370 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1371
1372 Example::
1373
1374 scan.set_freqframe('BARY')
1375
1376 """
1377 frame = frame or rcParams['scantable.freqframe']
1378 varlist = vars()
1379 # "REST" is not implemented in casacore
1380 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1381 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1382 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1383 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1384
1385 if frame in valid:
1386 inf = list(self._getcoordinfo())
1387 inf[1] = frame
1388 self._setcoordinfo(inf)
1389 self._add_history("set_freqframe", varlist)
1390 else:
1391 msg = "Please specify a valid freq type. Valid types are:\n", valid
1392 raise TypeError(msg)
1393
1394 @asaplog_post_dec
1395 def set_dirframe(self, frame=""):
1396 """\
1397 Set the frame type of the Direction on the sky.
1398
1399 Parameters:
1400
1401 frame: an optional frame type, default ''. Valid frames are:
1402 'J2000', 'B1950', 'GALACTIC'
1403
1404 Example:
1405
1406 scan.set_dirframe('GALACTIC')
1407
1408 """
1409 varlist = vars()
1410 Scantable.set_dirframe(self, frame)
1411 self._add_history("set_dirframe", varlist)
1412
1413 def get_unit(self):
1414 """\
1415 Get the default unit set in this scantable
1416
1417 Returns:
1418
1419 A unit string
1420
1421 """
1422 inf = self._getcoordinfo()
1423 unit = inf[0]
1424 if unit == '': unit = 'channel'
1425 return unit
1426
1427 @asaplog_post_dec
1428 def get_abcissa(self, rowno=0):
1429 """\
1430 Get the abcissa in the current coordinate setup for the currently
1431 selected Beam/IF/Pol
1432
1433 Parameters:
1434
1435 rowno: an optional row number in the scantable. Default is the
1436 first row, i.e. rowno=0
1437
1438 Returns:
1439
1440 The abcissa values and the format string (as a dictionary)
1441
1442 """
1443 abc = self._getabcissa(rowno)
1444 lbl = self._getabcissalabel(rowno)
1445 return abc, lbl
1446
1447 @asaplog_post_dec
1448 def flag(self, mask=None, unflag=False, row=-1):
1449 """\
1450 Flag the selected data using an optional channel mask.
1451
1452 Parameters:
1453
1454 mask: an optional channel mask, created with create_mask. Default
1455 (no mask) is all channels.
1456
1457 unflag: if True, unflag the data
1458
1459 row: an optional row number in the scantable.
1460 Default -1 flags all rows
1461
1462 """
1463 varlist = vars()
1464 mask = mask or []
1465 self._flag(row, mask, unflag)
1466 self._add_history("flag", varlist)
1467
1468 @asaplog_post_dec
1469 def flag_row(self, rows=None, unflag=False):
1470 """\
1471 Flag the selected data in row-based manner.
1472
1473 Parameters:
1474
1475 rows: list of row numbers to be flagged. Default is no row
1476 (must be explicitly specified to execute row-based
1477 flagging).
1478
1479 unflag: if True, unflag the data.
1480
1481 """
1482 varlist = vars()
1483 if rows is None:
1484 rows = []
1485 self._flag_row(rows, unflag)
1486 self._add_history("flag_row", varlist)
1487
1488 @asaplog_post_dec
1489 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1490 """\
1491 Flag the selected data outside a specified range (in channel-base)
1492
1493 Parameters:
1494
1495 uthres: upper threshold.
1496
1497 dthres: lower threshold
1498
1499 clipoutside: True for flagging data outside the range
1500 [dthres:uthres].
1501 False for flagging data inside the range.
1502
1503 unflag: if True, unflag the data.
1504
1505 """
1506 varlist = vars()
1507 self._clip(uthres, dthres, clipoutside, unflag)
1508 self._add_history("clip", varlist)
1509
1510 @asaplog_post_dec
1511 def lag_flag(self, start, end, unit="MHz", insitu=None):
1512 """\
1513 Flag the data in 'lag' space by providing a frequency to remove.
1514 Flagged data in the scantable get interpolated over the region.
1515 No taper is applied.
1516
1517 Parameters:
1518
1519 start: the start frequency (really a period within the
1520 bandwidth) or period to remove
1521
1522 end: the end frequency or period to remove
1523
1524 unit: the frequency unit (default 'MHz') or '' for
1525 explicit lag channels
1526
1527 *Notes*:
1528
1529 It is recommended to flag edges of the band or strong
1530 signals beforehand.
1531
1532 """
1533 if insitu is None: insitu = rcParams['insitu']
1534 self._math._setinsitu(insitu)
1535 varlist = vars()
1536 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1537 if not (unit == "" or base.has_key(unit)):
1538 raise ValueError("%s is not a valid unit." % unit)
1539 if unit == "":
1540 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1541 else:
1542 s = scantable(self._math._lag_flag(self, start*base[unit],
1543 end*base[unit], "frequency"))
1544 s._add_history("lag_flag", varlist)
1545 if insitu:
1546 self._assign(s)
1547 else:
1548 return s
1549
1550 @asaplog_post_dec
1551 def fft(self, rowno=None, mask=None, getrealimag=False):
1552 """\
1553 Apply FFT to the spectra.
1554 Flagged data in the scantable get interpolated over the region.
1555
1556 Parameters:
1557
1558 rowno: The row number(s) to be processed. int, list
1559 and tuple are accepted. By default (None), FFT
1560 is applied to the whole data.
1561
1562 mask: Auxiliary channel mask(s). Given as a boolean
1563 list, it is applied to all specified rows.
1564 A list of boolean lists can also be used to
1565 apply different masks. In the latter case, the
1566 length of 'mask' must be the same as that of
1567 'rowno'. The default is None.
1568
1569 getrealimag: If True, returns the real and imaginary part
1570 values of the complex results.
1571 If False (the default), returns the amplitude
1572 (absolute value) normalised with Ndata/2 and
1573 phase (argument, in unit of radian).
1574
1575 Returns:
1576
1577 A list of dictionaries containing the results for each spectrum.
1578 Each dictionary contains two values, the real and the imaginary
1579 parts when getrealimag = True, or the amplitude(absolute value)
1580 and the phase(argument) when getrealimag = False. The key for
1581 these values are 'real' and 'imag', or 'ampl' and 'phase',
1582 respectively.
1583 """
1584 if rowno is None:
1585 rowno = []
1586 if isinstance(rowno, int):
1587 rowno = [rowno]
1588 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1589 raise TypeError("The row number(s) must be int, list or tuple.")
1590 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1591
1592 usecommonmask = True
1593
1594 if mask is None:
1595 mask = []
1596 if isinstance(mask, list) or isinstance(mask, tuple):
1597 if len(mask) == 0:
1598 mask = [[]]
1599 else:
1600 if isinstance(mask[0], bool):
1601 if len(mask) != self.nchan(self.getif(rowno[0])):
1602 raise ValueError("The spectra and the mask have "
1603 "different length.")
1604 mask = [mask]
1605 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1606 usecommonmask = False
1607 if len(mask) != len(rowno):
1608 raise ValueError("When specifying masks for each "
1609 "spectrum, the numbers of them "
1610 "must be identical.")
1611 for i in xrange(mask):
1612 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1613 raise ValueError("The spectra and the mask have "
1614 "different length.")
1615 else:
1616 raise TypeError("The mask must be a boolean list or "
1617 "a list of boolean list.")
1618 else:
1619 raise TypeError("The mask must be a boolean list or a list of "
1620 "boolean list.")
1621
1622 res = []
1623
1624 imask = 0
1625 for whichrow in rowno:
1626 fspec = self._fft(whichrow, mask[imask], getrealimag)
1627 nspec = len(fspec)
1628
1629 i = 0
1630 v1 = []
1631 v2 = []
1632 reselem = {"real":[],"imag":[]} if getrealimag \
1633 else {"ampl":[],"phase":[]}
1634
1635 while (i < nspec):
1636 v1.append(fspec[i])
1637 v2.append(fspec[i+1])
1638 i += 2
1639
1640 if getrealimag:
1641 reselem["real"] += v1
1642 reselem["imag"] += v2
1643 else:
1644 reselem["ampl"] += v1
1645 reselem["phase"] += v2
1646
1647 res.append(reselem)
1648
1649 if not usecommonmask:
1650 imask += 1
1651
1652 return res
1653
1654 @asaplog_post_dec
1655 def create_mask(self, *args, **kwargs):
1656 """\
1657 Compute and return a mask based on [min, max] windows.
1658 The specified windows are to be INCLUDED, when the mask is
1659 applied.
1660
1661 Parameters:
1662
1663 [min, max], [min2, max2], ...
1664 Pairs of start/end points (inclusive)specifying the regions
1665 to be masked
1666
1667 invert: optional argument. If specified as True,
1668 return an inverted mask, i.e. the regions
1669 specified are EXCLUDED
1670
1671 row: create the mask using the specified row for
1672 unit conversions, default is row=0
1673 only necessary if frequency varies over rows.
1674
1675 Examples::
1676
1677 scan.set_unit('channel')
1678 # a)
1679 msk = scan.create_mask([400, 500], [800, 900])
1680 # masks everything outside 400 and 500
1681 # and 800 and 900 in the unit 'channel'
1682
1683 # b)
1684 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1685 # masks the regions between 400 and 500
1686 # and 800 and 900 in the unit 'channel'
1687
1688 # c)
1689 #mask only channel 400
1690 msk = scan.create_mask([400])
1691
1692 """
1693 row = kwargs.get("row", 0)
1694 data = self._getabcissa(row)
1695 u = self._getcoordinfo()[0]
1696 if u == "":
1697 u = "channel"
1698 msg = "The current mask window unit is %s" % u
1699 i = self._check_ifs()
1700 if not i:
1701 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1702 asaplog.push(msg)
1703 n = len(data)
1704 msk = _n_bools(n, False)
1705 # test if args is a 'list' or a 'normal *args - UGLY!!!
1706
1707 ws = (isinstance(args[-1][-1], int)
1708 or isinstance(args[-1][-1], float)) and args or args[0]
1709 for window in ws:
1710 if len(window) == 1:
1711 window = [window[0], window[0]]
1712 if len(window) == 0 or len(window) > 2:
1713 raise ValueError("A window needs to be defined as "
1714 "[start(, end)]")
1715 if window[0] > window[1]:
1716 tmp = window[0]
1717 window[0] = window[1]
1718 window[1] = tmp
1719 for i in range(n):
1720 if data[i] >= window[0] and data[i] <= window[1]:
1721 msk[i] = True
1722 if kwargs.has_key('invert'):
1723 if kwargs.get('invert'):
1724 msk = mask_not(msk)
1725 return msk
1726
1727 def get_masklist(self, mask=None, row=0, silent=False):
1728 """\
1729 Compute and return a list of mask windows, [min, max].
1730
1731 Parameters:
1732
1733 mask: channel mask, created with create_mask.
1734
1735 row: calcutate the masklist using the specified row
1736 for unit conversions, default is row=0
1737 only necessary if frequency varies over rows.
1738
1739 Returns:
1740
1741 [min, max], [min2, max2], ...
1742 Pairs of start/end points (inclusive)specifying
1743 the masked regions
1744
1745 """
1746 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1747 raise TypeError("The mask should be list or tuple.")
1748 if len(mask) <= 0:
1749 raise TypeError("The mask elements should be > 0")
1750 data = self._getabcissa(row)
1751 if len(data) != len(mask):
1752 msg = "Number of channels in scantable != number of mask elements"
1753 raise TypeError(msg)
1754 u = self._getcoordinfo()[0]
1755 if u == "":
1756 u = "channel"
1757 msg = "The current mask window unit is %s" % u
1758 i = self._check_ifs()
1759 if not i:
1760 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1761 if not silent:
1762 asaplog.push(msg)
1763 masklist = []
1764 ist, ien = None, None
1765 ist, ien=self.get_mask_indices(mask)
1766 if ist is not None and ien is not None:
1767 for i in xrange(len(ist)):
1768 range=[data[ist[i]],data[ien[i]]]
1769 range.sort()
1770 masklist.append([range[0],range[1]])
1771 return masklist
1772
1773 def get_mask_indices(self, mask=None):
1774 """\
1775 Compute and Return lists of mask start indices and mask end indices.
1776
1777 Parameters:
1778
1779 mask: channel mask, created with create_mask.
1780
1781 Returns:
1782
1783 List of mask start indices and that of mask end indices,
1784 i.e., [istart1,istart2,....], [iend1,iend2,....].
1785
1786 """
1787 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1788 raise TypeError("The mask should be list or tuple.")
1789 if len(mask) <= 0:
1790 raise TypeError("The mask elements should be > 0")
1791 istart = []
1792 iend = []
1793 if mask[0]:
1794 istart.append(0)
1795 for i in range(len(mask)-1):
1796 if not mask[i] and mask[i+1]:
1797 istart.append(i+1)
1798 elif mask[i] and not mask[i+1]:
1799 iend.append(i)
1800 if mask[len(mask)-1]:
1801 iend.append(len(mask)-1)
1802 if len(istart) != len(iend):
1803 raise RuntimeError("Numbers of mask start != mask end.")
1804 for i in range(len(istart)):
1805 if istart[i] > iend[i]:
1806 raise RuntimeError("Mask start index > mask end index")
1807 break
1808 return istart,iend
1809
1810 @asaplog_post_dec
1811 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1812 """
1813 Parse MS type spw/channel selection syntax.
1814
1815 Parameters:
1816 selectstring : A string expression of spw and channel selection.
1817 Comma-separated expressions mean different spw -
1818 channel combinations. Spws and channel selections
1819 are partitioned by a colon ':'. In a single
1820 selection expression, you can put multiple values
1821 separated by semicolons ';'. Both for spw and
1822 channel selection, allowed cases include single
1823 value, blank('') or asterisk('*') to specify all
1824 available values, two values connected with a
1825 tilde ('~') to specify an inclusive range. Unit
1826 strings for frequency or velocity can be added to
1827 the tilde-connected values. For channel selection
1828 expression, placing a '<' or a '>' is possible to
1829 specify a semi-infinite interval as well.
1830
1831 examples:
1832 '' or '*' = all spws (all channels)
1833 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1834 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1835 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1836 all channels in spw8
1837 '1.3~1.5GHz' = all spws that fall in or have at
1838 least some overwrap with frequency
1839 range between 1.3GHz and 1.5GHz.
1840 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1841 between the specified
1842 frequency range in spws
1843 that fall in or have
1844 overwrap with the
1845 specified frequency
1846 range.
1847 '1:-200~250km/s' = channels that fall between the
1848 specified velocity range in
1849 spw 1.
1850 restfreq: the rest frequency.
1851 examples: '115.2712GHz', 115271201800.0
1852 frame: an optional frame type, default 'LSRK'. Valid frames are:
1853 'TOPO', 'LSRD', 'LSRK', 'BARY',
1854 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1855 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1856 Returns:
1857 A dictionary of selected (valid) spw and masklist pairs,
1858 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1859 """
1860 if not isinstance(selectstring, str):
1861 asaplog.post()
1862 asaplog.push("Expression of spw/channel selection must be a string.")
1863 asaplog.post("ERROR")
1864
1865 orig_unit = self.get_unit()
1866 self.set_unit('channel')
1867
1868 if restfreq is not None:
1869 orig_molids = self._getmolidcol_list()
1870 set_restfreq(self, restfreq)
1871
1872 orig_coord = self._getcoordinfo()
1873
1874 if frame is not None:
1875 orig_frame = orig_coord[1]
1876 self.set_freqframe(frame)
1877
1878 if doppler is not None:
1879 orig_doppler = orig_coord[2]
1880 self.set_doppler(doppler)
1881
1882 valid_ifs = self.getifnos()
1883
1884 comma_sep = selectstring.split(",")
1885 res = {}
1886
1887 for cms_elem in comma_sep:
1888 colon_sep = cms_elem.split(":")
1889
1890 if (len(colon_sep) > 2):
1891 raise RuntimeError("Invalid selection expression: more than two colons!")
1892
1893 # parse spw expression and store result in spw_list.
1894 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1895 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1896 # 'a~b*m/s' (where * can be '' or 'k') and also
1897 # several of the above expressions connected with ';'.
1898
1899 spw_list = []
1900
1901 semicolon_sep = colon_sep[0].split(";")
1902
1903 for scs_elem in semicolon_sep:
1904 scs_elem = scs_elem.strip()
1905
1906 lt_sep = scs_elem.split("<")
1907 gt_sep = scs_elem.split(">")
1908 ti_sep = scs_elem.split("~")
1909
1910 lt_sep_length = len(lt_sep)
1911 gt_sep_length = len(gt_sep)
1912 ti_sep_length = len(ti_sep)
1913
1914 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1915
1916 if (len_product > 2):
1917 # '<', '>' and '~' must not coexist in a single spw expression
1918
1919 raise RuntimeError("Invalid spw selection.")
1920
1921 elif (len_product == 1):
1922 # '', '*', or single spw number.
1923
1924 if (scs_elem == "") or (scs_elem == "*"):
1925 spw_list = valid_ifs[:] # deep copy
1926
1927 else: # single number
1928 expr = int(scs_elem)
1929 spw_list.append(expr)
1930 if expr not in valid_ifs:
1931 asaplog.push("Invalid spw given. Ignored.")
1932
1933 else: # (len_product == 2)
1934 # namely, one of '<', '>' or '~' appears just once.
1935
1936 if (lt_sep_length == 2): # '<a'
1937 if is_number(lt_sep[1]):
1938 no_valid_spw = True
1939 for i in valid_ifs:
1940 if (i < float(lt_sep[1])):
1941 spw_list.append(i)
1942 no_valid_spw = False
1943
1944 if no_valid_spw:
1945 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1946
1947 else:
1948 raise RuntimeError("Invalid spw selection.")
1949
1950 elif (gt_sep_length == 2): # '>a'
1951 if is_number(gt_sep[1]):
1952 no_valid_spw = True
1953 for i in valid_ifs:
1954 if (i > float(gt_sep[1])):
1955 spw_list.append(i)
1956 no_valid_spw = False
1957
1958 if no_valid_spw:
1959 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1960
1961 else:
1962 raise RuntimeError("Invalid spw selection.")
1963
1964 else: # (ti_sep_length == 2) where both boundaries inclusive
1965 expr0 = ti_sep[0].strip()
1966 expr1 = ti_sep[1].strip()
1967
1968 if is_number(expr0) and is_number(expr1):
1969 # 'a~b'
1970 expr_pmin = min(float(expr0), float(expr1))
1971 expr_pmax = max(float(expr0), float(expr1))
1972 has_invalid_spw = False
1973 no_valid_spw = True
1974
1975 for i in valid_ifs:
1976 if (expr_pmin <= i) and (i <= expr_pmax):
1977 spw_list.append(i)
1978 no_valid_spw = False
1979 else:
1980 has_invalid_spw = True
1981
1982 if has_invalid_spw:
1983 msg = "Invalid spw is given. Ignored."
1984 asaplog.push(msg)
1985 asaplog.post()
1986
1987 if no_valid_spw:
1988 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1989
1990 elif is_number(expr0) and is_frequency(expr1):
1991 # 'a~b*Hz'
1992 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1993 no_valid_spw = True
1994
1995 for coord in self._get_coordinate_list():
1996 expr_p0 = coord['coord'].to_pixel(expr_f0)
1997 expr_p1 = coord['coord'].to_pixel(expr_f1)
1998 expr_pmin = min(expr_p0, expr_p1)
1999 expr_pmax = max(expr_p0, expr_p1)
2000
2001 spw = coord['if']
2002 pmin = 0.0
2003 pmax = float(self.nchan(spw) - 1)
2004
2005 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2006 spw_list.append(spw)
2007 no_valid_spw = False
2008
2009 if no_valid_spw:
2010 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2011
2012 elif is_number(expr0) and is_velocity(expr1):
2013 # 'a~b*m/s'
2014 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2015 expr_vmin = min(expr_v0, expr_v1)
2016 expr_vmax = max(expr_v0, expr_v1)
2017 no_valid_spw = True
2018
2019 for coord in self._get_coordinate_list():
2020 spw = coord['if']
2021
2022 pmin = 0.0
2023 pmax = float(self.nchan(spw) - 1)
2024
2025 vel0 = coord['coord'].to_velocity(pmin)
2026 vel1 = coord['coord'].to_velocity(pmax)
2027
2028 vmin = min(vel0, vel1)
2029 vmax = max(vel0, vel1)
2030
2031 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2032 spw_list.append(spw)
2033 no_valid_spw = False
2034
2035 if no_valid_spw:
2036 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2037
2038 else:
2039 # cases such as 'aGHz~bkm/s' are not allowed now
2040 raise RuntimeError("Invalid spw selection.")
2041
2042 # check spw list and remove invalid ones.
2043 # if no valid spw left, emit ValueError.
2044 if len(spw_list) == 0:
2045 raise ValueError("No valid spw in given range.")
2046
2047 # parse channel expression and store the result in crange_list.
2048 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2049 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2050 # and also several of the above expressions connected with ';'.
2051
2052 for spw in spw_list:
2053 pmin = 0.0
2054 pmax = float(self.nchan(spw) - 1)
2055
2056 molid = self._getmolidcol_list()[self.get_first_rowno_by_if(spw)]
2057
2058 if (len(colon_sep) == 1):
2059 # no expression for channel selection,
2060 # which means all channels are to be selected.
2061 crange_list = [[pmin, pmax]]
2062
2063 else: # (len(colon_sep) == 2)
2064 crange_list = []
2065
2066 found = False
2067 for i in self._get_coordinate_list():
2068 if (i['if'] == spw):
2069 coord = i['coord']
2070 found = True
2071 break
2072
2073 if found:
2074 semicolon_sep = colon_sep[1].split(";")
2075 for scs_elem in semicolon_sep:
2076 scs_elem = scs_elem.strip()
2077
2078 ti_sep = scs_elem.split("~")
2079 ti_sep_length = len(ti_sep)
2080
2081 if (ti_sep_length > 2):
2082 raise RuntimeError("Invalid channel selection.")
2083
2084 elif (ti_sep_length == 1):
2085 if (scs_elem == "") or (scs_elem == "*"):
2086 # '' and '*' for all channels
2087 crange_list = [[pmin, pmax]]
2088 break
2089 elif (is_number(scs_elem)):
2090 # single channel given
2091 crange_list.append([float(scs_elem), float(scs_elem)])
2092 else:
2093 raise RuntimeError("Invalid channel selection.")
2094
2095 else: #(ti_sep_length == 2)
2096 expr0 = ti_sep[0].strip()
2097 expr1 = ti_sep[1].strip()
2098
2099 if is_number(expr0) and is_number(expr1):
2100 # 'a~b'
2101 expr_pmin = min(float(expr0), float(expr1))
2102 expr_pmax = max(float(expr0), float(expr1))
2103
2104 elif is_number(expr0) and is_frequency(expr1):
2105 # 'a~b*Hz'
2106 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2107 expr_p0 = coord.to_pixel(expr_f0)
2108 expr_p1 = coord.to_pixel(expr_f1)
2109 expr_pmin = min(expr_p0, expr_p1)
2110 expr_pmax = max(expr_p0, expr_p1)
2111
2112 elif is_number(expr0) and is_velocity(expr1):
2113 # 'a~b*m/s'
2114 restf = self.get_restfreqs()[molid][0]
2115 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2116 dppl = self.get_doppler()
2117 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)
2118 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)
2119 expr_p0 = coord.to_pixel(expr_f0)
2120 expr_p1 = coord.to_pixel(expr_f1)
2121 expr_pmin = min(expr_p0, expr_p1)
2122 expr_pmax = max(expr_p0, expr_p1)
2123
2124 else:
2125 # cases such as 'aGHz~bkm/s' are not allowed now
2126 raise RuntimeError("Invalid channel selection.")
2127
2128 cmin = max(pmin, expr_pmin)
2129 cmax = min(pmax, expr_pmax)
2130 # if the given range of channel selection has overwrap with
2131 # that of current spw, output the overwrap area.
2132 if (cmin <= cmax):
2133 cmin = float(int(cmin + 0.5))
2134 cmax = float(int(cmax + 0.5))
2135 crange_list.append([cmin, cmax])
2136
2137 if (len(crange_list) == 0):
2138 crange_list.append([])
2139
2140 if (len(crange_list[0]) > 0):
2141 if res.has_key(spw):
2142 res[spw].extend(crange_list)
2143 else:
2144 res[spw] = crange_list
2145
2146 for spw in res.keys():
2147 if spw in valid_ifs:
2148 # remove duplicated channel ranges
2149 for i in reversed(xrange(len(res[spw]))):
2150 for j in xrange(i):
2151 if ((res[spw][i][0]-res[spw][j][1])*(res[spw][i][1]-res[spw][j][0]) <= 0) or \
2152 (min(abs(res[spw][i][0]-res[spw][j][1]),abs(res[spw][j][0]-res[spw][i][1])) == 1):
2153 res[spw][j][0] = min(res[spw][i][0], res[spw][j][0])
2154 res[spw][j][1] = max(res[spw][i][1], res[spw][j][1])
2155 res[spw].pop(i)
2156 break
2157 else:
2158 del res[spw]
2159
2160 if len(res) == 0:
2161 raise RuntimeError("No valid spw.")
2162
2163 # restore original values
2164 self.set_unit(orig_unit)
2165 if restfreq is not None:
2166 self._setmolidcol_list(orig_molids)
2167 if frame is not None:
2168 self.set_freqframe(orig_frame)
2169 if doppler is not None:
2170 self.set_doppler(orig_doppler)
2171
2172 return res
2173
2174 @asaplog_post_dec
2175 def get_first_rowno_by_if(self, ifno):
2176 found = False
2177 for irow in xrange(self.nrow()):
2178 if (self.getif(irow) == ifno):
2179 res = irow
2180 found = True
2181 break
2182
2183 if not found: raise RuntimeError("No valid spw.")
2184
2185 return res
2186
2187 @asaplog_post_dec
2188 def _get_coordinate_list(self):
2189 res = []
2190 spws = self.getifnos()
2191 for spw in spws:
2192 elem = {}
2193 elem['if'] = spw
2194 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2195 res.append(elem)
2196
2197 return res
2198
2199 @asaplog_post_dec
2200 def parse_maskexpr(self, maskstring):
2201 """
2202 Parse CASA type mask selection syntax (IF dependent).
2203
2204 Parameters:
2205 maskstring : A string mask selection expression.
2206 A comma separated selections mean different IF -
2207 channel combinations. IFs and channel selections
2208 are partitioned by a colon, ':'.
2209 examples:
2210 '' = all IFs (all channels)
2211 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2212 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2213 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2214 all channels in IF8
2215 Returns:
2216 A dictionary of selected (valid) IF and masklist pairs,
2217 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2218 """
2219 if not isinstance(maskstring,str):
2220 asaplog.post()
2221 asaplog.push("Mask expression should be a string.")
2222 asaplog.post("ERROR")
2223
2224 valid_ifs = self.getifnos()
2225 frequnit = self.get_unit()
2226 seldict = {}
2227 if maskstring == "":
2228 maskstring = str(valid_ifs)[1:-1]
2229 ## split each selection "IF range[:CHAN range]"
2230 # split maskstring by "<spaces>,<spaces>"
2231 comma_sep = re.compile('\s*,\s*')
2232 sellist = comma_sep.split(maskstring)
2233 # separator by "<spaces>:<spaces>"
2234 collon_sep = re.compile('\s*:\s*')
2235 for currselstr in sellist:
2236 selset = collon_sep.split(currselstr)
2237 # spw and mask string (may include ~, < or >)
2238 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2239 minval=min(valid_ifs),
2240 maxval=max(valid_ifs))
2241 for spwlist in spwmasklist:
2242 selspws = []
2243 for ispw in range(spwlist[0],spwlist[1]+1):
2244 # Put into the list only if ispw exists
2245 if valid_ifs.count(ispw):
2246 selspws.append(ispw)
2247 del spwmasklist, spwlist
2248
2249 # parse frequency mask list
2250 if len(selset) > 1:
2251 freqmasklist = self._parse_selection(selset[1], typestr='float',
2252 offset=0.)
2253 else:
2254 # want to select the whole spectrum
2255 freqmasklist = [None]
2256
2257 ## define a dictionary of spw - masklist combination
2258 for ispw in selspws:
2259 #print "working on", ispw
2260 spwstr = str(ispw)
2261 if len(selspws) == 0:
2262 # empty spw
2263 continue
2264 else:
2265 ## want to get min and max of the spw and
2266 ## offset to set for '<' and '>'
2267 if frequnit == 'channel':
2268 minfreq = 0
2269 maxfreq = self.nchan(ifno=ispw)
2270 offset = 0.5
2271 else:
2272 ## This is ugly part. need improvement
2273 for ifrow in xrange(self.nrow()):
2274 if self.getif(ifrow) == ispw:
2275 #print "IF",ispw,"found in row =",ifrow
2276 break
2277 freqcoord = self.get_coordinate(ifrow)
2278 freqs = self._getabcissa(ifrow)
2279 minfreq = min(freqs)
2280 maxfreq = max(freqs)
2281 if len(freqs) == 1:
2282 offset = 0.5
2283 elif frequnit.find('Hz') > 0:
2284 offset = abs(freqcoord.to_frequency(1,
2285 unit=frequnit)
2286 -freqcoord.to_frequency(0,
2287 unit=frequnit)
2288 )*0.5
2289 elif frequnit.find('m/s') > 0:
2290 offset = abs(freqcoord.to_velocity(1,
2291 unit=frequnit)
2292 -freqcoord.to_velocity(0,
2293 unit=frequnit)
2294 )*0.5
2295 else:
2296 asaplog.post()
2297 asaplog.push("Invalid frequency unit")
2298 asaplog.post("ERROR")
2299 del freqs, freqcoord, ifrow
2300 for freq in freqmasklist:
2301 selmask = freq or [minfreq, maxfreq]
2302 if selmask[0] == None:
2303 ## selection was "<freq[1]".
2304 if selmask[1] < minfreq:
2305 ## avoid adding region selection
2306 selmask = None
2307 else:
2308 selmask = [minfreq,selmask[1]-offset]
2309 elif selmask[1] == None:
2310 ## selection was ">freq[0]"
2311 if selmask[0] > maxfreq:
2312 ## avoid adding region selection
2313 selmask = None
2314 else:
2315 selmask = [selmask[0]+offset,maxfreq]
2316 if selmask:
2317 if not seldict.has_key(spwstr):
2318 # new spw selection
2319 seldict[spwstr] = []
2320 seldict[spwstr] += [selmask]
2321 del minfreq,maxfreq,offset,freq,selmask
2322 del spwstr
2323 del freqmasklist
2324 del valid_ifs
2325 if len(seldict) == 0:
2326 asaplog.post()
2327 asaplog.push("No valid selection in the mask expression: "
2328 +maskstring)
2329 asaplog.post("WARN")
2330 return None
2331 msg = "Selected masklist:\n"
2332 for sif, lmask in seldict.iteritems():
2333 msg += " IF"+sif+" - "+str(lmask)+"\n"
2334 asaplog.push(msg)
2335 return seldict
2336
2337 @asaplog_post_dec
2338 def parse_idx_selection(self, mode, selexpr):
2339 """
2340 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2341 BEAMNO, and row number
2342
2343 Parameters:
2344 mode : which column to select.
2345 ['scan',|'if'|'pol'|'beam'|'row']
2346 selexpr : A comma separated selection expression.
2347 examples:
2348 '' = all (returns [])
2349 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2350 (returns [0,1,4,5,6,9])
2351 Returns:
2352 A List of selected indices
2353 """
2354 if selexpr == "":
2355 return []
2356 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2357 'b': 'beam', 'r': 'row'}
2358 smode = mode.lower()[0]
2359 if not (smode in valid_modes.keys()):
2360 msg = "Invalid mode '%s'. Valid modes are %s" %\
2361 (mode, str(valid_modes.values()))
2362 asaplog.post()
2363 asaplog.push(msg)
2364 asaplog.post("ERROR")
2365 mode = valid_modes[smode]
2366 minidx = None
2367 maxidx = None
2368 if smode == 'r':
2369 minidx = 0
2370 maxidx = self.nrow()
2371 else:
2372 idx = getattr(self,"get"+mode+"nos")()
2373 minidx = min(idx)
2374 maxidx = max(idx)
2375 del idx
2376 # split selexpr by "<spaces>,<spaces>"
2377 comma_sep = re.compile('\s*,\s*')
2378 sellist = comma_sep.split(selexpr)
2379 idxlist = []
2380 for currselstr in sellist:
2381 # single range (may include ~, < or >)
2382 currlist = self._parse_selection(currselstr, typestr='integer',
2383 minval=minidx,maxval=maxidx)
2384 for thelist in currlist:
2385 idxlist += range(thelist[0],thelist[1]+1)
2386 # remove duplicated elements after first ones
2387 for i in reversed(xrange(len(idxlist))):
2388 if idxlist.index(idxlist[i]) < i:
2389 idxlist.pop(i)
2390 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2391 asaplog.push(msg)
2392 return idxlist
2393
2394 def _parse_selection(self, selstr, typestr='float', offset=0.,
2395 minval=None, maxval=None):
2396 """
2397 Parameters:
2398 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2399 typestr : The type of the values in returned list
2400 ('integer' or 'float')
2401 offset : The offset value to subtract from or add to
2402 the boundary value if the selection string
2403 includes '<' or '>' [Valid only for typestr='float']
2404 minval, maxval : The minimum/maximum values to set if the
2405 selection string includes '<' or '>'.
2406 The list element is filled with None by default.
2407 Returns:
2408 A list of min/max pair of selections.
2409 Example:
2410 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2411 --> returns [[0,2],[5,7],[9,9]]
2412 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2413 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2414 """
2415 # split selstr by '<spaces>;<spaces>'
2416 semi_sep = re.compile('\s*;\s*')
2417 selgroups = semi_sep.split(selstr)
2418 sellists = []
2419 if typestr.lower().startswith('int'):
2420 formatfunc = int
2421 offset = 1
2422 else:
2423 formatfunc = float
2424
2425 for currsel in selgroups:
2426 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2427 minsel = minval
2428 maxsel = maxval
2429 if currsel.find('~') > 0:
2430 # val0 <= x <= val1
2431 minsel = formatfunc(currsel.split('~')[0].strip())
2432 maxsel = formatfunc(currsel.split('~')[1].strip())
2433 elif currsel.strip().find('<=') > -1:
2434 bound = currsel.split('<=')
2435 try: # try "x <= val"
2436 minsel = minval
2437 maxsel = formatfunc(bound[1].strip())
2438 except ValueError: # now "val <= x"
2439 minsel = formatfunc(bound[0].strip())
2440 maxsel = maxval
2441 elif currsel.strip().find('>=') > -1:
2442 bound = currsel.split('>=')
2443 try: # try "x >= val"
2444 minsel = formatfunc(bound[1].strip())
2445 maxsel = maxval
2446 except ValueError: # now "val >= x"
2447 minsel = minval
2448 maxsel = formatfunc(bound[0].strip())
2449 elif currsel.strip().find('<') > -1:
2450 bound = currsel.split('<')
2451 try: # try "x < val"
2452 minsel = minval
2453 maxsel = formatfunc(bound[1].strip()) \
2454 - formatfunc(offset)
2455 except ValueError: # now "val < x"
2456 minsel = formatfunc(bound[0].strip()) \
2457 + formatfunc(offset)
2458 maxsel = maxval
2459 elif currsel.strip().find('>') > -1:
2460 bound = currsel.split('>')
2461 try: # try "x > val"
2462 minsel = formatfunc(bound[1].strip()) \
2463 + formatfunc(offset)
2464 maxsel = maxval
2465 except ValueError: # now "val > x"
2466 minsel = minval
2467 maxsel = formatfunc(bound[0].strip()) \
2468 - formatfunc(offset)
2469 else:
2470 minsel = formatfunc(currsel)
2471 maxsel = formatfunc(currsel)
2472 sellists.append([minsel,maxsel])
2473 return sellists
2474
2475# def get_restfreqs(self):
2476# """
2477# Get the restfrequency(s) stored in this scantable.
2478# The return value(s) are always of unit 'Hz'
2479# Parameters:
2480# none
2481# Returns:
2482# a list of doubles
2483# """
2484# return list(self._getrestfreqs())
2485
2486 def get_restfreqs(self, ids=None):
2487 """\
2488 Get the restfrequency(s) stored in this scantable.
2489 The return value(s) are always of unit 'Hz'
2490
2491 Parameters:
2492
2493 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2494 be retrieved
2495
2496 Returns:
2497
2498 dictionary containing ids and a list of doubles for each id
2499
2500 """
2501 if ids is None:
2502 rfreqs = {}
2503 idlist = self.getmolnos()
2504 for i in idlist:
2505 rfreqs[i] = list(self._getrestfreqs(i))
2506 return rfreqs
2507 else:
2508 if type(ids) == list or type(ids) == tuple:
2509 rfreqs = {}
2510 for i in ids:
2511 rfreqs[i] = list(self._getrestfreqs(i))
2512 return rfreqs
2513 else:
2514 return list(self._getrestfreqs(ids))
2515
2516 @asaplog_post_dec
2517 def set_restfreqs(self, freqs=None, unit='Hz'):
2518 """\
2519 Set or replace the restfrequency specified and
2520 if the 'freqs' argument holds a scalar,
2521 then that rest frequency will be applied to all the selected
2522 data. If the 'freqs' argument holds
2523 a vector, then it MUST be of equal or smaller length than
2524 the number of IFs (and the available restfrequencies will be
2525 replaced by this vector). In this case, *all* data have
2526 the restfrequency set per IF according
2527 to the corresponding value you give in the 'freqs' vector.
2528 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2529 IF 1 gets restfreq 2e9.
2530
2531 You can also specify the frequencies via a linecatalog.
2532
2533 Parameters:
2534
2535 freqs: list of rest frequency values or string idenitfiers
2536
2537 unit: unit for rest frequency (default 'Hz')
2538
2539
2540 Example::
2541
2542 # set the given restfrequency for the all currently selected IFs
2543 scan.set_restfreqs(freqs=1.4e9)
2544 # set restfrequencies for the n IFs (n > 1) in the order of the
2545 # list, i.e
2546 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2547 # len(list_of_restfreqs) == nIF
2548 # for nIF == 1 the following will set multiple restfrequency for
2549 # that IF
2550 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2551 # set multiple restfrequencies per IF. as a list of lists where
2552 # the outer list has nIF elements, the inner s arbitrary
2553 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2554
2555 *Note*:
2556
2557 To do more sophisticate Restfrequency setting, e.g. on a
2558 source and IF basis, use scantable.set_selection() before using
2559 this function::
2560
2561 # provided your scantable is called scan
2562 selection = selector()
2563 selection.set_name('ORION*')
2564 selection.set_ifs([1])
2565 scan.set_selection(selection)
2566 scan.set_restfreqs(freqs=86.6e9)
2567
2568 """
2569 varlist = vars()
2570 from asap import linecatalog
2571 # simple value
2572 if isinstance(freqs, int) or isinstance(freqs, float):
2573 self._setrestfreqs([freqs], [""], unit)
2574 # list of values
2575 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2576 # list values are scalars
2577 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2578 if len(freqs) == 1:
2579 self._setrestfreqs(freqs, [""], unit)
2580 else:
2581 # allow the 'old' mode of setting mulitple IFs
2582 savesel = self._getselection()
2583 sel = self.get_selection()
2584 iflist = self.getifnos()
2585 if len(freqs)>len(iflist):
2586 raise ValueError("number of elements in list of list "
2587 "exeeds the current IF selections")
2588 iflist = self.getifnos()
2589 for i, fval in enumerate(freqs):
2590 sel.set_ifs(iflist[i])
2591 self._setselection(sel)
2592 self._setrestfreqs([fval], [""], unit)
2593 self._setselection(savesel)
2594
2595 # list values are dict, {'value'=, 'name'=)
2596 elif isinstance(freqs[-1], dict):
2597 values = []
2598 names = []
2599 for d in freqs:
2600 values.append(d["value"])
2601 names.append(d["name"])
2602 self._setrestfreqs(values, names, unit)
2603 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2604 savesel = self._getselection()
2605 sel = self.get_selection()
2606 iflist = self.getifnos()
2607 if len(freqs)>len(iflist):
2608 raise ValueError("number of elements in list of list exeeds"
2609 " the current IF selections")
2610 for i, fval in enumerate(freqs):
2611 sel.set_ifs(iflist[i])
2612 self._setselection(sel)
2613 self._setrestfreqs(fval, [""], unit)
2614 self._setselection(savesel)
2615 # freqs are to be taken from a linecatalog
2616 elif isinstance(freqs, linecatalog):
2617 savesel = self._getselection()
2618 sel = self.get_selection()
2619 for i in xrange(freqs.nrow()):
2620 sel.set_ifs(iflist[i])
2621 self._setselection(sel)
2622 self._setrestfreqs([freqs.get_frequency(i)],
2623 [freqs.get_name(i)], "MHz")
2624 # ensure that we are not iterating past nIF
2625 if i == self.nif()-1: break
2626 self._setselection(savesel)
2627 else:
2628 return
2629 self._add_history("set_restfreqs", varlist)
2630
2631 @asaplog_post_dec
2632 def shift_refpix(self, delta):
2633 """\
2634 Shift the reference pixel of the Spectra Coordinate by an
2635 integer amount.
2636
2637 Parameters:
2638
2639 delta: the amount to shift by
2640
2641 *Note*:
2642
2643 Be careful using this with broadband data.
2644
2645 """
2646 varlist = vars()
2647 Scantable.shift_refpix(self, delta)
2648 s._add_history("shift_refpix", varlist)
2649
2650 @asaplog_post_dec
2651 def history(self, filename=None, nrows=-1, start=0):
2652 """\
2653 Print the history. Optionally to a file.
2654
2655 Parameters:
2656
2657 filename: The name of the file to save the history to.
2658
2659 """
2660 n = self._historylength()
2661 if nrows == -1:
2662 nrows = n
2663 if start+nrows > n:
2664 nrows = nrows-start
2665 if n > 1000 and nrows == n:
2666 nrows = 1000
2667 start = n-1000
2668 asaplog.push("Warning: History has {0} entries. Displaying last "
2669 "1000".format(n))
2670 hist = list(self._gethistory(nrows, start))
2671 out = "-"*80
2672 for h in hist:
2673 if not h.strip():
2674 continue
2675 if h.find("---") >-1:
2676 continue
2677 else:
2678 items = h.split("##")
2679 date = items[0]
2680 func = items[1]
2681 items = items[2:]
2682 out += "\n"+date+"\n"
2683 out += "Function: %s\n Parameters:" % (func)
2684 for i in items:
2685 if i == '':
2686 continue
2687 s = i.split("=")
2688 out += "\n %s = %s" % (s[0], s[1])
2689 out = "\n".join([out, "*"*80])
2690 if filename is not None:
2691 if filename is "":
2692 filename = 'scantable_history.txt'
2693 filename = os.path.expandvars(os.path.expanduser(filename))
2694 if not os.path.isdir(filename):
2695 data = open(filename, 'w')
2696 data.write(out)
2697 data.close()
2698 else:
2699 msg = "Illegal file name '%s'." % (filename)
2700 raise IOError(msg)
2701 return page(out)
2702
2703 #
2704 # Maths business
2705 #
2706 @asaplog_post_dec
2707 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2708 avmode="NONE"):
2709 """\
2710 Return the (time) weighted average of a scan. Scans will be averaged
2711 only if the source direction (RA/DEC) is within 1' otherwise
2712
2713 *Note*:
2714
2715 in channels only - align if necessary
2716
2717 Parameters:
2718
2719 mask: an optional mask (only used for 'var' and 'tsys'
2720 weighting)
2721
2722 scanav: True averages each scan separately
2723 False (default) averages all scans together,
2724
2725 weight: Weighting scheme.
2726 'none' (mean no weight)
2727 'var' (1/var(spec) weighted)
2728 'tsys' (1/Tsys**2 weighted)
2729 'tint' (integration time weighted)
2730 'tintsys' (Tint/Tsys**2)
2731 'median' ( median averaging)
2732 The default is 'tint'
2733
2734 align: align the spectra in velocity before averaging. It takes
2735 the time of the first spectrum as reference time.
2736 avmode: 'SOURCE' - also select by source name - or
2737 'NONE' (default). Not applicable for scanav=True or
2738 weight=median
2739
2740 Example::
2741
2742 # time average the scantable without using a mask
2743 newscan = scan.average_time()
2744
2745 """
2746 varlist = vars()
2747 weight = weight or 'TINT'
2748 mask = mask or ()
2749 scanav = (scanav and 'SCAN') or avmode.upper()
2750 scan = (self, )
2751
2752 if align:
2753 scan = (self.freq_align(insitu=False), )
2754 asaplog.push("Note: Alignment is don on a source-by-source basis")
2755 asaplog.push("Note: Averaging (by default) is not")
2756 # we need to set it to SOURCE averaging here
2757 s = None
2758 if weight.upper() == 'MEDIAN':
2759 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2760 scanav))
2761 else:
2762 s = scantable(self._math._average(scan, mask, weight.upper(),
2763 scanav))
2764 s._add_history("average_time", varlist)
2765 return s
2766
2767 @asaplog_post_dec
2768 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2769 """\
2770 Return a scan where all spectra are converted to either
2771 Jansky or Kelvin depending upon the flux units of the scan table.
2772 By default the function tries to look the values up internally.
2773 If it can't find them (or if you want to over-ride), you must
2774 specify EITHER jyperk OR eta (and D which it will try to look up
2775 also if you don't set it). jyperk takes precedence if you set both.
2776
2777 Parameters:
2778
2779 jyperk: the Jy / K conversion factor
2780
2781 eta: the aperture efficiency
2782
2783 d: the geometric diameter (metres)
2784
2785 insitu: if False a new scantable is returned.
2786 Otherwise, the scaling is done in-situ
2787 The default is taken from .asaprc (False)
2788
2789 """
2790 if insitu is None: insitu = rcParams['insitu']
2791 self._math._setinsitu(insitu)
2792 varlist = vars()
2793 jyperk = jyperk or -1.0
2794 d = d or -1.0
2795 eta = eta or -1.0
2796 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2797 s._add_history("convert_flux", varlist)
2798 if insitu: self._assign(s)
2799 else: return s
2800
2801 @asaplog_post_dec
2802 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2803 """\
2804 Return a scan after applying a gain-elevation correction.
2805 The correction can be made via either a polynomial or a
2806 table-based interpolation (and extrapolation if necessary).
2807 You specify polynomial coefficients, an ascii table or neither.
2808 If you specify neither, then a polynomial correction will be made
2809 with built in coefficients known for certain telescopes (an error
2810 will occur if the instrument is not known).
2811 The data and Tsys are *divided* by the scaling factors.
2812
2813 Parameters:
2814
2815 poly: Polynomial coefficients (default None) to compute a
2816 gain-elevation correction as a function of
2817 elevation (in degrees).
2818
2819 filename: The name of an ascii file holding correction factors.
2820 The first row of the ascii file must give the column
2821 names and these MUST include columns
2822 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2823 by this) somewhere.
2824 The second row must give the data type of the
2825 column. Use 'R' for Real and 'I' for Integer.
2826 An example file would be
2827 (actual factors are arbitrary) :
2828
2829 TIME ELEVATION FACTOR
2830 R R R
2831 0.1 0 0.8
2832 0.2 20 0.85
2833 0.3 40 0.9
2834 0.4 60 0.85
2835 0.5 80 0.8
2836 0.6 90 0.75
2837
2838 method: Interpolation method when correcting from a table.
2839 Values are 'nearest', 'linear' (default), 'cubic'
2840 and 'spline'
2841
2842 insitu: if False a new scantable is returned.
2843 Otherwise, the scaling is done in-situ
2844 The default is taken from .asaprc (False)
2845
2846 """
2847
2848 if insitu is None: insitu = rcParams['insitu']
2849 self._math._setinsitu(insitu)
2850 varlist = vars()
2851 poly = poly or ()
2852 from os.path import expandvars
2853 filename = expandvars(filename)
2854 s = scantable(self._math._gainel(self, poly, filename, method))
2855 s._add_history("gain_el", varlist)
2856 if insitu:
2857 self._assign(s)
2858 else:
2859 return s
2860
2861 @asaplog_post_dec
2862 def freq_align(self, reftime=None, method='cubic', insitu=None):
2863 """\
2864 Return a scan where all rows have been aligned in frequency/velocity.
2865 The alignment frequency frame (e.g. LSRK) is that set by function
2866 set_freqframe.
2867
2868 Parameters:
2869
2870 reftime: reference time to align at. By default, the time of
2871 the first row of data is used.
2872
2873 method: Interpolation method for regridding the spectra.
2874 Choose from 'nearest', 'linear', 'cubic' (default)
2875 and 'spline'
2876
2877 insitu: if False a new scantable is returned.
2878 Otherwise, the scaling is done in-situ
2879 The default is taken from .asaprc (False)
2880
2881 """
2882 if insitu is None: insitu = rcParams["insitu"]
2883 oldInsitu = self._math._insitu()
2884 self._math._setinsitu(insitu)
2885 varlist = vars()
2886 reftime = reftime or ""
2887 s = scantable(self._math._freq_align(self, reftime, method))
2888 s._add_history("freq_align", varlist)
2889 self._math._setinsitu(oldInsitu)
2890 if insitu:
2891 self._assign(s)
2892 else:
2893 return s
2894
2895 @asaplog_post_dec
2896 def opacity(self, tau=None, insitu=None):
2897 """\
2898 Apply an opacity correction. The data
2899 and Tsys are multiplied by the correction factor.
2900
2901 Parameters:
2902
2903 tau: (list of) opacity from which the correction factor is
2904 exp(tau*ZD)
2905 where ZD is the zenith-distance.
2906 If a list is provided, it has to be of length nIF,
2907 nIF*nPol or 1 and in order of IF/POL, e.g.
2908 [opif0pol0, opif0pol1, opif1pol0 ...]
2909 if tau is `None` the opacities are determined from a
2910 model.
2911
2912 insitu: if False a new scantable is returned.
2913 Otherwise, the scaling is done in-situ
2914 The default is taken from .asaprc (False)
2915
2916 """
2917 if insitu is None:
2918 insitu = rcParams['insitu']
2919 self._math._setinsitu(insitu)
2920 varlist = vars()
2921 if not hasattr(tau, "__len__"):
2922 tau = [tau]
2923 s = scantable(self._math._opacity(self, tau))
2924 s._add_history("opacity", varlist)
2925 if insitu:
2926 self._assign(s)
2927 else:
2928 return s
2929
2930 @asaplog_post_dec
2931 def bin(self, width=5, insitu=None):
2932 """\
2933 Return a scan where all spectra have been binned up.
2934
2935 Parameters:
2936
2937 width: The bin width (default=5) in pixels
2938
2939 insitu: if False a new scantable is returned.
2940 Otherwise, the scaling is done in-situ
2941 The default is taken from .asaprc (False)
2942
2943 """
2944 if insitu is None:
2945 insitu = rcParams['insitu']
2946 self._math._setinsitu(insitu)
2947 varlist = vars()
2948 s = scantable(self._math._bin(self, width))
2949 s._add_history("bin", varlist)
2950 if insitu:
2951 self._assign(s)
2952 else:
2953 return s
2954
2955 @asaplog_post_dec
2956 def reshape(self, first, last, insitu=None):
2957 """Resize the band by providing first and last channel.
2958 This will cut off all channels outside [first, last].
2959 """
2960 if insitu is None:
2961 insitu = rcParams['insitu']
2962 varlist = vars()
2963 if last < 0:
2964 last = self.nchan()-1 + last
2965 s = None
2966 if insitu:
2967 s = self
2968 else:
2969 s = self.copy()
2970 s._reshape(first,last)
2971 s._add_history("reshape", varlist)
2972 if not insitu:
2973 return s
2974
2975 @asaplog_post_dec
2976 def resample(self, width=5, method='cubic', insitu=None):
2977 """\
2978 Return a scan where all spectra have been binned up.
2979
2980 Parameters:
2981
2982 width: The bin width (default=5) in pixels
2983
2984 method: Interpolation method when correcting from a table.
2985 Values are 'nearest', 'linear', 'cubic' (default)
2986 and 'spline'
2987
2988 insitu: if False a new scantable is returned.
2989 Otherwise, the scaling is done in-situ
2990 The default is taken from .asaprc (False)
2991
2992 """
2993 if insitu is None:
2994 insitu = rcParams['insitu']
2995 self._math._setinsitu(insitu)
2996 varlist = vars()
2997 s = scantable(self._math._resample(self, method, width))
2998 s._add_history("resample", varlist)
2999 if insitu:
3000 self._assign(s)
3001 else:
3002 return s
3003
3004 @asaplog_post_dec
3005 def average_pol(self, mask=None, weight='none'):
3006 """\
3007 Average the Polarisations together.
3008
3009 Parameters:
3010
3011 mask: An optional mask defining the region, where the
3012 averaging will be applied. The output will have all
3013 specified points masked.
3014
3015 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3016 weighted), or 'tsys' (1/Tsys**2 weighted)
3017
3018 """
3019 varlist = vars()
3020 mask = mask or ()
3021 s = scantable(self._math._averagepol(self, mask, weight.upper()))
3022 s._add_history("average_pol", varlist)
3023 return s
3024
3025 @asaplog_post_dec
3026 def average_beam(self, mask=None, weight='none'):
3027 """\
3028 Average the Beams together.
3029
3030 Parameters:
3031 mask: An optional mask defining the region, where the
3032 averaging will be applied. The output will have all
3033 specified points masked.
3034
3035 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3036 weighted), or 'tsys' (1/Tsys**2 weighted)
3037
3038 """
3039 varlist = vars()
3040 mask = mask or ()
3041 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3042 s._add_history("average_beam", varlist)
3043 return s
3044
3045 def parallactify(self, pflag):
3046 """\
3047 Set a flag to indicate whether this data should be treated as having
3048 been 'parallactified' (total phase == 0.0)
3049
3050 Parameters:
3051
3052 pflag: Bool indicating whether to turn this on (True) or
3053 off (False)
3054
3055 """
3056 varlist = vars()
3057 self._parallactify(pflag)
3058 self._add_history("parallactify", varlist)
3059
3060 @asaplog_post_dec
3061 def convert_pol(self, poltype=None):
3062 """\
3063 Convert the data to a different polarisation type.
3064 Note that you will need cross-polarisation terms for most conversions.
3065
3066 Parameters:
3067
3068 poltype: The new polarisation type. Valid types are:
3069 'linear', 'circular', 'stokes' and 'linpol'
3070
3071 """
3072 varlist = vars()
3073 s = scantable(self._math._convertpol(self, poltype))
3074 s._add_history("convert_pol", varlist)
3075 return s
3076
3077 @asaplog_post_dec
3078 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3079 insitu=None):
3080 """\
3081 Smooth the spectrum by the specified kernel (conserving flux).
3082
3083 Parameters:
3084
3085 kernel: The type of smoothing kernel. Select from
3086 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3087 or 'poly'
3088
3089 width: The width of the kernel in pixels. For hanning this is
3090 ignored otherwise it defauls to 5 pixels.
3091 For 'gaussian' it is the Full Width Half
3092 Maximum. For 'boxcar' it is the full width.
3093 For 'rmedian' and 'poly' it is the half width.
3094
3095 order: Optional parameter for 'poly' kernel (default is 2), to
3096 specify the order of the polnomial. Ignored by all other
3097 kernels.
3098
3099 plot: plot the original and the smoothed spectra.
3100 In this each indivual fit has to be approved, by
3101 typing 'y' or 'n'
3102
3103 insitu: if False a new scantable is returned.
3104 Otherwise, the scaling is done in-situ
3105 The default is taken from .asaprc (False)
3106
3107 """
3108 if insitu is None: insitu = rcParams['insitu']
3109 self._math._setinsitu(insitu)
3110 varlist = vars()
3111
3112 if plot: orgscan = self.copy()
3113
3114 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
3115 s._add_history("smooth", varlist)
3116
3117 action = 'H'
3118 if plot:
3119 from asap.asapplotter import new_asaplot
3120 theplot = new_asaplot(rcParams['plotter.gui'])
3121 from matplotlib import rc as rcp
3122 rcp('lines', linewidth=1)
3123 theplot.set_panels()
3124 ylab=s._get_ordinate_label()
3125 #theplot.palette(0,["#777777","red"])
3126 for r in xrange(s.nrow()):
3127 xsm=s._getabcissa(r)
3128 ysm=s._getspectrum(r)
3129 xorg=orgscan._getabcissa(r)
3130 yorg=orgscan._getspectrum(r)
3131 if action != "N": #skip plotting if rejecting all
3132 theplot.clear()
3133 theplot.hold()
3134 theplot.set_axes('ylabel',ylab)
3135 theplot.set_axes('xlabel',s._getabcissalabel(r))
3136 theplot.set_axes('title',s._getsourcename(r))
3137 theplot.set_line(label='Original',color="#777777")
3138 theplot.plot(xorg,yorg)
3139 theplot.set_line(label='Smoothed',color="red")
3140 theplot.plot(xsm,ysm)
3141 ### Ugly part for legend
3142 for i in [0,1]:
3143 theplot.subplots[0]['lines'].append(
3144 [theplot.subplots[0]['axes'].lines[i]]
3145 )
3146 theplot.release()
3147 ### Ugly part for legend
3148 theplot.subplots[0]['lines']=[]
3149 res = self._get_verify_action("Accept smoothing?",action)
3150 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3151 if r == 0: action = None
3152 #res = raw_input("Accept smoothing ([y]/n): ")
3153 if res.upper() == 'N':
3154 # reject for the current rows
3155 s._setspectrum(yorg, r)
3156 elif res.upper() == 'R':
3157 # reject all the following rows
3158 action = "N"
3159 s._setspectrum(yorg, r)
3160 elif res.upper() == 'A':
3161 # accept all the following rows
3162 break
3163 theplot.quit()
3164 del theplot
3165 del orgscan
3166
3167 if insitu: self._assign(s)
3168 else: return s
3169
3170 @asaplog_post_dec
3171 def regrid_channel(self, width=5, plot=False, insitu=None):
3172 """\
3173 Regrid the spectra by the specified channel width
3174
3175 Parameters:
3176
3177 width: The channel width (float) of regridded spectra
3178 in the current spectral unit.
3179
3180 plot: [NOT IMPLEMENTED YET]
3181 plot the original and the regridded spectra.
3182 In this each indivual fit has to be approved, by
3183 typing 'y' or 'n'
3184
3185 insitu: if False a new scantable is returned.
3186 Otherwise, the scaling is done in-situ
3187 The default is taken from .asaprc (False)
3188
3189 """
3190 if insitu is None: insitu = rcParams['insitu']
3191 varlist = vars()
3192
3193 if plot:
3194 asaplog.post()
3195 asaplog.push("Verification plot is not implemtnetd yet.")
3196 asaplog.post("WARN")
3197
3198 s = self.copy()
3199 s._regrid_specchan(width)
3200
3201 s._add_history("regrid_channel", varlist)
3202
3203# if plot:
3204# from asap.asapplotter import new_asaplot
3205# theplot = new_asaplot(rcParams['plotter.gui'])
3206# from matplotlib import rc as rcp
3207# rcp('lines', linewidth=1)
3208# theplot.set_panels()
3209# ylab=s._get_ordinate_label()
3210# #theplot.palette(0,["#777777","red"])
3211# for r in xrange(s.nrow()):
3212# xsm=s._getabcissa(r)
3213# ysm=s._getspectrum(r)
3214# xorg=orgscan._getabcissa(r)
3215# yorg=orgscan._getspectrum(r)
3216# theplot.clear()
3217# theplot.hold()
3218# theplot.set_axes('ylabel',ylab)
3219# theplot.set_axes('xlabel',s._getabcissalabel(r))
3220# theplot.set_axes('title',s._getsourcename(r))
3221# theplot.set_line(label='Original',color="#777777")
3222# theplot.plot(xorg,yorg)
3223# theplot.set_line(label='Smoothed',color="red")
3224# theplot.plot(xsm,ysm)
3225# ### Ugly part for legend
3226# for i in [0,1]:
3227# theplot.subplots[0]['lines'].append(
3228# [theplot.subplots[0]['axes'].lines[i]]
3229# )
3230# theplot.release()
3231# ### Ugly part for legend
3232# theplot.subplots[0]['lines']=[]
3233# res = raw_input("Accept smoothing ([y]/n): ")
3234# if res.upper() == 'N':
3235# s._setspectrum(yorg, r)
3236# theplot.quit()
3237# del theplot
3238# del orgscan
3239
3240 if insitu: self._assign(s)
3241 else: return s
3242
3243 @asaplog_post_dec
3244 def _parse_wn(self, wn):
3245 if isinstance(wn, list) or isinstance(wn, tuple):
3246 return wn
3247 elif isinstance(wn, int):
3248 return [ wn ]
3249 elif isinstance(wn, str):
3250 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3251 val = wn.split('-')
3252 val = [int(val[0]), int(val[1])]
3253 val.sort()
3254 res = [i for i in xrange(val[0], val[1]+1)]
3255 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3256 val = int(wn[2:])+1
3257 res = [i for i in xrange(val)]
3258 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3259 val = int(wn[:-2])+1
3260 res = [i for i in xrange(val)]
3261 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3262 val = int(wn[1:])
3263 res = [i for i in xrange(val)]
3264 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3265 val = int(wn[:-1])
3266 res = [i for i in xrange(val)]
3267 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3268 # then interpreted in C++
3269 # side as [a,a+1,...,a_nyq]
3270 # (CAS-3759)
3271 val = int(wn[2:])
3272 res = [val, -999]
3273 #res = [i for i in xrange(val, self.nchan()/2+1)]
3274 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3275 # then interpreted in C++
3276 # side as [a,a+1,...,a_nyq]
3277 # (CAS-3759)
3278 val = int(wn[:-2])
3279 res = [val, -999]
3280 #res = [i for i in xrange(val, self.nchan()/2+1)]
3281 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3282 # then interpreted in C++
3283 # side as [a+1,a+2,...,a_nyq]
3284 # (CAS-3759)
3285 val = int(wn[1:])+1
3286 res = [val, -999]
3287 #res = [i for i in xrange(val, self.nchan()/2+1)]
3288 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3289 # then interpreted in C++
3290 # side as [a+1,a+2,...,a_nyq]
3291 # (CAS-3759)
3292 val = int(wn[:-1])+1
3293 res = [val, -999]
3294 #res = [i for i in xrange(val, self.nchan()/2+1)]
3295
3296 return res
3297 else:
3298 msg = 'wrong value given for addwn/rejwn'
3299 raise RuntimeError(msg)
3300
3301 @asaplog_post_dec
3302 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3303 """\
3304 Subtract baseline based on parameters written in Baseline Table.
3305
3306 Parameters:
3307 insitu: if True, baseline fitting/subtraction is done
3308 in-situ. If False, a new scantable with
3309 baseline subtracted is returned. Actually,
3310 format of the returned value depends on both
3311 insitu and retfitres (see below).
3312 The default is taken from .asaprc (False)
3313 retfitres: if True, the results of baseline fitting (i.e.,
3314 coefficients and rms) are returned.
3315 default is False.
3316 The format of the returned value of this
3317 function varies as follows:
3318 (1) in case insitu=True and retfitres=True:
3319 fitting result.
3320 (2) in case insitu=True and retfitres=False:
3321 None.
3322 (3) in case insitu=False and retfitres=True:
3323 a dictionary containing a new scantable
3324 (with baseline subtracted) and the fitting
3325 results.
3326 (4) in case insitu=False and retfitres=False:
3327 a new scantable (with baseline subtracted).
3328 inbltable: name of input baseline table. The row number of
3329 scantable and that of inbltable must be
3330 identical.
3331 outbltable: name of output baseline table where baseline
3332 parameters and fitting results recorded.
3333 default is ''(no output).
3334 overwrite: if True when an existing baseline table is
3335 specified for outbltable, overwrites it.
3336 Otherwise there is no harm.
3337 default is False.
3338 """
3339
3340 try:
3341 varlist = vars()
3342 if retfitres is None: retfitres = False
3343 if inbltable is None: raise ValueError("bltable missing.")
3344 if outbltable is None: outbltable = ''
3345 if overwrite is None: overwrite = False
3346
3347 if insitu is None: insitu = rcParams['insitu']
3348 if insitu:
3349 workscan = self
3350 else:
3351 workscan = self.copy()
3352
3353 sres = workscan._apply_bltable(inbltable,
3354 retfitres,
3355 outbltable,
3356 os.path.exists(outbltable),
3357 overwrite)
3358 if retfitres: res = parse_fitresult(sres)
3359
3360 workscan._add_history('apply_bltable', varlist)
3361
3362 if insitu:
3363 self._assign(workscan)
3364 if retfitres:
3365 return res
3366 else:
3367 return None
3368 else:
3369 if retfitres:
3370 return {'scantable': workscan, 'fitresults': res}
3371 else:
3372 return workscan
3373
3374 except RuntimeError, e:
3375 raise_fitting_failure_exception(e)
3376
3377 @asaplog_post_dec
3378 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3379 """\
3380 Subtract baseline based on parameters written in the input list.
3381
3382 Parameters:
3383 insitu: if True, baseline fitting/subtraction is done
3384 in-situ. If False, a new scantable with
3385 baseline subtracted is returned. Actually,
3386 format of the returned value depends on both
3387 insitu and retfitres (see below).
3388 The default is taken from .asaprc (False)
3389 retfitres: if True, the results of baseline fitting (i.e.,
3390 coefficients and rms) are returned.
3391 default is False.
3392 The format of the returned value of this
3393 function varies as follows:
3394 (1) in case insitu=True and retfitres=True:
3395 fitting result.
3396 (2) in case insitu=True and retfitres=False:
3397 None.
3398 (3) in case insitu=False and retfitres=True:
3399 a dictionary containing a new scantable
3400 (with baseline subtracted) and the fitting
3401 results.
3402 (4) in case insitu=False and retfitres=False:
3403 a new scantable (with baseline subtracted).
3404 blinfo: baseline parameter set stored in a dictionary
3405 or a list of dictionary. Each dictionary
3406 corresponds to each spectrum and must contain
3407 the following keys and values:
3408 'row': row number,
3409 'blfunc': function name. available ones include
3410 'poly', 'chebyshev', 'cspline' and
3411 'sinusoid',
3412 'order': maximum order of polynomial. needed
3413 if blfunc='poly' or 'chebyshev',
3414 'npiece': number or piecewise polynomial.
3415 needed if blfunc='cspline',
3416 'nwave': a list of sinusoidal wave numbers.
3417 needed if blfunc='sinusoid', and
3418 'masklist': min-max windows for channel mask.
3419 the specified ranges will be used
3420 for fitting.
3421 bltable: name of output baseline table where baseline
3422 parameters and fitting results recorded.
3423 default is ''(no output).
3424 overwrite: if True when an existing baseline table is
3425 specified for bltable, overwrites it.
3426 Otherwise there is no harm.
3427 default is False.
3428
3429 Example:
3430 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3431 'masklist':[[10,350],[352,510]]},
3432 {'row':1, 'blfunc':'cspline', 'npiece':3,
3433 'masklist':[[3,16],[19,404],[407,511]]}
3434 ])
3435
3436 the first spectrum (row=0) will be fitted with polynomial
3437 of order=5 and the next one (row=1) will be fitted with cubic
3438 spline consisting of 3 pieces.
3439 """
3440
3441 try:
3442 varlist = vars()
3443 if retfitres is None: retfitres = False
3444 if blinfo is None: blinfo = []
3445 if bltable is None: bltable = ''
3446 if overwrite is None: overwrite = False
3447
3448 if insitu is None: insitu = rcParams['insitu']
3449 if insitu:
3450 workscan = self
3451 else:
3452 workscan = self.copy()
3453
3454 nrow = workscan.nrow()
3455
3456 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3457
3458 print "in_blinfo=< "+ str(in_blinfo)+" >"
3459
3460 sres = workscan._sub_baseline(in_blinfo,
3461 retfitres,
3462 bltable,
3463 os.path.exists(bltable),
3464 overwrite)
3465 if retfitres: res = parse_fitresult(sres)
3466
3467 workscan._add_history('sub_baseline', varlist)
3468
3469 if insitu:
3470 self._assign(workscan)
3471 if retfitres:
3472 return res
3473 else:
3474 return None
3475 else:
3476 if retfitres:
3477 return {'scantable': workscan, 'fitresults': res}
3478 else:
3479 return workscan
3480
3481 except RuntimeError, e:
3482 raise_fitting_failure_exception(e)
3483
3484 @asaplog_post_dec
3485 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3486 whichrow=None, uselinefinder=None, edge=None,
3487 threshold=None, chan_avg_limit=None):
3488 """\
3489 Calculates and returns model selection criteria for a specified
3490 baseline model and a given spectrum data.
3491 Available values include Akaike Information Criterion (AIC), the
3492 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3493 Bayesian Information Criterion (BIC) and the Generalised Cross
3494 Validation (GCV).
3495
3496 Parameters:
3497 value: name of model selection criteria to calculate.
3498 available ones include 'aic', 'aicc', 'bic' and
3499 'gcv'. default is 'aicc'.
3500 blfunc: baseline function name. available ones include
3501 'chebyshev', 'cspline' and 'sinusoid'.
3502 default is 'chebyshev'.
3503 order: parameter for basline function. actually stands for
3504 order of polynomial (order) for 'chebyshev',
3505 number of spline pieces (npiece) for 'cspline' and
3506 maximum wave number for 'sinusoid', respectively.
3507 default is 5 (which is also the default order value
3508 for [auto_]chebyshev_baseline()).
3509 mask: an optional mask. default is [].
3510 whichrow: row number. default is 0 (the first row)
3511 uselinefinder: use sd.linefinder() to flag out line regions
3512 default is True.
3513 edge: an optional number of channel to drop at
3514 the edge of spectrum. If only one value is
3515 specified, the same number will be dropped
3516 from both sides of the spectrum. Default
3517 is to keep all channels. Nested tuples
3518 represent individual edge selection for
3519 different IFs (a number of spectral channels
3520 can be different)
3521 default is (0, 0).
3522 threshold: the threshold used by line finder. It is
3523 better to keep it large as only strong lines
3524 affect the baseline solution.
3525 default is 3.
3526 chan_avg_limit: a maximum number of consequtive spectral
3527 channels to average during the search of
3528 weak and broad lines. The default is no
3529 averaging (and no search for weak lines).
3530 If such lines can affect the fitted baseline
3531 (e.g. a high order polynomial is fitted),
3532 increase this parameter (usually values up
3533 to 8 are reasonable). Most users of this
3534 method should find the default value sufficient.
3535 default is 1.
3536
3537 Example:
3538 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3539 """
3540
3541 try:
3542 varlist = vars()
3543
3544 if value is None: value = 'aicc'
3545 if blfunc is None: blfunc = 'chebyshev'
3546 if order is None: order = 5
3547 if mask is None: mask = []
3548 if whichrow is None: whichrow = 0
3549 if uselinefinder is None: uselinefinder = True
3550 if edge is None: edge = (0, 0)
3551 if threshold is None: threshold = 3
3552 if chan_avg_limit is None: chan_avg_limit = 1
3553
3554 return self._calc_aic(value, blfunc, order, mask,
3555 whichrow, uselinefinder, edge,
3556 threshold, chan_avg_limit)
3557
3558 except RuntimeError, e:
3559 raise_fitting_failure_exception(e)
3560
3561 @asaplog_post_dec
3562 def sinusoid_baseline(self, mask=None, applyfft=None,
3563 fftmethod=None, fftthresh=None,
3564 addwn=None, rejwn=None,
3565 insitu=None,
3566 clipthresh=None, clipniter=None,
3567 plot=None,
3568 getresidual=None,
3569 showprogress=None, minnrow=None,
3570 outlog=None,
3571 blfile=None, csvformat=None,
3572 bltable=None):
3573 """\
3574 Return a scan which has been baselined (all rows) with sinusoidal
3575 functions.
3576
3577 Parameters:
3578 mask: an optional mask
3579 applyfft: if True use some method, such as FFT, to find
3580 strongest sinusoidal components in the wavenumber
3581 domain to be used for baseline fitting.
3582 default is True.
3583 fftmethod: method to find the strong sinusoidal components.
3584 now only 'fft' is available and it is the default.
3585 fftthresh: the threshold to select wave numbers to be used for
3586 fitting from the distribution of amplitudes in the
3587 wavenumber domain.
3588 both float and string values accepted.
3589 given a float value, the unit is set to sigma.
3590 for string values, allowed formats include:
3591 'xsigma' or 'x' (= x-sigma level. e.g.,
3592 '3sigma'), or
3593 'topx' (= the x strongest ones, e.g. 'top5').
3594 default is 3.0 (unit: sigma).
3595 addwn: the additional wave numbers to be used for fitting.
3596 list or integer value is accepted to specify every
3597 wave numbers. also string value can be used in case
3598 you need to specify wave numbers in a certain range,
3599 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3600 '<a' (= 0,1,...,a-2,a-1),
3601 '>=a' (= a, a+1, ... up to the maximum wave
3602 number corresponding to the Nyquist
3603 frequency for the case of FFT).
3604 default is [0].
3605 rejwn: the wave numbers NOT to be used for fitting.
3606 can be set just as addwn but has higher priority:
3607 wave numbers which are specified both in addwn
3608 and rejwn will NOT be used. default is [].
3609 insitu: if False a new scantable is returned.
3610 Otherwise, the scaling is done in-situ
3611 The default is taken from .asaprc (False)
3612 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3613 clipniter: maximum number of iteration of 'clipthresh'-sigma
3614 clipping (default is 0)
3615 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3616 plot the fit and the residual. In this each
3617 indivual fit has to be approved, by typing 'y'
3618 or 'n'
3619 getresidual: if False, returns best-fit values instead of
3620 residual. (default is True)
3621 showprogress: show progress status for large data.
3622 default is True.
3623 minnrow: minimum number of input spectra to show.
3624 default is 1000.
3625 outlog: Output the coefficients of the best-fit
3626 function to logger (default is False)
3627 blfile: Name of a text file in which the best-fit
3628 parameter values to be written
3629 (default is '': no file/logger output)
3630 csvformat: if True blfile is csv-formatted, default is False.
3631 bltable: name of a baseline table where fitting results
3632 (coefficients, rms, etc.) are to be written.
3633 if given, fitting results will NOT be output to
3634 scantable (insitu=True) or None will be
3635 returned (insitu=False).
3636 (default is "": no table output)
3637
3638 Example:
3639 # return a scan baselined by a combination of sinusoidal curves
3640 # having wave numbers in spectral window up to 10,
3641 # also with 3-sigma clipping, iteration up to 4 times
3642 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3643
3644 Note:
3645 The best-fit parameter values output in logger and/or blfile are now
3646 based on specunit of 'channel'.
3647 """
3648
3649 try:
3650 varlist = vars()
3651
3652 if insitu is None: insitu = rcParams['insitu']
3653 if insitu:
3654 workscan = self
3655 else:
3656 workscan = self.copy()
3657
3658 if mask is None: mask = []
3659 if applyfft is None: applyfft = True
3660 if fftmethod is None: fftmethod = 'fft'
3661 if fftthresh is None: fftthresh = 3.0
3662 if addwn is None: addwn = [0]
3663 if rejwn is None: rejwn = []
3664 if clipthresh is None: clipthresh = 3.0
3665 if clipniter is None: clipniter = 0
3666 if plot is None: plot = False
3667 if getresidual is None: getresidual = True
3668 if showprogress is None: showprogress = True
3669 if minnrow is None: minnrow = 1000
3670 if outlog is None: outlog = False
3671 if blfile is None: blfile = ''
3672 if csvformat is None: csvformat = False
3673 if bltable is None: bltable = ''
3674
3675 sapplyfft = 'true' if applyfft else 'false'
3676 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3677
3678 scsvformat = 'T' if csvformat else 'F'
3679
3680 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3681 workscan._sinusoid_baseline(mask,
3682 fftinfo,
3683 #applyfft, fftmethod.lower(),
3684 #str(fftthresh).lower(),
3685 workscan._parse_wn(addwn),
3686 workscan._parse_wn(rejwn),
3687 clipthresh, clipniter,
3688 getresidual,
3689 pack_progress_params(showprogress,
3690 minnrow),
3691 outlog, scsvformat+blfile,
3692 bltable)
3693 workscan._add_history('sinusoid_baseline', varlist)
3694
3695 if bltable == '':
3696 if insitu:
3697 self._assign(workscan)
3698 else:
3699 return workscan
3700 else:
3701 if not insitu:
3702 return None
3703
3704 except RuntimeError, e:
3705 raise_fitting_failure_exception(e)
3706
3707
3708 @asaplog_post_dec
3709 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3710 fftmethod=None, fftthresh=None,
3711 addwn=None, rejwn=None,
3712 insitu=None,
3713 clipthresh=None, clipniter=None,
3714 edge=None, threshold=None, chan_avg_limit=None,
3715 plot=None,
3716 getresidual=None,
3717 showprogress=None, minnrow=None,
3718 outlog=None,
3719 blfile=None, csvformat=None,
3720 bltable=None):
3721 """\
3722 Return a scan which has been baselined (all rows) with sinusoidal
3723 functions.
3724 Spectral lines are detected first using linefinder and masked out
3725 to avoid them affecting the baseline solution.
3726
3727 Parameters:
3728 mask: an optional mask retreived from scantable
3729 applyfft: if True use some method, such as FFT, to find
3730 strongest sinusoidal components in the wavenumber
3731 domain to be used for baseline fitting.
3732 default is True.
3733 fftmethod: method to find the strong sinusoidal components.
3734 now only 'fft' is available and it is the default.
3735 fftthresh: the threshold to select wave numbers to be used for
3736 fitting from the distribution of amplitudes in the
3737 wavenumber domain.
3738 both float and string values accepted.
3739 given a float value, the unit is set to sigma.
3740 for string values, allowed formats include:
3741 'xsigma' or 'x' (= x-sigma level. e.g.,
3742 '3sigma'), or
3743 'topx' (= the x strongest ones, e.g. 'top5').
3744 default is 3.0 (unit: sigma).
3745 addwn: the additional wave numbers to be used for fitting.
3746 list or integer value is accepted to specify every
3747 wave numbers. also string value can be used in case
3748 you need to specify wave numbers in a certain range,
3749 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3750 '<a' (= 0,1,...,a-2,a-1),
3751 '>=a' (= a, a+1, ... up to the maximum wave
3752 number corresponding to the Nyquist
3753 frequency for the case of FFT).
3754 default is [0].
3755 rejwn: the wave numbers NOT to be used for fitting.
3756 can be set just as addwn but has higher priority:
3757 wave numbers which are specified both in addwn
3758 and rejwn will NOT be used. default is [].
3759 insitu: if False a new scantable is returned.
3760 Otherwise, the scaling is done in-situ
3761 The default is taken from .asaprc (False)
3762 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3763 clipniter: maximum number of iteration of 'clipthresh'-sigma
3764 clipping (default is 0)
3765 edge: an optional number of channel to drop at
3766 the edge of spectrum. If only one value is
3767 specified, the same number will be dropped
3768 from both sides of the spectrum. Default
3769 is to keep all channels. Nested tuples
3770 represent individual edge selection for
3771 different IFs (a number of spectral channels
3772 can be different)
3773 threshold: the threshold used by line finder. It is
3774 better to keep it large as only strong lines
3775 affect the baseline solution.
3776 chan_avg_limit: a maximum number of consequtive spectral
3777 channels to average during the search of
3778 weak and broad lines. The default is no
3779 averaging (and no search for weak lines).
3780 If such lines can affect the fitted baseline
3781 (e.g. a high order polynomial is fitted),
3782 increase this parameter (usually values up
3783 to 8 are reasonable). Most users of this
3784 method should find the default value sufficient.
3785 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3786 plot the fit and the residual. In this each
3787 indivual fit has to be approved, by typing 'y'
3788 or 'n'
3789 getresidual: if False, returns best-fit values instead of
3790 residual. (default is True)
3791 showprogress: show progress status for large data.
3792 default is True.
3793 minnrow: minimum number of input spectra to show.
3794 default is 1000.
3795 outlog: Output the coefficients of the best-fit
3796 function to logger (default is False)
3797 blfile: Name of a text file in which the best-fit
3798 parameter values to be written
3799 (default is "": no file/logger output)
3800 csvformat: if True blfile is csv-formatted, default is False.
3801 bltable: name of a baseline table where fitting results
3802 (coefficients, rms, etc.) are to be written.
3803 if given, fitting results will NOT be output to
3804 scantable (insitu=True) or None will be
3805 returned (insitu=False).
3806 (default is "": no table output)
3807
3808 Example:
3809 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3810
3811 Note:
3812 The best-fit parameter values output in logger and/or blfile are now
3813 based on specunit of 'channel'.
3814 """
3815
3816 try:
3817 varlist = vars()
3818
3819 if insitu is None: insitu = rcParams['insitu']
3820 if insitu:
3821 workscan = self
3822 else:
3823 workscan = self.copy()
3824
3825 if mask is None: mask = []
3826 if applyfft is None: applyfft = True
3827 if fftmethod is None: fftmethod = 'fft'
3828 if fftthresh is None: fftthresh = 3.0
3829 if addwn is None: addwn = [0]
3830 if rejwn is None: rejwn = []
3831 if clipthresh is None: clipthresh = 3.0
3832 if clipniter is None: clipniter = 0
3833 if edge is None: edge = (0,0)
3834 if threshold is None: threshold = 3
3835 if chan_avg_limit is None: chan_avg_limit = 1
3836 if plot is None: plot = False
3837 if getresidual is None: getresidual = True
3838 if showprogress is None: showprogress = True
3839 if minnrow is None: minnrow = 1000
3840 if outlog is None: outlog = False
3841 if blfile is None: blfile = ''
3842 if csvformat is None: csvformat = False
3843 if bltable is None: bltable = ''
3844
3845 sapplyfft = 'true' if applyfft else 'false'
3846 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3847
3848 scsvformat = 'T' if csvformat else 'F'
3849
3850 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3851 workscan._auto_sinusoid_baseline(mask,
3852 fftinfo,
3853 workscan._parse_wn(addwn),
3854 workscan._parse_wn(rejwn),
3855 clipthresh, clipniter,
3856 normalise_edge_param(edge),
3857 threshold, chan_avg_limit,
3858 getresidual,
3859 pack_progress_params(showprogress,
3860 minnrow),
3861 outlog, scsvformat+blfile, bltable)
3862 workscan._add_history("auto_sinusoid_baseline", varlist)
3863
3864 if bltable == '':
3865 if insitu:
3866 self._assign(workscan)
3867 else:
3868 return workscan
3869 else:
3870 if not insitu:
3871 return None
3872
3873 except RuntimeError, e:
3874 raise_fitting_failure_exception(e)
3875
3876 @asaplog_post_dec
3877 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3878 clipthresh=None, clipniter=None, plot=None,
3879 getresidual=None, showprogress=None, minnrow=None,
3880 outlog=None, blfile=None, csvformat=None,
3881 bltable=None):
3882 """\
3883 Return a scan which has been baselined (all rows) by cubic spline
3884 function (piecewise cubic polynomial).
3885
3886 Parameters:
3887 mask: An optional mask
3888 npiece: Number of pieces. (default is 2)
3889 insitu: If False a new scantable is returned.
3890 Otherwise, the scaling is done in-situ
3891 The default is taken from .asaprc (False)
3892 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3893 clipniter: maximum number of iteration of 'clipthresh'-sigma
3894 clipping (default is 0)
3895 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3896 plot the fit and the residual. In this each
3897 indivual fit has to be approved, by typing 'y'
3898 or 'n'
3899 getresidual: if False, returns best-fit values instead of
3900 residual. (default is True)
3901 showprogress: show progress status for large data.
3902 default is True.
3903 minnrow: minimum number of input spectra to show.
3904 default is 1000.
3905 outlog: Output the coefficients of the best-fit
3906 function to logger (default is False)
3907 blfile: Name of a text file in which the best-fit
3908 parameter values to be written
3909 (default is "": no file/logger output)
3910 csvformat: if True blfile is csv-formatted, default is False.
3911 bltable: name of a baseline table where fitting results
3912 (coefficients, rms, etc.) are to be written.
3913 if given, fitting results will NOT be output to
3914 scantable (insitu=True) or None will be
3915 returned (insitu=False).
3916 (default is "": no table output)
3917
3918 Example:
3919 # return a scan baselined by a cubic spline consisting of 2 pieces
3920 # (i.e., 1 internal knot),
3921 # also with 3-sigma clipping, iteration up to 4 times
3922 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3923
3924 Note:
3925 The best-fit parameter values output in logger and/or blfile are now
3926 based on specunit of 'channel'.
3927 """
3928
3929 try:
3930 varlist = vars()
3931
3932 if insitu is None: insitu = rcParams['insitu']
3933 if insitu:
3934 workscan = self
3935 else:
3936 workscan = self.copy()
3937
3938 if mask is None: mask = []
3939 if npiece is None: npiece = 2
3940 if clipthresh is None: clipthresh = 3.0
3941 if clipniter is None: clipniter = 0
3942 if plot is None: plot = False
3943 if getresidual is None: getresidual = True
3944 if showprogress is None: showprogress = True
3945 if minnrow is None: minnrow = 1000
3946 if outlog is None: outlog = False
3947 if blfile is None: blfile = ''
3948 if csvformat is None: csvformat = False
3949 if bltable is None: bltable = ''
3950
3951 scsvformat = 'T' if csvformat else 'F'
3952
3953 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3954 workscan._cspline_baseline(mask, npiece,
3955 clipthresh, clipniter,
3956 getresidual,
3957 pack_progress_params(showprogress,
3958 minnrow),
3959 outlog, scsvformat+blfile,
3960 bltable)
3961 workscan._add_history("cspline_baseline", varlist)
3962
3963 if bltable == '':
3964 if insitu:
3965 self._assign(workscan)
3966 else:
3967 return workscan
3968 else:
3969 if not insitu:
3970 return None
3971
3972 except RuntimeError, e:
3973 raise_fitting_failure_exception(e)
3974
3975 @asaplog_post_dec
3976 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3977 clipthresh=None, clipniter=None,
3978 edge=None, threshold=None, chan_avg_limit=None,
3979 getresidual=None, plot=None,
3980 showprogress=None, minnrow=None, outlog=None,
3981 blfile=None, csvformat=None, bltable=None):
3982 """\
3983 Return a scan which has been baselined (all rows) by cubic spline
3984 function (piecewise cubic polynomial).
3985 Spectral lines are detected first using linefinder and masked out
3986 to avoid them affecting the baseline solution.
3987
3988 Parameters:
3989 mask: an optional mask retreived from scantable
3990 npiece: Number of pieces. (default is 2)
3991 insitu: if False a new scantable is returned.
3992 Otherwise, the scaling is done in-situ
3993 The default is taken from .asaprc (False)
3994 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3995 clipniter: maximum number of iteration of 'clipthresh'-sigma
3996 clipping (default is 0)
3997 edge: an optional number of channel to drop at
3998 the edge of spectrum. If only one value is
3999 specified, the same number will be dropped
4000 from both sides of the spectrum. Default
4001 is to keep all channels. Nested tuples
4002 represent individual edge selection for
4003 different IFs (a number of spectral channels
4004 can be different)
4005 threshold: the threshold used by line finder. It is
4006 better to keep it large as only strong lines
4007 affect the baseline solution.
4008 chan_avg_limit: a maximum number of consequtive spectral
4009 channels to average during the search of
4010 weak and broad lines. The default is no
4011 averaging (and no search for weak lines).
4012 If such lines can affect the fitted baseline
4013 (e.g. a high order polynomial is fitted),
4014 increase this parameter (usually values up
4015 to 8 are reasonable). Most users of this
4016 method should find the default value sufficient.
4017 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4018 plot the fit and the residual. In this each
4019 indivual fit has to be approved, by typing 'y'
4020 or 'n'
4021 getresidual: if False, returns best-fit values instead of
4022 residual. (default is True)
4023 showprogress: show progress status for large data.
4024 default is True.
4025 minnrow: minimum number of input spectra to show.
4026 default is 1000.
4027 outlog: Output the coefficients of the best-fit
4028 function to logger (default is False)
4029 blfile: Name of a text file in which the best-fit
4030 parameter values to be written
4031 (default is "": no file/logger output)
4032 csvformat: if True blfile is csv-formatted, default is False.
4033 bltable: name of a baseline table where fitting results
4034 (coefficients, rms, etc.) are to be written.
4035 if given, fitting results will NOT be output to
4036 scantable (insitu=True) or None will be
4037 returned (insitu=False).
4038 (default is "": no table output)
4039
4040 Example:
4041 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4042
4043 Note:
4044 The best-fit parameter values output in logger and/or blfile are now
4045 based on specunit of 'channel'.
4046 """
4047
4048 try:
4049 varlist = vars()
4050
4051 if insitu is None: insitu = rcParams['insitu']
4052 if insitu:
4053 workscan = self
4054 else:
4055 workscan = self.copy()
4056
4057 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4058 if mask is None: mask = []
4059 if npiece is None: npiece = 2
4060 if clipthresh is None: clipthresh = 3.0
4061 if clipniter is None: clipniter = 0
4062 if edge is None: edge = (0, 0)
4063 if threshold is None: threshold = 3
4064 if chan_avg_limit is None: chan_avg_limit = 1
4065 if plot is None: plot = False
4066 if getresidual is None: getresidual = True
4067 if showprogress is None: showprogress = True
4068 if minnrow is None: minnrow = 1000
4069 if outlog is None: outlog = False
4070 if blfile is None: blfile = ''
4071 if csvformat is None: csvformat = False
4072 if bltable is None: bltable = ''
4073
4074 scsvformat = 'T' if csvformat else 'F'
4075
4076 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4077 workscan._auto_cspline_baseline(mask, npiece,
4078 clipthresh, clipniter,
4079 normalise_edge_param(edge),
4080 threshold,
4081 chan_avg_limit, getresidual,
4082 pack_progress_params(showprogress,
4083 minnrow),
4084 outlog,
4085 scsvformat+blfile,
4086 bltable)
4087 workscan._add_history("auto_cspline_baseline", varlist)
4088
4089 if bltable == '':
4090 if insitu:
4091 self._assign(workscan)
4092 else:
4093 return workscan
4094 else:
4095 if not insitu:
4096 return None
4097
4098 except RuntimeError, e:
4099 raise_fitting_failure_exception(e)
4100
4101 @asaplog_post_dec
4102 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
4103 clipthresh=None, clipniter=None, plot=None,
4104 getresidual=None, showprogress=None, minnrow=None,
4105 outlog=None, blfile=None, csvformat=None,
4106 bltable=None):
4107 """\
4108 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4109
4110 Parameters:
4111 mask: An optional mask
4112 order: the maximum order of Chebyshev polynomial (default is 5)
4113 insitu: If False a new scantable is returned.
4114 Otherwise, the scaling is done in-situ
4115 The default is taken from .asaprc (False)
4116 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4117 clipniter: maximum number of iteration of 'clipthresh'-sigma
4118 clipping (default is 0)
4119 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4120 plot the fit and the residual. In this each
4121 indivual fit has to be approved, by typing 'y'
4122 or 'n'
4123 getresidual: if False, returns best-fit values instead of
4124 residual. (default is True)
4125 showprogress: show progress status for large data.
4126 default is True.
4127 minnrow: minimum number of input spectra to show.
4128 default is 1000.
4129 outlog: Output the coefficients of the best-fit
4130 function to logger (default is False)
4131 blfile: Name of a text file in which the best-fit
4132 parameter values to be written
4133 (default is "": no file/logger output)
4134 csvformat: if True blfile is csv-formatted, default is False.
4135 bltable: name of a baseline table where fitting results
4136 (coefficients, rms, etc.) are to be written.
4137 if given, fitting results will NOT be output to
4138 scantable (insitu=True) or None will be
4139 returned (insitu=False).
4140 (default is "": no table output)
4141
4142 Example:
4143 # return a scan baselined by a cubic spline consisting of 2 pieces
4144 # (i.e., 1 internal knot),
4145 # also with 3-sigma clipping, iteration up to 4 times
4146 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4147
4148 Note:
4149 The best-fit parameter values output in logger and/or blfile are now
4150 based on specunit of 'channel'.
4151 """
4152
4153 try:
4154 varlist = vars()
4155
4156 if insitu is None: insitu = rcParams['insitu']
4157 if insitu:
4158 workscan = self
4159 else:
4160 workscan = self.copy()
4161
4162 if mask is None: mask = []
4163 if order is None: order = 5
4164 if clipthresh is None: clipthresh = 3.0
4165 if clipniter is None: clipniter = 0
4166 if plot is None: plot = False
4167 if getresidual is None: getresidual = True
4168 if showprogress is None: showprogress = True
4169 if minnrow is None: minnrow = 1000
4170 if outlog is None: outlog = False
4171 if blfile is None: blfile = ''
4172 if csvformat is None: csvformat = False
4173 if bltable is None: bltable = ''
4174
4175 scsvformat = 'T' if csvformat else 'F'
4176
4177 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4178 workscan._chebyshev_baseline(mask, order,
4179 clipthresh, clipniter,
4180 getresidual,
4181 pack_progress_params(showprogress,
4182 minnrow),
4183 outlog, scsvformat+blfile,
4184 bltable)
4185 workscan._add_history("chebyshev_baseline", varlist)
4186
4187 if bltable == '':
4188 if insitu:
4189 self._assign(workscan)
4190 else:
4191 return workscan
4192 else:
4193 if not insitu:
4194 return None
4195
4196 except RuntimeError, e:
4197 raise_fitting_failure_exception(e)
4198
4199 @asaplog_post_dec
4200 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4201 clipthresh=None, clipniter=None,
4202 edge=None, threshold=None, chan_avg_limit=None,
4203 getresidual=None, plot=None,
4204 showprogress=None, minnrow=None, outlog=None,
4205 blfile=None, csvformat=None, bltable=None):
4206 """\
4207 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4208 Spectral lines are detected first using linefinder and masked out
4209 to avoid them affecting the baseline solution.
4210
4211 Parameters:
4212 mask: an optional mask retreived from scantable
4213 order: the maximum order of Chebyshev polynomial (default is 5)
4214 insitu: if False a new scantable is returned.
4215 Otherwise, the scaling is done in-situ
4216 The default is taken from .asaprc (False)
4217 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4218 clipniter: maximum number of iteration of 'clipthresh'-sigma
4219 clipping (default is 0)
4220 edge: an optional number of channel to drop at
4221 the edge of spectrum. If only one value is
4222 specified, the same number will be dropped
4223 from both sides of the spectrum. Default
4224 is to keep all channels. Nested tuples
4225 represent individual edge selection for
4226 different IFs (a number of spectral channels
4227 can be different)
4228 threshold: the threshold used by line finder. It is
4229 better to keep it large as only strong lines
4230 affect the baseline solution.
4231 chan_avg_limit: a maximum number of consequtive spectral
4232 channels to average during the search of
4233 weak and broad lines. The default is no
4234 averaging (and no search for weak lines).
4235 If such lines can affect the fitted baseline
4236 (e.g. a high order polynomial is fitted),
4237 increase this parameter (usually values up
4238 to 8 are reasonable). Most users of this
4239 method should find the default value sufficient.
4240 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4241 plot the fit and the residual. In this each
4242 indivual fit has to be approved, by typing 'y'
4243 or 'n'
4244 getresidual: if False, returns best-fit values instead of
4245 residual. (default is True)
4246 showprogress: show progress status for large data.
4247 default is True.
4248 minnrow: minimum number of input spectra to show.
4249 default is 1000.
4250 outlog: Output the coefficients of the best-fit
4251 function to logger (default is False)
4252 blfile: Name of a text file in which the best-fit
4253 parameter values to be written
4254 (default is "": no file/logger output)
4255 csvformat: if True blfile is csv-formatted, default is False.
4256 bltable: name of a baseline table where fitting results
4257 (coefficients, rms, etc.) are to be written.
4258 if given, fitting results will NOT be output to
4259 scantable (insitu=True) or None will be
4260 returned (insitu=False).
4261 (default is "": no table output)
4262
4263 Example:
4264 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4265
4266 Note:
4267 The best-fit parameter values output in logger and/or blfile are now
4268 based on specunit of 'channel'.
4269 """
4270
4271 try:
4272 varlist = vars()
4273
4274 if insitu is None: insitu = rcParams['insitu']
4275 if insitu:
4276 workscan = self
4277 else:
4278 workscan = self.copy()
4279
4280 if mask is None: mask = []
4281 if order is None: order = 5
4282 if clipthresh is None: clipthresh = 3.0
4283 if clipniter is None: clipniter = 0
4284 if edge is None: edge = (0, 0)
4285 if threshold is None: threshold = 3
4286 if chan_avg_limit is None: chan_avg_limit = 1
4287 if plot is None: plot = False
4288 if getresidual is None: getresidual = True
4289 if showprogress is None: showprogress = True
4290 if minnrow is None: minnrow = 1000
4291 if outlog is None: outlog = False
4292 if blfile is None: blfile = ''
4293 if csvformat is None: csvformat = False
4294 if bltable is None: bltable = ''
4295
4296 scsvformat = 'T' if csvformat else 'F'
4297
4298 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4299 workscan._auto_chebyshev_baseline(mask, order,
4300 clipthresh, clipniter,
4301 normalise_edge_param(edge),
4302 threshold,
4303 chan_avg_limit, getresidual,
4304 pack_progress_params(showprogress,
4305 minnrow),
4306 outlog, scsvformat+blfile,
4307 bltable)
4308 workscan._add_history("auto_chebyshev_baseline", varlist)
4309
4310 if bltable == '':
4311 if insitu:
4312 self._assign(workscan)
4313 else:
4314 return workscan
4315 else:
4316 if not insitu:
4317 return None
4318
4319 except RuntimeError, e:
4320 raise_fitting_failure_exception(e)
4321
4322 @asaplog_post_dec
4323 def poly_baseline(self, mask=None, order=None, insitu=None,
4324 clipthresh=None, clipniter=None, plot=None,
4325 getresidual=None, showprogress=None, minnrow=None,
4326 outlog=None, blfile=None, csvformat=None,
4327 bltable=None):
4328 """\
4329 Return a scan which has been baselined (all rows) by a polynomial.
4330 Parameters:
4331 mask: an optional mask
4332 order: the order of the polynomial (default is 0)
4333 insitu: if False a new scantable is returned.
4334 Otherwise, the scaling is done in-situ
4335 The default is taken from .asaprc (False)
4336 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4337 clipniter: maximum number of iteration of 'clipthresh'-sigma
4338 clipping (default is 0)
4339 plot: plot the fit and the residual. In this each
4340 indivual fit has to be approved, by typing 'y'
4341 or 'n'
4342 getresidual: if False, returns best-fit values instead of
4343 residual. (default is True)
4344 showprogress: show progress status for large data.
4345 default is True.
4346 minnrow: minimum number of input spectra to show.
4347 default is 1000.
4348 outlog: Output the coefficients of the best-fit
4349 function to logger (default is False)
4350 blfile: Name of a text file in which the best-fit
4351 parameter values to be written
4352 (default is "": no file/logger output)
4353 csvformat: if True blfile is csv-formatted, default is False.
4354 bltable: name of a baseline table where fitting results
4355 (coefficients, rms, etc.) are to be written.
4356 if given, fitting results will NOT be output to
4357 scantable (insitu=True) or None will be
4358 returned (insitu=False).
4359 (default is "": no table output)
4360
4361 Example:
4362 # return a scan baselined by a third order polynomial,
4363 # not using a mask
4364 bscan = scan.poly_baseline(order=3)
4365 """
4366
4367 try:
4368 varlist = vars()
4369
4370 if insitu is None:
4371 insitu = rcParams["insitu"]
4372 if insitu:
4373 workscan = self
4374 else:
4375 workscan = self.copy()
4376
4377 if mask is None: mask = []
4378 if order is None: order = 0
4379 if clipthresh is None: clipthresh = 3.0
4380 if clipniter is None: clipniter = 0
4381 if plot is None: plot = False
4382 if getresidual is None: getresidual = True
4383 if showprogress is None: showprogress = True
4384 if minnrow is None: minnrow = 1000
4385 if outlog is None: outlog = False
4386 if blfile is None: blfile = ''
4387 if csvformat is None: csvformat = False
4388 if bltable is None: bltable = ''
4389
4390 scsvformat = 'T' if csvformat else 'F'
4391
4392 if plot:
4393 outblfile = (blfile != "") and \
4394 os.path.exists(os.path.expanduser(
4395 os.path.expandvars(blfile))
4396 )
4397 if outblfile:
4398 blf = open(blfile, "a")
4399
4400 f = fitter()
4401 f.set_function(lpoly=order)
4402
4403 rows = xrange(workscan.nrow())
4404 #if len(rows) > 0: workscan._init_blinfo()
4405
4406 action = "H"
4407 for r in rows:
4408 f.x = workscan._getabcissa(r)
4409 f.y = workscan._getspectrum(r)
4410 if mask:
4411 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4412 else: # mask=None
4413 f.mask = workscan._getmask(r)
4414
4415 f.data = None
4416 f.fit()
4417
4418 if action != "Y": # skip plotting when accepting all
4419 f.plot(residual=True)
4420 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4421 #if accept_fit.upper() == "N":
4422 # #workscan._append_blinfo(None, None, None)
4423 # continue
4424 accept_fit = self._get_verify_action("Accept fit?",action)
4425 if r == 0: action = None
4426 if accept_fit.upper() == "N":
4427 continue
4428 elif accept_fit.upper() == "R":
4429 break
4430 elif accept_fit.upper() == "A":
4431 action = "Y"
4432
4433 blpars = f.get_parameters()
4434 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4435 #workscan._append_blinfo(blpars, masklist, f.mask)
4436 workscan._setspectrum((f.fitter.getresidual()
4437 if getresidual else f.fitter.getfit()), r)
4438
4439 if outblfile:
4440 rms = workscan.get_rms(f.mask, r)
4441 dataout = \
4442 workscan.format_blparams_row(blpars["params"],
4443 blpars["fixed"],
4444 rms, str(masklist),
4445 r, True, csvformat)
4446 blf.write(dataout)
4447
4448 f._p.unmap()
4449 f._p = None
4450
4451 if outblfile:
4452 blf.close()
4453 else:
4454 workscan._poly_baseline(mask, order,
4455 clipthresh, clipniter, #
4456 getresidual,
4457 pack_progress_params(showprogress,
4458 minnrow),
4459 outlog, scsvformat+blfile,
4460 bltable) #
4461
4462 workscan._add_history("poly_baseline", varlist)
4463
4464 if insitu:
4465 self._assign(workscan)
4466 else:
4467 return workscan
4468
4469 except RuntimeError, e:
4470 raise_fitting_failure_exception(e)
4471
4472 @asaplog_post_dec
4473 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4474 clipthresh=None, clipniter=None,
4475 edge=None, threshold=None, chan_avg_limit=None,
4476 getresidual=None, plot=None,
4477 showprogress=None, minnrow=None, outlog=None,
4478 blfile=None, csvformat=None, bltable=None):
4479 """\
4480 Return a scan which has been baselined (all rows) by a polynomial.
4481 Spectral lines are detected first using linefinder and masked out
4482 to avoid them affecting the baseline solution.
4483
4484 Parameters:
4485 mask: an optional mask retreived from scantable
4486 order: the order of the polynomial (default is 0)
4487 insitu: if False a new scantable is returned.
4488 Otherwise, the scaling is done in-situ
4489 The default is taken from .asaprc (False)
4490 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4491 clipniter: maximum number of iteration of 'clipthresh'-sigma
4492 clipping (default is 0)
4493 edge: an optional number of channel to drop at
4494 the edge of spectrum. If only one value is
4495 specified, the same number will be dropped
4496 from both sides of the spectrum. Default
4497 is to keep all channels. Nested tuples
4498 represent individual edge selection for
4499 different IFs (a number of spectral channels
4500 can be different)
4501 threshold: the threshold used by line finder. It is
4502 better to keep it large as only strong lines
4503 affect the baseline solution.
4504 chan_avg_limit: a maximum number of consequtive spectral
4505 channels to average during the search of
4506 weak and broad lines. The default is no
4507 averaging (and no search for weak lines).
4508 If such lines can affect the fitted baseline
4509 (e.g. a high order polynomial is fitted),
4510 increase this parameter (usually values up
4511 to 8 are reasonable). Most users of this
4512 method should find the default value sufficient.
4513 plot: plot the fit and the residual. In this each
4514 indivual fit has to be approved, by typing 'y'
4515 or 'n'
4516 getresidual: if False, returns best-fit values instead of
4517 residual. (default is True)
4518 showprogress: show progress status for large data.
4519 default is True.
4520 minnrow: minimum number of input spectra to show.
4521 default is 1000.
4522 outlog: Output the coefficients of the best-fit
4523 function to logger (default is False)
4524 blfile: Name of a text file in which the best-fit
4525 parameter values to be written
4526 (default is "": no file/logger output)
4527 csvformat: if True blfile is csv-formatted, default is False.
4528 bltable: name of a baseline table where fitting results
4529 (coefficients, rms, etc.) are to be written.
4530 if given, fitting results will NOT be output to
4531 scantable (insitu=True) or None will be
4532 returned (insitu=False).
4533 (default is "": no table output)
4534
4535 Example:
4536 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4537 """
4538
4539 try:
4540 varlist = vars()
4541
4542 if insitu is None:
4543 insitu = rcParams['insitu']
4544 if insitu:
4545 workscan = self
4546 else:
4547 workscan = self.copy()
4548
4549 if mask is None: mask = []
4550 if order is None: order = 0
4551 if clipthresh is None: clipthresh = 3.0
4552 if clipniter is None: clipniter = 0
4553 if edge is None: edge = (0, 0)
4554 if threshold is None: threshold = 3
4555 if chan_avg_limit is None: chan_avg_limit = 1
4556 if plot is None: plot = False
4557 if getresidual is None: getresidual = True
4558 if showprogress is None: showprogress = True
4559 if minnrow is None: minnrow = 1000
4560 if outlog is None: outlog = False
4561 if blfile is None: blfile = ''
4562 if csvformat is None: csvformat = False
4563 if bltable is None: bltable = ''
4564
4565 scsvformat = 'T' if csvformat else 'F'
4566
4567 edge = normalise_edge_param(edge)
4568
4569 if plot:
4570 outblfile = (blfile != "") and \
4571 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4572 if outblfile: blf = open(blfile, "a")
4573
4574 from asap.asaplinefind import linefinder
4575 fl = linefinder()
4576 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4577 fl.set_scan(workscan)
4578
4579 f = fitter()
4580 f.set_function(lpoly=order)
4581
4582 rows = xrange(workscan.nrow())
4583 #if len(rows) > 0: workscan._init_blinfo()
4584
4585 action = "H"
4586 for r in rows:
4587 idx = 2*workscan.getif(r)
4588 if mask:
4589 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4590 else: # mask=None
4591 msk = workscan._getmask(r)
4592 fl.find_lines(r, msk, edge[idx:idx+2])
4593
4594 f.x = workscan._getabcissa(r)
4595 f.y = workscan._getspectrum(r)
4596 f.mask = fl.get_mask()
4597 f.data = None
4598 f.fit()
4599
4600 if action != "Y": # skip plotting when accepting all
4601 f.plot(residual=True)
4602 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4603 accept_fit = self._get_verify_action("Accept fit?",action)
4604 if r == 0: action = None
4605 if accept_fit.upper() == "N":
4606 #workscan._append_blinfo(None, None, None)
4607 continue
4608 elif accept_fit.upper() == "R":
4609 break
4610 elif accept_fit.upper() == "A":
4611 action = "Y"
4612
4613 blpars = f.get_parameters()
4614 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4615 #workscan._append_blinfo(blpars, masklist, f.mask)
4616 workscan._setspectrum(
4617 (f.fitter.getresidual() if getresidual
4618 else f.fitter.getfit()), r
4619 )
4620
4621 if outblfile:
4622 rms = workscan.get_rms(f.mask, r)
4623 dataout = \
4624 workscan.format_blparams_row(blpars["params"],
4625 blpars["fixed"],
4626 rms, str(masklist),
4627 r, True, csvformat)
4628 blf.write(dataout)
4629
4630 f._p.unmap()
4631 f._p = None
4632
4633 if outblfile: blf.close()
4634 else:
4635 workscan._auto_poly_baseline(mask, order,
4636 clipthresh, clipniter,
4637 edge, threshold,
4638 chan_avg_limit, getresidual,
4639 pack_progress_params(showprogress,
4640 minnrow),
4641 outlog, scsvformat+blfile,
4642 bltable)
4643 workscan._add_history("auto_poly_baseline", varlist)
4644
4645 if bltable == '':
4646 if insitu:
4647 self._assign(workscan)
4648 else:
4649 return workscan
4650 else:
4651 if not insitu:
4652 return None
4653
4654 except RuntimeError, e:
4655 raise_fitting_failure_exception(e)
4656
4657 def _init_blinfo(self):
4658 """\
4659 Initialise the following three auxiliary members:
4660 blpars : parameters of the best-fit baseline,
4661 masklists : mask data (edge positions of masked channels) and
4662 actualmask : mask data (in boolean list),
4663 to keep for use later (including output to logger/text files).
4664 Used by poly_baseline() and auto_poly_baseline() in case of
4665 'plot=True'.
4666 """
4667 self.blpars = []
4668 self.masklists = []
4669 self.actualmask = []
4670 return
4671
4672 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4673 """\
4674 Append baseline-fitting related info to blpars, masklist and
4675 actualmask.
4676 """
4677 self.blpars.append(data_blpars)
4678 self.masklists.append(data_masklists)
4679 self.actualmask.append(data_actualmask)
4680 return
4681
4682 @asaplog_post_dec
4683 def rotate_linpolphase(self, angle):
4684 """\
4685 Rotate the phase of the complex polarization O=Q+iU correlation.
4686 This is always done in situ in the raw data. So if you call this
4687 function more than once then each call rotates the phase further.
4688
4689 Parameters:
4690
4691 angle: The angle (degrees) to rotate (add) by.
4692
4693 Example::
4694
4695 scan.rotate_linpolphase(2.3)
4696
4697 """
4698 varlist = vars()
4699 self._math._rotate_linpolphase(self, angle)
4700 self._add_history("rotate_linpolphase", varlist)
4701 return
4702
4703 @asaplog_post_dec
4704 def rotate_xyphase(self, angle):
4705 """\
4706 Rotate the phase of the XY correlation. This is always done in situ
4707 in the data. So if you call this function more than once
4708 then each call rotates the phase further.
4709
4710 Parameters:
4711
4712 angle: The angle (degrees) to rotate (add) by.
4713
4714 Example::
4715
4716 scan.rotate_xyphase(2.3)
4717
4718 """
4719 varlist = vars()
4720 self._math._rotate_xyphase(self, angle)
4721 self._add_history("rotate_xyphase", varlist)
4722 return
4723
4724 @asaplog_post_dec
4725 def swap_linears(self):
4726 """\
4727 Swap the linear polarisations XX and YY, or better the first two
4728 polarisations as this also works for ciculars.
4729 """
4730 varlist = vars()
4731 self._math._swap_linears(self)
4732 self._add_history("swap_linears", varlist)
4733 return
4734
4735 @asaplog_post_dec
4736 def invert_phase(self):
4737 """\
4738 Invert the phase of the complex polarisation
4739 """
4740 varlist = vars()
4741 self._math._invert_phase(self)
4742 self._add_history("invert_phase", varlist)
4743 return
4744
4745 @asaplog_post_dec
4746 def add(self, offset, insitu=None):
4747 """\
4748 Return a scan where all spectra have the offset added
4749
4750 Parameters:
4751
4752 offset: the offset
4753
4754 insitu: if False a new scantable is returned.
4755 Otherwise, the scaling is done in-situ
4756 The default is taken from .asaprc (False)
4757
4758 """
4759 if insitu is None: insitu = rcParams['insitu']
4760 self._math._setinsitu(insitu)
4761 varlist = vars()
4762 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4763 s._add_history("add", varlist)
4764 if insitu:
4765 self._assign(s)
4766 else:
4767 return s
4768
4769 @asaplog_post_dec
4770 def scale(self, factor, tsys=True, insitu=None):
4771 """\
4772
4773 Return a scan where all spectra are scaled by the given 'factor'
4774
4775 Parameters:
4776
4777 factor: the scaling factor (float or 1D float list)
4778
4779 insitu: if False a new scantable is returned.
4780 Otherwise, the scaling is done in-situ
4781 The default is taken from .asaprc (False)
4782
4783 tsys: if True (default) then apply the operation to Tsys
4784 as well as the data
4785
4786 """
4787 if insitu is None: insitu = rcParams['insitu']
4788 self._math._setinsitu(insitu)
4789 varlist = vars()
4790 s = None
4791 import numpy
4792 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4793 if isinstance(factor[0], list) or isinstance(factor[0],
4794 numpy.ndarray):
4795 from asapmath import _array2dOp
4796 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4797 else:
4798 s = scantable( self._math._arrayop( self, factor,
4799 "MUL", tsys ) )
4800 else:
4801 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4802 s._add_history("scale", varlist)
4803 if insitu:
4804 self._assign(s)
4805 else:
4806 return s
4807
4808 @preserve_selection
4809 def set_sourcetype(self, match, matchtype="pattern",
4810 sourcetype="reference"):
4811 """\
4812 Set the type of the source to be an source or reference scan
4813 using the provided pattern.
4814
4815 Parameters:
4816
4817 match: a Unix style pattern, regular expression or selector
4818
4819 matchtype: 'pattern' (default) UNIX style pattern or
4820 'regex' regular expression
4821
4822 sourcetype: the type of the source to use (source/reference)
4823
4824 """
4825 varlist = vars()
4826 stype = -1
4827 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4828 stype = 1
4829 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4830 stype = 0
4831 else:
4832 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4833 if matchtype.lower().startswith("p"):
4834 matchtype = "pattern"
4835 elif matchtype.lower().startswith("r"):
4836 matchtype = "regex"
4837 else:
4838 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4839 sel = selector()
4840 if isinstance(match, selector):
4841 sel = match
4842 else:
4843 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4844 self.set_selection(sel)
4845 self._setsourcetype(stype)
4846 self._add_history("set_sourcetype", varlist)
4847
4848
4849 def set_sourcename(self, name):
4850 varlist = vars()
4851 self._setsourcename(name)
4852 self._add_history("set_sourcename", varlist)
4853
4854 @asaplog_post_dec
4855 @preserve_selection
4856 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4857 """\
4858 This function allows to build quotients automatically.
4859 It assumes the observation to have the same number of
4860 "ons" and "offs"
4861
4862 Parameters:
4863
4864 preserve: you can preserve (default) the continuum or
4865 remove it. The equations used are
4866
4867 preserve: Output = Toff * (on/off) - Toff
4868
4869 remove: Output = Toff * (on/off) - Ton
4870
4871 mode: the on/off detection mode
4872 'paired' (default)
4873 identifies 'off' scans by the
4874 trailing '_R' (Mopra/Parkes) or
4875 '_e'/'_w' (Tid) and matches
4876 on/off pairs from the observing pattern
4877 'time'
4878 finds the closest off in time
4879
4880 .. todo:: verify argument is not implemented
4881
4882 """
4883 varlist = vars()
4884 modes = ["time", "paired"]
4885 if not mode in modes:
4886 msg = "please provide valid mode. Valid modes are %s" % (modes)
4887 raise ValueError(msg)
4888 s = None
4889 if mode.lower() == "paired":
4890 from asap._asap import srctype
4891 sel = self.get_selection()
4892 #sel.set_query("SRCTYPE==psoff")
4893 sel.set_types(srctype.psoff)
4894 self.set_selection(sel)
4895 offs = self.copy()
4896 #sel.set_query("SRCTYPE==pson")
4897 sel.set_types(srctype.pson)
4898 self.set_selection(sel)
4899 ons = self.copy()
4900 s = scantable(self._math._quotient(ons, offs, preserve))
4901 elif mode.lower() == "time":
4902 s = scantable(self._math._auto_quotient(self, mode, preserve))
4903 s._add_history("auto_quotient", varlist)
4904 return s
4905
4906 @asaplog_post_dec
4907 def mx_quotient(self, mask = None, weight='median', preserve=True):
4908 """\
4909 Form a quotient using "off" beams when observing in "MX" mode.
4910
4911 Parameters:
4912
4913 mask: an optional mask to be used when weight == 'stddev'
4914
4915 weight: How to average the off beams. Default is 'median'.
4916
4917 preserve: you can preserve (default) the continuum or
4918 remove it. The equations used are:
4919
4920 preserve: Output = Toff * (on/off) - Toff
4921
4922 remove: Output = Toff * (on/off) - Ton
4923
4924 """
4925 mask = mask or ()
4926 varlist = vars()
4927 on = scantable(self._math._mx_extract(self, 'on'))
4928 preoff = scantable(self._math._mx_extract(self, 'off'))
4929 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4930 from asapmath import quotient
4931 q = quotient(on, off, preserve)
4932 q._add_history("mx_quotient", varlist)
4933 return q
4934
4935 @asaplog_post_dec
4936 def freq_switch(self, insitu=None):
4937 """\
4938 Apply frequency switching to the data.
4939
4940 Parameters:
4941
4942 insitu: if False a new scantable is returned.
4943 Otherwise, the swictching is done in-situ
4944 The default is taken from .asaprc (False)
4945
4946 """
4947 if insitu is None: insitu = rcParams['insitu']
4948 self._math._setinsitu(insitu)
4949 varlist = vars()
4950 s = scantable(self._math._freqswitch(self))
4951 s._add_history("freq_switch", varlist)
4952 if insitu:
4953 self._assign(s)
4954 else:
4955 return s
4956
4957 @asaplog_post_dec
4958 def recalc_azel(self):
4959 """Recalculate the azimuth and elevation for each position."""
4960 varlist = vars()
4961 self._recalcazel()
4962 self._add_history("recalc_azel", varlist)
4963 return
4964
4965 @asaplog_post_dec
4966 def __add__(self, other):
4967 """
4968 implicit on all axes and on Tsys
4969 """
4970 varlist = vars()
4971 s = self.__op( other, "ADD" )
4972 s._add_history("operator +", varlist)
4973 return s
4974
4975 @asaplog_post_dec
4976 def __sub__(self, other):
4977 """
4978 implicit on all axes and on Tsys
4979 """
4980 varlist = vars()
4981 s = self.__op( other, "SUB" )
4982 s._add_history("operator -", varlist)
4983 return s
4984
4985 @asaplog_post_dec
4986 def __mul__(self, other):
4987 """
4988 implicit on all axes and on Tsys
4989 """
4990 varlist = vars()
4991 s = self.__op( other, "MUL" ) ;
4992 s._add_history("operator *", varlist)
4993 return s
4994
4995
4996 @asaplog_post_dec
4997 def __div__(self, other):
4998 """
4999 implicit on all axes and on Tsys
5000 """
5001 varlist = vars()
5002 s = self.__op( other, "DIV" )
5003 s._add_history("operator /", varlist)
5004 return s
5005
5006 @asaplog_post_dec
5007 def __op( self, other, mode ):
5008 s = None
5009 if isinstance(other, scantable):
5010 s = scantable(self._math._binaryop(self, other, mode))
5011 elif isinstance(other, float):
5012 if other == 0.0:
5013 raise ZeroDivisionError("Dividing by zero is not recommended")
5014 s = scantable(self._math._unaryop(self, other, mode, False))
5015 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
5016 if isinstance(other[0], list) \
5017 or isinstance(other[0], numpy.ndarray):
5018 from asapmath import _array2dOp
5019 s = _array2dOp( self, other, mode, False )
5020 else:
5021 s = scantable( self._math._arrayop( self, other,
5022 mode, False ) )
5023 else:
5024 raise TypeError("Other input is not a scantable or float value")
5025 return s
5026
5027 @asaplog_post_dec
5028 def get_fit(self, row=0):
5029 """\
5030 Print or return the stored fits for a row in the scantable
5031
5032 Parameters:
5033
5034 row: the row which the fit has been applied to.
5035
5036 """
5037 if row > self.nrow():
5038 return
5039 from asap.asapfit import asapfit
5040 fit = asapfit(self._getfit(row))
5041 asaplog.push( '%s' %(fit) )
5042 return fit.as_dict()
5043
5044 @preserve_selection
5045 def flag_nans(self):
5046 """\
5047 Utility function to flag NaN values in the scantable.
5048 """
5049 import numpy
5050 basesel = self.get_selection()
5051 for i in range(self.nrow()):
5052 sel = self.get_row_selector(i)
5053 self.set_selection(basesel+sel)
5054 nans = numpy.isnan(self._getspectrum(0))
5055 if numpy.any(nans):
5056 bnans = [ bool(v) for v in nans]
5057 self.flag(bnans)
5058
5059 self.set_selection(basesel)
5060
5061 def get_row_selector(self, rowno):
5062 return selector(rows=[rowno])
5063
5064 def _add_history(self, funcname, parameters):
5065 if not rcParams['scantable.history']:
5066 return
5067 # create date
5068 sep = "##"
5069 from datetime import datetime
5070 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5071 hist = dstr+sep
5072 hist += funcname+sep#cdate+sep
5073 if parameters.has_key('self'):
5074 del parameters['self']
5075 for k, v in parameters.iteritems():
5076 if type(v) is dict:
5077 for k2, v2 in v.iteritems():
5078 hist += k2
5079 hist += "="
5080 if isinstance(v2, scantable):
5081 hist += 'scantable'
5082 elif k2 == 'mask':
5083 if isinstance(v2, list) or isinstance(v2, tuple):
5084 hist += str(self._zip_mask(v2))
5085 else:
5086 hist += str(v2)
5087 else:
5088 hist += str(v2)
5089 else:
5090 hist += k
5091 hist += "="
5092 if isinstance(v, scantable):
5093 hist += 'scantable'
5094 elif k == 'mask':
5095 if isinstance(v, list) or isinstance(v, tuple):
5096 hist += str(self._zip_mask(v))
5097 else:
5098 hist += str(v)
5099 else:
5100 hist += str(v)
5101 hist += sep
5102 hist = hist[:-2] # remove trailing '##'
5103 self._addhistory(hist)
5104
5105
5106 def _zip_mask(self, mask):
5107 mask = list(mask)
5108 i = 0
5109 segments = []
5110 while mask[i:].count(1):
5111 i += mask[i:].index(1)
5112 if mask[i:].count(0):
5113 j = i + mask[i:].index(0)
5114 else:
5115 j = len(mask)
5116 segments.append([i, j])
5117 i = j
5118 return segments
5119
5120 def _get_ordinate_label(self):
5121 fu = "("+self.get_fluxunit()+")"
5122 import re
5123 lbl = "Intensity"
5124 if re.match(".K.", fu):
5125 lbl = "Brightness Temperature "+ fu
5126 elif re.match(".Jy.", fu):
5127 lbl = "Flux density "+ fu
5128 return lbl
5129
5130 def _check_ifs(self):
5131# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5132 nchans = [self.nchan(i) for i in self.getifnos()]
5133 nchans = filter(lambda t: t > 0, nchans)
5134 return (sum(nchans)/len(nchans) == nchans[0])
5135
5136 @asaplog_post_dec
5137 def _fill(self, names, unit, average, opts={}):
5138 first = True
5139 fullnames = []
5140 for name in names:
5141 name = os.path.expandvars(name)
5142 name = os.path.expanduser(name)
5143 if not os.path.exists(name):
5144 msg = "File '%s' does not exists" % (name)
5145 raise IOError(msg)
5146 fullnames.append(name)
5147 if average:
5148 asaplog.push('Auto averaging integrations')
5149 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5150 for name in fullnames:
5151 tbl = Scantable(stype)
5152 if is_ms( name ):
5153 r = msfiller( tbl )
5154 else:
5155 r = filler( tbl )
5156 msg = "Importing %s..." % (name)
5157 asaplog.push(msg, False)
5158 r.open(name, opts)
5159 rx = rcParams['scantable.reference']
5160 r.setreferenceexpr(rx)
5161 r.fill()
5162 if average:
5163 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5164 if not first:
5165 tbl = self._math._merge([self, tbl])
5166 Scantable.__init__(self, tbl)
5167 r.close()
5168 del r, tbl
5169 first = False
5170 #flush log
5171 asaplog.post()
5172 if unit is not None:
5173 self.set_fluxunit(unit)
5174 if not is_casapy():
5175 self.set_freqframe(rcParams['scantable.freqframe'])
5176
5177 def _get_verify_action( self, msg, action=None ):
5178 valid_act = ['Y', 'N', 'A', 'R']
5179 if not action or not isinstance(action, str):
5180 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5181 if action == '':
5182 return "Y"
5183 elif (action.upper()[0] in valid_act):
5184 return action.upper()[0]
5185 elif (action.upper()[0] in ['H','?']):
5186 print "Available actions of verification [Y|n|a|r]"
5187 print " Y : Yes for current data (default)"
5188 print " N : No for current data"
5189 print " A : Accept all in the following and exit from verification"
5190 print " R : Reject all in the following and exit from verification"
5191 print " H or ?: help (show this message)"
5192 return self._get_verify_action(msg)
5193 else:
5194 return 'Y'
5195
5196 def __getitem__(self, key):
5197 if key < 0:
5198 key += self.nrow()
5199 if key >= self.nrow():
5200 raise IndexError("Row index out of range.")
5201 return self._getspectrum(key)
5202
5203 def __setitem__(self, key, value):
5204 if key < 0:
5205 key += self.nrow()
5206 if key >= self.nrow():
5207 raise IndexError("Row index out of range.")
5208 if not hasattr(value, "__len__") or \
5209 len(value) > self.nchan(self.getif(key)):
5210 raise ValueError("Spectrum length doesn't match.")
5211 return self._setspectrum(value, key)
5212
5213 def __len__(self):
5214 return self.nrow()
5215
5216 def __iter__(self):
5217 for i in range(len(self)):
5218 yield self[i]
Note: See TracBrowser for help on using the repository browser.