source: trunk/python/scantable.py@ 2954

Last change on this file since 2954 was 2954, checked in by WataruKawasaki, 10 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes:

Module(s): sd

Description: add parameter (skip_flaggedrow) where they were missing in stmath._unaryop and stmath._arrayop called in scantable.op().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 208.4 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel, doppler):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
380
381def get_restfreq_in_Hz(s_restfreq):
382 value = 0.0
383 unit = ""
384 s = s_restfreq.replace(" ","")
385
386 for i in range(len(s))[::-1]:
387 if s[i].isalpha():
388 unit = s[i] + unit
389 else:
390 value = float(s[0:i+1])
391 break
392
393 if (unit == "") or (unit.lower() == "hz"):
394 return value
395 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
396 unitprefix = unit[0]
397 factor = 1.0
398
399 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
400 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
401 factor = factor_list[prefix_list.index(unitprefix)]
402 """
403 if (unitprefix == 'a'):
404 factor = 1.0e-18
405 elif (unitprefix == 'f'):
406 factor = 1.0e-15
407 elif (unitprefix == 'p'):
408 factor = 1.0e-12
409 elif (unitprefix == 'n'):
410 factor = 1.0e-9
411 elif (unitprefix == 'u'):
412 factor = 1.0e-6
413 elif (unitprefix == 'm'):
414 factor = 1.0e-3
415 elif (unitprefix == 'k'):
416 factor = 1.0e+3
417 elif (unitprefix == 'M'):
418 factor = 1.0e+6
419 elif (unitprefix == 'G'):
420 factor = 1.0e+9
421 elif (unitprefix == 'T'):
422 factor = 1.0e+12
423 elif (unitprefix == 'P'):
424 factor = 1.0e+15
425 elif (unitprefix == 'E'):
426 factor = 1.0e+18
427 """
428 return value*factor
429 else:
430 mesg = "wrong unit of restfreq."
431 raise Exception, mesg
432
433def normalise_restfreq(in_restfreq):
434 if isinstance(in_restfreq, float):
435 return in_restfreq
436 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
437 return float(in_restfreq)
438 elif isinstance(in_restfreq, str):
439 return get_restfreq_in_Hz(in_restfreq)
440 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
441 if isinstance(in_restfreq, numpy.ndarray):
442 if len(in_restfreq.shape) > 1:
443 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
444 raise Exception, mesg
445
446 res = []
447 for i in xrange(len(in_restfreq)):
448 elem = in_restfreq[i]
449 if isinstance(elem, float):
450 res.append(elem)
451 elif isinstance(elem, int) or isinstance(elem, long):
452 res.append(float(elem))
453 elif isinstance(elem, str):
454 res.append(get_restfreq_in_Hz(elem))
455 elif isinstance(elem, dict):
456 if isinstance(elem["value"], float):
457 res.append(elem)
458 elif isinstance(elem["value"], int):
459 dictelem = {}
460 dictelem["name"] = elem["name"]
461 dictelem["value"] = float(elem["value"])
462 res.append(dictelem)
463 elif isinstance(elem["value"], str):
464 dictelem = {}
465 dictelem["name"] = elem["name"]
466 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
467 res.append(dictelem)
468 else:
469 mesg = "restfreq elements must be float, int, or string."
470 raise Exception, mesg
471 return res
472 else:
473 mesg = "wrong type of restfreq given."
474 raise Exception, mesg
475
476def set_restfreq(s, restfreq):
477 rfset = (restfreq != '') and (restfreq != [])
478 if rfset:
479 s.set_restfreqs(normalise_restfreq(restfreq))
480
481class scantable(Scantable):
482 """\
483 The ASAP container for scans (single-dish data).
484 """
485
486 @asaplog_post_dec
487 def __init__(self, filename, average=None, unit=None, parallactify=None,
488 **args):
489 """\
490 Create a scantable from a saved one or make a reference
491
492 Parameters:
493
494 filename: the name of an asap table on disk
495 or
496 the name of a rpfits/sdfits/ms file
497 (integrations within scans are auto averaged
498 and the whole file is read) or
499 [advanced] a reference to an existing scantable
500
501 average: average all integrations withinb a scan on read.
502 The default (True) is taken from .asaprc.
503
504 unit: brightness unit; must be consistent with K or Jy.
505 Over-rides the default selected by the filler
506 (input rpfits/sdfits/ms) or replaces the value
507 in existing scantables
508
509 antenna: for MeasurementSet input data only:
510 Antenna selection. integer (id) or string
511 (name or id).
512
513 parallactify: Indicate that the data had been parallactified.
514 Default (false) is taken from rc file.
515
516 getpt: Whether to import direction from MS/POINTING
517 table properly or not.
518 This is effective only when filename is MS.
519 The default (True) is to import direction
520 from MS/POINTING.
521 """
522 if average is None:
523 average = rcParams['scantable.autoaverage']
524 parallactify = parallactify or rcParams['scantable.parallactify']
525 varlist = vars()
526 from asap._asap import stmath
527 self._math = stmath( rcParams['insitu'] )
528 if isinstance(filename, Scantable):
529 Scantable.__init__(self, filename)
530 else:
531 if isinstance(filename, str):
532 filename = os.path.expandvars(filename)
533 filename = os.path.expanduser(filename)
534 if not os.path.exists(filename):
535 s = "File '%s' not found." % (filename)
536 raise IOError(s)
537 if is_scantable(filename):
538 ondisk = rcParams['scantable.storage'] == 'disk'
539 Scantable.__init__(self, filename, ondisk)
540 if unit is not None:
541 self.set_fluxunit(unit)
542 if average:
543 self._assign( self.average_time( scanav=True ) )
544 # do not reset to the default freqframe
545 #self.set_freqframe(rcParams['scantable.freqframe'])
546 elif is_ms(filename):
547 # Measurement Set
548 opts={'ms': {}}
549 mskeys=['getpt','antenna']
550 for key in mskeys:
551 if key in args.keys():
552 opts['ms'][key] = args[key]
553 self._fill([filename], unit, average, opts)
554 elif os.path.isfile(filename):
555 opts={'nro': {}}
556 nrokeys=['freqref']
557 for key in nrokeys:
558 if key in args.keys():
559 opts['nro'][key] = args[key]
560 self._fill([filename], unit, average, opts)
561 # only apply to new data not "copy constructor"
562 self.parallactify(parallactify)
563 else:
564 msg = "The given file '%s'is not a valid " \
565 "asap table." % (filename)
566 raise IOError(msg)
567 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
568 and isinstance(filename[-1], str):
569 self._fill(filename, unit, average)
570 self.parallactify(parallactify)
571 self._add_history("scantable", varlist)
572
573 @asaplog_post_dec
574 def save(self, name=None, format=None, overwrite=False):
575 """\
576 Store the scantable on disk. This can be an asap (aips++) Table,
577 SDFITS or MS2 format.
578
579 Parameters:
580
581 name: the name of the outputfile. For format 'ASCII'
582 this is the root file name (data in 'name'.txt
583 and header in 'name'_header.txt)
584
585 format: an optional file format. Default is ASAP.
586 Allowed are:
587
588 * 'ASAP' (save as ASAP [aips++] Table),
589 * 'SDFITS' (save as SDFITS file)
590 * 'ASCII' (saves as ascii text file)
591 * 'MS2' (saves as an casacore MeasurementSet V2)
592 * 'FITS' (save as image FITS - not readable by
593 class)
594 * 'CLASS' (save as FITS readable by CLASS)
595
596 overwrite: If the file should be overwritten if it exists.
597 The default False is to return with warning
598 without writing the output. USE WITH CARE.
599
600 Example::
601
602 scan.save('myscan.asap')
603 scan.save('myscan.sdfits', 'SDFITS')
604
605 """
606 from os import path
607 format = format or rcParams['scantable.save']
608 suffix = '.'+format.lower()
609 if name is None or name == "":
610 name = 'scantable'+suffix
611 msg = "No filename given. Using default name %s..." % name
612 asaplog.push(msg)
613 name = path.expandvars(name)
614 if path.isfile(name) or path.isdir(name):
615 if not overwrite:
616 msg = "File %s exists." % name
617 raise IOError(msg)
618 format2 = format.upper()
619 if format2 == 'ASAP':
620 self._save(name)
621 elif format2 == 'MS2':
622 msopt = {'ms': {'overwrite': overwrite } }
623 from asap._asap import mswriter
624 writer = mswriter( self )
625 writer.write( name, msopt )
626 else:
627 from asap._asap import stwriter as stw
628 writer = stw(format2)
629 writer.write(self, name)
630 return
631
632 def copy(self):
633 """Return a copy of this scantable.
634
635 *Note*:
636
637 This makes a full (deep) copy. scan2 = scan1 makes a reference.
638
639 Example::
640
641 copiedscan = scan.copy()
642
643 """
644 sd = scantable(Scantable._copy(self))
645 return sd
646
647 def drop_scan(self, scanid=None):
648 """\
649 Return a new scantable where the specified scan number(s) has(have)
650 been dropped.
651
652 Parameters:
653
654 scanid: a (list of) scan number(s)
655
656 """
657 from asap import _is_sequence_or_number as _is_valid
658 from asap import _to_list
659 from asap import unique
660 if not _is_valid(scanid):
661 raise RuntimeError( 'Please specify a scanno to drop from the'
662 ' scantable' )
663 scanid = _to_list(scanid)
664 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
665 for sid in scanid: allscans.remove(sid)
666 if len(allscans) == 0:
667 raise ValueError("Can't remove all scans")
668 sel = selector(scans=allscans)
669 return self._select_copy(sel)
670
671 def _select_copy(self, selection):
672 orig = self.get_selection()
673 self.set_selection(orig+selection)
674 cp = self.copy()
675 self.set_selection(orig)
676 return cp
677
678 def get_scan(self, scanid=None):
679 """\
680 Return a specific scan (by scanno) or collection of scans (by
681 source name) in a new scantable.
682
683 *Note*:
684
685 See scantable.drop_scan() for the inverse operation.
686
687 Parameters:
688
689 scanid: a (list of) scanno or a source name, unix-style
690 patterns are accepted for source name matching, e.g.
691 '*_R' gets all 'ref scans
692
693 Example::
694
695 # get all scans containing the source '323p459'
696 newscan = scan.get_scan('323p459')
697 # get all 'off' scans
698 refscans = scan.get_scan('*_R')
699 # get a susbset of scans by scanno (as listed in scan.summary())
700 newscan = scan.get_scan([0, 2, 7, 10])
701
702 """
703 if scanid is None:
704 raise RuntimeError( 'Please specify a scan no or name to '
705 'retrieve from the scantable' )
706 try:
707 bsel = self.get_selection()
708 sel = selector()
709 if type(scanid) is str:
710 sel.set_name(scanid)
711 return self._select_copy(sel)
712 elif type(scanid) is int:
713 sel.set_scans([scanid])
714 return self._select_copy(sel)
715 elif type(scanid) is list:
716 sel.set_scans(scanid)
717 return self._select_copy(sel)
718 else:
719 msg = "Illegal scanid type, use 'int' or 'list' if ints."
720 raise TypeError(msg)
721 except RuntimeError:
722 raise
723
724 def __str__(self):
725 tempFile = tempfile.NamedTemporaryFile()
726 Scantable._summary(self, tempFile.name)
727 tempFile.seek(0)
728 asaplog.clear()
729 return tempFile.file.read()
730
731 @asaplog_post_dec
732 def summary(self, filename=None):
733 """\
734 Print a summary of the contents of this scantable.
735
736 Parameters:
737
738 filename: the name of a file to write the putput to
739 Default - no file output
740
741 """
742 if filename is not None:
743 if filename is "":
744 filename = 'scantable_summary.txt'
745 from os.path import expandvars, isdir
746 filename = expandvars(filename)
747 if isdir(filename):
748 msg = "Illegal file name '%s'." % (filename)
749 raise IOError(msg)
750 else:
751 filename = ""
752 Scantable._summary(self, filename)
753
754 def get_spectrum(self, rowno):
755 """Return the spectrum for the current row in the scantable as a list.
756
757 Parameters:
758
759 rowno: the row number to retrieve the spectrum from
760
761 """
762 return self._getspectrum(rowno)
763
764 def get_mask(self, rowno):
765 """Return the mask for the current row in the scantable as a list.
766
767 Parameters:
768
769 rowno: the row number to retrieve the mask from
770
771 """
772 return self._getmask(rowno)
773
774 def set_spectrum(self, spec, rowno):
775 """Set the spectrum for the current row in the scantable.
776
777 Parameters:
778
779 spec: the new spectrum
780
781 rowno: the row number to set the spectrum for
782
783 """
784 assert(len(spec) == self.nchan(self.getif(rowno)))
785 return self._setspectrum(spec, rowno)
786
787 def get_coordinate(self, rowno):
788 """Return the (spectral) coordinate for a a given 'rowno'.
789
790 *Note*:
791
792 * This coordinate is only valid until a scantable method modifies
793 the frequency axis.
794 * This coordinate does contain the original frequency set-up
795 NOT the new frame. The conversions however are done using the user
796 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
797 use scantable.freq_align first. Without it there is no closure,
798 i.e.::
799
800 c = myscan.get_coordinate(0)
801 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
802
803 Parameters:
804
805 rowno: the row number for the spectral coordinate
806
807 """
808 return coordinate(Scantable.get_coordinate(self, rowno))
809
810 def get_selection(self):
811 """\
812 Get the selection object currently set on this scantable.
813
814 Example::
815
816 sel = scan.get_selection()
817 sel.set_ifs(0) # select IF 0
818 scan.set_selection(sel) # apply modified selection
819
820 """
821 return selector(self._getselection())
822
823 def set_selection(self, selection=None, **kw):
824 """\
825 Select a subset of the data. All following operations on this scantable
826 are only applied to thi selection.
827
828 Parameters:
829
830 selection: a selector object (default unset the selection), or
831 any combination of 'pols', 'ifs', 'beams', 'scans',
832 'cycles', 'name', 'query'
833
834 Examples::
835
836 sel = selector() # create a selection object
837 self.set_scans([0, 3]) # select SCANNO 0 and 3
838 scan.set_selection(sel) # set the selection
839 scan.summary() # will only print summary of scanno 0 an 3
840 scan.set_selection() # unset the selection
841 # or the equivalent
842 scan.set_selection(scans=[0,3])
843 scan.summary() # will only print summary of scanno 0 an 3
844 scan.set_selection() # unset the selection
845
846 """
847 if selection is None:
848 # reset
849 if len(kw) == 0:
850 selection = selector()
851 else:
852 # try keywords
853 for k in kw:
854 if k not in selector.fields:
855 raise KeyError("Invalid selection key '%s', "
856 "valid keys are %s" % (k,
857 selector.fields))
858 selection = selector(**kw)
859 self._setselection(selection)
860
861 def get_row(self, row=0, insitu=None):
862 """\
863 Select a row in the scantable.
864 Return a scantable with single row.
865
866 Parameters:
867
868 row: row no of integration, default is 0.
869 insitu: if False a new scantable is returned. Otherwise, the
870 scaling is done in-situ. The default is taken from .asaprc
871 (False)
872
873 """
874 if insitu is None:
875 insitu = rcParams['insitu']
876 if not insitu:
877 workscan = self.copy()
878 else:
879 workscan = self
880 # Select a row
881 sel = selector()
882 sel.set_rows([row])
883 workscan.set_selection(sel)
884 if not workscan.nrow() == 1:
885 msg = "Could not identify single row. %d rows selected." \
886 % (workscan.nrow())
887 raise RuntimeError(msg)
888 if insitu:
889 self._assign(workscan)
890 else:
891 return workscan
892
893 @asaplog_post_dec
894 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
895 """\
896 Determine the specified statistic of the current beam/if/pol
897 Takes a 'mask' as an optional parameter to specify which
898 channels should be excluded.
899
900 Parameters:
901
902 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
903 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
904
905 mask: an optional mask specifying where the statistic
906 should be determined.
907
908 form: format string to print statistic values
909
910 row: row number of spectrum to process.
911 (default is None: for all rows)
912
913 Example:
914 scan.set_unit('channel')
915 msk = scan.create_mask([100, 200], [500, 600])
916 scan.stats(stat='mean', mask=m)
917
918 """
919 mask = mask or []
920 if not self._check_ifs():
921 raise ValueError("Cannot apply mask as the IFs have different "
922 "number of channels. Please use setselection() "
923 "to select individual IFs")
924 rtnabc = False
925 if stat.lower().endswith('_abc'): rtnabc = True
926 getchan = False
927 if stat.lower().startswith('min') or stat.lower().startswith('max'):
928 chan = self._math._minmaxchan(self, mask, stat)
929 getchan = True
930 statvals = []
931 if not rtnabc:
932 if row == None:
933 statvals = self._math._stats(self, mask, stat)
934 else:
935 statvals = self._math._statsrow(self, mask, stat, int(row))
936
937 #def cb(i):
938 # return statvals[i]
939
940 #return self._row_callback(cb, stat)
941
942 label=stat
943 #callback=cb
944 out = ""
945 #outvec = []
946 sep = '-'*50
947
948 if row == None:
949 rows = xrange(self.nrow())
950 elif isinstance(row, int):
951 rows = [ row ]
952
953 for i in rows:
954 print '@@@@@@@@'
955 print '['+str(i)+'/'+str(rows)+'] -- '+('RowFlagged' if self._getflagrow(i) else '')
956 print '@@@@@@@@'
957 #if self._getflagrow(i): continue
958 refstr = ''
959 statunit= ''
960 if getchan:
961 qx, qy = self.chan2data(rowno=i, chan=chan[i])
962 if rtnabc:
963 statvals.append(qx['value'])
964 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
965 statunit= '['+qx['unit']+']'
966 else:
967 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
968
969 tm = self._gettime(i)
970 src = self._getsourcename(i)
971 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
972 out += 'Time[%s]:\n' % (tm)
973 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
974 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
975 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
976 #outvec.append(callback(i))
977 if len(rows) > 1:
978 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
979 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
980 else:
981 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
982 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
983 out += sep+"\n"
984
985 import os
986 if os.environ.has_key( 'USER' ):
987 usr = os.environ['USER']
988 else:
989 import commands
990 usr = commands.getoutput( 'whoami' )
991 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
992 f = open(tmpfile,'w')
993 print >> f, sep
994 print >> f, ' %s %s' % (label, statunit)
995 print >> f, sep
996 print >> f, out
997 f.close()
998 f = open(tmpfile,'r')
999 x = f.readlines()
1000 f.close()
1001 asaplog.push(''.join(x), False)
1002
1003 return statvals
1004
1005 def chan2data(self, rowno=0, chan=0):
1006 """\
1007 Returns channel/frequency/velocity and spectral value
1008 at an arbitrary row and channel in the scantable.
1009
1010 Parameters:
1011
1012 rowno: a row number in the scantable. Default is the
1013 first row, i.e. rowno=0
1014
1015 chan: a channel in the scantable. Default is the first
1016 channel, i.e. pos=0
1017
1018 """
1019 if isinstance(rowno, int) and isinstance(chan, int):
1020 qx = {'unit': self.get_unit(),
1021 'value': self._getabcissa(rowno)[chan]}
1022 qy = {'unit': self.get_fluxunit(),
1023 'value': self._getspectrum(rowno)[chan]}
1024 return qx, qy
1025
1026 def stddev(self, mask=None):
1027 """\
1028 Determine the standard deviation of the current beam/if/pol
1029 Takes a 'mask' as an optional parameter to specify which
1030 channels should be excluded.
1031
1032 Parameters:
1033
1034 mask: an optional mask specifying where the standard
1035 deviation should be determined.
1036
1037 Example::
1038
1039 scan.set_unit('channel')
1040 msk = scan.create_mask([100, 200], [500, 600])
1041 scan.stddev(mask=m)
1042
1043 """
1044 return self.stats(stat='stddev', mask=mask);
1045
1046
1047 def get_column_names(self):
1048 """\
1049 Return a list of column names, which can be used for selection.
1050 """
1051 return list(Scantable.get_column_names(self))
1052
1053 def get_tsys(self, row=-1):
1054 """\
1055 Return the System temperatures.
1056
1057 Parameters:
1058
1059 row: the rowno to get the information for. (default all rows)
1060
1061 Returns:
1062
1063 a list of Tsys values for the current selection
1064
1065 """
1066 if row > -1:
1067 return self._get_column(self._gettsys, row)
1068 return self._row_callback(self._gettsys, "Tsys")
1069
1070 def get_tsysspectrum(self, row=-1):
1071 """\
1072 Return the channel dependent system temperatures.
1073
1074 Parameters:
1075
1076 row: the rowno to get the information for. (default all rows)
1077
1078 Returns:
1079
1080 a list of Tsys values for the current selection
1081
1082 """
1083 return self._get_column( self._gettsysspectrum, row )
1084
1085 def set_tsys(self, values, row=-1):
1086 """\
1087 Set the Tsys value(s) of the given 'row' or the whole scantable
1088 (selection).
1089
1090 Parameters:
1091
1092 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1093 row: the row number to apply Tsys values to.
1094 (default all rows)
1095
1096 """
1097
1098 if not hasattr(values, "__len__"):
1099 values = [values]
1100 self._settsys(values, row)
1101
1102 def get_weather(self, row=-1):
1103 """\
1104 Return the weather information.
1105
1106 Parameters:
1107
1108 row: the rowno to get the information for. (default all rows)
1109
1110 Returns:
1111
1112 a dict or list of of dicts of values for the current selection
1113
1114 """
1115 if row >= len(self):
1116 raise IndexError("row out of range")
1117 values = self._get_column(self._get_weather, row)
1118 if row > -1:
1119 return {'temperature': values[0],
1120 'pressure': values[1], 'humidity' : values[2],
1121 'windspeed' : values[3], 'windaz' : values[4]
1122 }
1123 else:
1124 out = []
1125 for r in values:
1126 out.append({'temperature': r[0],
1127 'pressure': r[1], 'humidity' : r[2],
1128 'windspeed' : r[3], 'windaz' : r[4]
1129 })
1130 return out
1131
1132 def _row_callback(self, callback, label):
1133 out = ""
1134 outvec = []
1135 sep = '-'*50
1136 for i in range(self.nrow()):
1137 tm = self._gettime(i)
1138 src = self._getsourcename(i)
1139 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1140 out += 'Time[%s]:\n' % (tm)
1141 if self.nbeam(-1) > 1:
1142 out += ' Beam[%d] ' % (self.getbeam(i))
1143 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1144 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1145 outvec.append(callback(i))
1146 out += '= %3.3f\n' % (outvec[i])
1147 out += sep+'\n'
1148
1149 asaplog.push(sep)
1150 asaplog.push(" %s" % (label))
1151 asaplog.push(sep)
1152 asaplog.push(out)
1153 asaplog.post()
1154 return outvec
1155
1156 def _get_column(self, callback, row=-1, *args):
1157 """
1158 """
1159 if row == -1:
1160 return [callback(i, *args) for i in range(self.nrow())]
1161 else:
1162 if 0 <= row < self.nrow():
1163 return callback(row, *args)
1164
1165
1166 def get_time(self, row=-1, asdatetime=False, prec=-1):
1167 """\
1168 Get a list of time stamps for the observations.
1169 Return a datetime object or a string (default) for each
1170 integration time stamp in the scantable.
1171
1172 Parameters:
1173
1174 row: row no of integration. Default -1 return all rows
1175
1176 asdatetime: return values as datetime objects rather than strings
1177
1178 prec: number of digits shown. Default -1 to automatic
1179 calculation.
1180 Note this number is equals to the digits of MVTime,
1181 i.e., 0<prec<3: dates with hh:: only,
1182 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1183 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1184
1185 """
1186 from datetime import datetime
1187 if prec < 0:
1188 # automagically set necessary precision +1
1189 prec = 7 - \
1190 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1191 prec = max(6, int(prec))
1192 else:
1193 prec = max(0, prec)
1194 if asdatetime:
1195 #precision can be 1 millisecond at max
1196 prec = min(12, prec)
1197
1198 times = self._get_column(self._gettime, row, prec)
1199 if not asdatetime:
1200 return times
1201 format = "%Y/%m/%d/%H:%M:%S.%f"
1202 if prec < 7:
1203 nsub = 1 + (((6-prec)/2) % 3)
1204 substr = [".%f","%S","%M"]
1205 for i in range(nsub):
1206 format = format.replace(substr[i],"")
1207 if isinstance(times, list):
1208 return [datetime.strptime(i, format) for i in times]
1209 else:
1210 return datetime.strptime(times, format)
1211
1212
1213 def get_inttime(self, row=-1):
1214 """\
1215 Get a list of integration times for the observations.
1216 Return a time in seconds for each integration in the scantable.
1217
1218 Parameters:
1219
1220 row: row no of integration. Default -1 return all rows.
1221
1222 """
1223 return self._get_column(self._getinttime, row)
1224
1225
1226 def get_sourcename(self, row=-1):
1227 """\
1228 Get a list source names for the observations.
1229 Return a string for each integration in the scantable.
1230 Parameters:
1231
1232 row: row no of integration. Default -1 return all rows.
1233
1234 """
1235 return self._get_column(self._getsourcename, row)
1236
1237 def get_elevation(self, row=-1):
1238 """\
1239 Get a list of elevations for the observations.
1240 Return a float for each integration in the scantable.
1241
1242 Parameters:
1243
1244 row: row no of integration. Default -1 return all rows.
1245
1246 """
1247 return self._get_column(self._getelevation, row)
1248
1249 def get_azimuth(self, row=-1):
1250 """\
1251 Get a list of azimuths for the observations.
1252 Return a float for each integration in the scantable.
1253
1254 Parameters:
1255 row: row no of integration. Default -1 return all rows.
1256
1257 """
1258 return self._get_column(self._getazimuth, row)
1259
1260 def get_parangle(self, row=-1):
1261 """\
1262 Get a list of parallactic angles for the observations.
1263 Return a float for each integration in the scantable.
1264
1265 Parameters:
1266
1267 row: row no of integration. Default -1 return all rows.
1268
1269 """
1270 return self._get_column(self._getparangle, row)
1271
1272 def get_direction(self, row=-1):
1273 """
1274 Get a list of Positions on the sky (direction) for the observations.
1275 Return a string for each integration in the scantable.
1276
1277 Parameters:
1278
1279 row: row no of integration. Default -1 return all rows
1280
1281 """
1282 return self._get_column(self._getdirection, row)
1283
1284 def get_directionval(self, row=-1):
1285 """\
1286 Get a list of Positions on the sky (direction) for the observations.
1287 Return a float for each integration in the scantable.
1288
1289 Parameters:
1290
1291 row: row no of integration. Default -1 return all rows
1292
1293 """
1294 return self._get_column(self._getdirectionvec, row)
1295
1296 @asaplog_post_dec
1297 def set_unit(self, unit='channel'):
1298 """\
1299 Set the unit for all following operations on this scantable
1300
1301 Parameters:
1302
1303 unit: optional unit, default is 'channel'. Use one of '*Hz',
1304 'km/s', 'channel' or equivalent ''
1305
1306 """
1307 varlist = vars()
1308 if unit in ['', 'pixel', 'channel']:
1309 unit = ''
1310 inf = list(self._getcoordinfo())
1311 inf[0] = unit
1312 self._setcoordinfo(inf)
1313 self._add_history("set_unit", varlist)
1314
1315 @asaplog_post_dec
1316 def set_instrument(self, instr):
1317 """\
1318 Set the instrument for subsequent processing.
1319
1320 Parameters:
1321
1322 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1323 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1324
1325 """
1326 self._setInstrument(instr)
1327 self._add_history("set_instument", vars())
1328
1329 @asaplog_post_dec
1330 def set_feedtype(self, feedtype):
1331 """\
1332 Overwrite the feed type, which might not be set correctly.
1333
1334 Parameters:
1335
1336 feedtype: 'linear' or 'circular'
1337
1338 """
1339 self._setfeedtype(feedtype)
1340 self._add_history("set_feedtype", vars())
1341
1342 @asaplog_post_dec
1343 def get_doppler(self):
1344 """\
1345 Get the doppler.
1346 """
1347 return self._getcoordinfo()[2]
1348
1349 @asaplog_post_dec
1350 def set_doppler(self, doppler='RADIO'):
1351 """\
1352 Set the doppler for all following operations on this scantable.
1353
1354 Parameters:
1355
1356 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1357
1358 """
1359 varlist = vars()
1360 inf = list(self._getcoordinfo())
1361 inf[2] = doppler
1362 self._setcoordinfo(inf)
1363 self._add_history("set_doppler", vars())
1364
1365 @asaplog_post_dec
1366 def set_freqframe(self, frame=None):
1367 """\
1368 Set the frame type of the Spectral Axis.
1369
1370 Parameters:
1371
1372 frame: an optional frame type, default 'LSRK'. Valid frames are:
1373 'TOPO', 'LSRD', 'LSRK', 'BARY',
1374 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1375
1376 Example::
1377
1378 scan.set_freqframe('BARY')
1379
1380 """
1381 frame = frame or rcParams['scantable.freqframe']
1382 varlist = vars()
1383 # "REST" is not implemented in casacore
1384 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1385 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1386 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1387 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1388
1389 if frame in valid:
1390 inf = list(self._getcoordinfo())
1391 inf[1] = frame
1392 self._setcoordinfo(inf)
1393 self._add_history("set_freqframe", varlist)
1394 else:
1395 msg = "Please specify a valid freq type. Valid types are:\n", valid
1396 raise TypeError(msg)
1397
1398 @asaplog_post_dec
1399 def set_dirframe(self, frame=""):
1400 """\
1401 Set the frame type of the Direction on the sky.
1402
1403 Parameters:
1404
1405 frame: an optional frame type, default ''. Valid frames are:
1406 'J2000', 'B1950', 'GALACTIC'
1407
1408 Example:
1409
1410 scan.set_dirframe('GALACTIC')
1411
1412 """
1413 varlist = vars()
1414 Scantable.set_dirframe(self, frame)
1415 self._add_history("set_dirframe", varlist)
1416
1417 def get_unit(self):
1418 """\
1419 Get the default unit set in this scantable
1420
1421 Returns:
1422
1423 A unit string
1424
1425 """
1426 inf = self._getcoordinfo()
1427 unit = inf[0]
1428 if unit == '': unit = 'channel'
1429 return unit
1430
1431 @asaplog_post_dec
1432 def get_abcissa(self, rowno=0):
1433 """\
1434 Get the abcissa in the current coordinate setup for the currently
1435 selected Beam/IF/Pol
1436
1437 Parameters:
1438
1439 rowno: an optional row number in the scantable. Default is the
1440 first row, i.e. rowno=0
1441
1442 Returns:
1443
1444 The abcissa values and the format string (as a dictionary)
1445
1446 """
1447 abc = self._getabcissa(rowno)
1448 lbl = self._getabcissalabel(rowno)
1449 return abc, lbl
1450
1451 @asaplog_post_dec
1452 def flag(self, mask=None, unflag=False, row=-1):
1453 """\
1454 Flag the selected data using an optional channel mask.
1455
1456 Parameters:
1457
1458 mask: an optional channel mask, created with create_mask. Default
1459 (no mask) is all channels.
1460
1461 unflag: if True, unflag the data
1462
1463 row: an optional row number in the scantable.
1464 Default -1 flags all rows
1465
1466 """
1467 varlist = vars()
1468 mask = mask or []
1469 self._flag(row, mask, unflag)
1470 self._add_history("flag", varlist)
1471
1472 @asaplog_post_dec
1473 def flag_row(self, rows=None, unflag=False):
1474 """\
1475 Flag the selected data in row-based manner.
1476
1477 Parameters:
1478
1479 rows: list of row numbers to be flagged. Default is no row
1480 (must be explicitly specified to execute row-based
1481 flagging).
1482
1483 unflag: if True, unflag the data.
1484
1485 """
1486 varlist = vars()
1487 if rows is None:
1488 rows = []
1489 self._flag_row(rows, unflag)
1490 self._add_history("flag_row", varlist)
1491
1492 @asaplog_post_dec
1493 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1494 """\
1495 Flag the selected data outside a specified range (in channel-base)
1496
1497 Parameters:
1498
1499 uthres: upper threshold.
1500
1501 dthres: lower threshold
1502
1503 clipoutside: True for flagging data outside the range
1504 [dthres:uthres].
1505 False for flagging data inside the range.
1506
1507 unflag: if True, unflag the data.
1508
1509 """
1510 varlist = vars()
1511 self._clip(uthres, dthres, clipoutside, unflag)
1512 self._add_history("clip", varlist)
1513
1514 @asaplog_post_dec
1515 def lag_flag(self, start, end, unit="MHz", insitu=None):
1516 """\
1517 Flag the data in 'lag' space by providing a frequency to remove.
1518 Flagged data in the scantable get interpolated over the region.
1519 No taper is applied.
1520
1521 Parameters:
1522
1523 start: the start frequency (really a period within the
1524 bandwidth) or period to remove
1525
1526 end: the end frequency or period to remove
1527
1528 unit: the frequency unit (default 'MHz') or '' for
1529 explicit lag channels
1530
1531 *Notes*:
1532
1533 It is recommended to flag edges of the band or strong
1534 signals beforehand.
1535
1536 """
1537 if insitu is None: insitu = rcParams['insitu']
1538 self._math._setinsitu(insitu)
1539 varlist = vars()
1540 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1541 if not (unit == "" or base.has_key(unit)):
1542 raise ValueError("%s is not a valid unit." % unit)
1543 if unit == "":
1544 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1545 else:
1546 s = scantable(self._math._lag_flag(self, start*base[unit],
1547 end*base[unit], "frequency"))
1548 s._add_history("lag_flag", varlist)
1549 if insitu:
1550 self._assign(s)
1551 else:
1552 return s
1553
1554 @asaplog_post_dec
1555 def fft(self, rowno=None, mask=None, getrealimag=False):
1556 """\
1557 Apply FFT to the spectra.
1558 Flagged data in the scantable get interpolated over the region.
1559
1560 Parameters:
1561
1562 rowno: The row number(s) to be processed. int, list
1563 and tuple are accepted. By default (None), FFT
1564 is applied to the whole data.
1565
1566 mask: Auxiliary channel mask(s). Given as a boolean
1567 list, it is applied to all specified rows.
1568 A list of boolean lists can also be used to
1569 apply different masks. In the latter case, the
1570 length of 'mask' must be the same as that of
1571 'rowno'. The default is None.
1572
1573 getrealimag: If True, returns the real and imaginary part
1574 values of the complex results.
1575 If False (the default), returns the amplitude
1576 (absolute value) normalised with Ndata/2 and
1577 phase (argument, in unit of radian).
1578
1579 Returns:
1580
1581 A list of dictionaries containing the results for each spectrum.
1582 Each dictionary contains two values, the real and the imaginary
1583 parts when getrealimag = True, or the amplitude(absolute value)
1584 and the phase(argument) when getrealimag = False. The key for
1585 these values are 'real' and 'imag', or 'ampl' and 'phase',
1586 respectively.
1587 """
1588 if rowno is None:
1589 rowno = []
1590 if isinstance(rowno, int):
1591 rowno = [rowno]
1592 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1593 raise TypeError("The row number(s) must be int, list or tuple.")
1594 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1595
1596 usecommonmask = True
1597
1598 if mask is None:
1599 mask = []
1600 if isinstance(mask, list) or isinstance(mask, tuple):
1601 if len(mask) == 0:
1602 mask = [[]]
1603 else:
1604 if isinstance(mask[0], bool):
1605 if len(mask) != self.nchan(self.getif(rowno[0])):
1606 raise ValueError("The spectra and the mask have "
1607 "different length.")
1608 mask = [mask]
1609 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1610 usecommonmask = False
1611 if len(mask) != len(rowno):
1612 raise ValueError("When specifying masks for each "
1613 "spectrum, the numbers of them "
1614 "must be identical.")
1615 for i in xrange(mask):
1616 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1617 raise ValueError("The spectra and the mask have "
1618 "different length.")
1619 else:
1620 raise TypeError("The mask must be a boolean list or "
1621 "a list of boolean list.")
1622 else:
1623 raise TypeError("The mask must be a boolean list or a list of "
1624 "boolean list.")
1625
1626 res = []
1627
1628 imask = 0
1629 for whichrow in rowno:
1630 fspec = self._fft(whichrow, mask[imask], getrealimag)
1631 nspec = len(fspec)
1632
1633 i = 0
1634 v1 = []
1635 v2 = []
1636 reselem = {"real":[],"imag":[]} if getrealimag \
1637 else {"ampl":[],"phase":[]}
1638
1639 while (i < nspec):
1640 v1.append(fspec[i])
1641 v2.append(fspec[i+1])
1642 i += 2
1643
1644 if getrealimag:
1645 reselem["real"] += v1
1646 reselem["imag"] += v2
1647 else:
1648 reselem["ampl"] += v1
1649 reselem["phase"] += v2
1650
1651 res.append(reselem)
1652
1653 if not usecommonmask:
1654 imask += 1
1655
1656 return res
1657
1658 @asaplog_post_dec
1659 def create_mask(self, *args, **kwargs):
1660 """\
1661 Compute and return a mask based on [min, max] windows.
1662 The specified windows are to be INCLUDED, when the mask is
1663 applied.
1664
1665 Parameters:
1666
1667 [min, max], [min2, max2], ...
1668 Pairs of start/end points (inclusive)specifying the regions
1669 to be masked
1670
1671 invert: optional argument. If specified as True,
1672 return an inverted mask, i.e. the regions
1673 specified are EXCLUDED
1674
1675 row: create the mask using the specified row for
1676 unit conversions, default is row=0
1677 only necessary if frequency varies over rows.
1678
1679 Examples::
1680
1681 scan.set_unit('channel')
1682 # a)
1683 msk = scan.create_mask([400, 500], [800, 900])
1684 # masks everything outside 400 and 500
1685 # and 800 and 900 in the unit 'channel'
1686
1687 # b)
1688 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1689 # masks the regions between 400 and 500
1690 # and 800 and 900 in the unit 'channel'
1691
1692 # c)
1693 #mask only channel 400
1694 msk = scan.create_mask([400])
1695
1696 """
1697 row = kwargs.get("row", 0)
1698 data = self._getabcissa(row)
1699 u = self._getcoordinfo()[0]
1700 if u == "":
1701 u = "channel"
1702 msg = "The current mask window unit is %s" % u
1703 i = self._check_ifs()
1704 if not i:
1705 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1706 asaplog.push(msg)
1707 n = len(data)
1708 msk = _n_bools(n, False)
1709 # test if args is a 'list' or a 'normal *args - UGLY!!!
1710
1711 ws = (isinstance(args[-1][-1], int)
1712 or isinstance(args[-1][-1], float)) and args or args[0]
1713 for window in ws:
1714 if len(window) == 1:
1715 window = [window[0], window[0]]
1716 if len(window) == 0 or len(window) > 2:
1717 raise ValueError("A window needs to be defined as "
1718 "[start(, end)]")
1719 if window[0] > window[1]:
1720 tmp = window[0]
1721 window[0] = window[1]
1722 window[1] = tmp
1723 for i in range(n):
1724 if data[i] >= window[0] and data[i] <= window[1]:
1725 msk[i] = True
1726 if kwargs.has_key('invert'):
1727 if kwargs.get('invert'):
1728 msk = mask_not(msk)
1729 return msk
1730
1731 def get_masklist(self, mask=None, row=0, silent=False):
1732 """\
1733 Compute and return a list of mask windows, [min, max].
1734
1735 Parameters:
1736
1737 mask: channel mask, created with create_mask.
1738
1739 row: calcutate the masklist using the specified row
1740 for unit conversions, default is row=0
1741 only necessary if frequency varies over rows.
1742
1743 Returns:
1744
1745 [min, max], [min2, max2], ...
1746 Pairs of start/end points (inclusive)specifying
1747 the masked regions
1748
1749 """
1750 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1751 raise TypeError("The mask should be list or tuple.")
1752 if len(mask) <= 0:
1753 raise TypeError("The mask elements should be > 0")
1754 data = self._getabcissa(row)
1755 if len(data) != len(mask):
1756 msg = "Number of channels in scantable != number of mask elements"
1757 raise TypeError(msg)
1758 u = self._getcoordinfo()[0]
1759 if u == "":
1760 u = "channel"
1761 msg = "The current mask window unit is %s" % u
1762 i = self._check_ifs()
1763 if not i:
1764 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1765 if not silent:
1766 asaplog.push(msg)
1767 masklist = []
1768 ist, ien = None, None
1769 ist, ien=self.get_mask_indices(mask)
1770 if ist is not None and ien is not None:
1771 for i in xrange(len(ist)):
1772 range=[data[ist[i]],data[ien[i]]]
1773 range.sort()
1774 masklist.append([range[0],range[1]])
1775 return masklist
1776
1777 def get_mask_indices(self, mask=None):
1778 """\
1779 Compute and Return lists of mask start indices and mask end indices.
1780
1781 Parameters:
1782
1783 mask: channel mask, created with create_mask.
1784
1785 Returns:
1786
1787 List of mask start indices and that of mask end indices,
1788 i.e., [istart1,istart2,....], [iend1,iend2,....].
1789
1790 """
1791 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1792 raise TypeError("The mask should be list or tuple.")
1793 if len(mask) <= 0:
1794 raise TypeError("The mask elements should be > 0")
1795 istart = []
1796 iend = []
1797 if mask[0]:
1798 istart.append(0)
1799 for i in range(len(mask)-1):
1800 if not mask[i] and mask[i+1]:
1801 istart.append(i+1)
1802 elif mask[i] and not mask[i+1]:
1803 iend.append(i)
1804 if mask[len(mask)-1]:
1805 iend.append(len(mask)-1)
1806 if len(istart) != len(iend):
1807 raise RuntimeError("Numbers of mask start != mask end.")
1808 for i in range(len(istart)):
1809 if istart[i] > iend[i]:
1810 raise RuntimeError("Mask start index > mask end index")
1811 break
1812 return istart,iend
1813
1814 @asaplog_post_dec
1815 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1816 """
1817 Parse MS type spw/channel selection syntax.
1818
1819 Parameters:
1820 selectstring : A string expression of spw and channel selection.
1821 Comma-separated expressions mean different spw -
1822 channel combinations. Spws and channel selections
1823 are partitioned by a colon ':'. In a single
1824 selection expression, you can put multiple values
1825 separated by semicolons ';'. Both for spw and
1826 channel selection, allowed cases include single
1827 value, blank('') or asterisk('*') to specify all
1828 available values, two values connected with a
1829 tilde ('~') to specify an inclusive range. Unit
1830 strings for frequency or velocity can be added to
1831 the tilde-connected values. For channel selection
1832 expression, placing a '<' or a '>' is possible to
1833 specify a semi-infinite interval as well.
1834
1835 examples:
1836 '' or '*' = all spws (all channels)
1837 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1838 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1839 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1840 all channels in spw8
1841 '1.3~1.5GHz' = all spws whose central frequency
1842 falls in frequency range between
1843 1.3GHz and 1.5GHz.
1844 '1.3~1.5GHz:1.3~1.5GHz' = channels which fall
1845 between the specified
1846 frequency range in spws
1847 whose central frequency
1848 falls in the specified
1849 frequency range.
1850 '1:-200~250km/s' = channels that fall between the
1851 specified velocity range in
1852 spw 1.
1853 restfreq: the rest frequency.
1854 examples: '115.2712GHz', 115271201800.0
1855 frame: an optional frame type, default 'LSRK'. Valid frames are:
1856 'TOPO', 'LSRD', 'LSRK', 'BARY',
1857 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1858 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1859 Returns:
1860 A dictionary of selected (valid) spw and masklist pairs,
1861 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1862 """
1863 if not isinstance(selectstring, str):
1864 asaplog.post()
1865 asaplog.push("Expression of spw/channel selection must be a string.")
1866 asaplog.post("ERROR")
1867
1868 orig_unit = self.get_unit()
1869 self.set_unit('channel')
1870
1871 if restfreq is not None:
1872 orig_molids = self._getmolidcol_list()
1873 set_restfreq(self, restfreq)
1874
1875 orig_coord = self._getcoordinfo()
1876
1877 if frame is not None:
1878 orig_frame = orig_coord[1]
1879 self.set_freqframe(frame)
1880
1881 if doppler is not None:
1882 orig_doppler = orig_coord[2]
1883 self.set_doppler(doppler)
1884
1885 valid_ifs = self.getifnos()
1886
1887 comma_sep = selectstring.split(",")
1888 res = {}
1889
1890 for cms_elem in comma_sep:
1891 colon_sep = cms_elem.split(":")
1892
1893 if (len(colon_sep) > 2):
1894 raise RuntimeError("Invalid selection expression: more than two colons!")
1895
1896 # parse spw expression and store result in spw_list.
1897 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1898 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1899 # 'a~b*m/s' (where * can be '' or 'k') and also
1900 # several of the above expressions connected with ';'.
1901
1902 spw_list = []
1903
1904 semicolon_sep = colon_sep[0].split(";")
1905
1906 for scs_elem in semicolon_sep:
1907 scs_elem = scs_elem.strip()
1908
1909 lt_sep = scs_elem.split("<")
1910 gt_sep = scs_elem.split(">")
1911 ti_sep = scs_elem.split("~")
1912
1913 lt_sep_length = len(lt_sep)
1914 gt_sep_length = len(gt_sep)
1915 ti_sep_length = len(ti_sep)
1916
1917 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1918
1919 if (len_product > 2):
1920 # '<', '>' and '~' must not coexist in a single spw expression
1921
1922 raise RuntimeError("Invalid spw selection.")
1923
1924 elif (len_product == 1):
1925 # '', '*', or single spw number.
1926
1927 if (scs_elem == "") or (scs_elem == "*"):
1928 spw_list = valid_ifs[:] # deep copy
1929
1930 else: # single number
1931 expr = int(scs_elem)
1932 spw_list.append(expr)
1933 if expr not in valid_ifs:
1934 asaplog.push("Invalid spw given. Ignored.")
1935
1936 else: # (len_product == 2)
1937 # namely, one of '<', '>' or '~' appears just once.
1938
1939 if (lt_sep_length == 2): # '<a'
1940 if is_number(lt_sep[1]):
1941 no_valid_spw = True
1942 for i in valid_ifs:
1943 if (i < float(lt_sep[1])):
1944 spw_list.append(i)
1945 no_valid_spw = False
1946
1947 if no_valid_spw:
1948 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1949
1950 else:
1951 raise RuntimeError("Invalid spw selection.")
1952
1953 elif (gt_sep_length == 2): # '>a'
1954 if is_number(gt_sep[1]):
1955 no_valid_spw = True
1956 for i in valid_ifs:
1957 if (i > float(gt_sep[1])):
1958 spw_list.append(i)
1959 no_valid_spw = False
1960
1961 if no_valid_spw:
1962 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1963
1964 else:
1965 raise RuntimeError("Invalid spw selection.")
1966
1967 else: # (ti_sep_length == 2) where both boundaries inclusive
1968 expr0 = ti_sep[0].strip()
1969 expr1 = ti_sep[1].strip()
1970
1971 if is_number(expr0) and is_number(expr1):
1972 # 'a~b'
1973 expr_pmin = min(float(expr0), float(expr1))
1974 expr_pmax = max(float(expr0), float(expr1))
1975 has_invalid_spw = False
1976 no_valid_spw = True
1977
1978 for i in valid_ifs:
1979 if (expr_pmin <= i) and (i <= expr_pmax):
1980 spw_list.append(i)
1981 no_valid_spw = False
1982 else:
1983 has_invalid_spw = True
1984
1985 if has_invalid_spw:
1986 msg = "Invalid spw is given. Ignored."
1987 asaplog.push(msg)
1988 asaplog.post()
1989
1990 if no_valid_spw:
1991 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1992
1993 elif is_number(expr0) and is_frequency(expr1):
1994 # 'a~b*Hz'
1995 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1996 expr_fmin = min(expr_f0, expr_f1)
1997 expr_fmax = max(expr_f0, expr_f1)
1998 no_valid_spw = True
1999
2000 for coord in self._get_coordinate_list():
2001 spw = coord['if']
2002
2003 """
2004 expr_p0 = coord['coord'].to_pixel(expr_f0)
2005 expr_p1 = coord['coord'].to_pixel(expr_f1)
2006 expr_pmin = min(expr_p0, expr_p1)
2007 expr_pmax = max(expr_p0, expr_p1)
2008
2009 pmin = 0.0
2010 pmax = float(self.nchan(spw) - 1)
2011
2012 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2013 spw_list.append(spw)
2014 no_valid_spw = False
2015 """
2016
2017 crd = coord['coord']
2018 fhead = crd.to_frequency(0)
2019 ftail = crd.to_frequency(self.nchan(spw) - 1)
2020 fcen = (fhead + ftail) / 2.0
2021
2022 if ((expr_fmin <= fcen) and (fcen <= expr_fmax)):
2023 spw_list.append(spw)
2024 no_valid_spw = False
2025
2026 if no_valid_spw:
2027 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2028
2029 elif is_number(expr0) and is_velocity(expr1):
2030 # 'a~b*m/s'
2031 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2032 expr_vmin = min(expr_v0, expr_v1)
2033 expr_vmax = max(expr_v0, expr_v1)
2034 no_valid_spw = True
2035
2036 for coord in self._get_coordinate_list():
2037 spw = coord['if']
2038
2039 """
2040 pmin = 0.0
2041 pmax = float(self.nchan(spw) - 1)
2042
2043 vel0 = coord['coord'].to_velocity(pmin)
2044 vel1 = coord['coord'].to_velocity(pmax)
2045
2046 vmin = min(vel0, vel1)
2047 vmax = max(vel0, vel1)
2048
2049 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2050 spw_list.append(spw)
2051 no_valid_spw = False
2052 """
2053
2054 crd = coord['coord']
2055 fhead = crd.to_frequency(0)
2056 ftail = crd.to_frequency(self.nchan(spw) - 1)
2057 fcen = (fhead + ftail) / 2.0
2058 vcen = crd.to_velocity(crd.to_pixel(fcen))
2059
2060 if ((expr_vmin <= vcen) and (vcen <= expr_vmax)):
2061 spw_list.append(spw)
2062 no_valid_spw = False
2063
2064 if no_valid_spw:
2065 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2066
2067 else:
2068 # cases such as 'aGHz~bkm/s' are not allowed now
2069 raise RuntimeError("Invalid spw selection.")
2070
2071 # check spw list and remove invalid ones.
2072 # if no valid spw left, emit ValueError.
2073 if len(spw_list) == 0:
2074 raise ValueError("No valid spw in given range.")
2075
2076 # parse channel expression and store the result in crange_list.
2077 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2078 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2079 # and also several of the above expressions connected with ';'.
2080
2081 for spw in spw_list:
2082 pmin = 0.0
2083 pmax = float(self.nchan(spw) - 1)
2084
2085 molid = self._getmolidcol_list()[self.get_first_rowno_by_if(spw)]
2086
2087 if (len(colon_sep) == 1):
2088 # no expression for channel selection,
2089 # which means all channels are to be selected.
2090 crange_list = [[pmin, pmax]]
2091
2092 else: # (len(colon_sep) == 2)
2093 crange_list = []
2094
2095 found = False
2096 for i in self._get_coordinate_list():
2097 if (i['if'] == spw):
2098 coord = i['coord']
2099 found = True
2100 break
2101
2102 if found:
2103 semicolon_sep = colon_sep[1].split(";")
2104 for scs_elem in semicolon_sep:
2105 scs_elem = scs_elem.strip()
2106
2107 ti_sep = scs_elem.split("~")
2108 ti_sep_length = len(ti_sep)
2109
2110 if (ti_sep_length > 2):
2111 raise RuntimeError("Invalid channel selection.")
2112
2113 elif (ti_sep_length == 1):
2114 if (scs_elem == "") or (scs_elem == "*"):
2115 # '' and '*' for all channels
2116 crange_list = [[pmin, pmax]]
2117 break
2118 elif (is_number(scs_elem)):
2119 # single channel given
2120 crange_list.append([float(scs_elem), float(scs_elem)])
2121 else:
2122 raise RuntimeError("Invalid channel selection.")
2123
2124 else: #(ti_sep_length == 2)
2125 expr0 = ti_sep[0].strip()
2126 expr1 = ti_sep[1].strip()
2127
2128 if is_number(expr0) and is_number(expr1):
2129 # 'a~b'
2130 expr_pmin = min(float(expr0), float(expr1))
2131 expr_pmax = max(float(expr0), float(expr1))
2132
2133 elif is_number(expr0) and is_frequency(expr1):
2134 # 'a~b*Hz'
2135 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2136 expr_p0 = coord.to_pixel(expr_f0)
2137 expr_p1 = coord.to_pixel(expr_f1)
2138 expr_pmin = min(expr_p0, expr_p1)
2139 expr_pmax = max(expr_p0, expr_p1)
2140
2141 elif is_number(expr0) and is_velocity(expr1):
2142 # 'a~b*m/s'
2143 restf = self.get_restfreqs()[molid][0]
2144 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2145 dppl = self.get_doppler()
2146 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)
2147 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)
2148 expr_p0 = coord.to_pixel(expr_f0)
2149 expr_p1 = coord.to_pixel(expr_f1)
2150 expr_pmin = min(expr_p0, expr_p1)
2151 expr_pmax = max(expr_p0, expr_p1)
2152
2153 else:
2154 # cases such as 'aGHz~bkm/s' are not allowed now
2155 raise RuntimeError("Invalid channel selection.")
2156
2157 cmin = max(pmin, expr_pmin)
2158 cmax = min(pmax, expr_pmax)
2159 # if the given range of channel selection has overwrap with
2160 # that of current spw, output the overwrap area.
2161 if (cmin <= cmax):
2162 cmin = float(int(cmin + 0.5))
2163 cmax = float(int(cmax + 0.5))
2164 crange_list.append([cmin, cmax])
2165
2166 if (len(crange_list) == 0):
2167 crange_list.append([])
2168
2169 if (len(crange_list[0]) > 0):
2170 if res.has_key(spw):
2171 res[spw].extend(crange_list)
2172 else:
2173 res[spw] = crange_list
2174
2175 for spw in res.keys():
2176 if spw in valid_ifs:
2177 # remove duplicated channel ranges
2178 for i in reversed(xrange(len(res[spw]))):
2179 for j in xrange(i):
2180 if ((res[spw][i][0]-res[spw][j][1])*(res[spw][i][1]-res[spw][j][0]) <= 0) or \
2181 (min(abs(res[spw][i][0]-res[spw][j][1]),abs(res[spw][j][0]-res[spw][i][1])) == 1):
2182 asaplog.post()
2183 merge_warn_mesg = "Spw " + str(spw) + ": overwrapping channel ranges are merged."
2184 asaplog.push(merge_warn_mesg)
2185 asaplog.post('WARN')
2186 res[spw][j][0] = min(res[spw][i][0], res[spw][j][0])
2187 res[spw][j][1] = max(res[spw][i][1], res[spw][j][1])
2188 res[spw].pop(i)
2189 break
2190 else:
2191 del res[spw]
2192
2193 if len(res) == 0:
2194 raise RuntimeError("No valid spw.")
2195
2196 # restore original values
2197 self.set_unit(orig_unit)
2198 if restfreq is not None:
2199 self._setmolidcol_list(orig_molids)
2200 if frame is not None:
2201 self.set_freqframe(orig_frame)
2202 if doppler is not None:
2203 self.set_doppler(orig_doppler)
2204
2205 return res
2206
2207 @asaplog_post_dec
2208 def get_first_rowno_by_if(self, ifno):
2209 found = False
2210 for irow in xrange(self.nrow()):
2211 if (self.getif(irow) == ifno):
2212 res = irow
2213 found = True
2214 break
2215
2216 if not found: raise RuntimeError("No valid spw.")
2217
2218 return res
2219
2220 @asaplog_post_dec
2221 def _get_coordinate_list(self):
2222 res = []
2223 spws = self.getifnos()
2224 for spw in spws:
2225 elem = {}
2226 elem['if'] = spw
2227 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2228 res.append(elem)
2229
2230 return res
2231
2232 @asaplog_post_dec
2233 def parse_maskexpr(self, maskstring):
2234 """
2235 Parse CASA type mask selection syntax (IF dependent).
2236
2237 Parameters:
2238 maskstring : A string mask selection expression.
2239 A comma separated selections mean different IF -
2240 channel combinations. IFs and channel selections
2241 are partitioned by a colon, ':'.
2242 examples:
2243 '' = all IFs (all channels)
2244 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2245 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2246 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2247 all channels in IF8
2248 Returns:
2249 A dictionary of selected (valid) IF and masklist pairs,
2250 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2251 """
2252 if not isinstance(maskstring,str):
2253 asaplog.post()
2254 asaplog.push("Mask expression should be a string.")
2255 asaplog.post("ERROR")
2256
2257 valid_ifs = self.getifnos()
2258 frequnit = self.get_unit()
2259 seldict = {}
2260 if maskstring == "":
2261 maskstring = str(valid_ifs)[1:-1]
2262 ## split each selection "IF range[:CHAN range]"
2263 # split maskstring by "<spaces>,<spaces>"
2264 comma_sep = re.compile('\s*,\s*')
2265 sellist = comma_sep.split(maskstring)
2266 # separator by "<spaces>:<spaces>"
2267 collon_sep = re.compile('\s*:\s*')
2268 for currselstr in sellist:
2269 selset = collon_sep.split(currselstr)
2270 # spw and mask string (may include ~, < or >)
2271 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2272 minval=min(valid_ifs),
2273 maxval=max(valid_ifs))
2274 for spwlist in spwmasklist:
2275 selspws = []
2276 for ispw in range(spwlist[0],spwlist[1]+1):
2277 # Put into the list only if ispw exists
2278 if valid_ifs.count(ispw):
2279 selspws.append(ispw)
2280 del spwmasklist, spwlist
2281
2282 # parse frequency mask list
2283 if len(selset) > 1:
2284 freqmasklist = self._parse_selection(selset[1], typestr='float',
2285 offset=0.)
2286 else:
2287 # want to select the whole spectrum
2288 freqmasklist = [None]
2289
2290 ## define a dictionary of spw - masklist combination
2291 for ispw in selspws:
2292 #print "working on", ispw
2293 spwstr = str(ispw)
2294 if len(selspws) == 0:
2295 # empty spw
2296 continue
2297 else:
2298 ## want to get min and max of the spw and
2299 ## offset to set for '<' and '>'
2300 if frequnit == 'channel':
2301 minfreq = 0
2302 maxfreq = self.nchan(ifno=ispw)
2303 offset = 0.5
2304 else:
2305 ## This is ugly part. need improvement
2306 for ifrow in xrange(self.nrow()):
2307 if self.getif(ifrow) == ispw:
2308 #print "IF",ispw,"found in row =",ifrow
2309 break
2310 freqcoord = self.get_coordinate(ifrow)
2311 freqs = self._getabcissa(ifrow)
2312 minfreq = min(freqs)
2313 maxfreq = max(freqs)
2314 if len(freqs) == 1:
2315 offset = 0.5
2316 elif frequnit.find('Hz') > 0:
2317 offset = abs(freqcoord.to_frequency(1,
2318 unit=frequnit)
2319 -freqcoord.to_frequency(0,
2320 unit=frequnit)
2321 )*0.5
2322 elif frequnit.find('m/s') > 0:
2323 offset = abs(freqcoord.to_velocity(1,
2324 unit=frequnit)
2325 -freqcoord.to_velocity(0,
2326 unit=frequnit)
2327 )*0.5
2328 else:
2329 asaplog.post()
2330 asaplog.push("Invalid frequency unit")
2331 asaplog.post("ERROR")
2332 del freqs, freqcoord, ifrow
2333 for freq in freqmasklist:
2334 selmask = freq or [minfreq, maxfreq]
2335 if selmask[0] == None:
2336 ## selection was "<freq[1]".
2337 if selmask[1] < minfreq:
2338 ## avoid adding region selection
2339 selmask = None
2340 else:
2341 selmask = [minfreq,selmask[1]-offset]
2342 elif selmask[1] == None:
2343 ## selection was ">freq[0]"
2344 if selmask[0] > maxfreq:
2345 ## avoid adding region selection
2346 selmask = None
2347 else:
2348 selmask = [selmask[0]+offset,maxfreq]
2349 if selmask:
2350 if not seldict.has_key(spwstr):
2351 # new spw selection
2352 seldict[spwstr] = []
2353 seldict[spwstr] += [selmask]
2354 del minfreq,maxfreq,offset,freq,selmask
2355 del spwstr
2356 del freqmasklist
2357 del valid_ifs
2358 if len(seldict) == 0:
2359 asaplog.post()
2360 asaplog.push("No valid selection in the mask expression: "
2361 +maskstring)
2362 asaplog.post("WARN")
2363 return None
2364 msg = "Selected masklist:\n"
2365 for sif, lmask in seldict.iteritems():
2366 msg += " IF"+sif+" - "+str(lmask)+"\n"
2367 asaplog.push(msg)
2368 return seldict
2369
2370 @asaplog_post_dec
2371 def parse_idx_selection(self, mode, selexpr):
2372 """
2373 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2374 BEAMNO, and row number
2375
2376 Parameters:
2377 mode : which column to select.
2378 ['scan',|'if'|'pol'|'beam'|'row']
2379 selexpr : A comma separated selection expression.
2380 examples:
2381 '' = all (returns [])
2382 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2383 (returns [0,1,4,5,6,9])
2384 Returns:
2385 A List of selected indices
2386 """
2387 if selexpr == "":
2388 return []
2389 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2390 'b': 'beam', 'r': 'row'}
2391 smode = mode.lower()[0]
2392 if not (smode in valid_modes.keys()):
2393 msg = "Invalid mode '%s'. Valid modes are %s" %\
2394 (mode, str(valid_modes.values()))
2395 asaplog.post()
2396 asaplog.push(msg)
2397 asaplog.post("ERROR")
2398 mode = valid_modes[smode]
2399 minidx = None
2400 maxidx = None
2401 if smode == 'r':
2402 minidx = 0
2403 maxidx = self.nrow()
2404 else:
2405 idx = getattr(self,"get"+mode+"nos")()
2406 minidx = min(idx)
2407 maxidx = max(idx)
2408 del idx
2409 # split selexpr by "<spaces>,<spaces>"
2410 comma_sep = re.compile('\s*,\s*')
2411 sellist = comma_sep.split(selexpr)
2412 idxlist = []
2413 for currselstr in sellist:
2414 # single range (may include ~, < or >)
2415 currlist = self._parse_selection(currselstr, typestr='integer',
2416 minval=minidx,maxval=maxidx)
2417 for thelist in currlist:
2418 idxlist += range(thelist[0],thelist[1]+1)
2419 # remove duplicated elements after first ones
2420 for i in reversed(xrange(len(idxlist))):
2421 if idxlist.index(idxlist[i]) < i:
2422 idxlist.pop(i)
2423 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2424 asaplog.push(msg)
2425 return idxlist
2426
2427 def _parse_selection(self, selstr, typestr='float', offset=0.,
2428 minval=None, maxval=None):
2429 """
2430 Parameters:
2431 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2432 typestr : The type of the values in returned list
2433 ('integer' or 'float')
2434 offset : The offset value to subtract from or add to
2435 the boundary value if the selection string
2436 includes '<' or '>' [Valid only for typestr='float']
2437 minval, maxval : The minimum/maximum values to set if the
2438 selection string includes '<' or '>'.
2439 The list element is filled with None by default.
2440 Returns:
2441 A list of min/max pair of selections.
2442 Example:
2443 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2444 --> returns [[0,2],[5,7],[9,9]]
2445 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2446 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2447 """
2448 # split selstr by '<spaces>;<spaces>'
2449 semi_sep = re.compile('\s*;\s*')
2450 selgroups = semi_sep.split(selstr)
2451 sellists = []
2452 if typestr.lower().startswith('int'):
2453 formatfunc = int
2454 offset = 1
2455 else:
2456 formatfunc = float
2457
2458 for currsel in selgroups:
2459 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2460 minsel = minval
2461 maxsel = maxval
2462 if currsel.find('~') > 0:
2463 # val0 <= x <= val1
2464 minsel = formatfunc(currsel.split('~')[0].strip())
2465 maxsel = formatfunc(currsel.split('~')[1].strip())
2466 elif currsel.strip().find('<=') > -1:
2467 bound = currsel.split('<=')
2468 try: # try "x <= val"
2469 minsel = minval
2470 maxsel = formatfunc(bound[1].strip())
2471 except ValueError: # now "val <= x"
2472 minsel = formatfunc(bound[0].strip())
2473 maxsel = maxval
2474 elif currsel.strip().find('>=') > -1:
2475 bound = currsel.split('>=')
2476 try: # try "x >= val"
2477 minsel = formatfunc(bound[1].strip())
2478 maxsel = maxval
2479 except ValueError: # now "val >= x"
2480 minsel = minval
2481 maxsel = formatfunc(bound[0].strip())
2482 elif currsel.strip().find('<') > -1:
2483 bound = currsel.split('<')
2484 try: # try "x < val"
2485 minsel = minval
2486 maxsel = formatfunc(bound[1].strip()) \
2487 - formatfunc(offset)
2488 except ValueError: # now "val < x"
2489 minsel = formatfunc(bound[0].strip()) \
2490 + formatfunc(offset)
2491 maxsel = maxval
2492 elif currsel.strip().find('>') > -1:
2493 bound = currsel.split('>')
2494 try: # try "x > val"
2495 minsel = formatfunc(bound[1].strip()) \
2496 + formatfunc(offset)
2497 maxsel = maxval
2498 except ValueError: # now "val > x"
2499 minsel = minval
2500 maxsel = formatfunc(bound[0].strip()) \
2501 - formatfunc(offset)
2502 else:
2503 minsel = formatfunc(currsel)
2504 maxsel = formatfunc(currsel)
2505 sellists.append([minsel,maxsel])
2506 return sellists
2507
2508# def get_restfreqs(self):
2509# """
2510# Get the restfrequency(s) stored in this scantable.
2511# The return value(s) are always of unit 'Hz'
2512# Parameters:
2513# none
2514# Returns:
2515# a list of doubles
2516# """
2517# return list(self._getrestfreqs())
2518
2519 def get_restfreqs(self, ids=None):
2520 """\
2521 Get the restfrequency(s) stored in this scantable.
2522 The return value(s) are always of unit 'Hz'
2523
2524 Parameters:
2525
2526 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2527 be retrieved
2528
2529 Returns:
2530
2531 dictionary containing ids and a list of doubles for each id
2532
2533 """
2534 if ids is None:
2535 rfreqs = {}
2536 idlist = self.getmolnos()
2537 for i in idlist:
2538 rfreqs[i] = list(self._getrestfreqs(i))
2539 return rfreqs
2540 else:
2541 if type(ids) == list or type(ids) == tuple:
2542 rfreqs = {}
2543 for i in ids:
2544 rfreqs[i] = list(self._getrestfreqs(i))
2545 return rfreqs
2546 else:
2547 return list(self._getrestfreqs(ids))
2548
2549 @asaplog_post_dec
2550 def set_restfreqs(self, freqs=None, unit='Hz'):
2551 """\
2552 Set or replace the restfrequency specified and
2553 if the 'freqs' argument holds a scalar,
2554 then that rest frequency will be applied to all the selected
2555 data. If the 'freqs' argument holds
2556 a vector, then it MUST be of equal or smaller length than
2557 the number of IFs (and the available restfrequencies will be
2558 replaced by this vector). In this case, *all* data have
2559 the restfrequency set per IF according
2560 to the corresponding value you give in the 'freqs' vector.
2561 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2562 IF 1 gets restfreq 2e9.
2563
2564 You can also specify the frequencies via a linecatalog.
2565
2566 Parameters:
2567
2568 freqs: list of rest frequency values or string idenitfiers
2569
2570 unit: unit for rest frequency (default 'Hz')
2571
2572
2573 Example::
2574
2575 # set the given restfrequency for the all currently selected IFs
2576 scan.set_restfreqs(freqs=1.4e9)
2577 # set restfrequencies for the n IFs (n > 1) in the order of the
2578 # list, i.e
2579 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2580 # len(list_of_restfreqs) == nIF
2581 # for nIF == 1 the following will set multiple restfrequency for
2582 # that IF
2583 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2584 # set multiple restfrequencies per IF. as a list of lists where
2585 # the outer list has nIF elements, the inner s arbitrary
2586 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2587
2588 *Note*:
2589
2590 To do more sophisticate Restfrequency setting, e.g. on a
2591 source and IF basis, use scantable.set_selection() before using
2592 this function::
2593
2594 # provided your scantable is called scan
2595 selection = selector()
2596 selection.set_name('ORION*')
2597 selection.set_ifs([1])
2598 scan.set_selection(selection)
2599 scan.set_restfreqs(freqs=86.6e9)
2600
2601 """
2602 varlist = vars()
2603 from asap import linecatalog
2604 # simple value
2605 if isinstance(freqs, int) or isinstance(freqs, float):
2606 self._setrestfreqs([freqs], [""], unit)
2607 # list of values
2608 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2609 # list values are scalars
2610 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2611 if len(freqs) == 1:
2612 self._setrestfreqs(freqs, [""], unit)
2613 else:
2614 # allow the 'old' mode of setting mulitple IFs
2615 savesel = self._getselection()
2616 sel = self.get_selection()
2617 iflist = self.getifnos()
2618 if len(freqs)>len(iflist):
2619 raise ValueError("number of elements in list of list "
2620 "exeeds the current IF selections")
2621 iflist = self.getifnos()
2622 for i, fval in enumerate(freqs):
2623 sel.set_ifs(iflist[i])
2624 self._setselection(sel)
2625 self._setrestfreqs([fval], [""], unit)
2626 self._setselection(savesel)
2627
2628 # list values are dict, {'value'=, 'name'=)
2629 elif isinstance(freqs[-1], dict):
2630 values = []
2631 names = []
2632 for d in freqs:
2633 values.append(d["value"])
2634 names.append(d["name"])
2635 self._setrestfreqs(values, names, unit)
2636 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2637 savesel = self._getselection()
2638 sel = self.get_selection()
2639 iflist = self.getifnos()
2640 if len(freqs)>len(iflist):
2641 raise ValueError("number of elements in list of list exeeds"
2642 " the current IF selections")
2643 for i, fval in enumerate(freqs):
2644 sel.set_ifs(iflist[i])
2645 self._setselection(sel)
2646 self._setrestfreqs(fval, [""], unit)
2647 self._setselection(savesel)
2648 # freqs are to be taken from a linecatalog
2649 elif isinstance(freqs, linecatalog):
2650 savesel = self._getselection()
2651 sel = self.get_selection()
2652 for i in xrange(freqs.nrow()):
2653 sel.set_ifs(iflist[i])
2654 self._setselection(sel)
2655 self._setrestfreqs([freqs.get_frequency(i)],
2656 [freqs.get_name(i)], "MHz")
2657 # ensure that we are not iterating past nIF
2658 if i == self.nif()-1: break
2659 self._setselection(savesel)
2660 else:
2661 return
2662 self._add_history("set_restfreqs", varlist)
2663
2664 @asaplog_post_dec
2665 def shift_refpix(self, delta):
2666 """\
2667 Shift the reference pixel of the Spectra Coordinate by an
2668 integer amount.
2669
2670 Parameters:
2671
2672 delta: the amount to shift by
2673
2674 *Note*:
2675
2676 Be careful using this with broadband data.
2677
2678 """
2679 varlist = vars()
2680 Scantable.shift_refpix(self, delta)
2681 s._add_history("shift_refpix", varlist)
2682
2683 @asaplog_post_dec
2684 def history(self, filename=None, nrows=-1, start=0):
2685 """\
2686 Print the history. Optionally to a file.
2687
2688 Parameters:
2689
2690 filename: The name of the file to save the history to.
2691
2692 """
2693 n = self._historylength()
2694 if nrows == -1:
2695 nrows = n
2696 if start+nrows > n:
2697 nrows = nrows-start
2698 if n > 1000 and nrows == n:
2699 nrows = 1000
2700 start = n-1000
2701 asaplog.push("Warning: History has {0} entries. Displaying last "
2702 "1000".format(n))
2703 hist = list(self._gethistory(nrows, start))
2704 out = "-"*80
2705 for h in hist:
2706 if not h.strip():
2707 continue
2708 if h.find("---") >-1:
2709 continue
2710 else:
2711 items = h.split("##")
2712 date = items[0]
2713 func = items[1]
2714 items = items[2:]
2715 out += "\n"+date+"\n"
2716 out += "Function: %s\n Parameters:" % (func)
2717 for i in items:
2718 if i == '':
2719 continue
2720 s = i.split("=")
2721 out += "\n %s = %s" % (s[0], s[1])
2722 out = "\n".join([out, "*"*80])
2723 if filename is not None:
2724 if filename is "":
2725 filename = 'scantable_history.txt'
2726 filename = os.path.expandvars(os.path.expanduser(filename))
2727 if not os.path.isdir(filename):
2728 data = open(filename, 'w')
2729 data.write(out)
2730 data.close()
2731 else:
2732 msg = "Illegal file name '%s'." % (filename)
2733 raise IOError(msg)
2734 return page(out)
2735
2736 #
2737 # Maths business
2738 #
2739 @asaplog_post_dec
2740 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2741 avmode="NONE"):
2742 """\
2743 Return the (time) weighted average of a scan. Scans will be averaged
2744 only if the source direction (RA/DEC) is within 1' otherwise
2745
2746 *Note*:
2747
2748 in channels only - align if necessary
2749
2750 Parameters:
2751
2752 mask: an optional mask (only used for 'var' and 'tsys'
2753 weighting)
2754
2755 scanav: True averages each scan separately
2756 False (default) averages all scans together,
2757
2758 weight: Weighting scheme.
2759 'none' (mean no weight)
2760 'var' (1/var(spec) weighted)
2761 'tsys' (1/Tsys**2 weighted)
2762 'tint' (integration time weighted)
2763 'tintsys' (Tint/Tsys**2)
2764 'median' ( median averaging)
2765 The default is 'tint'
2766
2767 align: align the spectra in velocity before averaging. It takes
2768 the time of the first spectrum as reference time.
2769 avmode: 'SOURCE' - also select by source name - or
2770 'NONE' (default). Not applicable for scanav=True or
2771 weight=median
2772
2773 Example::
2774
2775 # time average the scantable without using a mask
2776 newscan = scan.average_time()
2777
2778 """
2779 varlist = vars()
2780 weight = weight or 'TINT'
2781 mask = mask or ()
2782 scanav = (scanav and 'SCAN') or avmode.upper()
2783 scan = (self, )
2784
2785 if align:
2786 scan = (self.freq_align(insitu=False), )
2787 asaplog.push("Note: Alignment is don on a source-by-source basis")
2788 asaplog.push("Note: Averaging (by default) is not")
2789 # we need to set it to SOURCE averaging here
2790 s = None
2791 if weight.upper() == 'MEDIAN':
2792 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2793 scanav))
2794 else:
2795 s = scantable(self._math._average(scan, mask, weight.upper(),
2796 scanav))
2797 s._add_history("average_time", varlist)
2798 return s
2799
2800 @asaplog_post_dec
2801 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2802 """\
2803 Return a scan where all spectra are converted to either
2804 Jansky or Kelvin depending upon the flux units of the scan table.
2805 By default the function tries to look the values up internally.
2806 If it can't find them (or if you want to over-ride), you must
2807 specify EITHER jyperk OR eta (and D which it will try to look up
2808 also if you don't set it). jyperk takes precedence if you set both.
2809
2810 Parameters:
2811
2812 jyperk: the Jy / K conversion factor
2813
2814 eta: the aperture efficiency
2815
2816 d: the geometric diameter (metres)
2817
2818 insitu: if False a new scantable is returned.
2819 Otherwise, the scaling is done in-situ
2820 The default is taken from .asaprc (False)
2821
2822 """
2823 if insitu is None: insitu = rcParams['insitu']
2824 self._math._setinsitu(insitu)
2825 varlist = vars()
2826 jyperk = jyperk or -1.0
2827 d = d or -1.0
2828 eta = eta or -1.0
2829 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2830 s._add_history("convert_flux", varlist)
2831 if insitu: self._assign(s)
2832 else: return s
2833
2834 @asaplog_post_dec
2835 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2836 """\
2837 Return a scan after applying a gain-elevation correction.
2838 The correction can be made via either a polynomial or a
2839 table-based interpolation (and extrapolation if necessary).
2840 You specify polynomial coefficients, an ascii table or neither.
2841 If you specify neither, then a polynomial correction will be made
2842 with built in coefficients known for certain telescopes (an error
2843 will occur if the instrument is not known).
2844 The data and Tsys are *divided* by the scaling factors.
2845
2846 Parameters:
2847
2848 poly: Polynomial coefficients (default None) to compute a
2849 gain-elevation correction as a function of
2850 elevation (in degrees).
2851
2852 filename: The name of an ascii file holding correction factors.
2853 The first row of the ascii file must give the column
2854 names and these MUST include columns
2855 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2856 by this) somewhere.
2857 The second row must give the data type of the
2858 column. Use 'R' for Real and 'I' for Integer.
2859 An example file would be
2860 (actual factors are arbitrary) :
2861
2862 TIME ELEVATION FACTOR
2863 R R R
2864 0.1 0 0.8
2865 0.2 20 0.85
2866 0.3 40 0.9
2867 0.4 60 0.85
2868 0.5 80 0.8
2869 0.6 90 0.75
2870
2871 method: Interpolation method when correcting from a table.
2872 Values are 'nearest', 'linear' (default), 'cubic'
2873 and 'spline'
2874
2875 insitu: if False a new scantable is returned.
2876 Otherwise, the scaling is done in-situ
2877 The default is taken from .asaprc (False)
2878
2879 """
2880
2881 if insitu is None: insitu = rcParams['insitu']
2882 self._math._setinsitu(insitu)
2883 varlist = vars()
2884 poly = poly or ()
2885 from os.path import expandvars
2886 filename = expandvars(filename)
2887 s = scantable(self._math._gainel(self, poly, filename, method))
2888 s._add_history("gain_el", varlist)
2889 if insitu:
2890 self._assign(s)
2891 else:
2892 return s
2893
2894 @asaplog_post_dec
2895 def freq_align(self, reftime=None, method='cubic', insitu=None):
2896 """\
2897 Return a scan where all rows have been aligned in frequency/velocity.
2898 The alignment frequency frame (e.g. LSRK) is that set by function
2899 set_freqframe.
2900
2901 Parameters:
2902
2903 reftime: reference time to align at. By default, the time of
2904 the first row of data is used.
2905
2906 method: Interpolation method for regridding the spectra.
2907 Choose from 'nearest', 'linear', 'cubic' (default)
2908 and 'spline'
2909
2910 insitu: if False a new scantable is returned.
2911 Otherwise, the scaling is done in-situ
2912 The default is taken from .asaprc (False)
2913
2914 """
2915 if insitu is None: insitu = rcParams["insitu"]
2916 oldInsitu = self._math._insitu()
2917 self._math._setinsitu(insitu)
2918 varlist = vars()
2919 reftime = reftime or ""
2920 s = scantable(self._math._freq_align(self, reftime, method))
2921 s._add_history("freq_align", varlist)
2922 self._math._setinsitu(oldInsitu)
2923 if insitu:
2924 self._assign(s)
2925 else:
2926 return s
2927
2928 @asaplog_post_dec
2929 def opacity(self, tau=None, insitu=None):
2930 """\
2931 Apply an opacity correction. The data
2932 and Tsys are multiplied by the correction factor.
2933
2934 Parameters:
2935
2936 tau: (list of) opacity from which the correction factor is
2937 exp(tau*ZD)
2938 where ZD is the zenith-distance.
2939 If a list is provided, it has to be of length nIF,
2940 nIF*nPol or 1 and in order of IF/POL, e.g.
2941 [opif0pol0, opif0pol1, opif1pol0 ...]
2942 if tau is `None` the opacities are determined from a
2943 model.
2944
2945 insitu: if False a new scantable is returned.
2946 Otherwise, the scaling is done in-situ
2947 The default is taken from .asaprc (False)
2948
2949 """
2950 if insitu is None:
2951 insitu = rcParams['insitu']
2952 self._math._setinsitu(insitu)
2953 varlist = vars()
2954 if not hasattr(tau, "__len__"):
2955 tau = [tau]
2956 s = scantable(self._math._opacity(self, tau))
2957 s._add_history("opacity", varlist)
2958 if insitu:
2959 self._assign(s)
2960 else:
2961 return s
2962
2963 @asaplog_post_dec
2964 def bin(self, width=5, insitu=None):
2965 """\
2966 Return a scan where all spectra have been binned up.
2967
2968 Parameters:
2969
2970 width: The bin width (default=5) in pixels
2971
2972 insitu: if False a new scantable is returned.
2973 Otherwise, the scaling is done in-situ
2974 The default is taken from .asaprc (False)
2975
2976 """
2977 if insitu is None:
2978 insitu = rcParams['insitu']
2979 self._math._setinsitu(insitu)
2980 varlist = vars()
2981 s = scantable(self._math._bin(self, width))
2982 s._add_history("bin", varlist)
2983 if insitu:
2984 self._assign(s)
2985 else:
2986 return s
2987
2988 @asaplog_post_dec
2989 def reshape(self, first, last, insitu=None):
2990 """Resize the band by providing first and last channel.
2991 This will cut off all channels outside [first, last].
2992 """
2993 if insitu is None:
2994 insitu = rcParams['insitu']
2995 varlist = vars()
2996 if last < 0:
2997 last = self.nchan()-1 + last
2998 s = None
2999 if insitu:
3000 s = self
3001 else:
3002 s = self.copy()
3003 s._reshape(first,last)
3004 s._add_history("reshape", varlist)
3005 if not insitu:
3006 return s
3007
3008 @asaplog_post_dec
3009 def resample(self, width=5, method='cubic', insitu=None):
3010 """\
3011 Return a scan where all spectra have been binned up.
3012
3013 Parameters:
3014
3015 width: The bin width (default=5) in pixels
3016
3017 method: Interpolation method when correcting from a table.
3018 Values are 'nearest', 'linear', 'cubic' (default)
3019 and 'spline'
3020
3021 insitu: if False a new scantable is returned.
3022 Otherwise, the scaling is done in-situ
3023 The default is taken from .asaprc (False)
3024
3025 """
3026 if insitu is None:
3027 insitu = rcParams['insitu']
3028 self._math._setinsitu(insitu)
3029 varlist = vars()
3030 s = scantable(self._math._resample(self, method, width))
3031 s._add_history("resample", varlist)
3032 if insitu:
3033 self._assign(s)
3034 else:
3035 return s
3036
3037 @asaplog_post_dec
3038 def average_pol(self, mask=None, weight='none'):
3039 """\
3040 Average the Polarisations together.
3041
3042 Parameters:
3043
3044 mask: An optional mask defining the region, where the
3045 averaging will be applied. The output will have all
3046 specified points masked.
3047
3048 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3049 weighted), or 'tsys' (1/Tsys**2 weighted)
3050
3051 """
3052 varlist = vars()
3053 mask = mask or ()
3054 s = scantable(self._math._averagepol(self, mask, weight.upper()))
3055 s._add_history("average_pol", varlist)
3056 return s
3057
3058 @asaplog_post_dec
3059 def average_beam(self, mask=None, weight='none'):
3060 """\
3061 Average the Beams together.
3062
3063 Parameters:
3064 mask: An optional mask defining the region, where the
3065 averaging will be applied. The output will have all
3066 specified points masked.
3067
3068 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3069 weighted), or 'tsys' (1/Tsys**2 weighted)
3070
3071 """
3072 varlist = vars()
3073 mask = mask or ()
3074 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3075 s._add_history("average_beam", varlist)
3076 return s
3077
3078 def parallactify(self, pflag):
3079 """\
3080 Set a flag to indicate whether this data should be treated as having
3081 been 'parallactified' (total phase == 0.0)
3082
3083 Parameters:
3084
3085 pflag: Bool indicating whether to turn this on (True) or
3086 off (False)
3087
3088 """
3089 varlist = vars()
3090 self._parallactify(pflag)
3091 self._add_history("parallactify", varlist)
3092
3093 @asaplog_post_dec
3094 def convert_pol(self, poltype=None):
3095 """\
3096 Convert the data to a different polarisation type.
3097 Note that you will need cross-polarisation terms for most conversions.
3098
3099 Parameters:
3100
3101 poltype: The new polarisation type. Valid types are:
3102 'linear', 'circular', 'stokes' and 'linpol'
3103
3104 """
3105 varlist = vars()
3106 s = scantable(self._math._convertpol(self, poltype))
3107 s._add_history("convert_pol", varlist)
3108 return s
3109
3110 @asaplog_post_dec
3111 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3112 insitu=None):
3113 """\
3114 Smooth the spectrum by the specified kernel (conserving flux).
3115
3116 Parameters:
3117
3118 kernel: The type of smoothing kernel. Select from
3119 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3120 or 'poly'
3121
3122 width: The width of the kernel in pixels. For hanning this is
3123 ignored otherwise it defauls to 5 pixels.
3124 For 'gaussian' it is the Full Width Half
3125 Maximum. For 'boxcar' it is the full width.
3126 For 'rmedian' and 'poly' it is the half width.
3127
3128 order: Optional parameter for 'poly' kernel (default is 2), to
3129 specify the order of the polnomial. Ignored by all other
3130 kernels.
3131
3132 plot: plot the original and the smoothed spectra.
3133 In this each indivual fit has to be approved, by
3134 typing 'y' or 'n'
3135
3136 insitu: if False a new scantable is returned.
3137 Otherwise, the scaling is done in-situ
3138 The default is taken from .asaprc (False)
3139
3140 """
3141 if insitu is None: insitu = rcParams['insitu']
3142 self._math._setinsitu(insitu)
3143 varlist = vars()
3144
3145 if plot: orgscan = self.copy()
3146
3147 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
3148 s._add_history("smooth", varlist)
3149
3150 action = 'H'
3151 if plot:
3152 from asap.asapplotter import new_asaplot
3153 theplot = new_asaplot(rcParams['plotter.gui'])
3154 from matplotlib import rc as rcp
3155 rcp('lines', linewidth=1)
3156 theplot.set_panels()
3157 ylab=s._get_ordinate_label()
3158 #theplot.palette(0,["#777777","red"])
3159 for r in xrange(s.nrow()):
3160 xsm=s._getabcissa(r)
3161 ysm=s._getspectrum(r)
3162 xorg=orgscan._getabcissa(r)
3163 yorg=orgscan._getspectrum(r)
3164 if action != "N": #skip plotting if rejecting all
3165 theplot.clear()
3166 theplot.hold()
3167 theplot.set_axes('ylabel',ylab)
3168 theplot.set_axes('xlabel',s._getabcissalabel(r))
3169 theplot.set_axes('title',s._getsourcename(r))
3170 theplot.set_line(label='Original',color="#777777")
3171 theplot.plot(xorg,yorg)
3172 theplot.set_line(label='Smoothed',color="red")
3173 theplot.plot(xsm,ysm)
3174 ### Ugly part for legend
3175 for i in [0,1]:
3176 theplot.subplots[0]['lines'].append(
3177 [theplot.subplots[0]['axes'].lines[i]]
3178 )
3179 theplot.release()
3180 ### Ugly part for legend
3181 theplot.subplots[0]['lines']=[]
3182 res = self._get_verify_action("Accept smoothing?",action)
3183 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3184 if r == 0: action = None
3185 #res = raw_input("Accept smoothing ([y]/n): ")
3186 if res.upper() == 'N':
3187 # reject for the current rows
3188 s._setspectrum(yorg, r)
3189 elif res.upper() == 'R':
3190 # reject all the following rows
3191 action = "N"
3192 s._setspectrum(yorg, r)
3193 elif res.upper() == 'A':
3194 # accept all the following rows
3195 break
3196 theplot.quit()
3197 del theplot
3198 del orgscan
3199
3200 if insitu: self._assign(s)
3201 else: return s
3202
3203 @asaplog_post_dec
3204 def regrid_channel(self, width=5, plot=False, insitu=None):
3205 """\
3206 Regrid the spectra by the specified channel width
3207
3208 Parameters:
3209
3210 width: The channel width (float) of regridded spectra
3211 in the current spectral unit.
3212
3213 plot: [NOT IMPLEMENTED YET]
3214 plot the original and the regridded spectra.
3215 In this each indivual fit has to be approved, by
3216 typing 'y' or 'n'
3217
3218 insitu: if False a new scantable is returned.
3219 Otherwise, the scaling is done in-situ
3220 The default is taken from .asaprc (False)
3221
3222 """
3223 if insitu is None: insitu = rcParams['insitu']
3224 varlist = vars()
3225
3226 if plot:
3227 asaplog.post()
3228 asaplog.push("Verification plot is not implemtnetd yet.")
3229 asaplog.post("WARN")
3230
3231 s = self.copy()
3232 s._regrid_specchan(width)
3233
3234 s._add_history("regrid_channel", varlist)
3235
3236# if plot:
3237# from asap.asapplotter import new_asaplot
3238# theplot = new_asaplot(rcParams['plotter.gui'])
3239# from matplotlib import rc as rcp
3240# rcp('lines', linewidth=1)
3241# theplot.set_panels()
3242# ylab=s._get_ordinate_label()
3243# #theplot.palette(0,["#777777","red"])
3244# for r in xrange(s.nrow()):
3245# xsm=s._getabcissa(r)
3246# ysm=s._getspectrum(r)
3247# xorg=orgscan._getabcissa(r)
3248# yorg=orgscan._getspectrum(r)
3249# theplot.clear()
3250# theplot.hold()
3251# theplot.set_axes('ylabel',ylab)
3252# theplot.set_axes('xlabel',s._getabcissalabel(r))
3253# theplot.set_axes('title',s._getsourcename(r))
3254# theplot.set_line(label='Original',color="#777777")
3255# theplot.plot(xorg,yorg)
3256# theplot.set_line(label='Smoothed',color="red")
3257# theplot.plot(xsm,ysm)
3258# ### Ugly part for legend
3259# for i in [0,1]:
3260# theplot.subplots[0]['lines'].append(
3261# [theplot.subplots[0]['axes'].lines[i]]
3262# )
3263# theplot.release()
3264# ### Ugly part for legend
3265# theplot.subplots[0]['lines']=[]
3266# res = raw_input("Accept smoothing ([y]/n): ")
3267# if res.upper() == 'N':
3268# s._setspectrum(yorg, r)
3269# theplot.quit()
3270# del theplot
3271# del orgscan
3272
3273 if insitu: self._assign(s)
3274 else: return s
3275
3276 @asaplog_post_dec
3277 def _parse_wn(self, wn):
3278 if isinstance(wn, list) or isinstance(wn, tuple):
3279 return wn
3280 elif isinstance(wn, int):
3281 return [ wn ]
3282 elif isinstance(wn, str):
3283 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3284 val = wn.split('-')
3285 val = [int(val[0]), int(val[1])]
3286 val.sort()
3287 res = [i for i in xrange(val[0], val[1]+1)]
3288 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3289 val = int(wn[2:])+1
3290 res = [i for i in xrange(val)]
3291 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3292 val = int(wn[:-2])+1
3293 res = [i for i in xrange(val)]
3294 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3295 val = int(wn[1:])
3296 res = [i for i in xrange(val)]
3297 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3298 val = int(wn[:-1])
3299 res = [i for i in xrange(val)]
3300 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3301 # then interpreted in C++
3302 # side as [a,a+1,...,a_nyq]
3303 # (CAS-3759)
3304 val = int(wn[2:])
3305 res = [val, -999]
3306 #res = [i for i in xrange(val, self.nchan()/2+1)]
3307 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3308 # then interpreted in C++
3309 # side as [a,a+1,...,a_nyq]
3310 # (CAS-3759)
3311 val = int(wn[:-2])
3312 res = [val, -999]
3313 #res = [i for i in xrange(val, self.nchan()/2+1)]
3314 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3315 # then interpreted in C++
3316 # side as [a+1,a+2,...,a_nyq]
3317 # (CAS-3759)
3318 val = int(wn[1:])+1
3319 res = [val, -999]
3320 #res = [i for i in xrange(val, self.nchan()/2+1)]
3321 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3322 # then interpreted in C++
3323 # side as [a+1,a+2,...,a_nyq]
3324 # (CAS-3759)
3325 val = int(wn[:-1])+1
3326 res = [val, -999]
3327 #res = [i for i in xrange(val, self.nchan()/2+1)]
3328
3329 return res
3330 else:
3331 msg = 'wrong value given for addwn/rejwn'
3332 raise RuntimeError(msg)
3333
3334 @asaplog_post_dec
3335 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3336 """\
3337 Subtract baseline based on parameters written in Baseline Table.
3338
3339 Parameters:
3340 insitu: if True, baseline fitting/subtraction is done
3341 in-situ. If False, a new scantable with
3342 baseline subtracted is returned. Actually,
3343 format of the returned value depends on both
3344 insitu and retfitres (see below).
3345 The default is taken from .asaprc (False)
3346 retfitres: if True, the results of baseline fitting (i.e.,
3347 coefficients and rms) are returned.
3348 default is False.
3349 The format of the returned value of this
3350 function varies as follows:
3351 (1) in case insitu=True and retfitres=True:
3352 fitting result.
3353 (2) in case insitu=True and retfitres=False:
3354 None.
3355 (3) in case insitu=False and retfitres=True:
3356 a dictionary containing a new scantable
3357 (with baseline subtracted) and the fitting
3358 results.
3359 (4) in case insitu=False and retfitres=False:
3360 a new scantable (with baseline subtracted).
3361 inbltable: name of input baseline table. The row number of
3362 scantable and that of inbltable must be
3363 identical.
3364 outbltable: name of output baseline table where baseline
3365 parameters and fitting results recorded.
3366 default is ''(no output).
3367 overwrite: if True when an existing baseline table is
3368 specified for outbltable, overwrites it.
3369 Otherwise there is no harm.
3370 default is False.
3371 """
3372
3373 try:
3374 varlist = vars()
3375 if retfitres is None: retfitres = False
3376 if inbltable is None: raise ValueError("bltable missing.")
3377 if outbltable is None: outbltable = ''
3378 if overwrite is None: overwrite = False
3379
3380 if insitu is None: insitu = rcParams['insitu']
3381 if insitu:
3382 workscan = self
3383 else:
3384 workscan = self.copy()
3385
3386 sres = workscan._apply_bltable(inbltable,
3387 retfitres,
3388 outbltable,
3389 os.path.exists(outbltable),
3390 overwrite)
3391 if retfitres: res = parse_fitresult(sres)
3392
3393 workscan._add_history('apply_bltable', varlist)
3394
3395 if insitu:
3396 self._assign(workscan)
3397 if retfitres:
3398 return res
3399 else:
3400 return None
3401 else:
3402 if retfitres:
3403 return {'scantable': workscan, 'fitresults': res}
3404 else:
3405 return workscan
3406
3407 except RuntimeError, e:
3408 raise_fitting_failure_exception(e)
3409
3410 @asaplog_post_dec
3411 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3412 """\
3413 Subtract baseline based on parameters written in the input list.
3414
3415 Parameters:
3416 insitu: if True, baseline fitting/subtraction is done
3417 in-situ. If False, a new scantable with
3418 baseline subtracted is returned. Actually,
3419 format of the returned value depends on both
3420 insitu and retfitres (see below).
3421 The default is taken from .asaprc (False)
3422 retfitres: if True, the results of baseline fitting (i.e.,
3423 coefficients and rms) are returned.
3424 default is False.
3425 The format of the returned value of this
3426 function varies as follows:
3427 (1) in case insitu=True and retfitres=True:
3428 fitting result.
3429 (2) in case insitu=True and retfitres=False:
3430 None.
3431 (3) in case insitu=False and retfitres=True:
3432 a dictionary containing a new scantable
3433 (with baseline subtracted) and the fitting
3434 results.
3435 (4) in case insitu=False and retfitres=False:
3436 a new scantable (with baseline subtracted).
3437 blinfo: baseline parameter set stored in a dictionary
3438 or a list of dictionary. Each dictionary
3439 corresponds to each spectrum and must contain
3440 the following keys and values:
3441 'row': row number,
3442 'blfunc': function name. available ones include
3443 'poly', 'chebyshev', 'cspline' and
3444 'sinusoid',
3445 'order': maximum order of polynomial. needed
3446 if blfunc='poly' or 'chebyshev',
3447 'npiece': number or piecewise polynomial.
3448 needed if blfunc='cspline',
3449 'nwave': a list of sinusoidal wave numbers.
3450 needed if blfunc='sinusoid', and
3451 'masklist': min-max windows for channel mask.
3452 the specified ranges will be used
3453 for fitting.
3454 bltable: name of output baseline table where baseline
3455 parameters and fitting results recorded.
3456 default is ''(no output).
3457 overwrite: if True when an existing baseline table is
3458 specified for bltable, overwrites it.
3459 Otherwise there is no harm.
3460 default is False.
3461
3462 Example:
3463 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3464 'masklist':[[10,350],[352,510]]},
3465 {'row':1, 'blfunc':'cspline', 'npiece':3,
3466 'masklist':[[3,16],[19,404],[407,511]]}
3467 ])
3468
3469 the first spectrum (row=0) will be fitted with polynomial
3470 of order=5 and the next one (row=1) will be fitted with cubic
3471 spline consisting of 3 pieces.
3472 """
3473
3474 try:
3475 varlist = vars()
3476 if retfitres is None: retfitres = False
3477 if blinfo is None: blinfo = []
3478 if bltable is None: bltable = ''
3479 if overwrite is None: overwrite = False
3480
3481 if insitu is None: insitu = rcParams['insitu']
3482 if insitu:
3483 workscan = self
3484 else:
3485 workscan = self.copy()
3486
3487 nrow = workscan.nrow()
3488
3489 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3490
3491 print "in_blinfo=< "+ str(in_blinfo)+" >"
3492
3493 sres = workscan._sub_baseline(in_blinfo,
3494 retfitres,
3495 bltable,
3496 os.path.exists(bltable),
3497 overwrite)
3498 if retfitres: res = parse_fitresult(sres)
3499
3500 workscan._add_history('sub_baseline', varlist)
3501
3502 if insitu:
3503 self._assign(workscan)
3504 if retfitres:
3505 return res
3506 else:
3507 return None
3508 else:
3509 if retfitres:
3510 return {'scantable': workscan, 'fitresults': res}
3511 else:
3512 return workscan
3513
3514 except RuntimeError, e:
3515 raise_fitting_failure_exception(e)
3516
3517 @asaplog_post_dec
3518 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3519 whichrow=None, uselinefinder=None, edge=None,
3520 threshold=None, chan_avg_limit=None):
3521 """\
3522 Calculates and returns model selection criteria for a specified
3523 baseline model and a given spectrum data.
3524 Available values include Akaike Information Criterion (AIC), the
3525 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3526 Bayesian Information Criterion (BIC) and the Generalised Cross
3527 Validation (GCV).
3528
3529 Parameters:
3530 value: name of model selection criteria to calculate.
3531 available ones include 'aic', 'aicc', 'bic' and
3532 'gcv'. default is 'aicc'.
3533 blfunc: baseline function name. available ones include
3534 'chebyshev', 'cspline' and 'sinusoid'.
3535 default is 'chebyshev'.
3536 order: parameter for basline function. actually stands for
3537 order of polynomial (order) for 'chebyshev',
3538 number of spline pieces (npiece) for 'cspline' and
3539 maximum wave number for 'sinusoid', respectively.
3540 default is 5 (which is also the default order value
3541 for [auto_]chebyshev_baseline()).
3542 mask: an optional mask. default is [].
3543 whichrow: row number. default is 0 (the first row)
3544 uselinefinder: use sd.linefinder() to flag out line regions
3545 default is True.
3546 edge: an optional number of channel to drop at
3547 the edge of spectrum. If only one value is
3548 specified, the same number will be dropped
3549 from both sides of the spectrum. Default
3550 is to keep all channels. Nested tuples
3551 represent individual edge selection for
3552 different IFs (a number of spectral channels
3553 can be different)
3554 default is (0, 0).
3555 threshold: the threshold used by line finder. It is
3556 better to keep it large as only strong lines
3557 affect the baseline solution.
3558 default is 3.
3559 chan_avg_limit: a maximum number of consequtive spectral
3560 channels to average during the search of
3561 weak and broad lines. The default is no
3562 averaging (and no search for weak lines).
3563 If such lines can affect the fitted baseline
3564 (e.g. a high order polynomial is fitted),
3565 increase this parameter (usually values up
3566 to 8 are reasonable). Most users of this
3567 method should find the default value sufficient.
3568 default is 1.
3569
3570 Example:
3571 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3572 """
3573
3574 try:
3575 varlist = vars()
3576
3577 if value is None: value = 'aicc'
3578 if blfunc is None: blfunc = 'chebyshev'
3579 if order is None: order = 5
3580 if mask is None: mask = []
3581 if whichrow is None: whichrow = 0
3582 if uselinefinder is None: uselinefinder = True
3583 if edge is None: edge = (0, 0)
3584 if threshold is None: threshold = 3
3585 if chan_avg_limit is None: chan_avg_limit = 1
3586
3587 return self._calc_aic(value, blfunc, order, mask,
3588 whichrow, uselinefinder, edge,
3589 threshold, chan_avg_limit)
3590
3591 except RuntimeError, e:
3592 raise_fitting_failure_exception(e)
3593
3594 @asaplog_post_dec
3595 def sinusoid_baseline(self, mask=None, applyfft=None,
3596 fftmethod=None, fftthresh=None,
3597 addwn=None, rejwn=None,
3598 insitu=None,
3599 clipthresh=None, clipniter=None,
3600 plot=None,
3601 getresidual=None,
3602 showprogress=None, minnrow=None,
3603 outlog=None,
3604 blfile=None, csvformat=None,
3605 bltable=None):
3606 """\
3607 Return a scan which has been baselined (all rows) with sinusoidal
3608 functions.
3609
3610 Parameters:
3611 mask: an optional mask
3612 applyfft: if True use some method, such as FFT, to find
3613 strongest sinusoidal components in the wavenumber
3614 domain to be used for baseline fitting.
3615 default is True.
3616 fftmethod: method to find the strong sinusoidal components.
3617 now only 'fft' is available and it is the default.
3618 fftthresh: the threshold to select wave numbers to be used for
3619 fitting from the distribution of amplitudes in the
3620 wavenumber domain.
3621 both float and string values accepted.
3622 given a float value, the unit is set to sigma.
3623 for string values, allowed formats include:
3624 'xsigma' or 'x' (= x-sigma level. e.g.,
3625 '3sigma'), or
3626 'topx' (= the x strongest ones, e.g. 'top5').
3627 default is 3.0 (unit: sigma).
3628 addwn: the additional wave numbers to be used for fitting.
3629 list or integer value is accepted to specify every
3630 wave numbers. also string value can be used in case
3631 you need to specify wave numbers in a certain range,
3632 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3633 '<a' (= 0,1,...,a-2,a-1),
3634 '>=a' (= a, a+1, ... up to the maximum wave
3635 number corresponding to the Nyquist
3636 frequency for the case of FFT).
3637 default is [0].
3638 rejwn: the wave numbers NOT to be used for fitting.
3639 can be set just as addwn but has higher priority:
3640 wave numbers which are specified both in addwn
3641 and rejwn will NOT be used. default is [].
3642 insitu: if False a new scantable is returned.
3643 Otherwise, the scaling is done in-situ
3644 The default is taken from .asaprc (False)
3645 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3646 clipniter: maximum number of iteration of 'clipthresh'-sigma
3647 clipping (default is 0)
3648 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3649 plot the fit and the residual. In this each
3650 indivual fit has to be approved, by typing 'y'
3651 or 'n'
3652 getresidual: if False, returns best-fit values instead of
3653 residual. (default is True)
3654 showprogress: show progress status for large data.
3655 default is True.
3656 minnrow: minimum number of input spectra to show.
3657 default is 1000.
3658 outlog: Output the coefficients of the best-fit
3659 function to logger (default is False)
3660 blfile: Name of a text file in which the best-fit
3661 parameter values to be written
3662 (default is '': no file/logger output)
3663 csvformat: if True blfile is csv-formatted, default is False.
3664 bltable: name of a baseline table where fitting results
3665 (coefficients, rms, etc.) are to be written.
3666 if given, fitting results will NOT be output to
3667 scantable (insitu=True) or None will be
3668 returned (insitu=False).
3669 (default is "": no table output)
3670
3671 Example:
3672 # return a scan baselined by a combination of sinusoidal curves
3673 # having wave numbers in spectral window up to 10,
3674 # also with 3-sigma clipping, iteration up to 4 times
3675 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3676
3677 Note:
3678 The best-fit parameter values output in logger and/or blfile are now
3679 based on specunit of 'channel'.
3680 """
3681
3682 try:
3683 varlist = vars()
3684
3685 if insitu is None: insitu = rcParams['insitu']
3686 if insitu:
3687 workscan = self
3688 else:
3689 workscan = self.copy()
3690
3691 if mask is None: mask = []
3692 if applyfft is None: applyfft = True
3693 if fftmethod is None: fftmethod = 'fft'
3694 if fftthresh is None: fftthresh = 3.0
3695 if addwn is None: addwn = [0]
3696 if rejwn is None: rejwn = []
3697 if clipthresh is None: clipthresh = 3.0
3698 if clipniter is None: clipniter = 0
3699 if plot is None: plot = False
3700 if getresidual is None: getresidual = True
3701 if showprogress is None: showprogress = True
3702 if minnrow is None: minnrow = 1000
3703 if outlog is None: outlog = False
3704 if blfile is None: blfile = ''
3705 if csvformat is None: csvformat = False
3706 if bltable is None: bltable = ''
3707
3708 sapplyfft = 'true' if applyfft else 'false'
3709 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3710
3711 scsvformat = 'T' if csvformat else 'F'
3712
3713 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3714 workscan._sinusoid_baseline(mask,
3715 fftinfo,
3716 #applyfft, fftmethod.lower(),
3717 #str(fftthresh).lower(),
3718 workscan._parse_wn(addwn),
3719 workscan._parse_wn(rejwn),
3720 clipthresh, clipniter,
3721 getresidual,
3722 pack_progress_params(showprogress,
3723 minnrow),
3724 outlog, scsvformat+blfile,
3725 bltable)
3726 workscan._add_history('sinusoid_baseline', varlist)
3727
3728 if bltable == '':
3729 if insitu:
3730 self._assign(workscan)
3731 else:
3732 return workscan
3733 else:
3734 if not insitu:
3735 return None
3736
3737 except RuntimeError, e:
3738 raise_fitting_failure_exception(e)
3739
3740
3741 @asaplog_post_dec
3742 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3743 fftmethod=None, fftthresh=None,
3744 addwn=None, rejwn=None,
3745 insitu=None,
3746 clipthresh=None, clipniter=None,
3747 edge=None, threshold=None, chan_avg_limit=None,
3748 plot=None,
3749 getresidual=None,
3750 showprogress=None, minnrow=None,
3751 outlog=None,
3752 blfile=None, csvformat=None,
3753 bltable=None):
3754 """\
3755 Return a scan which has been baselined (all rows) with sinusoidal
3756 functions.
3757 Spectral lines are detected first using linefinder and masked out
3758 to avoid them affecting the baseline solution.
3759
3760 Parameters:
3761 mask: an optional mask retreived from scantable
3762 applyfft: if True use some method, such as FFT, to find
3763 strongest sinusoidal components in the wavenumber
3764 domain to be used for baseline fitting.
3765 default is True.
3766 fftmethod: method to find the strong sinusoidal components.
3767 now only 'fft' is available and it is the default.
3768 fftthresh: the threshold to select wave numbers to be used for
3769 fitting from the distribution of amplitudes in the
3770 wavenumber domain.
3771 both float and string values accepted.
3772 given a float value, the unit is set to sigma.
3773 for string values, allowed formats include:
3774 'xsigma' or 'x' (= x-sigma level. e.g.,
3775 '3sigma'), or
3776 'topx' (= the x strongest ones, e.g. 'top5').
3777 default is 3.0 (unit: sigma).
3778 addwn: the additional wave numbers to be used for fitting.
3779 list or integer value is accepted to specify every
3780 wave numbers. also string value can be used in case
3781 you need to specify wave numbers in a certain range,
3782 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3783 '<a' (= 0,1,...,a-2,a-1),
3784 '>=a' (= a, a+1, ... up to the maximum wave
3785 number corresponding to the Nyquist
3786 frequency for the case of FFT).
3787 default is [0].
3788 rejwn: the wave numbers NOT to be used for fitting.
3789 can be set just as addwn but has higher priority:
3790 wave numbers which are specified both in addwn
3791 and rejwn will NOT be used. default is [].
3792 insitu: if False a new scantable is returned.
3793 Otherwise, the scaling is done in-situ
3794 The default is taken from .asaprc (False)
3795 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3796 clipniter: maximum number of iteration of 'clipthresh'-sigma
3797 clipping (default is 0)
3798 edge: an optional number of channel to drop at
3799 the edge of spectrum. If only one value is
3800 specified, the same number will be dropped
3801 from both sides of the spectrum. Default
3802 is to keep all channels. Nested tuples
3803 represent individual edge selection for
3804 different IFs (a number of spectral channels
3805 can be different)
3806 threshold: the threshold used by line finder. It is
3807 better to keep it large as only strong lines
3808 affect the baseline solution.
3809 chan_avg_limit: a maximum number of consequtive spectral
3810 channels to average during the search of
3811 weak and broad lines. The default is no
3812 averaging (and no search for weak lines).
3813 If such lines can affect the fitted baseline
3814 (e.g. a high order polynomial is fitted),
3815 increase this parameter (usually values up
3816 to 8 are reasonable). Most users of this
3817 method should find the default value sufficient.
3818 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3819 plot the fit and the residual. In this each
3820 indivual fit has to be approved, by typing 'y'
3821 or 'n'
3822 getresidual: if False, returns best-fit values instead of
3823 residual. (default is True)
3824 showprogress: show progress status for large data.
3825 default is True.
3826 minnrow: minimum number of input spectra to show.
3827 default is 1000.
3828 outlog: Output the coefficients of the best-fit
3829 function to logger (default is False)
3830 blfile: Name of a text file in which the best-fit
3831 parameter values to be written
3832 (default is "": no file/logger output)
3833 csvformat: if True blfile is csv-formatted, default is False.
3834 bltable: name of a baseline table where fitting results
3835 (coefficients, rms, etc.) are to be written.
3836 if given, fitting results will NOT be output to
3837 scantable (insitu=True) or None will be
3838 returned (insitu=False).
3839 (default is "": no table output)
3840
3841 Example:
3842 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3843
3844 Note:
3845 The best-fit parameter values output in logger and/or blfile are now
3846 based on specunit of 'channel'.
3847 """
3848
3849 try:
3850 varlist = vars()
3851
3852 if insitu is None: insitu = rcParams['insitu']
3853 if insitu:
3854 workscan = self
3855 else:
3856 workscan = self.copy()
3857
3858 if mask is None: mask = []
3859 if applyfft is None: applyfft = True
3860 if fftmethod is None: fftmethod = 'fft'
3861 if fftthresh is None: fftthresh = 3.0
3862 if addwn is None: addwn = [0]
3863 if rejwn is None: rejwn = []
3864 if clipthresh is None: clipthresh = 3.0
3865 if clipniter is None: clipniter = 0
3866 if edge is None: edge = (0,0)
3867 if threshold is None: threshold = 3
3868 if chan_avg_limit is None: chan_avg_limit = 1
3869 if plot is None: plot = False
3870 if getresidual is None: getresidual = True
3871 if showprogress is None: showprogress = True
3872 if minnrow is None: minnrow = 1000
3873 if outlog is None: outlog = False
3874 if blfile is None: blfile = ''
3875 if csvformat is None: csvformat = False
3876 if bltable is None: bltable = ''
3877
3878 sapplyfft = 'true' if applyfft else 'false'
3879 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3880
3881 scsvformat = 'T' if csvformat else 'F'
3882
3883 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3884 workscan._auto_sinusoid_baseline(mask,
3885 fftinfo,
3886 workscan._parse_wn(addwn),
3887 workscan._parse_wn(rejwn),
3888 clipthresh, clipniter,
3889 normalise_edge_param(edge),
3890 threshold, chan_avg_limit,
3891 getresidual,
3892 pack_progress_params(showprogress,
3893 minnrow),
3894 outlog, scsvformat+blfile, bltable)
3895 workscan._add_history("auto_sinusoid_baseline", varlist)
3896
3897 if bltable == '':
3898 if insitu:
3899 self._assign(workscan)
3900 else:
3901 return workscan
3902 else:
3903 if not insitu:
3904 return None
3905
3906 except RuntimeError, e:
3907 raise_fitting_failure_exception(e)
3908
3909 @asaplog_post_dec
3910 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3911 clipthresh=None, clipniter=None, plot=None,
3912 getresidual=None, showprogress=None, minnrow=None,
3913 outlog=None, blfile=None, csvformat=None,
3914 bltable=None):
3915 """\
3916 Return a scan which has been baselined (all rows) by cubic spline
3917 function (piecewise cubic polynomial).
3918
3919 Parameters:
3920 mask: An optional mask
3921 npiece: Number of pieces. (default is 2)
3922 insitu: If False a new scantable is returned.
3923 Otherwise, the scaling is done in-situ
3924 The default is taken from .asaprc (False)
3925 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3926 clipniter: maximum number of iteration of 'clipthresh'-sigma
3927 clipping (default is 0)
3928 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3929 plot the fit and the residual. In this each
3930 indivual fit has to be approved, by typing 'y'
3931 or 'n'
3932 getresidual: if False, returns best-fit values instead of
3933 residual. (default is True)
3934 showprogress: show progress status for large data.
3935 default is True.
3936 minnrow: minimum number of input spectra to show.
3937 default is 1000.
3938 outlog: Output the coefficients of the best-fit
3939 function to logger (default is False)
3940 blfile: Name of a text file in which the best-fit
3941 parameter values to be written
3942 (default is "": no file/logger output)
3943 csvformat: if True blfile is csv-formatted, default is False.
3944 bltable: name of a baseline table where fitting results
3945 (coefficients, rms, etc.) are to be written.
3946 if given, fitting results will NOT be output to
3947 scantable (insitu=True) or None will be
3948 returned (insitu=False).
3949 (default is "": no table output)
3950
3951 Example:
3952 # return a scan baselined by a cubic spline consisting of 2 pieces
3953 # (i.e., 1 internal knot),
3954 # also with 3-sigma clipping, iteration up to 4 times
3955 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3956
3957 Note:
3958 The best-fit parameter values output in logger and/or blfile are now
3959 based on specunit of 'channel'.
3960 """
3961
3962 try:
3963 varlist = vars()
3964
3965 if insitu is None: insitu = rcParams['insitu']
3966 if insitu:
3967 workscan = self
3968 else:
3969 workscan = self.copy()
3970
3971 if mask is None: mask = []
3972 if npiece is None: npiece = 2
3973 if clipthresh is None: clipthresh = 3.0
3974 if clipniter is None: clipniter = 0
3975 if plot is None: plot = False
3976 if getresidual is None: getresidual = True
3977 if showprogress is None: showprogress = True
3978 if minnrow is None: minnrow = 1000
3979 if outlog is None: outlog = False
3980 if blfile is None: blfile = ''
3981 if csvformat is None: csvformat = False
3982 if bltable is None: bltable = ''
3983
3984 scsvformat = 'T' if csvformat else 'F'
3985
3986 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3987 workscan._cspline_baseline(mask, npiece,
3988 clipthresh, clipniter,
3989 getresidual,
3990 pack_progress_params(showprogress,
3991 minnrow),
3992 outlog, scsvformat+blfile,
3993 bltable)
3994 workscan._add_history("cspline_baseline", varlist)
3995
3996 if bltable == '':
3997 if insitu:
3998 self._assign(workscan)
3999 else:
4000 return workscan
4001 else:
4002 if not insitu:
4003 return None
4004
4005 except RuntimeError, e:
4006 raise_fitting_failure_exception(e)
4007
4008 @asaplog_post_dec
4009 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
4010 clipthresh=None, clipniter=None,
4011 edge=None, threshold=None, chan_avg_limit=None,
4012 getresidual=None, plot=None,
4013 showprogress=None, minnrow=None, outlog=None,
4014 blfile=None, csvformat=None, bltable=None):
4015 """\
4016 Return a scan which has been baselined (all rows) by cubic spline
4017 function (piecewise cubic polynomial).
4018 Spectral lines are detected first using linefinder and masked out
4019 to avoid them affecting the baseline solution.
4020
4021 Parameters:
4022 mask: an optional mask retreived from scantable
4023 npiece: Number of pieces. (default is 2)
4024 insitu: if False a new scantable is returned.
4025 Otherwise, the scaling is done in-situ
4026 The default is taken from .asaprc (False)
4027 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4028 clipniter: maximum number of iteration of 'clipthresh'-sigma
4029 clipping (default is 0)
4030 edge: an optional number of channel to drop at
4031 the edge of spectrum. If only one value is
4032 specified, the same number will be dropped
4033 from both sides of the spectrum. Default
4034 is to keep all channels. Nested tuples
4035 represent individual edge selection for
4036 different IFs (a number of spectral channels
4037 can be different)
4038 threshold: the threshold used by line finder. It is
4039 better to keep it large as only strong lines
4040 affect the baseline solution.
4041 chan_avg_limit: a maximum number of consequtive spectral
4042 channels to average during the search of
4043 weak and broad lines. The default is no
4044 averaging (and no search for weak lines).
4045 If such lines can affect the fitted baseline
4046 (e.g. a high order polynomial is fitted),
4047 increase this parameter (usually values up
4048 to 8 are reasonable). Most users of this
4049 method should find the default value sufficient.
4050 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4051 plot the fit and the residual. In this each
4052 indivual fit has to be approved, by typing 'y'
4053 or 'n'
4054 getresidual: if False, returns best-fit values instead of
4055 residual. (default is True)
4056 showprogress: show progress status for large data.
4057 default is True.
4058 minnrow: minimum number of input spectra to show.
4059 default is 1000.
4060 outlog: Output the coefficients of the best-fit
4061 function to logger (default is False)
4062 blfile: Name of a text file in which the best-fit
4063 parameter values to be written
4064 (default is "": no file/logger output)
4065 csvformat: if True blfile is csv-formatted, default is False.
4066 bltable: name of a baseline table where fitting results
4067 (coefficients, rms, etc.) are to be written.
4068 if given, fitting results will NOT be output to
4069 scantable (insitu=True) or None will be
4070 returned (insitu=False).
4071 (default is "": no table output)
4072
4073 Example:
4074 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4075
4076 Note:
4077 The best-fit parameter values output in logger and/or blfile are now
4078 based on specunit of 'channel'.
4079 """
4080
4081 try:
4082 varlist = vars()
4083
4084 if insitu is None: insitu = rcParams['insitu']
4085 if insitu:
4086 workscan = self
4087 else:
4088 workscan = self.copy()
4089
4090 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4091 if mask is None: mask = []
4092 if npiece is None: npiece = 2
4093 if clipthresh is None: clipthresh = 3.0
4094 if clipniter is None: clipniter = 0
4095 if edge is None: edge = (0, 0)
4096 if threshold is None: threshold = 3
4097 if chan_avg_limit is None: chan_avg_limit = 1
4098 if plot is None: plot = False
4099 if getresidual is None: getresidual = True
4100 if showprogress is None: showprogress = True
4101 if minnrow is None: minnrow = 1000
4102 if outlog is None: outlog = False
4103 if blfile is None: blfile = ''
4104 if csvformat is None: csvformat = False
4105 if bltable is None: bltable = ''
4106
4107 scsvformat = 'T' if csvformat else 'F'
4108
4109 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4110 workscan._auto_cspline_baseline(mask, npiece,
4111 clipthresh, clipniter,
4112 normalise_edge_param(edge),
4113 threshold,
4114 chan_avg_limit, getresidual,
4115 pack_progress_params(showprogress,
4116 minnrow),
4117 outlog,
4118 scsvformat+blfile,
4119 bltable)
4120 workscan._add_history("auto_cspline_baseline", varlist)
4121
4122 if bltable == '':
4123 if insitu:
4124 self._assign(workscan)
4125 else:
4126 return workscan
4127 else:
4128 if not insitu:
4129 return None
4130
4131 except RuntimeError, e:
4132 raise_fitting_failure_exception(e)
4133
4134 @asaplog_post_dec
4135 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
4136 clipthresh=None, clipniter=None, plot=None,
4137 getresidual=None, showprogress=None, minnrow=None,
4138 outlog=None, blfile=None, csvformat=None,
4139 bltable=None):
4140 """\
4141 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4142
4143 Parameters:
4144 mask: An optional mask
4145 order: the maximum order of Chebyshev polynomial (default is 5)
4146 insitu: If False a new scantable is returned.
4147 Otherwise, the scaling is done in-situ
4148 The default is taken from .asaprc (False)
4149 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4150 clipniter: maximum number of iteration of 'clipthresh'-sigma
4151 clipping (default is 0)
4152 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4153 plot the fit and the residual. In this each
4154 indivual fit has to be approved, by typing 'y'
4155 or 'n'
4156 getresidual: if False, returns best-fit values instead of
4157 residual. (default is True)
4158 showprogress: show progress status for large data.
4159 default is True.
4160 minnrow: minimum number of input spectra to show.
4161 default is 1000.
4162 outlog: Output the coefficients of the best-fit
4163 function to logger (default is False)
4164 blfile: Name of a text file in which the best-fit
4165 parameter values to be written
4166 (default is "": no file/logger output)
4167 csvformat: if True blfile is csv-formatted, default is False.
4168 bltable: name of a baseline table where fitting results
4169 (coefficients, rms, etc.) are to be written.
4170 if given, fitting results will NOT be output to
4171 scantable (insitu=True) or None will be
4172 returned (insitu=False).
4173 (default is "": no table output)
4174
4175 Example:
4176 # return a scan baselined by a cubic spline consisting of 2 pieces
4177 # (i.e., 1 internal knot),
4178 # also with 3-sigma clipping, iteration up to 4 times
4179 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4180
4181 Note:
4182 The best-fit parameter values output in logger and/or blfile are now
4183 based on specunit of 'channel'.
4184 """
4185
4186 try:
4187 varlist = vars()
4188
4189 if insitu is None: insitu = rcParams['insitu']
4190 if insitu:
4191 workscan = self
4192 else:
4193 workscan = self.copy()
4194
4195 if mask is None: mask = []
4196 if order is None: order = 5
4197 if clipthresh is None: clipthresh = 3.0
4198 if clipniter is None: clipniter = 0
4199 if plot is None: plot = False
4200 if getresidual is None: getresidual = True
4201 if showprogress is None: showprogress = True
4202 if minnrow is None: minnrow = 1000
4203 if outlog is None: outlog = False
4204 if blfile is None: blfile = ''
4205 if csvformat is None: csvformat = False
4206 if bltable is None: bltable = ''
4207
4208 scsvformat = 'T' if csvformat else 'F'
4209
4210 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4211 workscan._chebyshev_baseline(mask, order,
4212 clipthresh, clipniter,
4213 getresidual,
4214 pack_progress_params(showprogress,
4215 minnrow),
4216 outlog, scsvformat+blfile,
4217 bltable)
4218 workscan._add_history("chebyshev_baseline", varlist)
4219
4220 if bltable == '':
4221 if insitu:
4222 self._assign(workscan)
4223 else:
4224 return workscan
4225 else:
4226 if not insitu:
4227 return None
4228
4229 except RuntimeError, e:
4230 raise_fitting_failure_exception(e)
4231
4232 @asaplog_post_dec
4233 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4234 clipthresh=None, clipniter=None,
4235 edge=None, threshold=None, chan_avg_limit=None,
4236 getresidual=None, plot=None,
4237 showprogress=None, minnrow=None, outlog=None,
4238 blfile=None, csvformat=None, bltable=None):
4239 """\
4240 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4241 Spectral lines are detected first using linefinder and masked out
4242 to avoid them affecting the baseline solution.
4243
4244 Parameters:
4245 mask: an optional mask retreived from scantable
4246 order: the maximum order of Chebyshev polynomial (default is 5)
4247 insitu: if False a new scantable is returned.
4248 Otherwise, the scaling is done in-situ
4249 The default is taken from .asaprc (False)
4250 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4251 clipniter: maximum number of iteration of 'clipthresh'-sigma
4252 clipping (default is 0)
4253 edge: an optional number of channel to drop at
4254 the edge of spectrum. If only one value is
4255 specified, the same number will be dropped
4256 from both sides of the spectrum. Default
4257 is to keep all channels. Nested tuples
4258 represent individual edge selection for
4259 different IFs (a number of spectral channels
4260 can be different)
4261 threshold: the threshold used by line finder. It is
4262 better to keep it large as only strong lines
4263 affect the baseline solution.
4264 chan_avg_limit: a maximum number of consequtive spectral
4265 channels to average during the search of
4266 weak and broad lines. The default is no
4267 averaging (and no search for weak lines).
4268 If such lines can affect the fitted baseline
4269 (e.g. a high order polynomial is fitted),
4270 increase this parameter (usually values up
4271 to 8 are reasonable). Most users of this
4272 method should find the default value sufficient.
4273 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4274 plot the fit and the residual. In this each
4275 indivual fit has to be approved, by typing 'y'
4276 or 'n'
4277 getresidual: if False, returns best-fit values instead of
4278 residual. (default is True)
4279 showprogress: show progress status for large data.
4280 default is True.
4281 minnrow: minimum number of input spectra to show.
4282 default is 1000.
4283 outlog: Output the coefficients of the best-fit
4284 function to logger (default is False)
4285 blfile: Name of a text file in which the best-fit
4286 parameter values to be written
4287 (default is "": no file/logger output)
4288 csvformat: if True blfile is csv-formatted, default is False.
4289 bltable: name of a baseline table where fitting results
4290 (coefficients, rms, etc.) are to be written.
4291 if given, fitting results will NOT be output to
4292 scantable (insitu=True) or None will be
4293 returned (insitu=False).
4294 (default is "": no table output)
4295
4296 Example:
4297 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4298
4299 Note:
4300 The best-fit parameter values output in logger and/or blfile are now
4301 based on specunit of 'channel'.
4302 """
4303
4304 try:
4305 varlist = vars()
4306
4307 if insitu is None: insitu = rcParams['insitu']
4308 if insitu:
4309 workscan = self
4310 else:
4311 workscan = self.copy()
4312
4313 if mask is None: mask = []
4314 if order is None: order = 5
4315 if clipthresh is None: clipthresh = 3.0
4316 if clipniter is None: clipniter = 0
4317 if edge is None: edge = (0, 0)
4318 if threshold is None: threshold = 3
4319 if chan_avg_limit is None: chan_avg_limit = 1
4320 if plot is None: plot = False
4321 if getresidual is None: getresidual = True
4322 if showprogress is None: showprogress = True
4323 if minnrow is None: minnrow = 1000
4324 if outlog is None: outlog = False
4325 if blfile is None: blfile = ''
4326 if csvformat is None: csvformat = False
4327 if bltable is None: bltable = ''
4328
4329 scsvformat = 'T' if csvformat else 'F'
4330
4331 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4332 workscan._auto_chebyshev_baseline(mask, order,
4333 clipthresh, clipniter,
4334 normalise_edge_param(edge),
4335 threshold,
4336 chan_avg_limit, getresidual,
4337 pack_progress_params(showprogress,
4338 minnrow),
4339 outlog, scsvformat+blfile,
4340 bltable)
4341 workscan._add_history("auto_chebyshev_baseline", varlist)
4342
4343 if bltable == '':
4344 if insitu:
4345 self._assign(workscan)
4346 else:
4347 return workscan
4348 else:
4349 if not insitu:
4350 return None
4351
4352 except RuntimeError, e:
4353 raise_fitting_failure_exception(e)
4354
4355 @asaplog_post_dec
4356 def poly_baseline(self, mask=None, order=None, insitu=None,
4357 clipthresh=None, clipniter=None, plot=None,
4358 getresidual=None, showprogress=None, minnrow=None,
4359 outlog=None, blfile=None, csvformat=None,
4360 bltable=None):
4361 """\
4362 Return a scan which has been baselined (all rows) by a polynomial.
4363 Parameters:
4364 mask: an optional mask
4365 order: the order of the polynomial (default is 0)
4366 insitu: if False a new scantable is returned.
4367 Otherwise, the scaling is done in-situ
4368 The default is taken from .asaprc (False)
4369 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4370 clipniter: maximum number of iteration of 'clipthresh'-sigma
4371 clipping (default is 0)
4372 plot: plot the fit and the residual. In this each
4373 indivual fit has to be approved, by typing 'y'
4374 or 'n'
4375 getresidual: if False, returns best-fit values instead of
4376 residual. (default is True)
4377 showprogress: show progress status for large data.
4378 default is True.
4379 minnrow: minimum number of input spectra to show.
4380 default is 1000.
4381 outlog: Output the coefficients of the best-fit
4382 function to logger (default is False)
4383 blfile: Name of a text file in which the best-fit
4384 parameter values to be written
4385 (default is "": no file/logger output)
4386 csvformat: if True blfile is csv-formatted, default is False.
4387 bltable: name of a baseline table where fitting results
4388 (coefficients, rms, etc.) are to be written.
4389 if given, fitting results will NOT be output to
4390 scantable (insitu=True) or None will be
4391 returned (insitu=False).
4392 (default is "": no table output)
4393
4394 Example:
4395 # return a scan baselined by a third order polynomial,
4396 # not using a mask
4397 bscan = scan.poly_baseline(order=3)
4398 """
4399
4400 try:
4401 varlist = vars()
4402
4403 if insitu is None:
4404 insitu = rcParams["insitu"]
4405 if insitu:
4406 workscan = self
4407 else:
4408 workscan = self.copy()
4409
4410 if mask is None: mask = []
4411 if order is None: order = 0
4412 if clipthresh is None: clipthresh = 3.0
4413 if clipniter is None: clipniter = 0
4414 if plot is None: plot = False
4415 if getresidual is None: getresidual = True
4416 if showprogress is None: showprogress = True
4417 if minnrow is None: minnrow = 1000
4418 if outlog is None: outlog = False
4419 if blfile is None: blfile = ''
4420 if csvformat is None: csvformat = False
4421 if bltable is None: bltable = ''
4422
4423 scsvformat = 'T' if csvformat else 'F'
4424
4425 if plot:
4426 outblfile = (blfile != "") and \
4427 os.path.exists(os.path.expanduser(
4428 os.path.expandvars(blfile))
4429 )
4430 if outblfile:
4431 blf = open(blfile, "a")
4432
4433 f = fitter()
4434 f.set_function(lpoly=order)
4435
4436 rows = xrange(workscan.nrow())
4437 #if len(rows) > 0: workscan._init_blinfo()
4438
4439 action = "H"
4440 for r in rows:
4441 f.x = workscan._getabcissa(r)
4442 f.y = workscan._getspectrum(r)
4443 if mask:
4444 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4445 else: # mask=None
4446 f.mask = workscan._getmask(r)
4447
4448 f.data = None
4449 f.fit()
4450
4451 if action != "Y": # skip plotting when accepting all
4452 f.plot(residual=True)
4453 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4454 #if accept_fit.upper() == "N":
4455 # #workscan._append_blinfo(None, None, None)
4456 # continue
4457 accept_fit = self._get_verify_action("Accept fit?",action)
4458 if r == 0: action = None
4459 if accept_fit.upper() == "N":
4460 continue
4461 elif accept_fit.upper() == "R":
4462 break
4463 elif accept_fit.upper() == "A":
4464 action = "Y"
4465
4466 blpars = f.get_parameters()
4467 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4468 #workscan._append_blinfo(blpars, masklist, f.mask)
4469 workscan._setspectrum((f.fitter.getresidual()
4470 if getresidual else f.fitter.getfit()), r)
4471
4472 if outblfile:
4473 rms = workscan.get_rms(f.mask, r)
4474 dataout = \
4475 workscan.format_blparams_row(blpars["params"],
4476 blpars["fixed"],
4477 rms, str(masklist),
4478 r, True, csvformat)
4479 blf.write(dataout)
4480
4481 f._p.unmap()
4482 f._p = None
4483
4484 if outblfile:
4485 blf.close()
4486 else:
4487 workscan._poly_baseline(mask, order,
4488 clipthresh, clipniter, #
4489 getresidual,
4490 pack_progress_params(showprogress,
4491 minnrow),
4492 outlog, scsvformat+blfile,
4493 bltable) #
4494
4495 workscan._add_history("poly_baseline", varlist)
4496
4497 if insitu:
4498 self._assign(workscan)
4499 else:
4500 return workscan
4501
4502 except RuntimeError, e:
4503 raise_fitting_failure_exception(e)
4504
4505 @asaplog_post_dec
4506 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4507 clipthresh=None, clipniter=None,
4508 edge=None, threshold=None, chan_avg_limit=None,
4509 getresidual=None, plot=None,
4510 showprogress=None, minnrow=None, outlog=None,
4511 blfile=None, csvformat=None, bltable=None):
4512 """\
4513 Return a scan which has been baselined (all rows) by a polynomial.
4514 Spectral lines are detected first using linefinder and masked out
4515 to avoid them affecting the baseline solution.
4516
4517 Parameters:
4518 mask: an optional mask retreived from scantable
4519 order: the order of the polynomial (default is 0)
4520 insitu: if False a new scantable is returned.
4521 Otherwise, the scaling is done in-situ
4522 The default is taken from .asaprc (False)
4523 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4524 clipniter: maximum number of iteration of 'clipthresh'-sigma
4525 clipping (default is 0)
4526 edge: an optional number of channel to drop at
4527 the edge of spectrum. If only one value is
4528 specified, the same number will be dropped
4529 from both sides of the spectrum. Default
4530 is to keep all channels. Nested tuples
4531 represent individual edge selection for
4532 different IFs (a number of spectral channels
4533 can be different)
4534 threshold: the threshold used by line finder. It is
4535 better to keep it large as only strong lines
4536 affect the baseline solution.
4537 chan_avg_limit: a maximum number of consequtive spectral
4538 channels to average during the search of
4539 weak and broad lines. The default is no
4540 averaging (and no search for weak lines).
4541 If such lines can affect the fitted baseline
4542 (e.g. a high order polynomial is fitted),
4543 increase this parameter (usually values up
4544 to 8 are reasonable). Most users of this
4545 method should find the default value sufficient.
4546 plot: plot the fit and the residual. In this each
4547 indivual fit has to be approved, by typing 'y'
4548 or 'n'
4549 getresidual: if False, returns best-fit values instead of
4550 residual. (default is True)
4551 showprogress: show progress status for large data.
4552 default is True.
4553 minnrow: minimum number of input spectra to show.
4554 default is 1000.
4555 outlog: Output the coefficients of the best-fit
4556 function to logger (default is False)
4557 blfile: Name of a text file in which the best-fit
4558 parameter values to be written
4559 (default is "": no file/logger output)
4560 csvformat: if True blfile is csv-formatted, default is False.
4561 bltable: name of a baseline table where fitting results
4562 (coefficients, rms, etc.) are to be written.
4563 if given, fitting results will NOT be output to
4564 scantable (insitu=True) or None will be
4565 returned (insitu=False).
4566 (default is "": no table output)
4567
4568 Example:
4569 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4570 """
4571
4572 try:
4573 varlist = vars()
4574
4575 if insitu is None:
4576 insitu = rcParams['insitu']
4577 if insitu:
4578 workscan = self
4579 else:
4580 workscan = self.copy()
4581
4582 if mask is None: mask = []
4583 if order is None: order = 0
4584 if clipthresh is None: clipthresh = 3.0
4585 if clipniter is None: clipniter = 0
4586 if edge is None: edge = (0, 0)
4587 if threshold is None: threshold = 3
4588 if chan_avg_limit is None: chan_avg_limit = 1
4589 if plot is None: plot = False
4590 if getresidual is None: getresidual = True
4591 if showprogress is None: showprogress = True
4592 if minnrow is None: minnrow = 1000
4593 if outlog is None: outlog = False
4594 if blfile is None: blfile = ''
4595 if csvformat is None: csvformat = False
4596 if bltable is None: bltable = ''
4597
4598 scsvformat = 'T' if csvformat else 'F'
4599
4600 edge = normalise_edge_param(edge)
4601
4602 if plot:
4603 outblfile = (blfile != "") and \
4604 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4605 if outblfile: blf = open(blfile, "a")
4606
4607 from asap.asaplinefind import linefinder
4608 fl = linefinder()
4609 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4610 fl.set_scan(workscan)
4611
4612 f = fitter()
4613 f.set_function(lpoly=order)
4614
4615 rows = xrange(workscan.nrow())
4616 #if len(rows) > 0: workscan._init_blinfo()
4617
4618 action = "H"
4619 for r in rows:
4620 idx = 2*workscan.getif(r)
4621 if mask:
4622 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4623 else: # mask=None
4624 msk = workscan._getmask(r)
4625 fl.find_lines(r, msk, edge[idx:idx+2])
4626
4627 f.x = workscan._getabcissa(r)
4628 f.y = workscan._getspectrum(r)
4629 f.mask = fl.get_mask()
4630 f.data = None
4631 f.fit()
4632
4633 if action != "Y": # skip plotting when accepting all
4634 f.plot(residual=True)
4635 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4636 accept_fit = self._get_verify_action("Accept fit?",action)
4637 if r == 0: action = None
4638 if accept_fit.upper() == "N":
4639 #workscan._append_blinfo(None, None, None)
4640 continue
4641 elif accept_fit.upper() == "R":
4642 break
4643 elif accept_fit.upper() == "A":
4644 action = "Y"
4645
4646 blpars = f.get_parameters()
4647 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4648 #workscan._append_blinfo(blpars, masklist, f.mask)
4649 workscan._setspectrum(
4650 (f.fitter.getresidual() if getresidual
4651 else f.fitter.getfit()), r
4652 )
4653
4654 if outblfile:
4655 rms = workscan.get_rms(f.mask, r)
4656 dataout = \
4657 workscan.format_blparams_row(blpars["params"],
4658 blpars["fixed"],
4659 rms, str(masklist),
4660 r, True, csvformat)
4661 blf.write(dataout)
4662
4663 f._p.unmap()
4664 f._p = None
4665
4666 if outblfile: blf.close()
4667 else:
4668 workscan._auto_poly_baseline(mask, order,
4669 clipthresh, clipniter,
4670 edge, threshold,
4671 chan_avg_limit, getresidual,
4672 pack_progress_params(showprogress,
4673 minnrow),
4674 outlog, scsvformat+blfile,
4675 bltable)
4676 workscan._add_history("auto_poly_baseline", varlist)
4677
4678 if bltable == '':
4679 if insitu:
4680 self._assign(workscan)
4681 else:
4682 return workscan
4683 else:
4684 if not insitu:
4685 return None
4686
4687 except RuntimeError, e:
4688 raise_fitting_failure_exception(e)
4689
4690 def _init_blinfo(self):
4691 """\
4692 Initialise the following three auxiliary members:
4693 blpars : parameters of the best-fit baseline,
4694 masklists : mask data (edge positions of masked channels) and
4695 actualmask : mask data (in boolean list),
4696 to keep for use later (including output to logger/text files).
4697 Used by poly_baseline() and auto_poly_baseline() in case of
4698 'plot=True'.
4699 """
4700 self.blpars = []
4701 self.masklists = []
4702 self.actualmask = []
4703 return
4704
4705 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4706 """\
4707 Append baseline-fitting related info to blpars, masklist and
4708 actualmask.
4709 """
4710 self.blpars.append(data_blpars)
4711 self.masklists.append(data_masklists)
4712 self.actualmask.append(data_actualmask)
4713 return
4714
4715 @asaplog_post_dec
4716 def rotate_linpolphase(self, angle):
4717 """\
4718 Rotate the phase of the complex polarization O=Q+iU correlation.
4719 This is always done in situ in the raw data. So if you call this
4720 function more than once then each call rotates the phase further.
4721
4722 Parameters:
4723
4724 angle: The angle (degrees) to rotate (add) by.
4725
4726 Example::
4727
4728 scan.rotate_linpolphase(2.3)
4729
4730 """
4731 varlist = vars()
4732 self._math._rotate_linpolphase(self, angle)
4733 self._add_history("rotate_linpolphase", varlist)
4734 return
4735
4736 @asaplog_post_dec
4737 def rotate_xyphase(self, angle):
4738 """\
4739 Rotate the phase of the XY correlation. This is always done in situ
4740 in the data. So if you call this function more than once
4741 then each call rotates the phase further.
4742
4743 Parameters:
4744
4745 angle: The angle (degrees) to rotate (add) by.
4746
4747 Example::
4748
4749 scan.rotate_xyphase(2.3)
4750
4751 """
4752 varlist = vars()
4753 self._math._rotate_xyphase(self, angle)
4754 self._add_history("rotate_xyphase", varlist)
4755 return
4756
4757 @asaplog_post_dec
4758 def swap_linears(self):
4759 """\
4760 Swap the linear polarisations XX and YY, or better the first two
4761 polarisations as this also works for ciculars.
4762 """
4763 varlist = vars()
4764 self._math._swap_linears(self)
4765 self._add_history("swap_linears", varlist)
4766 return
4767
4768 @asaplog_post_dec
4769 def invert_phase(self):
4770 """\
4771 Invert the phase of the complex polarisation
4772 """
4773 varlist = vars()
4774 self._math._invert_phase(self)
4775 self._add_history("invert_phase", varlist)
4776 return
4777
4778 @asaplog_post_dec
4779 def add(self, offset, insitu=None):
4780 """\
4781 Return a scan where all spectra have the offset added
4782
4783 Parameters:
4784
4785 offset: the offset
4786
4787 insitu: if False a new scantable is returned.
4788 Otherwise, the scaling is done in-situ
4789 The default is taken from .asaprc (False)
4790
4791 """
4792 if insitu is None: insitu = rcParams['insitu']
4793 self._math._setinsitu(insitu)
4794 varlist = vars()
4795 s = scantable(self._math._unaryop(self, offset, "ADD", False, False))
4796 s._add_history("add", varlist)
4797 if insitu:
4798 self._assign(s)
4799 else:
4800 return s
4801
4802 @asaplog_post_dec
4803 def scale(self, factor, tsys=True, insitu=None, skip_flaggedrow=False):
4804 """\
4805
4806 Return a scan where all spectra are scaled by the given 'factor'
4807
4808 Parameters:
4809
4810 factor: the scaling factor (float or 1D float list)
4811
4812 insitu: if False a new scantable is returned.
4813 Otherwise, the scaling is done in-situ
4814 The default is taken from .asaprc (False)
4815
4816 tsys: if True (default) then apply the operation to Tsys
4817 as well as the data
4818
4819 skip_flaggedrow: if True, scaling is NOT executed for
4820 row-flagged spectra. default is False.
4821 """
4822 if insitu is None: insitu = rcParams['insitu']
4823 self._math._setinsitu(insitu)
4824 varlist = vars()
4825 s = None
4826 import numpy
4827 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4828 if isinstance(factor[0], list) or isinstance(factor[0],
4829 numpy.ndarray):
4830 from asapmath import _array2dOp
4831 s = _array2dOp( self, factor, "MUL", tsys, insitu, skip_flaggedrow )
4832 else:
4833 s = scantable( self._math._arrayop( self, factor,
4834 "MUL", tsys, skip_flaggedrow ) )
4835 else:
4836 s = scantable(self._math._unaryop(self, factor, "MUL", tsys, skip_flaggedrow))
4837 s._add_history("scale", varlist)
4838 if insitu:
4839 self._assign(s)
4840 else:
4841 return s
4842
4843 @preserve_selection
4844 def set_sourcetype(self, match, matchtype="pattern",
4845 sourcetype="reference"):
4846 """\
4847 Set the type of the source to be an source or reference scan
4848 using the provided pattern.
4849
4850 Parameters:
4851
4852 match: a Unix style pattern, regular expression or selector
4853
4854 matchtype: 'pattern' (default) UNIX style pattern or
4855 'regex' regular expression
4856
4857 sourcetype: the type of the source to use (source/reference)
4858
4859 """
4860 varlist = vars()
4861 stype = -1
4862 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4863 stype = 1
4864 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4865 stype = 0
4866 else:
4867 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4868 if matchtype.lower().startswith("p"):
4869 matchtype = "pattern"
4870 elif matchtype.lower().startswith("r"):
4871 matchtype = "regex"
4872 else:
4873 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4874 sel = selector()
4875 if isinstance(match, selector):
4876 sel = match
4877 else:
4878 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4879 self.set_selection(sel)
4880 self._setsourcetype(stype)
4881 self._add_history("set_sourcetype", varlist)
4882
4883
4884 def set_sourcename(self, name):
4885 varlist = vars()
4886 self._setsourcename(name)
4887 self._add_history("set_sourcename", varlist)
4888
4889 @asaplog_post_dec
4890 @preserve_selection
4891 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4892 """\
4893 This function allows to build quotients automatically.
4894 It assumes the observation to have the same number of
4895 "ons" and "offs"
4896
4897 Parameters:
4898
4899 preserve: you can preserve (default) the continuum or
4900 remove it. The equations used are
4901
4902 preserve: Output = Toff * (on/off) - Toff
4903
4904 remove: Output = Toff * (on/off) - Ton
4905
4906 mode: the on/off detection mode
4907 'paired' (default)
4908 identifies 'off' scans by the
4909 trailing '_R' (Mopra/Parkes) or
4910 '_e'/'_w' (Tid) and matches
4911 on/off pairs from the observing pattern
4912 'time'
4913 finds the closest off in time
4914
4915 .. todo:: verify argument is not implemented
4916
4917 """
4918 varlist = vars()
4919 modes = ["time", "paired"]
4920 if not mode in modes:
4921 msg = "please provide valid mode. Valid modes are %s" % (modes)
4922 raise ValueError(msg)
4923 s = None
4924 if mode.lower() == "paired":
4925 from asap._asap import srctype
4926 sel = self.get_selection()
4927 #sel.set_query("SRCTYPE==psoff")
4928 sel.set_types(srctype.psoff)
4929 self.set_selection(sel)
4930 offs = self.copy()
4931 #sel.set_query("SRCTYPE==pson")
4932 sel.set_types(srctype.pson)
4933 self.set_selection(sel)
4934 ons = self.copy()
4935 s = scantable(self._math._quotient(ons, offs, preserve))
4936 elif mode.lower() == "time":
4937 s = scantable(self._math._auto_quotient(self, mode, preserve))
4938 s._add_history("auto_quotient", varlist)
4939 return s
4940
4941 @asaplog_post_dec
4942 def mx_quotient(self, mask = None, weight='median', preserve=True):
4943 """\
4944 Form a quotient using "off" beams when observing in "MX" mode.
4945
4946 Parameters:
4947
4948 mask: an optional mask to be used when weight == 'stddev'
4949
4950 weight: How to average the off beams. Default is 'median'.
4951
4952 preserve: you can preserve (default) the continuum or
4953 remove it. The equations used are:
4954
4955 preserve: Output = Toff * (on/off) - Toff
4956
4957 remove: Output = Toff * (on/off) - Ton
4958
4959 """
4960 mask = mask or ()
4961 varlist = vars()
4962 on = scantable(self._math._mx_extract(self, 'on'))
4963 preoff = scantable(self._math._mx_extract(self, 'off'))
4964 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4965 from asapmath import quotient
4966 q = quotient(on, off, preserve)
4967 q._add_history("mx_quotient", varlist)
4968 return q
4969
4970 @asaplog_post_dec
4971 def freq_switch(self, insitu=None):
4972 """\
4973 Apply frequency switching to the data.
4974
4975 Parameters:
4976
4977 insitu: if False a new scantable is returned.
4978 Otherwise, the swictching is done in-situ
4979 The default is taken from .asaprc (False)
4980
4981 """
4982 if insitu is None: insitu = rcParams['insitu']
4983 self._math._setinsitu(insitu)
4984 varlist = vars()
4985 s = scantable(self._math._freqswitch(self))
4986 s._add_history("freq_switch", varlist)
4987 if insitu:
4988 self._assign(s)
4989 else:
4990 return s
4991
4992 @asaplog_post_dec
4993 def recalc_azel(self):
4994 """Recalculate the azimuth and elevation for each position."""
4995 varlist = vars()
4996 self._recalcazel()
4997 self._add_history("recalc_azel", varlist)
4998 return
4999
5000 @asaplog_post_dec
5001 def __add__(self, other):
5002 """
5003 implicit on all axes and on Tsys
5004 """
5005 varlist = vars()
5006 s = self.__op( other, "ADD" )
5007 s._add_history("operator +", varlist)
5008 return s
5009
5010 @asaplog_post_dec
5011 def __sub__(self, other):
5012 """
5013 implicit on all axes and on Tsys
5014 """
5015 varlist = vars()
5016 s = self.__op( other, "SUB" )
5017 s._add_history("operator -", varlist)
5018 return s
5019
5020 @asaplog_post_dec
5021 def __mul__(self, other):
5022 """
5023 implicit on all axes and on Tsys
5024 """
5025 varlist = vars()
5026 s = self.__op( other, "MUL" ) ;
5027 s._add_history("operator *", varlist)
5028 return s
5029
5030
5031 @asaplog_post_dec
5032 def __div__(self, other):
5033 """
5034 implicit on all axes and on Tsys
5035 """
5036 varlist = vars()
5037 s = self.__op( other, "DIV" )
5038 s._add_history("operator /", varlist)
5039 return s
5040
5041 @asaplog_post_dec
5042 def __op( self, other, mode ):
5043 s = None
5044 if isinstance(other, scantable):
5045 s = scantable(self._math._binaryop(self, other, mode))
5046 elif isinstance(other, float):
5047 if other == 0.0:
5048 raise ZeroDivisionError("Dividing by zero is not recommended")
5049 s = scantable(self._math._unaryop(self, other, mode, False, False))
5050 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
5051 if isinstance(other[0], list) \
5052 or isinstance(other[0], numpy.ndarray):
5053 from asapmath import _array2dOp
5054 s = _array2dOp( self, other, mode, False )
5055 else:
5056 s = scantable( self._math._arrayop( self, other,
5057 mode, False, False ) )
5058 else:
5059 raise TypeError("Other input is not a scantable or float value")
5060 return s
5061
5062 @asaplog_post_dec
5063 def get_fit(self, row=0):
5064 """\
5065 Print or return the stored fits for a row in the scantable
5066
5067 Parameters:
5068
5069 row: the row which the fit has been applied to.
5070
5071 """
5072 if row > self.nrow():
5073 return
5074 from asap.asapfit import asapfit
5075 fit = asapfit(self._getfit(row))
5076 asaplog.push( '%s' %(fit) )
5077 return fit.as_dict()
5078
5079 @preserve_selection
5080 def flag_nans(self):
5081 """\
5082 Utility function to flag NaN values in the scantable.
5083 """
5084 import numpy
5085 basesel = self.get_selection()
5086 for i in range(self.nrow()):
5087 sel = self.get_row_selector(i)
5088 self.set_selection(basesel+sel)
5089 nans = numpy.isnan(self._getspectrum(0))
5090 if numpy.any(nans):
5091 bnans = [ bool(v) for v in nans]
5092 self.flag(bnans)
5093
5094 self.set_selection(basesel)
5095
5096 def get_row_selector(self, rowno):
5097 return selector(rows=[rowno])
5098
5099 def _add_history(self, funcname, parameters):
5100 if not rcParams['scantable.history']:
5101 return
5102 # create date
5103 sep = "##"
5104 from datetime import datetime
5105 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5106 hist = dstr+sep
5107 hist += funcname+sep#cdate+sep
5108 if parameters.has_key('self'):
5109 del parameters['self']
5110 for k, v in parameters.iteritems():
5111 if type(v) is dict:
5112 for k2, v2 in v.iteritems():
5113 hist += k2
5114 hist += "="
5115 if isinstance(v2, scantable):
5116 hist += 'scantable'
5117 elif k2 == 'mask':
5118 if isinstance(v2, list) or isinstance(v2, tuple):
5119 hist += str(self._zip_mask(v2))
5120 else:
5121 hist += str(v2)
5122 else:
5123 hist += str(v2)
5124 else:
5125 hist += k
5126 hist += "="
5127 if isinstance(v, scantable):
5128 hist += 'scantable'
5129 elif k == 'mask':
5130 if isinstance(v, list) or isinstance(v, tuple):
5131 hist += str(self._zip_mask(v))
5132 else:
5133 hist += str(v)
5134 else:
5135 hist += str(v)
5136 hist += sep
5137 hist = hist[:-2] # remove trailing '##'
5138 self._addhistory(hist)
5139
5140
5141 def _zip_mask(self, mask):
5142 mask = list(mask)
5143 i = 0
5144 segments = []
5145 while mask[i:].count(1):
5146 i += mask[i:].index(1)
5147 if mask[i:].count(0):
5148 j = i + mask[i:].index(0)
5149 else:
5150 j = len(mask)
5151 segments.append([i, j])
5152 i = j
5153 return segments
5154
5155 def _get_ordinate_label(self):
5156 fu = "("+self.get_fluxunit()+")"
5157 import re
5158 lbl = "Intensity"
5159 if re.match(".K.", fu):
5160 lbl = "Brightness Temperature "+ fu
5161 elif re.match(".Jy.", fu):
5162 lbl = "Flux density "+ fu
5163 return lbl
5164
5165 def _check_ifs(self):
5166# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5167 nchans = [self.nchan(i) for i in self.getifnos()]
5168 nchans = filter(lambda t: t > 0, nchans)
5169 return (sum(nchans)/len(nchans) == nchans[0])
5170
5171 @asaplog_post_dec
5172 def _fill(self, names, unit, average, opts={}):
5173 first = True
5174 fullnames = []
5175 for name in names:
5176 name = os.path.expandvars(name)
5177 name = os.path.expanduser(name)
5178 if not os.path.exists(name):
5179 msg = "File '%s' does not exists" % (name)
5180 raise IOError(msg)
5181 fullnames.append(name)
5182 if average:
5183 asaplog.push('Auto averaging integrations')
5184 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5185 for name in fullnames:
5186 tbl = Scantable(stype)
5187 if is_ms( name ):
5188 r = msfiller( tbl )
5189 else:
5190 r = filler( tbl )
5191 msg = "Importing %s..." % (name)
5192 asaplog.push(msg, False)
5193 r.open(name, opts)
5194 rx = rcParams['scantable.reference']
5195 r.setreferenceexpr(rx)
5196 r.fill()
5197 if average:
5198 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5199 if not first:
5200 tbl = self._math._merge([self, tbl])
5201 Scantable.__init__(self, tbl)
5202 r.close()
5203 del r, tbl
5204 first = False
5205 #flush log
5206 asaplog.post()
5207 if unit is not None:
5208 self.set_fluxunit(unit)
5209 if not is_casapy():
5210 self.set_freqframe(rcParams['scantable.freqframe'])
5211
5212 def _get_verify_action( self, msg, action=None ):
5213 valid_act = ['Y', 'N', 'A', 'R']
5214 if not action or not isinstance(action, str):
5215 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5216 if action == '':
5217 return "Y"
5218 elif (action.upper()[0] in valid_act):
5219 return action.upper()[0]
5220 elif (action.upper()[0] in ['H','?']):
5221 print "Available actions of verification [Y|n|a|r]"
5222 print " Y : Yes for current data (default)"
5223 print " N : No for current data"
5224 print " A : Accept all in the following and exit from verification"
5225 print " R : Reject all in the following and exit from verification"
5226 print " H or ?: help (show this message)"
5227 return self._get_verify_action(msg)
5228 else:
5229 return 'Y'
5230
5231 def __getitem__(self, key):
5232 if key < 0:
5233 key += self.nrow()
5234 if key >= self.nrow():
5235 raise IndexError("Row index out of range.")
5236 return self._getspectrum(key)
5237
5238 def __setitem__(self, key, value):
5239 if key < 0:
5240 key += self.nrow()
5241 if key >= self.nrow():
5242 raise IndexError("Row index out of range.")
5243 if not hasattr(value, "__len__") or \
5244 len(value) > self.nchan(self.getif(key)):
5245 raise ValueError("Spectrum length doesn't match.")
5246 return self._setspectrum(value, key)
5247
5248 def __len__(self):
5249 return self.nrow()
5250
5251 def __iter__(self):
5252 for i in range(len(self)):
5253 yield self[i]
Note: See TracBrowser for help on using the repository browser.