source: trunk/python/scantable.py@ 2936

Last change on this file since 2936 was 2936, checked in by WataruKawasaki, 11 years ago

New Development: No

JIRA Issue: Yes CAS-6485

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: modified sd.scantable.parse_spw_selection() so that spectral windows will be selected if center values of their frequency/velocity ranges fall in the range specified.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 207.9 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel, doppler):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
380
381def get_restfreq_in_Hz(s_restfreq):
382 value = 0.0
383 unit = ""
384 s = s_restfreq.replace(" ","")
385
386 for i in range(len(s))[::-1]:
387 if s[i].isalpha():
388 unit = s[i] + unit
389 else:
390 value = float(s[0:i+1])
391 break
392
393 if (unit == "") or (unit.lower() == "hz"):
394 return value
395 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
396 unitprefix = unit[0]
397 factor = 1.0
398
399 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
400 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
401 factor = factor_list[prefix_list.index(unitprefix)]
402 """
403 if (unitprefix == 'a'):
404 factor = 1.0e-18
405 elif (unitprefix == 'f'):
406 factor = 1.0e-15
407 elif (unitprefix == 'p'):
408 factor = 1.0e-12
409 elif (unitprefix == 'n'):
410 factor = 1.0e-9
411 elif (unitprefix == 'u'):
412 factor = 1.0e-6
413 elif (unitprefix == 'm'):
414 factor = 1.0e-3
415 elif (unitprefix == 'k'):
416 factor = 1.0e+3
417 elif (unitprefix == 'M'):
418 factor = 1.0e+6
419 elif (unitprefix == 'G'):
420 factor = 1.0e+9
421 elif (unitprefix == 'T'):
422 factor = 1.0e+12
423 elif (unitprefix == 'P'):
424 factor = 1.0e+15
425 elif (unitprefix == 'E'):
426 factor = 1.0e+18
427 """
428 return value*factor
429 else:
430 mesg = "wrong unit of restfreq."
431 raise Exception, mesg
432
433def normalise_restfreq(in_restfreq):
434 if isinstance(in_restfreq, float):
435 return in_restfreq
436 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
437 return float(in_restfreq)
438 elif isinstance(in_restfreq, str):
439 return get_restfreq_in_Hz(in_restfreq)
440 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
441 if isinstance(in_restfreq, numpy.ndarray):
442 if len(in_restfreq.shape) > 1:
443 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
444 raise Exception, mesg
445
446 res = []
447 for i in xrange(len(in_restfreq)):
448 elem = in_restfreq[i]
449 if isinstance(elem, float):
450 res.append(elem)
451 elif isinstance(elem, int) or isinstance(elem, long):
452 res.append(float(elem))
453 elif isinstance(elem, str):
454 res.append(get_restfreq_in_Hz(elem))
455 elif isinstance(elem, dict):
456 if isinstance(elem["value"], float):
457 res.append(elem)
458 elif isinstance(elem["value"], int):
459 dictelem = {}
460 dictelem["name"] = elem["name"]
461 dictelem["value"] = float(elem["value"])
462 res.append(dictelem)
463 elif isinstance(elem["value"], str):
464 dictelem = {}
465 dictelem["name"] = elem["name"]
466 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
467 res.append(dictelem)
468 else:
469 mesg = "restfreq elements must be float, int, or string."
470 raise Exception, mesg
471 return res
472 else:
473 mesg = "wrong type of restfreq given."
474 raise Exception, mesg
475
476def set_restfreq(s, restfreq):
477 rfset = (restfreq != '') and (restfreq != [])
478 if rfset:
479 s.set_restfreqs(normalise_restfreq(restfreq))
480
481class scantable(Scantable):
482 """\
483 The ASAP container for scans (single-dish data).
484 """
485
486 @asaplog_post_dec
487 def __init__(self, filename, average=None, unit=None, parallactify=None,
488 **args):
489 """\
490 Create a scantable from a saved one or make a reference
491
492 Parameters:
493
494 filename: the name of an asap table on disk
495 or
496 the name of a rpfits/sdfits/ms file
497 (integrations within scans are auto averaged
498 and the whole file is read) or
499 [advanced] a reference to an existing scantable
500
501 average: average all integrations withinb a scan on read.
502 The default (True) is taken from .asaprc.
503
504 unit: brightness unit; must be consistent with K or Jy.
505 Over-rides the default selected by the filler
506 (input rpfits/sdfits/ms) or replaces the value
507 in existing scantables
508
509 antenna: for MeasurementSet input data only:
510 Antenna selection. integer (id) or string
511 (name or id).
512
513 parallactify: Indicate that the data had been parallactified.
514 Default (false) is taken from rc file.
515
516 getpt: Whether to import direction from MS/POINTING
517 table properly or not.
518 This is effective only when filename is MS.
519 The default (True) is to import direction
520 from MS/POINTING.
521 """
522 if average is None:
523 average = rcParams['scantable.autoaverage']
524 parallactify = parallactify or rcParams['scantable.parallactify']
525 varlist = vars()
526 from asap._asap import stmath
527 self._math = stmath( rcParams['insitu'] )
528 if isinstance(filename, Scantable):
529 Scantable.__init__(self, filename)
530 else:
531 if isinstance(filename, str):
532 filename = os.path.expandvars(filename)
533 filename = os.path.expanduser(filename)
534 if not os.path.exists(filename):
535 s = "File '%s' not found." % (filename)
536 raise IOError(s)
537 if is_scantable(filename):
538 ondisk = rcParams['scantable.storage'] == 'disk'
539 Scantable.__init__(self, filename, ondisk)
540 if unit is not None:
541 self.set_fluxunit(unit)
542 if average:
543 self._assign( self.average_time( scanav=True ) )
544 # do not reset to the default freqframe
545 #self.set_freqframe(rcParams['scantable.freqframe'])
546 elif is_ms(filename):
547 # Measurement Set
548 opts={'ms': {}}
549 mskeys=['getpt','antenna']
550 for key in mskeys:
551 if key in args.keys():
552 opts['ms'][key] = args[key]
553 self._fill([filename], unit, average, opts)
554 elif os.path.isfile(filename):
555 opts={'nro': {}}
556 nrokeys=['freqref']
557 for key in nrokeys:
558 if key in args.keys():
559 opts['nro'][key] = args[key]
560 self._fill([filename], unit, average, opts)
561 # only apply to new data not "copy constructor"
562 self.parallactify(parallactify)
563 else:
564 msg = "The given file '%s'is not a valid " \
565 "asap table." % (filename)
566 raise IOError(msg)
567 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
568 and isinstance(filename[-1], str):
569 self._fill(filename, unit, average)
570 self.parallactify(parallactify)
571 self._add_history("scantable", varlist)
572
573 @asaplog_post_dec
574 def save(self, name=None, format=None, overwrite=False):
575 """\
576 Store the scantable on disk. This can be an asap (aips++) Table,
577 SDFITS or MS2 format.
578
579 Parameters:
580
581 name: the name of the outputfile. For format 'ASCII'
582 this is the root file name (data in 'name'.txt
583 and header in 'name'_header.txt)
584
585 format: an optional file format. Default is ASAP.
586 Allowed are:
587
588 * 'ASAP' (save as ASAP [aips++] Table),
589 * 'SDFITS' (save as SDFITS file)
590 * 'ASCII' (saves as ascii text file)
591 * 'MS2' (saves as an casacore MeasurementSet V2)
592 * 'FITS' (save as image FITS - not readable by
593 class)
594 * 'CLASS' (save as FITS readable by CLASS)
595
596 overwrite: If the file should be overwritten if it exists.
597 The default False is to return with warning
598 without writing the output. USE WITH CARE.
599
600 Example::
601
602 scan.save('myscan.asap')
603 scan.save('myscan.sdfits', 'SDFITS')
604
605 """
606 from os import path
607 format = format or rcParams['scantable.save']
608 suffix = '.'+format.lower()
609 if name is None or name == "":
610 name = 'scantable'+suffix
611 msg = "No filename given. Using default name %s..." % name
612 asaplog.push(msg)
613 name = path.expandvars(name)
614 if path.isfile(name) or path.isdir(name):
615 if not overwrite:
616 msg = "File %s exists." % name
617 raise IOError(msg)
618 format2 = format.upper()
619 if format2 == 'ASAP':
620 self._save(name)
621 elif format2 == 'MS2':
622 msopt = {'ms': {'overwrite': overwrite } }
623 from asap._asap import mswriter
624 writer = mswriter( self )
625 writer.write( name, msopt )
626 else:
627 from asap._asap import stwriter as stw
628 writer = stw(format2)
629 writer.write(self, name)
630 return
631
632 def copy(self):
633 """Return a copy of this scantable.
634
635 *Note*:
636
637 This makes a full (deep) copy. scan2 = scan1 makes a reference.
638
639 Example::
640
641 copiedscan = scan.copy()
642
643 """
644 sd = scantable(Scantable._copy(self))
645 return sd
646
647 def drop_scan(self, scanid=None):
648 """\
649 Return a new scantable where the specified scan number(s) has(have)
650 been dropped.
651
652 Parameters:
653
654 scanid: a (list of) scan number(s)
655
656 """
657 from asap import _is_sequence_or_number as _is_valid
658 from asap import _to_list
659 from asap import unique
660 if not _is_valid(scanid):
661 raise RuntimeError( 'Please specify a scanno to drop from the'
662 ' scantable' )
663 scanid = _to_list(scanid)
664 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
665 for sid in scanid: allscans.remove(sid)
666 if len(allscans) == 0:
667 raise ValueError("Can't remove all scans")
668 sel = selector(scans=allscans)
669 return self._select_copy(sel)
670
671 def _select_copy(self, selection):
672 orig = self.get_selection()
673 self.set_selection(orig+selection)
674 cp = self.copy()
675 self.set_selection(orig)
676 return cp
677
678 def get_scan(self, scanid=None):
679 """\
680 Return a specific scan (by scanno) or collection of scans (by
681 source name) in a new scantable.
682
683 *Note*:
684
685 See scantable.drop_scan() for the inverse operation.
686
687 Parameters:
688
689 scanid: a (list of) scanno or a source name, unix-style
690 patterns are accepted for source name matching, e.g.
691 '*_R' gets all 'ref scans
692
693 Example::
694
695 # get all scans containing the source '323p459'
696 newscan = scan.get_scan('323p459')
697 # get all 'off' scans
698 refscans = scan.get_scan('*_R')
699 # get a susbset of scans by scanno (as listed in scan.summary())
700 newscan = scan.get_scan([0, 2, 7, 10])
701
702 """
703 if scanid is None:
704 raise RuntimeError( 'Please specify a scan no or name to '
705 'retrieve from the scantable' )
706 try:
707 bsel = self.get_selection()
708 sel = selector()
709 if type(scanid) is str:
710 sel.set_name(scanid)
711 return self._select_copy(sel)
712 elif type(scanid) is int:
713 sel.set_scans([scanid])
714 return self._select_copy(sel)
715 elif type(scanid) is list:
716 sel.set_scans(scanid)
717 return self._select_copy(sel)
718 else:
719 msg = "Illegal scanid type, use 'int' or 'list' if ints."
720 raise TypeError(msg)
721 except RuntimeError:
722 raise
723
724 def __str__(self):
725 tempFile = tempfile.NamedTemporaryFile()
726 Scantable._summary(self, tempFile.name)
727 tempFile.seek(0)
728 asaplog.clear()
729 return tempFile.file.read()
730
731 @asaplog_post_dec
732 def summary(self, filename=None):
733 """\
734 Print a summary of the contents of this scantable.
735
736 Parameters:
737
738 filename: the name of a file to write the putput to
739 Default - no file output
740
741 """
742 if filename is not None:
743 if filename is "":
744 filename = 'scantable_summary.txt'
745 from os.path import expandvars, isdir
746 filename = expandvars(filename)
747 if isdir(filename):
748 msg = "Illegal file name '%s'." % (filename)
749 raise IOError(msg)
750 else:
751 filename = ""
752 Scantable._summary(self, filename)
753
754 def get_spectrum(self, rowno):
755 """Return the spectrum for the current row in the scantable as a list.
756
757 Parameters:
758
759 rowno: the row number to retrieve the spectrum from
760
761 """
762 return self._getspectrum(rowno)
763
764 def get_mask(self, rowno):
765 """Return the mask for the current row in the scantable as a list.
766
767 Parameters:
768
769 rowno: the row number to retrieve the mask from
770
771 """
772 return self._getmask(rowno)
773
774 def set_spectrum(self, spec, rowno):
775 """Set the spectrum for the current row in the scantable.
776
777 Parameters:
778
779 spec: the new spectrum
780
781 rowno: the row number to set the spectrum for
782
783 """
784 assert(len(spec) == self.nchan(self.getif(rowno)))
785 return self._setspectrum(spec, rowno)
786
787 def get_coordinate(self, rowno):
788 """Return the (spectral) coordinate for a a given 'rowno'.
789
790 *Note*:
791
792 * This coordinate is only valid until a scantable method modifies
793 the frequency axis.
794 * This coordinate does contain the original frequency set-up
795 NOT the new frame. The conversions however are done using the user
796 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
797 use scantable.freq_align first. Without it there is no closure,
798 i.e.::
799
800 c = myscan.get_coordinate(0)
801 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
802
803 Parameters:
804
805 rowno: the row number for the spectral coordinate
806
807 """
808 return coordinate(Scantable.get_coordinate(self, rowno))
809
810 def get_selection(self):
811 """\
812 Get the selection object currently set on this scantable.
813
814 Example::
815
816 sel = scan.get_selection()
817 sel.set_ifs(0) # select IF 0
818 scan.set_selection(sel) # apply modified selection
819
820 """
821 return selector(self._getselection())
822
823 def set_selection(self, selection=None, **kw):
824 """\
825 Select a subset of the data. All following operations on this scantable
826 are only applied to thi selection.
827
828 Parameters:
829
830 selection: a selector object (default unset the selection), or
831 any combination of 'pols', 'ifs', 'beams', 'scans',
832 'cycles', 'name', 'query'
833
834 Examples::
835
836 sel = selector() # create a selection object
837 self.set_scans([0, 3]) # select SCANNO 0 and 3
838 scan.set_selection(sel) # set the selection
839 scan.summary() # will only print summary of scanno 0 an 3
840 scan.set_selection() # unset the selection
841 # or the equivalent
842 scan.set_selection(scans=[0,3])
843 scan.summary() # will only print summary of scanno 0 an 3
844 scan.set_selection() # unset the selection
845
846 """
847 if selection is None:
848 # reset
849 if len(kw) == 0:
850 selection = selector()
851 else:
852 # try keywords
853 for k in kw:
854 if k not in selector.fields:
855 raise KeyError("Invalid selection key '%s', "
856 "valid keys are %s" % (k,
857 selector.fields))
858 selection = selector(**kw)
859 self._setselection(selection)
860
861 def get_row(self, row=0, insitu=None):
862 """\
863 Select a row in the scantable.
864 Return a scantable with single row.
865
866 Parameters:
867
868 row: row no of integration, default is 0.
869 insitu: if False a new scantable is returned. Otherwise, the
870 scaling is done in-situ. The default is taken from .asaprc
871 (False)
872
873 """
874 if insitu is None:
875 insitu = rcParams['insitu']
876 if not insitu:
877 workscan = self.copy()
878 else:
879 workscan = self
880 # Select a row
881 sel = selector()
882 sel.set_rows([row])
883 workscan.set_selection(sel)
884 if not workscan.nrow() == 1:
885 msg = "Could not identify single row. %d rows selected." \
886 % (workscan.nrow())
887 raise RuntimeError(msg)
888 if insitu:
889 self._assign(workscan)
890 else:
891 return workscan
892
893 @asaplog_post_dec
894 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
895 """\
896 Determine the specified statistic of the current beam/if/pol
897 Takes a 'mask' as an optional parameter to specify which
898 channels should be excluded.
899
900 Parameters:
901
902 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
903 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
904
905 mask: an optional mask specifying where the statistic
906 should be determined.
907
908 form: format string to print statistic values
909
910 row: row number of spectrum to process.
911 (default is None: for all rows)
912
913 Example:
914 scan.set_unit('channel')
915 msk = scan.create_mask([100, 200], [500, 600])
916 scan.stats(stat='mean', mask=m)
917
918 """
919 mask = mask or []
920 if not self._check_ifs():
921 raise ValueError("Cannot apply mask as the IFs have different "
922 "number of channels. Please use setselection() "
923 "to select individual IFs")
924 rtnabc = False
925 if stat.lower().endswith('_abc'): rtnabc = True
926 getchan = False
927 if stat.lower().startswith('min') or stat.lower().startswith('max'):
928 chan = self._math._minmaxchan(self, mask, stat)
929 getchan = True
930 statvals = []
931 if not rtnabc:
932 if row == None:
933 statvals = self._math._stats(self, mask, stat)
934 else:
935 statvals = self._math._statsrow(self, mask, stat, int(row))
936
937 #def cb(i):
938 # return statvals[i]
939
940 #return self._row_callback(cb, stat)
941
942 label=stat
943 #callback=cb
944 out = ""
945 #outvec = []
946 sep = '-'*50
947
948 if row == None:
949 rows = xrange(self.nrow())
950 elif isinstance(row, int):
951 rows = [ row ]
952
953 for i in rows:
954 refstr = ''
955 statunit= ''
956 if getchan:
957 qx, qy = self.chan2data(rowno=i, chan=chan[i])
958 if rtnabc:
959 statvals.append(qx['value'])
960 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
961 statunit= '['+qx['unit']+']'
962 else:
963 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
964
965 tm = self._gettime(i)
966 src = self._getsourcename(i)
967 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
968 out += 'Time[%s]:\n' % (tm)
969 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
970 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
971 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
972 #outvec.append(callback(i))
973 if len(rows) > 1:
974 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
975 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
976 else:
977 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
978 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
979 out += sep+"\n"
980
981 import os
982 if os.environ.has_key( 'USER' ):
983 usr = os.environ['USER']
984 else:
985 import commands
986 usr = commands.getoutput( 'whoami' )
987 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
988 f = open(tmpfile,'w')
989 print >> f, sep
990 print >> f, ' %s %s' % (label, statunit)
991 print >> f, sep
992 print >> f, out
993 f.close()
994 f = open(tmpfile,'r')
995 x = f.readlines()
996 f.close()
997 asaplog.push(''.join(x), False)
998
999 return statvals
1000
1001 def chan2data(self, rowno=0, chan=0):
1002 """\
1003 Returns channel/frequency/velocity and spectral value
1004 at an arbitrary row and channel in the scantable.
1005
1006 Parameters:
1007
1008 rowno: a row number in the scantable. Default is the
1009 first row, i.e. rowno=0
1010
1011 chan: a channel in the scantable. Default is the first
1012 channel, i.e. pos=0
1013
1014 """
1015 if isinstance(rowno, int) and isinstance(chan, int):
1016 qx = {'unit': self.get_unit(),
1017 'value': self._getabcissa(rowno)[chan]}
1018 qy = {'unit': self.get_fluxunit(),
1019 'value': self._getspectrum(rowno)[chan]}
1020 return qx, qy
1021
1022 def stddev(self, mask=None):
1023 """\
1024 Determine the standard deviation of the current beam/if/pol
1025 Takes a 'mask' as an optional parameter to specify which
1026 channels should be excluded.
1027
1028 Parameters:
1029
1030 mask: an optional mask specifying where the standard
1031 deviation should be determined.
1032
1033 Example::
1034
1035 scan.set_unit('channel')
1036 msk = scan.create_mask([100, 200], [500, 600])
1037 scan.stddev(mask=m)
1038
1039 """
1040 return self.stats(stat='stddev', mask=mask);
1041
1042
1043 def get_column_names(self):
1044 """\
1045 Return a list of column names, which can be used for selection.
1046 """
1047 return list(Scantable.get_column_names(self))
1048
1049 def get_tsys(self, row=-1):
1050 """\
1051 Return the System temperatures.
1052
1053 Parameters:
1054
1055 row: the rowno to get the information for. (default all rows)
1056
1057 Returns:
1058
1059 a list of Tsys values for the current selection
1060
1061 """
1062 if row > -1:
1063 return self._get_column(self._gettsys, row)
1064 return self._row_callback(self._gettsys, "Tsys")
1065
1066 def get_tsysspectrum(self, row=-1):
1067 """\
1068 Return the channel dependent system temperatures.
1069
1070 Parameters:
1071
1072 row: the rowno to get the information for. (default all rows)
1073
1074 Returns:
1075
1076 a list of Tsys values for the current selection
1077
1078 """
1079 return self._get_column( self._gettsysspectrum, row )
1080
1081 def set_tsys(self, values, row=-1):
1082 """\
1083 Set the Tsys value(s) of the given 'row' or the whole scantable
1084 (selection).
1085
1086 Parameters:
1087
1088 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1089 row: the row number to apply Tsys values to.
1090 (default all rows)
1091
1092 """
1093
1094 if not hasattr(values, "__len__"):
1095 values = [values]
1096 self._settsys(values, row)
1097
1098 def get_weather(self, row=-1):
1099 """\
1100 Return the weather information.
1101
1102 Parameters:
1103
1104 row: the rowno to get the information for. (default all rows)
1105
1106 Returns:
1107
1108 a dict or list of of dicts of values for the current selection
1109
1110 """
1111 if row >= len(self):
1112 raise IndexError("row out of range")
1113 values = self._get_column(self._get_weather, row)
1114 if row > -1:
1115 return {'temperature': values[0],
1116 'pressure': values[1], 'humidity' : values[2],
1117 'windspeed' : values[3], 'windaz' : values[4]
1118 }
1119 else:
1120 out = []
1121 for r in values:
1122 out.append({'temperature': r[0],
1123 'pressure': r[1], 'humidity' : r[2],
1124 'windspeed' : r[3], 'windaz' : r[4]
1125 })
1126 return out
1127
1128 def _row_callback(self, callback, label):
1129 out = ""
1130 outvec = []
1131 sep = '-'*50
1132 for i in range(self.nrow()):
1133 tm = self._gettime(i)
1134 src = self._getsourcename(i)
1135 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1136 out += 'Time[%s]:\n' % (tm)
1137 if self.nbeam(-1) > 1:
1138 out += ' Beam[%d] ' % (self.getbeam(i))
1139 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1140 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1141 outvec.append(callback(i))
1142 out += '= %3.3f\n' % (outvec[i])
1143 out += sep+'\n'
1144
1145 asaplog.push(sep)
1146 asaplog.push(" %s" % (label))
1147 asaplog.push(sep)
1148 asaplog.push(out)
1149 asaplog.post()
1150 return outvec
1151
1152 def _get_column(self, callback, row=-1, *args):
1153 """
1154 """
1155 if row == -1:
1156 return [callback(i, *args) for i in range(self.nrow())]
1157 else:
1158 if 0 <= row < self.nrow():
1159 return callback(row, *args)
1160
1161
1162 def get_time(self, row=-1, asdatetime=False, prec=-1):
1163 """\
1164 Get a list of time stamps for the observations.
1165 Return a datetime object or a string (default) for each
1166 integration time stamp in the scantable.
1167
1168 Parameters:
1169
1170 row: row no of integration. Default -1 return all rows
1171
1172 asdatetime: return values as datetime objects rather than strings
1173
1174 prec: number of digits shown. Default -1 to automatic
1175 calculation.
1176 Note this number is equals to the digits of MVTime,
1177 i.e., 0<prec<3: dates with hh:: only,
1178 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1179 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1180
1181 """
1182 from datetime import datetime
1183 if prec < 0:
1184 # automagically set necessary precision +1
1185 prec = 7 - \
1186 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1187 prec = max(6, int(prec))
1188 else:
1189 prec = max(0, prec)
1190 if asdatetime:
1191 #precision can be 1 millisecond at max
1192 prec = min(12, prec)
1193
1194 times = self._get_column(self._gettime, row, prec)
1195 if not asdatetime:
1196 return times
1197 format = "%Y/%m/%d/%H:%M:%S.%f"
1198 if prec < 7:
1199 nsub = 1 + (((6-prec)/2) % 3)
1200 substr = [".%f","%S","%M"]
1201 for i in range(nsub):
1202 format = format.replace(substr[i],"")
1203 if isinstance(times, list):
1204 return [datetime.strptime(i, format) for i in times]
1205 else:
1206 return datetime.strptime(times, format)
1207
1208
1209 def get_inttime(self, row=-1):
1210 """\
1211 Get a list of integration times for the observations.
1212 Return a time in seconds for each integration in the scantable.
1213
1214 Parameters:
1215
1216 row: row no of integration. Default -1 return all rows.
1217
1218 """
1219 return self._get_column(self._getinttime, row)
1220
1221
1222 def get_sourcename(self, row=-1):
1223 """\
1224 Get a list source names for the observations.
1225 Return a string for each integration in the scantable.
1226 Parameters:
1227
1228 row: row no of integration. Default -1 return all rows.
1229
1230 """
1231 return self._get_column(self._getsourcename, row)
1232
1233 def get_elevation(self, row=-1):
1234 """\
1235 Get a list of elevations for the observations.
1236 Return a float for each integration in the scantable.
1237
1238 Parameters:
1239
1240 row: row no of integration. Default -1 return all rows.
1241
1242 """
1243 return self._get_column(self._getelevation, row)
1244
1245 def get_azimuth(self, row=-1):
1246 """\
1247 Get a list of azimuths for the observations.
1248 Return a float for each integration in the scantable.
1249
1250 Parameters:
1251 row: row no of integration. Default -1 return all rows.
1252
1253 """
1254 return self._get_column(self._getazimuth, row)
1255
1256 def get_parangle(self, row=-1):
1257 """\
1258 Get a list of parallactic angles for the observations.
1259 Return a float for each integration in the scantable.
1260
1261 Parameters:
1262
1263 row: row no of integration. Default -1 return all rows.
1264
1265 """
1266 return self._get_column(self._getparangle, row)
1267
1268 def get_direction(self, row=-1):
1269 """
1270 Get a list of Positions on the sky (direction) for the observations.
1271 Return a string for each integration in the scantable.
1272
1273 Parameters:
1274
1275 row: row no of integration. Default -1 return all rows
1276
1277 """
1278 return self._get_column(self._getdirection, row)
1279
1280 def get_directionval(self, row=-1):
1281 """\
1282 Get a list of Positions on the sky (direction) for the observations.
1283 Return a float for each integration in the scantable.
1284
1285 Parameters:
1286
1287 row: row no of integration. Default -1 return all rows
1288
1289 """
1290 return self._get_column(self._getdirectionvec, row)
1291
1292 @asaplog_post_dec
1293 def set_unit(self, unit='channel'):
1294 """\
1295 Set the unit for all following operations on this scantable
1296
1297 Parameters:
1298
1299 unit: optional unit, default is 'channel'. Use one of '*Hz',
1300 'km/s', 'channel' or equivalent ''
1301
1302 """
1303 varlist = vars()
1304 if unit in ['', 'pixel', 'channel']:
1305 unit = ''
1306 inf = list(self._getcoordinfo())
1307 inf[0] = unit
1308 self._setcoordinfo(inf)
1309 self._add_history("set_unit", varlist)
1310
1311 @asaplog_post_dec
1312 def set_instrument(self, instr):
1313 """\
1314 Set the instrument for subsequent processing.
1315
1316 Parameters:
1317
1318 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1319 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1320
1321 """
1322 self._setInstrument(instr)
1323 self._add_history("set_instument", vars())
1324
1325 @asaplog_post_dec
1326 def set_feedtype(self, feedtype):
1327 """\
1328 Overwrite the feed type, which might not be set correctly.
1329
1330 Parameters:
1331
1332 feedtype: 'linear' or 'circular'
1333
1334 """
1335 self._setfeedtype(feedtype)
1336 self._add_history("set_feedtype", vars())
1337
1338 @asaplog_post_dec
1339 def get_doppler(self):
1340 """\
1341 Get the doppler.
1342 """
1343 return self._getcoordinfo()[2]
1344
1345 @asaplog_post_dec
1346 def set_doppler(self, doppler='RADIO'):
1347 """\
1348 Set the doppler for all following operations on this scantable.
1349
1350 Parameters:
1351
1352 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1353
1354 """
1355 varlist = vars()
1356 inf = list(self._getcoordinfo())
1357 inf[2] = doppler
1358 self._setcoordinfo(inf)
1359 self._add_history("set_doppler", vars())
1360
1361 @asaplog_post_dec
1362 def set_freqframe(self, frame=None):
1363 """\
1364 Set the frame type of the Spectral Axis.
1365
1366 Parameters:
1367
1368 frame: an optional frame type, default 'LSRK'. Valid frames are:
1369 'TOPO', 'LSRD', 'LSRK', 'BARY',
1370 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1371
1372 Example::
1373
1374 scan.set_freqframe('BARY')
1375
1376 """
1377 frame = frame or rcParams['scantable.freqframe']
1378 varlist = vars()
1379 # "REST" is not implemented in casacore
1380 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1381 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1382 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1383 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1384
1385 if frame in valid:
1386 inf = list(self._getcoordinfo())
1387 inf[1] = frame
1388 self._setcoordinfo(inf)
1389 self._add_history("set_freqframe", varlist)
1390 else:
1391 msg = "Please specify a valid freq type. Valid types are:\n", valid
1392 raise TypeError(msg)
1393
1394 @asaplog_post_dec
1395 def set_dirframe(self, frame=""):
1396 """\
1397 Set the frame type of the Direction on the sky.
1398
1399 Parameters:
1400
1401 frame: an optional frame type, default ''. Valid frames are:
1402 'J2000', 'B1950', 'GALACTIC'
1403
1404 Example:
1405
1406 scan.set_dirframe('GALACTIC')
1407
1408 """
1409 varlist = vars()
1410 Scantable.set_dirframe(self, frame)
1411 self._add_history("set_dirframe", varlist)
1412
1413 def get_unit(self):
1414 """\
1415 Get the default unit set in this scantable
1416
1417 Returns:
1418
1419 A unit string
1420
1421 """
1422 inf = self._getcoordinfo()
1423 unit = inf[0]
1424 if unit == '': unit = 'channel'
1425 return unit
1426
1427 @asaplog_post_dec
1428 def get_abcissa(self, rowno=0):
1429 """\
1430 Get the abcissa in the current coordinate setup for the currently
1431 selected Beam/IF/Pol
1432
1433 Parameters:
1434
1435 rowno: an optional row number in the scantable. Default is the
1436 first row, i.e. rowno=0
1437
1438 Returns:
1439
1440 The abcissa values and the format string (as a dictionary)
1441
1442 """
1443 abc = self._getabcissa(rowno)
1444 lbl = self._getabcissalabel(rowno)
1445 return abc, lbl
1446
1447 @asaplog_post_dec
1448 def flag(self, mask=None, unflag=False, row=-1):
1449 """\
1450 Flag the selected data using an optional channel mask.
1451
1452 Parameters:
1453
1454 mask: an optional channel mask, created with create_mask. Default
1455 (no mask) is all channels.
1456
1457 unflag: if True, unflag the data
1458
1459 row: an optional row number in the scantable.
1460 Default -1 flags all rows
1461
1462 """
1463 varlist = vars()
1464 mask = mask or []
1465 self._flag(row, mask, unflag)
1466 self._add_history("flag", varlist)
1467
1468 @asaplog_post_dec
1469 def flag_row(self, rows=None, unflag=False):
1470 """\
1471 Flag the selected data in row-based manner.
1472
1473 Parameters:
1474
1475 rows: list of row numbers to be flagged. Default is no row
1476 (must be explicitly specified to execute row-based
1477 flagging).
1478
1479 unflag: if True, unflag the data.
1480
1481 """
1482 varlist = vars()
1483 if rows is None:
1484 rows = []
1485 self._flag_row(rows, unflag)
1486 self._add_history("flag_row", varlist)
1487
1488 @asaplog_post_dec
1489 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1490 """\
1491 Flag the selected data outside a specified range (in channel-base)
1492
1493 Parameters:
1494
1495 uthres: upper threshold.
1496
1497 dthres: lower threshold
1498
1499 clipoutside: True for flagging data outside the range
1500 [dthres:uthres].
1501 False for flagging data inside the range.
1502
1503 unflag: if True, unflag the data.
1504
1505 """
1506 varlist = vars()
1507 self._clip(uthres, dthres, clipoutside, unflag)
1508 self._add_history("clip", varlist)
1509
1510 @asaplog_post_dec
1511 def lag_flag(self, start, end, unit="MHz", insitu=None):
1512 """\
1513 Flag the data in 'lag' space by providing a frequency to remove.
1514 Flagged data in the scantable get interpolated over the region.
1515 No taper is applied.
1516
1517 Parameters:
1518
1519 start: the start frequency (really a period within the
1520 bandwidth) or period to remove
1521
1522 end: the end frequency or period to remove
1523
1524 unit: the frequency unit (default 'MHz') or '' for
1525 explicit lag channels
1526
1527 *Notes*:
1528
1529 It is recommended to flag edges of the band or strong
1530 signals beforehand.
1531
1532 """
1533 if insitu is None: insitu = rcParams['insitu']
1534 self._math._setinsitu(insitu)
1535 varlist = vars()
1536 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1537 if not (unit == "" or base.has_key(unit)):
1538 raise ValueError("%s is not a valid unit." % unit)
1539 if unit == "":
1540 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1541 else:
1542 s = scantable(self._math._lag_flag(self, start*base[unit],
1543 end*base[unit], "frequency"))
1544 s._add_history("lag_flag", varlist)
1545 if insitu:
1546 self._assign(s)
1547 else:
1548 return s
1549
1550 @asaplog_post_dec
1551 def fft(self, rowno=None, mask=None, getrealimag=False):
1552 """\
1553 Apply FFT to the spectra.
1554 Flagged data in the scantable get interpolated over the region.
1555
1556 Parameters:
1557
1558 rowno: The row number(s) to be processed. int, list
1559 and tuple are accepted. By default (None), FFT
1560 is applied to the whole data.
1561
1562 mask: Auxiliary channel mask(s). Given as a boolean
1563 list, it is applied to all specified rows.
1564 A list of boolean lists can also be used to
1565 apply different masks. In the latter case, the
1566 length of 'mask' must be the same as that of
1567 'rowno'. The default is None.
1568
1569 getrealimag: If True, returns the real and imaginary part
1570 values of the complex results.
1571 If False (the default), returns the amplitude
1572 (absolute value) normalised with Ndata/2 and
1573 phase (argument, in unit of radian).
1574
1575 Returns:
1576
1577 A list of dictionaries containing the results for each spectrum.
1578 Each dictionary contains two values, the real and the imaginary
1579 parts when getrealimag = True, or the amplitude(absolute value)
1580 and the phase(argument) when getrealimag = False. The key for
1581 these values are 'real' and 'imag', or 'ampl' and 'phase',
1582 respectively.
1583 """
1584 if rowno is None:
1585 rowno = []
1586 if isinstance(rowno, int):
1587 rowno = [rowno]
1588 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1589 raise TypeError("The row number(s) must be int, list or tuple.")
1590 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1591
1592 usecommonmask = True
1593
1594 if mask is None:
1595 mask = []
1596 if isinstance(mask, list) or isinstance(mask, tuple):
1597 if len(mask) == 0:
1598 mask = [[]]
1599 else:
1600 if isinstance(mask[0], bool):
1601 if len(mask) != self.nchan(self.getif(rowno[0])):
1602 raise ValueError("The spectra and the mask have "
1603 "different length.")
1604 mask = [mask]
1605 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1606 usecommonmask = False
1607 if len(mask) != len(rowno):
1608 raise ValueError("When specifying masks for each "
1609 "spectrum, the numbers of them "
1610 "must be identical.")
1611 for i in xrange(mask):
1612 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1613 raise ValueError("The spectra and the mask have "
1614 "different length.")
1615 else:
1616 raise TypeError("The mask must be a boolean list or "
1617 "a list of boolean list.")
1618 else:
1619 raise TypeError("The mask must be a boolean list or a list of "
1620 "boolean list.")
1621
1622 res = []
1623
1624 imask = 0
1625 for whichrow in rowno:
1626 fspec = self._fft(whichrow, mask[imask], getrealimag)
1627 nspec = len(fspec)
1628
1629 i = 0
1630 v1 = []
1631 v2 = []
1632 reselem = {"real":[],"imag":[]} if getrealimag \
1633 else {"ampl":[],"phase":[]}
1634
1635 while (i < nspec):
1636 v1.append(fspec[i])
1637 v2.append(fspec[i+1])
1638 i += 2
1639
1640 if getrealimag:
1641 reselem["real"] += v1
1642 reselem["imag"] += v2
1643 else:
1644 reselem["ampl"] += v1
1645 reselem["phase"] += v2
1646
1647 res.append(reselem)
1648
1649 if not usecommonmask:
1650 imask += 1
1651
1652 return res
1653
1654 @asaplog_post_dec
1655 def create_mask(self, *args, **kwargs):
1656 """\
1657 Compute and return a mask based on [min, max] windows.
1658 The specified windows are to be INCLUDED, when the mask is
1659 applied.
1660
1661 Parameters:
1662
1663 [min, max], [min2, max2], ...
1664 Pairs of start/end points (inclusive)specifying the regions
1665 to be masked
1666
1667 invert: optional argument. If specified as True,
1668 return an inverted mask, i.e. the regions
1669 specified are EXCLUDED
1670
1671 row: create the mask using the specified row for
1672 unit conversions, default is row=0
1673 only necessary if frequency varies over rows.
1674
1675 Examples::
1676
1677 scan.set_unit('channel')
1678 # a)
1679 msk = scan.create_mask([400, 500], [800, 900])
1680 # masks everything outside 400 and 500
1681 # and 800 and 900 in the unit 'channel'
1682
1683 # b)
1684 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1685 # masks the regions between 400 and 500
1686 # and 800 and 900 in the unit 'channel'
1687
1688 # c)
1689 #mask only channel 400
1690 msk = scan.create_mask([400])
1691
1692 """
1693 row = kwargs.get("row", 0)
1694 data = self._getabcissa(row)
1695 u = self._getcoordinfo()[0]
1696 if u == "":
1697 u = "channel"
1698 msg = "The current mask window unit is %s" % u
1699 i = self._check_ifs()
1700 if not i:
1701 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1702 asaplog.push(msg)
1703 n = len(data)
1704 msk = _n_bools(n, False)
1705 # test if args is a 'list' or a 'normal *args - UGLY!!!
1706
1707 ws = (isinstance(args[-1][-1], int)
1708 or isinstance(args[-1][-1], float)) and args or args[0]
1709 for window in ws:
1710 if len(window) == 1:
1711 window = [window[0], window[0]]
1712 if len(window) == 0 or len(window) > 2:
1713 raise ValueError("A window needs to be defined as "
1714 "[start(, end)]")
1715 if window[0] > window[1]:
1716 tmp = window[0]
1717 window[0] = window[1]
1718 window[1] = tmp
1719 for i in range(n):
1720 if data[i] >= window[0] and data[i] <= window[1]:
1721 msk[i] = True
1722 if kwargs.has_key('invert'):
1723 if kwargs.get('invert'):
1724 msk = mask_not(msk)
1725 return msk
1726
1727 def get_masklist(self, mask=None, row=0, silent=False):
1728 """\
1729 Compute and return a list of mask windows, [min, max].
1730
1731 Parameters:
1732
1733 mask: channel mask, created with create_mask.
1734
1735 row: calcutate the masklist using the specified row
1736 for unit conversions, default is row=0
1737 only necessary if frequency varies over rows.
1738
1739 Returns:
1740
1741 [min, max], [min2, max2], ...
1742 Pairs of start/end points (inclusive)specifying
1743 the masked regions
1744
1745 """
1746 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1747 raise TypeError("The mask should be list or tuple.")
1748 if len(mask) <= 0:
1749 raise TypeError("The mask elements should be > 0")
1750 data = self._getabcissa(row)
1751 if len(data) != len(mask):
1752 msg = "Number of channels in scantable != number of mask elements"
1753 raise TypeError(msg)
1754 u = self._getcoordinfo()[0]
1755 if u == "":
1756 u = "channel"
1757 msg = "The current mask window unit is %s" % u
1758 i = self._check_ifs()
1759 if not i:
1760 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1761 if not silent:
1762 asaplog.push(msg)
1763 masklist = []
1764 ist, ien = None, None
1765 ist, ien=self.get_mask_indices(mask)
1766 if ist is not None and ien is not None:
1767 for i in xrange(len(ist)):
1768 range=[data[ist[i]],data[ien[i]]]
1769 range.sort()
1770 masklist.append([range[0],range[1]])
1771 return masklist
1772
1773 def get_mask_indices(self, mask=None):
1774 """\
1775 Compute and Return lists of mask start indices and mask end indices.
1776
1777 Parameters:
1778
1779 mask: channel mask, created with create_mask.
1780
1781 Returns:
1782
1783 List of mask start indices and that of mask end indices,
1784 i.e., [istart1,istart2,....], [iend1,iend2,....].
1785
1786 """
1787 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1788 raise TypeError("The mask should be list or tuple.")
1789 if len(mask) <= 0:
1790 raise TypeError("The mask elements should be > 0")
1791 istart = []
1792 iend = []
1793 if mask[0]:
1794 istart.append(0)
1795 for i in range(len(mask)-1):
1796 if not mask[i] and mask[i+1]:
1797 istart.append(i+1)
1798 elif mask[i] and not mask[i+1]:
1799 iend.append(i)
1800 if mask[len(mask)-1]:
1801 iend.append(len(mask)-1)
1802 if len(istart) != len(iend):
1803 raise RuntimeError("Numbers of mask start != mask end.")
1804 for i in range(len(istart)):
1805 if istart[i] > iend[i]:
1806 raise RuntimeError("Mask start index > mask end index")
1807 break
1808 return istart,iend
1809
1810 @asaplog_post_dec
1811 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1812 """
1813 Parse MS type spw/channel selection syntax.
1814
1815 Parameters:
1816 selectstring : A string expression of spw and channel selection.
1817 Comma-separated expressions mean different spw -
1818 channel combinations. Spws and channel selections
1819 are partitioned by a colon ':'. In a single
1820 selection expression, you can put multiple values
1821 separated by semicolons ';'. Both for spw and
1822 channel selection, allowed cases include single
1823 value, blank('') or asterisk('*') to specify all
1824 available values, two values connected with a
1825 tilde ('~') to specify an inclusive range. Unit
1826 strings for frequency or velocity can be added to
1827 the tilde-connected values. For channel selection
1828 expression, placing a '<' or a '>' is possible to
1829 specify a semi-infinite interval as well.
1830
1831 examples:
1832 '' or '*' = all spws (all channels)
1833 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1834 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1835 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1836 all channels in spw8
1837 '1.3~1.5GHz' = all spws whose central frequency
1838 falls in frequency range between
1839 1.3GHz and 1.5GHz.
1840 '1.3~1.5GHz:1.3~1.5GHz' = channels which fall
1841 between the specified
1842 frequency range in spws
1843 whose central frequency
1844 falls in the specified
1845 frequency range.
1846 '1:-200~250km/s' = channels that fall between the
1847 specified velocity range in
1848 spw 1.
1849 restfreq: the rest frequency.
1850 examples: '115.2712GHz', 115271201800.0
1851 frame: an optional frame type, default 'LSRK'. Valid frames are:
1852 'TOPO', 'LSRD', 'LSRK', 'BARY',
1853 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1854 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1855 Returns:
1856 A dictionary of selected (valid) spw and masklist pairs,
1857 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1858 """
1859 if not isinstance(selectstring, str):
1860 asaplog.post()
1861 asaplog.push("Expression of spw/channel selection must be a string.")
1862 asaplog.post("ERROR")
1863
1864 orig_unit = self.get_unit()
1865 self.set_unit('channel')
1866
1867 if restfreq is not None:
1868 orig_molids = self._getmolidcol_list()
1869 set_restfreq(self, restfreq)
1870
1871 orig_coord = self._getcoordinfo()
1872
1873 if frame is not None:
1874 orig_frame = orig_coord[1]
1875 self.set_freqframe(frame)
1876
1877 if doppler is not None:
1878 orig_doppler = orig_coord[2]
1879 self.set_doppler(doppler)
1880
1881 valid_ifs = self.getifnos()
1882
1883 comma_sep = selectstring.split(",")
1884 res = {}
1885
1886 for cms_elem in comma_sep:
1887 colon_sep = cms_elem.split(":")
1888
1889 if (len(colon_sep) > 2):
1890 raise RuntimeError("Invalid selection expression: more than two colons!")
1891
1892 # parse spw expression and store result in spw_list.
1893 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1894 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1895 # 'a~b*m/s' (where * can be '' or 'k') and also
1896 # several of the above expressions connected with ';'.
1897
1898 spw_list = []
1899
1900 semicolon_sep = colon_sep[0].split(";")
1901
1902 for scs_elem in semicolon_sep:
1903 scs_elem = scs_elem.strip()
1904
1905 lt_sep = scs_elem.split("<")
1906 gt_sep = scs_elem.split(">")
1907 ti_sep = scs_elem.split("~")
1908
1909 lt_sep_length = len(lt_sep)
1910 gt_sep_length = len(gt_sep)
1911 ti_sep_length = len(ti_sep)
1912
1913 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1914
1915 if (len_product > 2):
1916 # '<', '>' and '~' must not coexist in a single spw expression
1917
1918 raise RuntimeError("Invalid spw selection.")
1919
1920 elif (len_product == 1):
1921 # '', '*', or single spw number.
1922
1923 if (scs_elem == "") or (scs_elem == "*"):
1924 spw_list = valid_ifs[:] # deep copy
1925
1926 else: # single number
1927 expr = int(scs_elem)
1928 spw_list.append(expr)
1929 if expr not in valid_ifs:
1930 asaplog.push("Invalid spw given. Ignored.")
1931
1932 else: # (len_product == 2)
1933 # namely, one of '<', '>' or '~' appears just once.
1934
1935 if (lt_sep_length == 2): # '<a'
1936 if is_number(lt_sep[1]):
1937 no_valid_spw = True
1938 for i in valid_ifs:
1939 if (i < float(lt_sep[1])):
1940 spw_list.append(i)
1941 no_valid_spw = False
1942
1943 if no_valid_spw:
1944 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1945
1946 else:
1947 raise RuntimeError("Invalid spw selection.")
1948
1949 elif (gt_sep_length == 2): # '>a'
1950 if is_number(gt_sep[1]):
1951 no_valid_spw = True
1952 for i in valid_ifs:
1953 if (i > float(gt_sep[1])):
1954 spw_list.append(i)
1955 no_valid_spw = False
1956
1957 if no_valid_spw:
1958 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1959
1960 else:
1961 raise RuntimeError("Invalid spw selection.")
1962
1963 else: # (ti_sep_length == 2) where both boundaries inclusive
1964 expr0 = ti_sep[0].strip()
1965 expr1 = ti_sep[1].strip()
1966
1967 if is_number(expr0) and is_number(expr1):
1968 # 'a~b'
1969 expr_pmin = min(float(expr0), float(expr1))
1970 expr_pmax = max(float(expr0), float(expr1))
1971 has_invalid_spw = False
1972 no_valid_spw = True
1973
1974 for i in valid_ifs:
1975 if (expr_pmin <= i) and (i <= expr_pmax):
1976 spw_list.append(i)
1977 no_valid_spw = False
1978 else:
1979 has_invalid_spw = True
1980
1981 if has_invalid_spw:
1982 msg = "Invalid spw is given. Ignored."
1983 asaplog.push(msg)
1984 asaplog.post()
1985
1986 if no_valid_spw:
1987 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1988
1989 elif is_number(expr0) and is_frequency(expr1):
1990 # 'a~b*Hz'
1991 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1992 expr_fmin = min(expr_f0, expr_f1)
1993 expr_fmax = max(expr_f0, expr_f1)
1994 no_valid_spw = True
1995
1996 for coord in self._get_coordinate_list():
1997 spw = coord['if']
1998
1999 """
2000 expr_p0 = coord['coord'].to_pixel(expr_f0)
2001 expr_p1 = coord['coord'].to_pixel(expr_f1)
2002 expr_pmin = min(expr_p0, expr_p1)
2003 expr_pmax = max(expr_p0, expr_p1)
2004
2005 pmin = 0.0
2006 pmax = float(self.nchan(spw) - 1)
2007
2008 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2009 spw_list.append(spw)
2010 no_valid_spw = False
2011 """
2012
2013 fhead = coord['coord'].to_frequency(0)
2014 ftail = coord['coord'].to_frequency(self.nchan(spw) - 1)
2015 fcen = (fhead + ftail) / 2.0
2016
2017 if ((expr_fmin <= fcen) and (fcen <= expr_fmax)):
2018 spw_list.append(spw)
2019 no_valid_spw = False
2020
2021 if no_valid_spw:
2022 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2023
2024 elif is_number(expr0) and is_velocity(expr1):
2025 # 'a~b*m/s'
2026 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2027 expr_vmin = min(expr_v0, expr_v1)
2028 expr_vmax = max(expr_v0, expr_v1)
2029 no_valid_spw = True
2030
2031 for coord in self._get_coordinate_list():
2032 spw = coord['if']
2033
2034 """
2035 pmin = 0.0
2036 pmax = float(self.nchan(spw) - 1)
2037
2038 vel0 = coord['coord'].to_velocity(pmin)
2039 vel1 = coord['coord'].to_velocity(pmax)
2040
2041 vmin = min(vel0, vel1)
2042 vmax = max(vel0, vel1)
2043
2044 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2045 spw_list.append(spw)
2046 no_valid_spw = False
2047 """
2048
2049 vhead = coord['coord'].to_velocity(0)
2050 vtail = coord['coord'].to_velocity(self.nchan(spw) - 1)
2051 vcen = (vhead + vtail) / 2.0
2052
2053 if ((expr_vmin <= vcen) and (vcen <= expr_vmax)):
2054 spw_list.append(spw)
2055 no_valid_spw = False
2056
2057 if no_valid_spw:
2058 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2059
2060 else:
2061 # cases such as 'aGHz~bkm/s' are not allowed now
2062 raise RuntimeError("Invalid spw selection.")
2063
2064 # check spw list and remove invalid ones.
2065 # if no valid spw left, emit ValueError.
2066 if len(spw_list) == 0:
2067 raise ValueError("No valid spw in given range.")
2068
2069 # parse channel expression and store the result in crange_list.
2070 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2071 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2072 # and also several of the above expressions connected with ';'.
2073
2074 for spw in spw_list:
2075 pmin = 0.0
2076 pmax = float(self.nchan(spw) - 1)
2077
2078 molid = self._getmolidcol_list()[self.get_first_rowno_by_if(spw)]
2079
2080 if (len(colon_sep) == 1):
2081 # no expression for channel selection,
2082 # which means all channels are to be selected.
2083 crange_list = [[pmin, pmax]]
2084
2085 else: # (len(colon_sep) == 2)
2086 crange_list = []
2087
2088 found = False
2089 for i in self._get_coordinate_list():
2090 if (i['if'] == spw):
2091 coord = i['coord']
2092 found = True
2093 break
2094
2095 if found:
2096 semicolon_sep = colon_sep[1].split(";")
2097 for scs_elem in semicolon_sep:
2098 scs_elem = scs_elem.strip()
2099
2100 ti_sep = scs_elem.split("~")
2101 ti_sep_length = len(ti_sep)
2102
2103 if (ti_sep_length > 2):
2104 raise RuntimeError("Invalid channel selection.")
2105
2106 elif (ti_sep_length == 1):
2107 if (scs_elem == "") or (scs_elem == "*"):
2108 # '' and '*' for all channels
2109 crange_list = [[pmin, pmax]]
2110 break
2111 elif (is_number(scs_elem)):
2112 # single channel given
2113 crange_list.append([float(scs_elem), float(scs_elem)])
2114 else:
2115 raise RuntimeError("Invalid channel selection.")
2116
2117 else: #(ti_sep_length == 2)
2118 expr0 = ti_sep[0].strip()
2119 expr1 = ti_sep[1].strip()
2120
2121 if is_number(expr0) and is_number(expr1):
2122 # 'a~b'
2123 expr_pmin = min(float(expr0), float(expr1))
2124 expr_pmax = max(float(expr0), float(expr1))
2125
2126 elif is_number(expr0) and is_frequency(expr1):
2127 # 'a~b*Hz'
2128 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2129 expr_p0 = coord.to_pixel(expr_f0)
2130 expr_p1 = coord.to_pixel(expr_f1)
2131 expr_pmin = min(expr_p0, expr_p1)
2132 expr_pmax = max(expr_p0, expr_p1)
2133
2134 elif is_number(expr0) and is_velocity(expr1):
2135 # 'a~b*m/s'
2136 restf = self.get_restfreqs()[molid][0]
2137 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2138 dppl = self.get_doppler()
2139 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)
2140 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)
2141 expr_p0 = coord.to_pixel(expr_f0)
2142 expr_p1 = coord.to_pixel(expr_f1)
2143 expr_pmin = min(expr_p0, expr_p1)
2144 expr_pmax = max(expr_p0, expr_p1)
2145
2146 else:
2147 # cases such as 'aGHz~bkm/s' are not allowed now
2148 raise RuntimeError("Invalid channel selection.")
2149
2150 cmin = max(pmin, expr_pmin)
2151 cmax = min(pmax, expr_pmax)
2152 # if the given range of channel selection has overwrap with
2153 # that of current spw, output the overwrap area.
2154 if (cmin <= cmax):
2155 cmin = float(int(cmin + 0.5))
2156 cmax = float(int(cmax + 0.5))
2157 crange_list.append([cmin, cmax])
2158
2159 if (len(crange_list) == 0):
2160 crange_list.append([])
2161
2162 if (len(crange_list[0]) > 0):
2163 if res.has_key(spw):
2164 res[spw].extend(crange_list)
2165 else:
2166 res[spw] = crange_list
2167
2168 for spw in res.keys():
2169 if spw in valid_ifs:
2170 # remove duplicated channel ranges
2171 for i in reversed(xrange(len(res[spw]))):
2172 for j in xrange(i):
2173 if ((res[spw][i][0]-res[spw][j][1])*(res[spw][i][1]-res[spw][j][0]) <= 0) or \
2174 (min(abs(res[spw][i][0]-res[spw][j][1]),abs(res[spw][j][0]-res[spw][i][1])) == 1):
2175 asaplog.post()
2176 merge_warn_mesg = "Spw " + str(spw) + ": overwrapping channel ranges are merged."
2177 asaplog.push(merge_warn_mesg)
2178 asaplog.post('WARN')
2179 res[spw][j][0] = min(res[spw][i][0], res[spw][j][0])
2180 res[spw][j][1] = max(res[spw][i][1], res[spw][j][1])
2181 res[spw].pop(i)
2182 break
2183 else:
2184 del res[spw]
2185
2186 if len(res) == 0:
2187 raise RuntimeError("No valid spw.")
2188
2189 # restore original values
2190 self.set_unit(orig_unit)
2191 if restfreq is not None:
2192 self._setmolidcol_list(orig_molids)
2193 if frame is not None:
2194 self.set_freqframe(orig_frame)
2195 if doppler is not None:
2196 self.set_doppler(orig_doppler)
2197
2198 return res
2199
2200 @asaplog_post_dec
2201 def get_first_rowno_by_if(self, ifno):
2202 found = False
2203 for irow in xrange(self.nrow()):
2204 if (self.getif(irow) == ifno):
2205 res = irow
2206 found = True
2207 break
2208
2209 if not found: raise RuntimeError("No valid spw.")
2210
2211 return res
2212
2213 @asaplog_post_dec
2214 def _get_coordinate_list(self):
2215 res = []
2216 spws = self.getifnos()
2217 for spw in spws:
2218 elem = {}
2219 elem['if'] = spw
2220 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2221 res.append(elem)
2222
2223 return res
2224
2225 @asaplog_post_dec
2226 def parse_maskexpr(self, maskstring):
2227 """
2228 Parse CASA type mask selection syntax (IF dependent).
2229
2230 Parameters:
2231 maskstring : A string mask selection expression.
2232 A comma separated selections mean different IF -
2233 channel combinations. IFs and channel selections
2234 are partitioned by a colon, ':'.
2235 examples:
2236 '' = all IFs (all channels)
2237 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2238 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2239 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2240 all channels in IF8
2241 Returns:
2242 A dictionary of selected (valid) IF and masklist pairs,
2243 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2244 """
2245 if not isinstance(maskstring,str):
2246 asaplog.post()
2247 asaplog.push("Mask expression should be a string.")
2248 asaplog.post("ERROR")
2249
2250 valid_ifs = self.getifnos()
2251 frequnit = self.get_unit()
2252 seldict = {}
2253 if maskstring == "":
2254 maskstring = str(valid_ifs)[1:-1]
2255 ## split each selection "IF range[:CHAN range]"
2256 # split maskstring by "<spaces>,<spaces>"
2257 comma_sep = re.compile('\s*,\s*')
2258 sellist = comma_sep.split(maskstring)
2259 # separator by "<spaces>:<spaces>"
2260 collon_sep = re.compile('\s*:\s*')
2261 for currselstr in sellist:
2262 selset = collon_sep.split(currselstr)
2263 # spw and mask string (may include ~, < or >)
2264 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2265 minval=min(valid_ifs),
2266 maxval=max(valid_ifs))
2267 for spwlist in spwmasklist:
2268 selspws = []
2269 for ispw in range(spwlist[0],spwlist[1]+1):
2270 # Put into the list only if ispw exists
2271 if valid_ifs.count(ispw):
2272 selspws.append(ispw)
2273 del spwmasklist, spwlist
2274
2275 # parse frequency mask list
2276 if len(selset) > 1:
2277 freqmasklist = self._parse_selection(selset[1], typestr='float',
2278 offset=0.)
2279 else:
2280 # want to select the whole spectrum
2281 freqmasklist = [None]
2282
2283 ## define a dictionary of spw - masklist combination
2284 for ispw in selspws:
2285 #print "working on", ispw
2286 spwstr = str(ispw)
2287 if len(selspws) == 0:
2288 # empty spw
2289 continue
2290 else:
2291 ## want to get min and max of the spw and
2292 ## offset to set for '<' and '>'
2293 if frequnit == 'channel':
2294 minfreq = 0
2295 maxfreq = self.nchan(ifno=ispw)
2296 offset = 0.5
2297 else:
2298 ## This is ugly part. need improvement
2299 for ifrow in xrange(self.nrow()):
2300 if self.getif(ifrow) == ispw:
2301 #print "IF",ispw,"found in row =",ifrow
2302 break
2303 freqcoord = self.get_coordinate(ifrow)
2304 freqs = self._getabcissa(ifrow)
2305 minfreq = min(freqs)
2306 maxfreq = max(freqs)
2307 if len(freqs) == 1:
2308 offset = 0.5
2309 elif frequnit.find('Hz') > 0:
2310 offset = abs(freqcoord.to_frequency(1,
2311 unit=frequnit)
2312 -freqcoord.to_frequency(0,
2313 unit=frequnit)
2314 )*0.5
2315 elif frequnit.find('m/s') > 0:
2316 offset = abs(freqcoord.to_velocity(1,
2317 unit=frequnit)
2318 -freqcoord.to_velocity(0,
2319 unit=frequnit)
2320 )*0.5
2321 else:
2322 asaplog.post()
2323 asaplog.push("Invalid frequency unit")
2324 asaplog.post("ERROR")
2325 del freqs, freqcoord, ifrow
2326 for freq in freqmasklist:
2327 selmask = freq or [minfreq, maxfreq]
2328 if selmask[0] == None:
2329 ## selection was "<freq[1]".
2330 if selmask[1] < minfreq:
2331 ## avoid adding region selection
2332 selmask = None
2333 else:
2334 selmask = [minfreq,selmask[1]-offset]
2335 elif selmask[1] == None:
2336 ## selection was ">freq[0]"
2337 if selmask[0] > maxfreq:
2338 ## avoid adding region selection
2339 selmask = None
2340 else:
2341 selmask = [selmask[0]+offset,maxfreq]
2342 if selmask:
2343 if not seldict.has_key(spwstr):
2344 # new spw selection
2345 seldict[spwstr] = []
2346 seldict[spwstr] += [selmask]
2347 del minfreq,maxfreq,offset,freq,selmask
2348 del spwstr
2349 del freqmasklist
2350 del valid_ifs
2351 if len(seldict) == 0:
2352 asaplog.post()
2353 asaplog.push("No valid selection in the mask expression: "
2354 +maskstring)
2355 asaplog.post("WARN")
2356 return None
2357 msg = "Selected masklist:\n"
2358 for sif, lmask in seldict.iteritems():
2359 msg += " IF"+sif+" - "+str(lmask)+"\n"
2360 asaplog.push(msg)
2361 return seldict
2362
2363 @asaplog_post_dec
2364 def parse_idx_selection(self, mode, selexpr):
2365 """
2366 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2367 BEAMNO, and row number
2368
2369 Parameters:
2370 mode : which column to select.
2371 ['scan',|'if'|'pol'|'beam'|'row']
2372 selexpr : A comma separated selection expression.
2373 examples:
2374 '' = all (returns [])
2375 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2376 (returns [0,1,4,5,6,9])
2377 Returns:
2378 A List of selected indices
2379 """
2380 if selexpr == "":
2381 return []
2382 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2383 'b': 'beam', 'r': 'row'}
2384 smode = mode.lower()[0]
2385 if not (smode in valid_modes.keys()):
2386 msg = "Invalid mode '%s'. Valid modes are %s" %\
2387 (mode, str(valid_modes.values()))
2388 asaplog.post()
2389 asaplog.push(msg)
2390 asaplog.post("ERROR")
2391 mode = valid_modes[smode]
2392 minidx = None
2393 maxidx = None
2394 if smode == 'r':
2395 minidx = 0
2396 maxidx = self.nrow()
2397 else:
2398 idx = getattr(self,"get"+mode+"nos")()
2399 minidx = min(idx)
2400 maxidx = max(idx)
2401 del idx
2402 # split selexpr by "<spaces>,<spaces>"
2403 comma_sep = re.compile('\s*,\s*')
2404 sellist = comma_sep.split(selexpr)
2405 idxlist = []
2406 for currselstr in sellist:
2407 # single range (may include ~, < or >)
2408 currlist = self._parse_selection(currselstr, typestr='integer',
2409 minval=minidx,maxval=maxidx)
2410 for thelist in currlist:
2411 idxlist += range(thelist[0],thelist[1]+1)
2412 # remove duplicated elements after first ones
2413 for i in reversed(xrange(len(idxlist))):
2414 if idxlist.index(idxlist[i]) < i:
2415 idxlist.pop(i)
2416 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2417 asaplog.push(msg)
2418 return idxlist
2419
2420 def _parse_selection(self, selstr, typestr='float', offset=0.,
2421 minval=None, maxval=None):
2422 """
2423 Parameters:
2424 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2425 typestr : The type of the values in returned list
2426 ('integer' or 'float')
2427 offset : The offset value to subtract from or add to
2428 the boundary value if the selection string
2429 includes '<' or '>' [Valid only for typestr='float']
2430 minval, maxval : The minimum/maximum values to set if the
2431 selection string includes '<' or '>'.
2432 The list element is filled with None by default.
2433 Returns:
2434 A list of min/max pair of selections.
2435 Example:
2436 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2437 --> returns [[0,2],[5,7],[9,9]]
2438 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2439 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2440 """
2441 # split selstr by '<spaces>;<spaces>'
2442 semi_sep = re.compile('\s*;\s*')
2443 selgroups = semi_sep.split(selstr)
2444 sellists = []
2445 if typestr.lower().startswith('int'):
2446 formatfunc = int
2447 offset = 1
2448 else:
2449 formatfunc = float
2450
2451 for currsel in selgroups:
2452 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2453 minsel = minval
2454 maxsel = maxval
2455 if currsel.find('~') > 0:
2456 # val0 <= x <= val1
2457 minsel = formatfunc(currsel.split('~')[0].strip())
2458 maxsel = formatfunc(currsel.split('~')[1].strip())
2459 elif currsel.strip().find('<=') > -1:
2460 bound = currsel.split('<=')
2461 try: # try "x <= val"
2462 minsel = minval
2463 maxsel = formatfunc(bound[1].strip())
2464 except ValueError: # now "val <= x"
2465 minsel = formatfunc(bound[0].strip())
2466 maxsel = maxval
2467 elif currsel.strip().find('>=') > -1:
2468 bound = currsel.split('>=')
2469 try: # try "x >= val"
2470 minsel = formatfunc(bound[1].strip())
2471 maxsel = maxval
2472 except ValueError: # now "val >= x"
2473 minsel = minval
2474 maxsel = formatfunc(bound[0].strip())
2475 elif currsel.strip().find('<') > -1:
2476 bound = currsel.split('<')
2477 try: # try "x < val"
2478 minsel = minval
2479 maxsel = formatfunc(bound[1].strip()) \
2480 - formatfunc(offset)
2481 except ValueError: # now "val < x"
2482 minsel = formatfunc(bound[0].strip()) \
2483 + formatfunc(offset)
2484 maxsel = maxval
2485 elif currsel.strip().find('>') > -1:
2486 bound = currsel.split('>')
2487 try: # try "x > val"
2488 minsel = formatfunc(bound[1].strip()) \
2489 + formatfunc(offset)
2490 maxsel = maxval
2491 except ValueError: # now "val > x"
2492 minsel = minval
2493 maxsel = formatfunc(bound[0].strip()) \
2494 - formatfunc(offset)
2495 else:
2496 minsel = formatfunc(currsel)
2497 maxsel = formatfunc(currsel)
2498 sellists.append([minsel,maxsel])
2499 return sellists
2500
2501# def get_restfreqs(self):
2502# """
2503# Get the restfrequency(s) stored in this scantable.
2504# The return value(s) are always of unit 'Hz'
2505# Parameters:
2506# none
2507# Returns:
2508# a list of doubles
2509# """
2510# return list(self._getrestfreqs())
2511
2512 def get_restfreqs(self, ids=None):
2513 """\
2514 Get the restfrequency(s) stored in this scantable.
2515 The return value(s) are always of unit 'Hz'
2516
2517 Parameters:
2518
2519 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2520 be retrieved
2521
2522 Returns:
2523
2524 dictionary containing ids and a list of doubles for each id
2525
2526 """
2527 if ids is None:
2528 rfreqs = {}
2529 idlist = self.getmolnos()
2530 for i in idlist:
2531 rfreqs[i] = list(self._getrestfreqs(i))
2532 return rfreqs
2533 else:
2534 if type(ids) == list or type(ids) == tuple:
2535 rfreqs = {}
2536 for i in ids:
2537 rfreqs[i] = list(self._getrestfreqs(i))
2538 return rfreqs
2539 else:
2540 return list(self._getrestfreqs(ids))
2541
2542 @asaplog_post_dec
2543 def set_restfreqs(self, freqs=None, unit='Hz'):
2544 """\
2545 Set or replace the restfrequency specified and
2546 if the 'freqs' argument holds a scalar,
2547 then that rest frequency will be applied to all the selected
2548 data. If the 'freqs' argument holds
2549 a vector, then it MUST be of equal or smaller length than
2550 the number of IFs (and the available restfrequencies will be
2551 replaced by this vector). In this case, *all* data have
2552 the restfrequency set per IF according
2553 to the corresponding value you give in the 'freqs' vector.
2554 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2555 IF 1 gets restfreq 2e9.
2556
2557 You can also specify the frequencies via a linecatalog.
2558
2559 Parameters:
2560
2561 freqs: list of rest frequency values or string idenitfiers
2562
2563 unit: unit for rest frequency (default 'Hz')
2564
2565
2566 Example::
2567
2568 # set the given restfrequency for the all currently selected IFs
2569 scan.set_restfreqs(freqs=1.4e9)
2570 # set restfrequencies for the n IFs (n > 1) in the order of the
2571 # list, i.e
2572 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2573 # len(list_of_restfreqs) == nIF
2574 # for nIF == 1 the following will set multiple restfrequency for
2575 # that IF
2576 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2577 # set multiple restfrequencies per IF. as a list of lists where
2578 # the outer list has nIF elements, the inner s arbitrary
2579 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2580
2581 *Note*:
2582
2583 To do more sophisticate Restfrequency setting, e.g. on a
2584 source and IF basis, use scantable.set_selection() before using
2585 this function::
2586
2587 # provided your scantable is called scan
2588 selection = selector()
2589 selection.set_name('ORION*')
2590 selection.set_ifs([1])
2591 scan.set_selection(selection)
2592 scan.set_restfreqs(freqs=86.6e9)
2593
2594 """
2595 varlist = vars()
2596 from asap import linecatalog
2597 # simple value
2598 if isinstance(freqs, int) or isinstance(freqs, float):
2599 self._setrestfreqs([freqs], [""], unit)
2600 # list of values
2601 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2602 # list values are scalars
2603 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2604 if len(freqs) == 1:
2605 self._setrestfreqs(freqs, [""], unit)
2606 else:
2607 # allow the 'old' mode of setting mulitple IFs
2608 savesel = self._getselection()
2609 sel = self.get_selection()
2610 iflist = self.getifnos()
2611 if len(freqs)>len(iflist):
2612 raise ValueError("number of elements in list of list "
2613 "exeeds the current IF selections")
2614 iflist = self.getifnos()
2615 for i, fval in enumerate(freqs):
2616 sel.set_ifs(iflist[i])
2617 self._setselection(sel)
2618 self._setrestfreqs([fval], [""], unit)
2619 self._setselection(savesel)
2620
2621 # list values are dict, {'value'=, 'name'=)
2622 elif isinstance(freqs[-1], dict):
2623 values = []
2624 names = []
2625 for d in freqs:
2626 values.append(d["value"])
2627 names.append(d["name"])
2628 self._setrestfreqs(values, names, unit)
2629 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2630 savesel = self._getselection()
2631 sel = self.get_selection()
2632 iflist = self.getifnos()
2633 if len(freqs)>len(iflist):
2634 raise ValueError("number of elements in list of list exeeds"
2635 " the current IF selections")
2636 for i, fval in enumerate(freqs):
2637 sel.set_ifs(iflist[i])
2638 self._setselection(sel)
2639 self._setrestfreqs(fval, [""], unit)
2640 self._setselection(savesel)
2641 # freqs are to be taken from a linecatalog
2642 elif isinstance(freqs, linecatalog):
2643 savesel = self._getselection()
2644 sel = self.get_selection()
2645 for i in xrange(freqs.nrow()):
2646 sel.set_ifs(iflist[i])
2647 self._setselection(sel)
2648 self._setrestfreqs([freqs.get_frequency(i)],
2649 [freqs.get_name(i)], "MHz")
2650 # ensure that we are not iterating past nIF
2651 if i == self.nif()-1: break
2652 self._setselection(savesel)
2653 else:
2654 return
2655 self._add_history("set_restfreqs", varlist)
2656
2657 @asaplog_post_dec
2658 def shift_refpix(self, delta):
2659 """\
2660 Shift the reference pixel of the Spectra Coordinate by an
2661 integer amount.
2662
2663 Parameters:
2664
2665 delta: the amount to shift by
2666
2667 *Note*:
2668
2669 Be careful using this with broadband data.
2670
2671 """
2672 varlist = vars()
2673 Scantable.shift_refpix(self, delta)
2674 s._add_history("shift_refpix", varlist)
2675
2676 @asaplog_post_dec
2677 def history(self, filename=None, nrows=-1, start=0):
2678 """\
2679 Print the history. Optionally to a file.
2680
2681 Parameters:
2682
2683 filename: The name of the file to save the history to.
2684
2685 """
2686 n = self._historylength()
2687 if nrows == -1:
2688 nrows = n
2689 if start+nrows > n:
2690 nrows = nrows-start
2691 if n > 1000 and nrows == n:
2692 nrows = 1000
2693 start = n-1000
2694 asaplog.push("Warning: History has {0} entries. Displaying last "
2695 "1000".format(n))
2696 hist = list(self._gethistory(nrows, start))
2697 out = "-"*80
2698 for h in hist:
2699 if not h.strip():
2700 continue
2701 if h.find("---") >-1:
2702 continue
2703 else:
2704 items = h.split("##")
2705 date = items[0]
2706 func = items[1]
2707 items = items[2:]
2708 out += "\n"+date+"\n"
2709 out += "Function: %s\n Parameters:" % (func)
2710 for i in items:
2711 if i == '':
2712 continue
2713 s = i.split("=")
2714 out += "\n %s = %s" % (s[0], s[1])
2715 out = "\n".join([out, "*"*80])
2716 if filename is not None:
2717 if filename is "":
2718 filename = 'scantable_history.txt'
2719 filename = os.path.expandvars(os.path.expanduser(filename))
2720 if not os.path.isdir(filename):
2721 data = open(filename, 'w')
2722 data.write(out)
2723 data.close()
2724 else:
2725 msg = "Illegal file name '%s'." % (filename)
2726 raise IOError(msg)
2727 return page(out)
2728
2729 #
2730 # Maths business
2731 #
2732 @asaplog_post_dec
2733 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2734 avmode="NONE"):
2735 """\
2736 Return the (time) weighted average of a scan. Scans will be averaged
2737 only if the source direction (RA/DEC) is within 1' otherwise
2738
2739 *Note*:
2740
2741 in channels only - align if necessary
2742
2743 Parameters:
2744
2745 mask: an optional mask (only used for 'var' and 'tsys'
2746 weighting)
2747
2748 scanav: True averages each scan separately
2749 False (default) averages all scans together,
2750
2751 weight: Weighting scheme.
2752 'none' (mean no weight)
2753 'var' (1/var(spec) weighted)
2754 'tsys' (1/Tsys**2 weighted)
2755 'tint' (integration time weighted)
2756 'tintsys' (Tint/Tsys**2)
2757 'median' ( median averaging)
2758 The default is 'tint'
2759
2760 align: align the spectra in velocity before averaging. It takes
2761 the time of the first spectrum as reference time.
2762 avmode: 'SOURCE' - also select by source name - or
2763 'NONE' (default). Not applicable for scanav=True or
2764 weight=median
2765
2766 Example::
2767
2768 # time average the scantable without using a mask
2769 newscan = scan.average_time()
2770
2771 """
2772 varlist = vars()
2773 weight = weight or 'TINT'
2774 mask = mask or ()
2775 scanav = (scanav and 'SCAN') or avmode.upper()
2776 scan = (self, )
2777
2778 if align:
2779 scan = (self.freq_align(insitu=False), )
2780 asaplog.push("Note: Alignment is don on a source-by-source basis")
2781 asaplog.push("Note: Averaging (by default) is not")
2782 # we need to set it to SOURCE averaging here
2783 s = None
2784 if weight.upper() == 'MEDIAN':
2785 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2786 scanav))
2787 else:
2788 s = scantable(self._math._average(scan, mask, weight.upper(),
2789 scanav))
2790 s._add_history("average_time", varlist)
2791 return s
2792
2793 @asaplog_post_dec
2794 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2795 """\
2796 Return a scan where all spectra are converted to either
2797 Jansky or Kelvin depending upon the flux units of the scan table.
2798 By default the function tries to look the values up internally.
2799 If it can't find them (or if you want to over-ride), you must
2800 specify EITHER jyperk OR eta (and D which it will try to look up
2801 also if you don't set it). jyperk takes precedence if you set both.
2802
2803 Parameters:
2804
2805 jyperk: the Jy / K conversion factor
2806
2807 eta: the aperture efficiency
2808
2809 d: the geometric diameter (metres)
2810
2811 insitu: if False a new scantable is returned.
2812 Otherwise, the scaling is done in-situ
2813 The default is taken from .asaprc (False)
2814
2815 """
2816 if insitu is None: insitu = rcParams['insitu']
2817 self._math._setinsitu(insitu)
2818 varlist = vars()
2819 jyperk = jyperk or -1.0
2820 d = d or -1.0
2821 eta = eta or -1.0
2822 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2823 s._add_history("convert_flux", varlist)
2824 if insitu: self._assign(s)
2825 else: return s
2826
2827 @asaplog_post_dec
2828 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2829 """\
2830 Return a scan after applying a gain-elevation correction.
2831 The correction can be made via either a polynomial or a
2832 table-based interpolation (and extrapolation if necessary).
2833 You specify polynomial coefficients, an ascii table or neither.
2834 If you specify neither, then a polynomial correction will be made
2835 with built in coefficients known for certain telescopes (an error
2836 will occur if the instrument is not known).
2837 The data and Tsys are *divided* by the scaling factors.
2838
2839 Parameters:
2840
2841 poly: Polynomial coefficients (default None) to compute a
2842 gain-elevation correction as a function of
2843 elevation (in degrees).
2844
2845 filename: The name of an ascii file holding correction factors.
2846 The first row of the ascii file must give the column
2847 names and these MUST include columns
2848 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2849 by this) somewhere.
2850 The second row must give the data type of the
2851 column. Use 'R' for Real and 'I' for Integer.
2852 An example file would be
2853 (actual factors are arbitrary) :
2854
2855 TIME ELEVATION FACTOR
2856 R R R
2857 0.1 0 0.8
2858 0.2 20 0.85
2859 0.3 40 0.9
2860 0.4 60 0.85
2861 0.5 80 0.8
2862 0.6 90 0.75
2863
2864 method: Interpolation method when correcting from a table.
2865 Values are 'nearest', 'linear' (default), 'cubic'
2866 and 'spline'
2867
2868 insitu: if False a new scantable is returned.
2869 Otherwise, the scaling is done in-situ
2870 The default is taken from .asaprc (False)
2871
2872 """
2873
2874 if insitu is None: insitu = rcParams['insitu']
2875 self._math._setinsitu(insitu)
2876 varlist = vars()
2877 poly = poly or ()
2878 from os.path import expandvars
2879 filename = expandvars(filename)
2880 s = scantable(self._math._gainel(self, poly, filename, method))
2881 s._add_history("gain_el", varlist)
2882 if insitu:
2883 self._assign(s)
2884 else:
2885 return s
2886
2887 @asaplog_post_dec
2888 def freq_align(self, reftime=None, method='cubic', insitu=None):
2889 """\
2890 Return a scan where all rows have been aligned in frequency/velocity.
2891 The alignment frequency frame (e.g. LSRK) is that set by function
2892 set_freqframe.
2893
2894 Parameters:
2895
2896 reftime: reference time to align at. By default, the time of
2897 the first row of data is used.
2898
2899 method: Interpolation method for regridding the spectra.
2900 Choose from 'nearest', 'linear', 'cubic' (default)
2901 and 'spline'
2902
2903 insitu: if False a new scantable is returned.
2904 Otherwise, the scaling is done in-situ
2905 The default is taken from .asaprc (False)
2906
2907 """
2908 if insitu is None: insitu = rcParams["insitu"]
2909 oldInsitu = self._math._insitu()
2910 self._math._setinsitu(insitu)
2911 varlist = vars()
2912 reftime = reftime or ""
2913 s = scantable(self._math._freq_align(self, reftime, method))
2914 s._add_history("freq_align", varlist)
2915 self._math._setinsitu(oldInsitu)
2916 if insitu:
2917 self._assign(s)
2918 else:
2919 return s
2920
2921 @asaplog_post_dec
2922 def opacity(self, tau=None, insitu=None):
2923 """\
2924 Apply an opacity correction. The data
2925 and Tsys are multiplied by the correction factor.
2926
2927 Parameters:
2928
2929 tau: (list of) opacity from which the correction factor is
2930 exp(tau*ZD)
2931 where ZD is the zenith-distance.
2932 If a list is provided, it has to be of length nIF,
2933 nIF*nPol or 1 and in order of IF/POL, e.g.
2934 [opif0pol0, opif0pol1, opif1pol0 ...]
2935 if tau is `None` the opacities are determined from a
2936 model.
2937
2938 insitu: if False a new scantable is returned.
2939 Otherwise, the scaling is done in-situ
2940 The default is taken from .asaprc (False)
2941
2942 """
2943 if insitu is None:
2944 insitu = rcParams['insitu']
2945 self._math._setinsitu(insitu)
2946 varlist = vars()
2947 if not hasattr(tau, "__len__"):
2948 tau = [tau]
2949 s = scantable(self._math._opacity(self, tau))
2950 s._add_history("opacity", varlist)
2951 if insitu:
2952 self._assign(s)
2953 else:
2954 return s
2955
2956 @asaplog_post_dec
2957 def bin(self, width=5, insitu=None):
2958 """\
2959 Return a scan where all spectra have been binned up.
2960
2961 Parameters:
2962
2963 width: The bin width (default=5) in pixels
2964
2965 insitu: if False a new scantable is returned.
2966 Otherwise, the scaling is done in-situ
2967 The default is taken from .asaprc (False)
2968
2969 """
2970 if insitu is None:
2971 insitu = rcParams['insitu']
2972 self._math._setinsitu(insitu)
2973 varlist = vars()
2974 s = scantable(self._math._bin(self, width))
2975 s._add_history("bin", varlist)
2976 if insitu:
2977 self._assign(s)
2978 else:
2979 return s
2980
2981 @asaplog_post_dec
2982 def reshape(self, first, last, insitu=None):
2983 """Resize the band by providing first and last channel.
2984 This will cut off all channels outside [first, last].
2985 """
2986 if insitu is None:
2987 insitu = rcParams['insitu']
2988 varlist = vars()
2989 if last < 0:
2990 last = self.nchan()-1 + last
2991 s = None
2992 if insitu:
2993 s = self
2994 else:
2995 s = self.copy()
2996 s._reshape(first,last)
2997 s._add_history("reshape", varlist)
2998 if not insitu:
2999 return s
3000
3001 @asaplog_post_dec
3002 def resample(self, width=5, method='cubic', insitu=None):
3003 """\
3004 Return a scan where all spectra have been binned up.
3005
3006 Parameters:
3007
3008 width: The bin width (default=5) in pixels
3009
3010 method: Interpolation method when correcting from a table.
3011 Values are 'nearest', 'linear', 'cubic' (default)
3012 and 'spline'
3013
3014 insitu: if False a new scantable is returned.
3015 Otherwise, the scaling is done in-situ
3016 The default is taken from .asaprc (False)
3017
3018 """
3019 if insitu is None:
3020 insitu = rcParams['insitu']
3021 self._math._setinsitu(insitu)
3022 varlist = vars()
3023 s = scantable(self._math._resample(self, method, width))
3024 s._add_history("resample", varlist)
3025 if insitu:
3026 self._assign(s)
3027 else:
3028 return s
3029
3030 @asaplog_post_dec
3031 def average_pol(self, mask=None, weight='none'):
3032 """\
3033 Average the Polarisations together.
3034
3035 Parameters:
3036
3037 mask: An optional mask defining the region, where the
3038 averaging will be applied. The output will have all
3039 specified points masked.
3040
3041 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3042 weighted), or 'tsys' (1/Tsys**2 weighted)
3043
3044 """
3045 varlist = vars()
3046 mask = mask or ()
3047 s = scantable(self._math._averagepol(self, mask, weight.upper()))
3048 s._add_history("average_pol", varlist)
3049 return s
3050
3051 @asaplog_post_dec
3052 def average_beam(self, mask=None, weight='none'):
3053 """\
3054 Average the Beams together.
3055
3056 Parameters:
3057 mask: An optional mask defining the region, where the
3058 averaging will be applied. The output will have all
3059 specified points masked.
3060
3061 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3062 weighted), or 'tsys' (1/Tsys**2 weighted)
3063
3064 """
3065 varlist = vars()
3066 mask = mask or ()
3067 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3068 s._add_history("average_beam", varlist)
3069 return s
3070
3071 def parallactify(self, pflag):
3072 """\
3073 Set a flag to indicate whether this data should be treated as having
3074 been 'parallactified' (total phase == 0.0)
3075
3076 Parameters:
3077
3078 pflag: Bool indicating whether to turn this on (True) or
3079 off (False)
3080
3081 """
3082 varlist = vars()
3083 self._parallactify(pflag)
3084 self._add_history("parallactify", varlist)
3085
3086 @asaplog_post_dec
3087 def convert_pol(self, poltype=None):
3088 """\
3089 Convert the data to a different polarisation type.
3090 Note that you will need cross-polarisation terms for most conversions.
3091
3092 Parameters:
3093
3094 poltype: The new polarisation type. Valid types are:
3095 'linear', 'circular', 'stokes' and 'linpol'
3096
3097 """
3098 varlist = vars()
3099 s = scantable(self._math._convertpol(self, poltype))
3100 s._add_history("convert_pol", varlist)
3101 return s
3102
3103 @asaplog_post_dec
3104 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3105 insitu=None):
3106 """\
3107 Smooth the spectrum by the specified kernel (conserving flux).
3108
3109 Parameters:
3110
3111 kernel: The type of smoothing kernel. Select from
3112 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3113 or 'poly'
3114
3115 width: The width of the kernel in pixels. For hanning this is
3116 ignored otherwise it defauls to 5 pixels.
3117 For 'gaussian' it is the Full Width Half
3118 Maximum. For 'boxcar' it is the full width.
3119 For 'rmedian' and 'poly' it is the half width.
3120
3121 order: Optional parameter for 'poly' kernel (default is 2), to
3122 specify the order of the polnomial. Ignored by all other
3123 kernels.
3124
3125 plot: plot the original and the smoothed spectra.
3126 In this each indivual fit has to be approved, by
3127 typing 'y' or 'n'
3128
3129 insitu: if False a new scantable is returned.
3130 Otherwise, the scaling is done in-situ
3131 The default is taken from .asaprc (False)
3132
3133 """
3134 if insitu is None: insitu = rcParams['insitu']
3135 self._math._setinsitu(insitu)
3136 varlist = vars()
3137
3138 if plot: orgscan = self.copy()
3139
3140 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
3141 s._add_history("smooth", varlist)
3142
3143 action = 'H'
3144 if plot:
3145 from asap.asapplotter import new_asaplot
3146 theplot = new_asaplot(rcParams['plotter.gui'])
3147 from matplotlib import rc as rcp
3148 rcp('lines', linewidth=1)
3149 theplot.set_panels()
3150 ylab=s._get_ordinate_label()
3151 #theplot.palette(0,["#777777","red"])
3152 for r in xrange(s.nrow()):
3153 xsm=s._getabcissa(r)
3154 ysm=s._getspectrum(r)
3155 xorg=orgscan._getabcissa(r)
3156 yorg=orgscan._getspectrum(r)
3157 if action != "N": #skip plotting if rejecting all
3158 theplot.clear()
3159 theplot.hold()
3160 theplot.set_axes('ylabel',ylab)
3161 theplot.set_axes('xlabel',s._getabcissalabel(r))
3162 theplot.set_axes('title',s._getsourcename(r))
3163 theplot.set_line(label='Original',color="#777777")
3164 theplot.plot(xorg,yorg)
3165 theplot.set_line(label='Smoothed',color="red")
3166 theplot.plot(xsm,ysm)
3167 ### Ugly part for legend
3168 for i in [0,1]:
3169 theplot.subplots[0]['lines'].append(
3170 [theplot.subplots[0]['axes'].lines[i]]
3171 )
3172 theplot.release()
3173 ### Ugly part for legend
3174 theplot.subplots[0]['lines']=[]
3175 res = self._get_verify_action("Accept smoothing?",action)
3176 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3177 if r == 0: action = None
3178 #res = raw_input("Accept smoothing ([y]/n): ")
3179 if res.upper() == 'N':
3180 # reject for the current rows
3181 s._setspectrum(yorg, r)
3182 elif res.upper() == 'R':
3183 # reject all the following rows
3184 action = "N"
3185 s._setspectrum(yorg, r)
3186 elif res.upper() == 'A':
3187 # accept all the following rows
3188 break
3189 theplot.quit()
3190 del theplot
3191 del orgscan
3192
3193 if insitu: self._assign(s)
3194 else: return s
3195
3196 @asaplog_post_dec
3197 def regrid_channel(self, width=5, plot=False, insitu=None):
3198 """\
3199 Regrid the spectra by the specified channel width
3200
3201 Parameters:
3202
3203 width: The channel width (float) of regridded spectra
3204 in the current spectral unit.
3205
3206 plot: [NOT IMPLEMENTED YET]
3207 plot the original and the regridded spectra.
3208 In this each indivual fit has to be approved, by
3209 typing 'y' or 'n'
3210
3211 insitu: if False a new scantable is returned.
3212 Otherwise, the scaling is done in-situ
3213 The default is taken from .asaprc (False)
3214
3215 """
3216 if insitu is None: insitu = rcParams['insitu']
3217 varlist = vars()
3218
3219 if plot:
3220 asaplog.post()
3221 asaplog.push("Verification plot is not implemtnetd yet.")
3222 asaplog.post("WARN")
3223
3224 s = self.copy()
3225 s._regrid_specchan(width)
3226
3227 s._add_history("regrid_channel", varlist)
3228
3229# if plot:
3230# from asap.asapplotter import new_asaplot
3231# theplot = new_asaplot(rcParams['plotter.gui'])
3232# from matplotlib import rc as rcp
3233# rcp('lines', linewidth=1)
3234# theplot.set_panels()
3235# ylab=s._get_ordinate_label()
3236# #theplot.palette(0,["#777777","red"])
3237# for r in xrange(s.nrow()):
3238# xsm=s._getabcissa(r)
3239# ysm=s._getspectrum(r)
3240# xorg=orgscan._getabcissa(r)
3241# yorg=orgscan._getspectrum(r)
3242# theplot.clear()
3243# theplot.hold()
3244# theplot.set_axes('ylabel',ylab)
3245# theplot.set_axes('xlabel',s._getabcissalabel(r))
3246# theplot.set_axes('title',s._getsourcename(r))
3247# theplot.set_line(label='Original',color="#777777")
3248# theplot.plot(xorg,yorg)
3249# theplot.set_line(label='Smoothed',color="red")
3250# theplot.plot(xsm,ysm)
3251# ### Ugly part for legend
3252# for i in [0,1]:
3253# theplot.subplots[0]['lines'].append(
3254# [theplot.subplots[0]['axes'].lines[i]]
3255# )
3256# theplot.release()
3257# ### Ugly part for legend
3258# theplot.subplots[0]['lines']=[]
3259# res = raw_input("Accept smoothing ([y]/n): ")
3260# if res.upper() == 'N':
3261# s._setspectrum(yorg, r)
3262# theplot.quit()
3263# del theplot
3264# del orgscan
3265
3266 if insitu: self._assign(s)
3267 else: return s
3268
3269 @asaplog_post_dec
3270 def _parse_wn(self, wn):
3271 if isinstance(wn, list) or isinstance(wn, tuple):
3272 return wn
3273 elif isinstance(wn, int):
3274 return [ wn ]
3275 elif isinstance(wn, str):
3276 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3277 val = wn.split('-')
3278 val = [int(val[0]), int(val[1])]
3279 val.sort()
3280 res = [i for i in xrange(val[0], val[1]+1)]
3281 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3282 val = int(wn[2:])+1
3283 res = [i for i in xrange(val)]
3284 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3285 val = int(wn[:-2])+1
3286 res = [i for i in xrange(val)]
3287 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3288 val = int(wn[1:])
3289 res = [i for i in xrange(val)]
3290 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3291 val = int(wn[:-1])
3292 res = [i for i in xrange(val)]
3293 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3294 # then interpreted in C++
3295 # side as [a,a+1,...,a_nyq]
3296 # (CAS-3759)
3297 val = int(wn[2:])
3298 res = [val, -999]
3299 #res = [i for i in xrange(val, self.nchan()/2+1)]
3300 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3301 # then interpreted in C++
3302 # side as [a,a+1,...,a_nyq]
3303 # (CAS-3759)
3304 val = int(wn[:-2])
3305 res = [val, -999]
3306 #res = [i for i in xrange(val, self.nchan()/2+1)]
3307 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3308 # then interpreted in C++
3309 # side as [a+1,a+2,...,a_nyq]
3310 # (CAS-3759)
3311 val = int(wn[1:])+1
3312 res = [val, -999]
3313 #res = [i for i in xrange(val, self.nchan()/2+1)]
3314 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3315 # then interpreted in C++
3316 # side as [a+1,a+2,...,a_nyq]
3317 # (CAS-3759)
3318 val = int(wn[:-1])+1
3319 res = [val, -999]
3320 #res = [i for i in xrange(val, self.nchan()/2+1)]
3321
3322 return res
3323 else:
3324 msg = 'wrong value given for addwn/rejwn'
3325 raise RuntimeError(msg)
3326
3327 @asaplog_post_dec
3328 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3329 """\
3330 Subtract baseline based on parameters written in Baseline Table.
3331
3332 Parameters:
3333 insitu: if True, baseline fitting/subtraction is done
3334 in-situ. If False, a new scantable with
3335 baseline subtracted is returned. Actually,
3336 format of the returned value depends on both
3337 insitu and retfitres (see below).
3338 The default is taken from .asaprc (False)
3339 retfitres: if True, the results of baseline fitting (i.e.,
3340 coefficients and rms) are returned.
3341 default is False.
3342 The format of the returned value of this
3343 function varies as follows:
3344 (1) in case insitu=True and retfitres=True:
3345 fitting result.
3346 (2) in case insitu=True and retfitres=False:
3347 None.
3348 (3) in case insitu=False and retfitres=True:
3349 a dictionary containing a new scantable
3350 (with baseline subtracted) and the fitting
3351 results.
3352 (4) in case insitu=False and retfitres=False:
3353 a new scantable (with baseline subtracted).
3354 inbltable: name of input baseline table. The row number of
3355 scantable and that of inbltable must be
3356 identical.
3357 outbltable: name of output baseline table where baseline
3358 parameters and fitting results recorded.
3359 default is ''(no output).
3360 overwrite: if True when an existing baseline table is
3361 specified for outbltable, overwrites it.
3362 Otherwise there is no harm.
3363 default is False.
3364 """
3365
3366 try:
3367 varlist = vars()
3368 if retfitres is None: retfitres = False
3369 if inbltable is None: raise ValueError("bltable missing.")
3370 if outbltable is None: outbltable = ''
3371 if overwrite is None: overwrite = False
3372
3373 if insitu is None: insitu = rcParams['insitu']
3374 if insitu:
3375 workscan = self
3376 else:
3377 workscan = self.copy()
3378
3379 sres = workscan._apply_bltable(inbltable,
3380 retfitres,
3381 outbltable,
3382 os.path.exists(outbltable),
3383 overwrite)
3384 if retfitres: res = parse_fitresult(sres)
3385
3386 workscan._add_history('apply_bltable', varlist)
3387
3388 if insitu:
3389 self._assign(workscan)
3390 if retfitres:
3391 return res
3392 else:
3393 return None
3394 else:
3395 if retfitres:
3396 return {'scantable': workscan, 'fitresults': res}
3397 else:
3398 return workscan
3399
3400 except RuntimeError, e:
3401 raise_fitting_failure_exception(e)
3402
3403 @asaplog_post_dec
3404 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3405 """\
3406 Subtract baseline based on parameters written in the input list.
3407
3408 Parameters:
3409 insitu: if True, baseline fitting/subtraction is done
3410 in-situ. If False, a new scantable with
3411 baseline subtracted is returned. Actually,
3412 format of the returned value depends on both
3413 insitu and retfitres (see below).
3414 The default is taken from .asaprc (False)
3415 retfitres: if True, the results of baseline fitting (i.e.,
3416 coefficients and rms) are returned.
3417 default is False.
3418 The format of the returned value of this
3419 function varies as follows:
3420 (1) in case insitu=True and retfitres=True:
3421 fitting result.
3422 (2) in case insitu=True and retfitres=False:
3423 None.
3424 (3) in case insitu=False and retfitres=True:
3425 a dictionary containing a new scantable
3426 (with baseline subtracted) and the fitting
3427 results.
3428 (4) in case insitu=False and retfitres=False:
3429 a new scantable (with baseline subtracted).
3430 blinfo: baseline parameter set stored in a dictionary
3431 or a list of dictionary. Each dictionary
3432 corresponds to each spectrum and must contain
3433 the following keys and values:
3434 'row': row number,
3435 'blfunc': function name. available ones include
3436 'poly', 'chebyshev', 'cspline' and
3437 'sinusoid',
3438 'order': maximum order of polynomial. needed
3439 if blfunc='poly' or 'chebyshev',
3440 'npiece': number or piecewise polynomial.
3441 needed if blfunc='cspline',
3442 'nwave': a list of sinusoidal wave numbers.
3443 needed if blfunc='sinusoid', and
3444 'masklist': min-max windows for channel mask.
3445 the specified ranges will be used
3446 for fitting.
3447 bltable: name of output baseline table where baseline
3448 parameters and fitting results recorded.
3449 default is ''(no output).
3450 overwrite: if True when an existing baseline table is
3451 specified for bltable, overwrites it.
3452 Otherwise there is no harm.
3453 default is False.
3454
3455 Example:
3456 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3457 'masklist':[[10,350],[352,510]]},
3458 {'row':1, 'blfunc':'cspline', 'npiece':3,
3459 'masklist':[[3,16],[19,404],[407,511]]}
3460 ])
3461
3462 the first spectrum (row=0) will be fitted with polynomial
3463 of order=5 and the next one (row=1) will be fitted with cubic
3464 spline consisting of 3 pieces.
3465 """
3466
3467 try:
3468 varlist = vars()
3469 if retfitres is None: retfitres = False
3470 if blinfo is None: blinfo = []
3471 if bltable is None: bltable = ''
3472 if overwrite is None: overwrite = False
3473
3474 if insitu is None: insitu = rcParams['insitu']
3475 if insitu:
3476 workscan = self
3477 else:
3478 workscan = self.copy()
3479
3480 nrow = workscan.nrow()
3481
3482 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3483
3484 print "in_blinfo=< "+ str(in_blinfo)+" >"
3485
3486 sres = workscan._sub_baseline(in_blinfo,
3487 retfitres,
3488 bltable,
3489 os.path.exists(bltable),
3490 overwrite)
3491 if retfitres: res = parse_fitresult(sres)
3492
3493 workscan._add_history('sub_baseline', varlist)
3494
3495 if insitu:
3496 self._assign(workscan)
3497 if retfitres:
3498 return res
3499 else:
3500 return None
3501 else:
3502 if retfitres:
3503 return {'scantable': workscan, 'fitresults': res}
3504 else:
3505 return workscan
3506
3507 except RuntimeError, e:
3508 raise_fitting_failure_exception(e)
3509
3510 @asaplog_post_dec
3511 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3512 whichrow=None, uselinefinder=None, edge=None,
3513 threshold=None, chan_avg_limit=None):
3514 """\
3515 Calculates and returns model selection criteria for a specified
3516 baseline model and a given spectrum data.
3517 Available values include Akaike Information Criterion (AIC), the
3518 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3519 Bayesian Information Criterion (BIC) and the Generalised Cross
3520 Validation (GCV).
3521
3522 Parameters:
3523 value: name of model selection criteria to calculate.
3524 available ones include 'aic', 'aicc', 'bic' and
3525 'gcv'. default is 'aicc'.
3526 blfunc: baseline function name. available ones include
3527 'chebyshev', 'cspline' and 'sinusoid'.
3528 default is 'chebyshev'.
3529 order: parameter for basline function. actually stands for
3530 order of polynomial (order) for 'chebyshev',
3531 number of spline pieces (npiece) for 'cspline' and
3532 maximum wave number for 'sinusoid', respectively.
3533 default is 5 (which is also the default order value
3534 for [auto_]chebyshev_baseline()).
3535 mask: an optional mask. default is [].
3536 whichrow: row number. default is 0 (the first row)
3537 uselinefinder: use sd.linefinder() to flag out line regions
3538 default is True.
3539 edge: an optional number of channel to drop at
3540 the edge of spectrum. If only one value is
3541 specified, the same number will be dropped
3542 from both sides of the spectrum. Default
3543 is to keep all channels. Nested tuples
3544 represent individual edge selection for
3545 different IFs (a number of spectral channels
3546 can be different)
3547 default is (0, 0).
3548 threshold: the threshold used by line finder. It is
3549 better to keep it large as only strong lines
3550 affect the baseline solution.
3551 default is 3.
3552 chan_avg_limit: a maximum number of consequtive spectral
3553 channels to average during the search of
3554 weak and broad lines. The default is no
3555 averaging (and no search for weak lines).
3556 If such lines can affect the fitted baseline
3557 (e.g. a high order polynomial is fitted),
3558 increase this parameter (usually values up
3559 to 8 are reasonable). Most users of this
3560 method should find the default value sufficient.
3561 default is 1.
3562
3563 Example:
3564 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3565 """
3566
3567 try:
3568 varlist = vars()
3569
3570 if value is None: value = 'aicc'
3571 if blfunc is None: blfunc = 'chebyshev'
3572 if order is None: order = 5
3573 if mask is None: mask = []
3574 if whichrow is None: whichrow = 0
3575 if uselinefinder is None: uselinefinder = True
3576 if edge is None: edge = (0, 0)
3577 if threshold is None: threshold = 3
3578 if chan_avg_limit is None: chan_avg_limit = 1
3579
3580 return self._calc_aic(value, blfunc, order, mask,
3581 whichrow, uselinefinder, edge,
3582 threshold, chan_avg_limit)
3583
3584 except RuntimeError, e:
3585 raise_fitting_failure_exception(e)
3586
3587 @asaplog_post_dec
3588 def sinusoid_baseline(self, mask=None, applyfft=None,
3589 fftmethod=None, fftthresh=None,
3590 addwn=None, rejwn=None,
3591 insitu=None,
3592 clipthresh=None, clipniter=None,
3593 plot=None,
3594 getresidual=None,
3595 showprogress=None, minnrow=None,
3596 outlog=None,
3597 blfile=None, csvformat=None,
3598 bltable=None):
3599 """\
3600 Return a scan which has been baselined (all rows) with sinusoidal
3601 functions.
3602
3603 Parameters:
3604 mask: an optional mask
3605 applyfft: if True use some method, such as FFT, to find
3606 strongest sinusoidal components in the wavenumber
3607 domain to be used for baseline fitting.
3608 default is True.
3609 fftmethod: method to find the strong sinusoidal components.
3610 now only 'fft' is available and it is the default.
3611 fftthresh: the threshold to select wave numbers to be used for
3612 fitting from the distribution of amplitudes in the
3613 wavenumber domain.
3614 both float and string values accepted.
3615 given a float value, the unit is set to sigma.
3616 for string values, allowed formats include:
3617 'xsigma' or 'x' (= x-sigma level. e.g.,
3618 '3sigma'), or
3619 'topx' (= the x strongest ones, e.g. 'top5').
3620 default is 3.0 (unit: sigma).
3621 addwn: the additional wave numbers to be used for fitting.
3622 list or integer value is accepted to specify every
3623 wave numbers. also string value can be used in case
3624 you need to specify wave numbers in a certain range,
3625 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3626 '<a' (= 0,1,...,a-2,a-1),
3627 '>=a' (= a, a+1, ... up to the maximum wave
3628 number corresponding to the Nyquist
3629 frequency for the case of FFT).
3630 default is [0].
3631 rejwn: the wave numbers NOT to be used for fitting.
3632 can be set just as addwn but has higher priority:
3633 wave numbers which are specified both in addwn
3634 and rejwn will NOT be used. default is [].
3635 insitu: if False a new scantable is returned.
3636 Otherwise, the scaling is done in-situ
3637 The default is taken from .asaprc (False)
3638 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3639 clipniter: maximum number of iteration of 'clipthresh'-sigma
3640 clipping (default is 0)
3641 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3642 plot the fit and the residual. In this each
3643 indivual fit has to be approved, by typing 'y'
3644 or 'n'
3645 getresidual: if False, returns best-fit values instead of
3646 residual. (default is True)
3647 showprogress: show progress status for large data.
3648 default is True.
3649 minnrow: minimum number of input spectra to show.
3650 default is 1000.
3651 outlog: Output the coefficients of the best-fit
3652 function to logger (default is False)
3653 blfile: Name of a text file in which the best-fit
3654 parameter values to be written
3655 (default is '': no file/logger output)
3656 csvformat: if True blfile is csv-formatted, default is False.
3657 bltable: name of a baseline table where fitting results
3658 (coefficients, rms, etc.) are to be written.
3659 if given, fitting results will NOT be output to
3660 scantable (insitu=True) or None will be
3661 returned (insitu=False).
3662 (default is "": no table output)
3663
3664 Example:
3665 # return a scan baselined by a combination of sinusoidal curves
3666 # having wave numbers in spectral window up to 10,
3667 # also with 3-sigma clipping, iteration up to 4 times
3668 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3669
3670 Note:
3671 The best-fit parameter values output in logger and/or blfile are now
3672 based on specunit of 'channel'.
3673 """
3674
3675 try:
3676 varlist = vars()
3677
3678 if insitu is None: insitu = rcParams['insitu']
3679 if insitu:
3680 workscan = self
3681 else:
3682 workscan = self.copy()
3683
3684 if mask is None: mask = []
3685 if applyfft is None: applyfft = True
3686 if fftmethod is None: fftmethod = 'fft'
3687 if fftthresh is None: fftthresh = 3.0
3688 if addwn is None: addwn = [0]
3689 if rejwn is None: rejwn = []
3690 if clipthresh is None: clipthresh = 3.0
3691 if clipniter is None: clipniter = 0
3692 if plot is None: plot = False
3693 if getresidual is None: getresidual = True
3694 if showprogress is None: showprogress = True
3695 if minnrow is None: minnrow = 1000
3696 if outlog is None: outlog = False
3697 if blfile is None: blfile = ''
3698 if csvformat is None: csvformat = False
3699 if bltable is None: bltable = ''
3700
3701 sapplyfft = 'true' if applyfft else 'false'
3702 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3703
3704 scsvformat = 'T' if csvformat else 'F'
3705
3706 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3707 workscan._sinusoid_baseline(mask,
3708 fftinfo,
3709 #applyfft, fftmethod.lower(),
3710 #str(fftthresh).lower(),
3711 workscan._parse_wn(addwn),
3712 workscan._parse_wn(rejwn),
3713 clipthresh, clipniter,
3714 getresidual,
3715 pack_progress_params(showprogress,
3716 minnrow),
3717 outlog, scsvformat+blfile,
3718 bltable)
3719 workscan._add_history('sinusoid_baseline', varlist)
3720
3721 if bltable == '':
3722 if insitu:
3723 self._assign(workscan)
3724 else:
3725 return workscan
3726 else:
3727 if not insitu:
3728 return None
3729
3730 except RuntimeError, e:
3731 raise_fitting_failure_exception(e)
3732
3733
3734 @asaplog_post_dec
3735 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3736 fftmethod=None, fftthresh=None,
3737 addwn=None, rejwn=None,
3738 insitu=None,
3739 clipthresh=None, clipniter=None,
3740 edge=None, threshold=None, chan_avg_limit=None,
3741 plot=None,
3742 getresidual=None,
3743 showprogress=None, minnrow=None,
3744 outlog=None,
3745 blfile=None, csvformat=None,
3746 bltable=None):
3747 """\
3748 Return a scan which has been baselined (all rows) with sinusoidal
3749 functions.
3750 Spectral lines are detected first using linefinder and masked out
3751 to avoid them affecting the baseline solution.
3752
3753 Parameters:
3754 mask: an optional mask retreived from scantable
3755 applyfft: if True use some method, such as FFT, to find
3756 strongest sinusoidal components in the wavenumber
3757 domain to be used for baseline fitting.
3758 default is True.
3759 fftmethod: method to find the strong sinusoidal components.
3760 now only 'fft' is available and it is the default.
3761 fftthresh: the threshold to select wave numbers to be used for
3762 fitting from the distribution of amplitudes in the
3763 wavenumber domain.
3764 both float and string values accepted.
3765 given a float value, the unit is set to sigma.
3766 for string values, allowed formats include:
3767 'xsigma' or 'x' (= x-sigma level. e.g.,
3768 '3sigma'), or
3769 'topx' (= the x strongest ones, e.g. 'top5').
3770 default is 3.0 (unit: sigma).
3771 addwn: the additional wave numbers to be used for fitting.
3772 list or integer value is accepted to specify every
3773 wave numbers. also string value can be used in case
3774 you need to specify wave numbers in a certain range,
3775 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3776 '<a' (= 0,1,...,a-2,a-1),
3777 '>=a' (= a, a+1, ... up to the maximum wave
3778 number corresponding to the Nyquist
3779 frequency for the case of FFT).
3780 default is [0].
3781 rejwn: the wave numbers NOT to be used for fitting.
3782 can be set just as addwn but has higher priority:
3783 wave numbers which are specified both in addwn
3784 and rejwn will NOT be used. default is [].
3785 insitu: if False a new scantable is returned.
3786 Otherwise, the scaling is done in-situ
3787 The default is taken from .asaprc (False)
3788 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3789 clipniter: maximum number of iteration of 'clipthresh'-sigma
3790 clipping (default is 0)
3791 edge: an optional number of channel to drop at
3792 the edge of spectrum. If only one value is
3793 specified, the same number will be dropped
3794 from both sides of the spectrum. Default
3795 is to keep all channels. Nested tuples
3796 represent individual edge selection for
3797 different IFs (a number of spectral channels
3798 can be different)
3799 threshold: the threshold used by line finder. It is
3800 better to keep it large as only strong lines
3801 affect the baseline solution.
3802 chan_avg_limit: a maximum number of consequtive spectral
3803 channels to average during the search of
3804 weak and broad lines. The default is no
3805 averaging (and no search for weak lines).
3806 If such lines can affect the fitted baseline
3807 (e.g. a high order polynomial is fitted),
3808 increase this parameter (usually values up
3809 to 8 are reasonable). Most users of this
3810 method should find the default value sufficient.
3811 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3812 plot the fit and the residual. In this each
3813 indivual fit has to be approved, by typing 'y'
3814 or 'n'
3815 getresidual: if False, returns best-fit values instead of
3816 residual. (default is True)
3817 showprogress: show progress status for large data.
3818 default is True.
3819 minnrow: minimum number of input spectra to show.
3820 default is 1000.
3821 outlog: Output the coefficients of the best-fit
3822 function to logger (default is False)
3823 blfile: Name of a text file in which the best-fit
3824 parameter values to be written
3825 (default is "": no file/logger output)
3826 csvformat: if True blfile is csv-formatted, default is False.
3827 bltable: name of a baseline table where fitting results
3828 (coefficients, rms, etc.) are to be written.
3829 if given, fitting results will NOT be output to
3830 scantable (insitu=True) or None will be
3831 returned (insitu=False).
3832 (default is "": no table output)
3833
3834 Example:
3835 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3836
3837 Note:
3838 The best-fit parameter values output in logger and/or blfile are now
3839 based on specunit of 'channel'.
3840 """
3841
3842 try:
3843 varlist = vars()
3844
3845 if insitu is None: insitu = rcParams['insitu']
3846 if insitu:
3847 workscan = self
3848 else:
3849 workscan = self.copy()
3850
3851 if mask is None: mask = []
3852 if applyfft is None: applyfft = True
3853 if fftmethod is None: fftmethod = 'fft'
3854 if fftthresh is None: fftthresh = 3.0
3855 if addwn is None: addwn = [0]
3856 if rejwn is None: rejwn = []
3857 if clipthresh is None: clipthresh = 3.0
3858 if clipniter is None: clipniter = 0
3859 if edge is None: edge = (0,0)
3860 if threshold is None: threshold = 3
3861 if chan_avg_limit is None: chan_avg_limit = 1
3862 if plot is None: plot = False
3863 if getresidual is None: getresidual = True
3864 if showprogress is None: showprogress = True
3865 if minnrow is None: minnrow = 1000
3866 if outlog is None: outlog = False
3867 if blfile is None: blfile = ''
3868 if csvformat is None: csvformat = False
3869 if bltable is None: bltable = ''
3870
3871 sapplyfft = 'true' if applyfft else 'false'
3872 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3873
3874 scsvformat = 'T' if csvformat else 'F'
3875
3876 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3877 workscan._auto_sinusoid_baseline(mask,
3878 fftinfo,
3879 workscan._parse_wn(addwn),
3880 workscan._parse_wn(rejwn),
3881 clipthresh, clipniter,
3882 normalise_edge_param(edge),
3883 threshold, chan_avg_limit,
3884 getresidual,
3885 pack_progress_params(showprogress,
3886 minnrow),
3887 outlog, scsvformat+blfile, bltable)
3888 workscan._add_history("auto_sinusoid_baseline", varlist)
3889
3890 if bltable == '':
3891 if insitu:
3892 self._assign(workscan)
3893 else:
3894 return workscan
3895 else:
3896 if not insitu:
3897 return None
3898
3899 except RuntimeError, e:
3900 raise_fitting_failure_exception(e)
3901
3902 @asaplog_post_dec
3903 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3904 clipthresh=None, clipniter=None, plot=None,
3905 getresidual=None, showprogress=None, minnrow=None,
3906 outlog=None, blfile=None, csvformat=None,
3907 bltable=None):
3908 """\
3909 Return a scan which has been baselined (all rows) by cubic spline
3910 function (piecewise cubic polynomial).
3911
3912 Parameters:
3913 mask: An optional mask
3914 npiece: Number of pieces. (default is 2)
3915 insitu: If False a new scantable is returned.
3916 Otherwise, the scaling is done in-situ
3917 The default is taken from .asaprc (False)
3918 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3919 clipniter: maximum number of iteration of 'clipthresh'-sigma
3920 clipping (default is 0)
3921 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3922 plot the fit and the residual. In this each
3923 indivual fit has to be approved, by typing 'y'
3924 or 'n'
3925 getresidual: if False, returns best-fit values instead of
3926 residual. (default is True)
3927 showprogress: show progress status for large data.
3928 default is True.
3929 minnrow: minimum number of input spectra to show.
3930 default is 1000.
3931 outlog: Output the coefficients of the best-fit
3932 function to logger (default is False)
3933 blfile: Name of a text file in which the best-fit
3934 parameter values to be written
3935 (default is "": no file/logger output)
3936 csvformat: if True blfile is csv-formatted, default is False.
3937 bltable: name of a baseline table where fitting results
3938 (coefficients, rms, etc.) are to be written.
3939 if given, fitting results will NOT be output to
3940 scantable (insitu=True) or None will be
3941 returned (insitu=False).
3942 (default is "": no table output)
3943
3944 Example:
3945 # return a scan baselined by a cubic spline consisting of 2 pieces
3946 # (i.e., 1 internal knot),
3947 # also with 3-sigma clipping, iteration up to 4 times
3948 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3949
3950 Note:
3951 The best-fit parameter values output in logger and/or blfile are now
3952 based on specunit of 'channel'.
3953 """
3954
3955 try:
3956 varlist = vars()
3957
3958 if insitu is None: insitu = rcParams['insitu']
3959 if insitu:
3960 workscan = self
3961 else:
3962 workscan = self.copy()
3963
3964 if mask is None: mask = []
3965 if npiece is None: npiece = 2
3966 if clipthresh is None: clipthresh = 3.0
3967 if clipniter is None: clipniter = 0
3968 if plot is None: plot = False
3969 if getresidual is None: getresidual = True
3970 if showprogress is None: showprogress = True
3971 if minnrow is None: minnrow = 1000
3972 if outlog is None: outlog = False
3973 if blfile is None: blfile = ''
3974 if csvformat is None: csvformat = False
3975 if bltable is None: bltable = ''
3976
3977 scsvformat = 'T' if csvformat else 'F'
3978
3979 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3980 workscan._cspline_baseline(mask, npiece,
3981 clipthresh, clipniter,
3982 getresidual,
3983 pack_progress_params(showprogress,
3984 minnrow),
3985 outlog, scsvformat+blfile,
3986 bltable)
3987 workscan._add_history("cspline_baseline", varlist)
3988
3989 if bltable == '':
3990 if insitu:
3991 self._assign(workscan)
3992 else:
3993 return workscan
3994 else:
3995 if not insitu:
3996 return None
3997
3998 except RuntimeError, e:
3999 raise_fitting_failure_exception(e)
4000
4001 @asaplog_post_dec
4002 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
4003 clipthresh=None, clipniter=None,
4004 edge=None, threshold=None, chan_avg_limit=None,
4005 getresidual=None, plot=None,
4006 showprogress=None, minnrow=None, outlog=None,
4007 blfile=None, csvformat=None, bltable=None):
4008 """\
4009 Return a scan which has been baselined (all rows) by cubic spline
4010 function (piecewise cubic polynomial).
4011 Spectral lines are detected first using linefinder and masked out
4012 to avoid them affecting the baseline solution.
4013
4014 Parameters:
4015 mask: an optional mask retreived from scantable
4016 npiece: Number of pieces. (default is 2)
4017 insitu: if False a new scantable is returned.
4018 Otherwise, the scaling is done in-situ
4019 The default is taken from .asaprc (False)
4020 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4021 clipniter: maximum number of iteration of 'clipthresh'-sigma
4022 clipping (default is 0)
4023 edge: an optional number of channel to drop at
4024 the edge of spectrum. If only one value is
4025 specified, the same number will be dropped
4026 from both sides of the spectrum. Default
4027 is to keep all channels. Nested tuples
4028 represent individual edge selection for
4029 different IFs (a number of spectral channels
4030 can be different)
4031 threshold: the threshold used by line finder. It is
4032 better to keep it large as only strong lines
4033 affect the baseline solution.
4034 chan_avg_limit: a maximum number of consequtive spectral
4035 channels to average during the search of
4036 weak and broad lines. The default is no
4037 averaging (and no search for weak lines).
4038 If such lines can affect the fitted baseline
4039 (e.g. a high order polynomial is fitted),
4040 increase this parameter (usually values up
4041 to 8 are reasonable). Most users of this
4042 method should find the default value sufficient.
4043 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4044 plot the fit and the residual. In this each
4045 indivual fit has to be approved, by typing 'y'
4046 or 'n'
4047 getresidual: if False, returns best-fit values instead of
4048 residual. (default is True)
4049 showprogress: show progress status for large data.
4050 default is True.
4051 minnrow: minimum number of input spectra to show.
4052 default is 1000.
4053 outlog: Output the coefficients of the best-fit
4054 function to logger (default is False)
4055 blfile: Name of a text file in which the best-fit
4056 parameter values to be written
4057 (default is "": no file/logger output)
4058 csvformat: if True blfile is csv-formatted, default is False.
4059 bltable: name of a baseline table where fitting results
4060 (coefficients, rms, etc.) are to be written.
4061 if given, fitting results will NOT be output to
4062 scantable (insitu=True) or None will be
4063 returned (insitu=False).
4064 (default is "": no table output)
4065
4066 Example:
4067 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4068
4069 Note:
4070 The best-fit parameter values output in logger and/or blfile are now
4071 based on specunit of 'channel'.
4072 """
4073
4074 try:
4075 varlist = vars()
4076
4077 if insitu is None: insitu = rcParams['insitu']
4078 if insitu:
4079 workscan = self
4080 else:
4081 workscan = self.copy()
4082
4083 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4084 if mask is None: mask = []
4085 if npiece is None: npiece = 2
4086 if clipthresh is None: clipthresh = 3.0
4087 if clipniter is None: clipniter = 0
4088 if edge is None: edge = (0, 0)
4089 if threshold is None: threshold = 3
4090 if chan_avg_limit is None: chan_avg_limit = 1
4091 if plot is None: plot = False
4092 if getresidual is None: getresidual = True
4093 if showprogress is None: showprogress = True
4094 if minnrow is None: minnrow = 1000
4095 if outlog is None: outlog = False
4096 if blfile is None: blfile = ''
4097 if csvformat is None: csvformat = False
4098 if bltable is None: bltable = ''
4099
4100 scsvformat = 'T' if csvformat else 'F'
4101
4102 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4103 workscan._auto_cspline_baseline(mask, npiece,
4104 clipthresh, clipniter,
4105 normalise_edge_param(edge),
4106 threshold,
4107 chan_avg_limit, getresidual,
4108 pack_progress_params(showprogress,
4109 minnrow),
4110 outlog,
4111 scsvformat+blfile,
4112 bltable)
4113 workscan._add_history("auto_cspline_baseline", varlist)
4114
4115 if bltable == '':
4116 if insitu:
4117 self._assign(workscan)
4118 else:
4119 return workscan
4120 else:
4121 if not insitu:
4122 return None
4123
4124 except RuntimeError, e:
4125 raise_fitting_failure_exception(e)
4126
4127 @asaplog_post_dec
4128 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
4129 clipthresh=None, clipniter=None, plot=None,
4130 getresidual=None, showprogress=None, minnrow=None,
4131 outlog=None, blfile=None, csvformat=None,
4132 bltable=None):
4133 """\
4134 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4135
4136 Parameters:
4137 mask: An optional mask
4138 order: the maximum order of Chebyshev polynomial (default is 5)
4139 insitu: If False a new scantable is returned.
4140 Otherwise, the scaling is done in-situ
4141 The default is taken from .asaprc (False)
4142 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4143 clipniter: maximum number of iteration of 'clipthresh'-sigma
4144 clipping (default is 0)
4145 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4146 plot the fit and the residual. In this each
4147 indivual fit has to be approved, by typing 'y'
4148 or 'n'
4149 getresidual: if False, returns best-fit values instead of
4150 residual. (default is True)
4151 showprogress: show progress status for large data.
4152 default is True.
4153 minnrow: minimum number of input spectra to show.
4154 default is 1000.
4155 outlog: Output the coefficients of the best-fit
4156 function to logger (default is False)
4157 blfile: Name of a text file in which the best-fit
4158 parameter values to be written
4159 (default is "": no file/logger output)
4160 csvformat: if True blfile is csv-formatted, default is False.
4161 bltable: name of a baseline table where fitting results
4162 (coefficients, rms, etc.) are to be written.
4163 if given, fitting results will NOT be output to
4164 scantable (insitu=True) or None will be
4165 returned (insitu=False).
4166 (default is "": no table output)
4167
4168 Example:
4169 # return a scan baselined by a cubic spline consisting of 2 pieces
4170 # (i.e., 1 internal knot),
4171 # also with 3-sigma clipping, iteration up to 4 times
4172 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4173
4174 Note:
4175 The best-fit parameter values output in logger and/or blfile are now
4176 based on specunit of 'channel'.
4177 """
4178
4179 try:
4180 varlist = vars()
4181
4182 if insitu is None: insitu = rcParams['insitu']
4183 if insitu:
4184 workscan = self
4185 else:
4186 workscan = self.copy()
4187
4188 if mask is None: mask = []
4189 if order is None: order = 5
4190 if clipthresh is None: clipthresh = 3.0
4191 if clipniter is None: clipniter = 0
4192 if plot is None: plot = False
4193 if getresidual is None: getresidual = True
4194 if showprogress is None: showprogress = True
4195 if minnrow is None: minnrow = 1000
4196 if outlog is None: outlog = False
4197 if blfile is None: blfile = ''
4198 if csvformat is None: csvformat = False
4199 if bltable is None: bltable = ''
4200
4201 scsvformat = 'T' if csvformat else 'F'
4202
4203 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4204 workscan._chebyshev_baseline(mask, order,
4205 clipthresh, clipniter,
4206 getresidual,
4207 pack_progress_params(showprogress,
4208 minnrow),
4209 outlog, scsvformat+blfile,
4210 bltable)
4211 workscan._add_history("chebyshev_baseline", varlist)
4212
4213 if bltable == '':
4214 if insitu:
4215 self._assign(workscan)
4216 else:
4217 return workscan
4218 else:
4219 if not insitu:
4220 return None
4221
4222 except RuntimeError, e:
4223 raise_fitting_failure_exception(e)
4224
4225 @asaplog_post_dec
4226 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4227 clipthresh=None, clipniter=None,
4228 edge=None, threshold=None, chan_avg_limit=None,
4229 getresidual=None, plot=None,
4230 showprogress=None, minnrow=None, outlog=None,
4231 blfile=None, csvformat=None, bltable=None):
4232 """\
4233 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4234 Spectral lines are detected first using linefinder and masked out
4235 to avoid them affecting the baseline solution.
4236
4237 Parameters:
4238 mask: an optional mask retreived from scantable
4239 order: the maximum order of Chebyshev polynomial (default is 5)
4240 insitu: if False a new scantable is returned.
4241 Otherwise, the scaling is done in-situ
4242 The default is taken from .asaprc (False)
4243 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4244 clipniter: maximum number of iteration of 'clipthresh'-sigma
4245 clipping (default is 0)
4246 edge: an optional number of channel to drop at
4247 the edge of spectrum. If only one value is
4248 specified, the same number will be dropped
4249 from both sides of the spectrum. Default
4250 is to keep all channels. Nested tuples
4251 represent individual edge selection for
4252 different IFs (a number of spectral channels
4253 can be different)
4254 threshold: the threshold used by line finder. It is
4255 better to keep it large as only strong lines
4256 affect the baseline solution.
4257 chan_avg_limit: a maximum number of consequtive spectral
4258 channels to average during the search of
4259 weak and broad lines. The default is no
4260 averaging (and no search for weak lines).
4261 If such lines can affect the fitted baseline
4262 (e.g. a high order polynomial is fitted),
4263 increase this parameter (usually values up
4264 to 8 are reasonable). Most users of this
4265 method should find the default value sufficient.
4266 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4267 plot the fit and the residual. In this each
4268 indivual fit has to be approved, by typing 'y'
4269 or 'n'
4270 getresidual: if False, returns best-fit values instead of
4271 residual. (default is True)
4272 showprogress: show progress status for large data.
4273 default is True.
4274 minnrow: minimum number of input spectra to show.
4275 default is 1000.
4276 outlog: Output the coefficients of the best-fit
4277 function to logger (default is False)
4278 blfile: Name of a text file in which the best-fit
4279 parameter values to be written
4280 (default is "": no file/logger output)
4281 csvformat: if True blfile is csv-formatted, default is False.
4282 bltable: name of a baseline table where fitting results
4283 (coefficients, rms, etc.) are to be written.
4284 if given, fitting results will NOT be output to
4285 scantable (insitu=True) or None will be
4286 returned (insitu=False).
4287 (default is "": no table output)
4288
4289 Example:
4290 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4291
4292 Note:
4293 The best-fit parameter values output in logger and/or blfile are now
4294 based on specunit of 'channel'.
4295 """
4296
4297 try:
4298 varlist = vars()
4299
4300 if insitu is None: insitu = rcParams['insitu']
4301 if insitu:
4302 workscan = self
4303 else:
4304 workscan = self.copy()
4305
4306 if mask is None: mask = []
4307 if order is None: order = 5
4308 if clipthresh is None: clipthresh = 3.0
4309 if clipniter is None: clipniter = 0
4310 if edge is None: edge = (0, 0)
4311 if threshold is None: threshold = 3
4312 if chan_avg_limit is None: chan_avg_limit = 1
4313 if plot is None: plot = False
4314 if getresidual is None: getresidual = True
4315 if showprogress is None: showprogress = True
4316 if minnrow is None: minnrow = 1000
4317 if outlog is None: outlog = False
4318 if blfile is None: blfile = ''
4319 if csvformat is None: csvformat = False
4320 if bltable is None: bltable = ''
4321
4322 scsvformat = 'T' if csvformat else 'F'
4323
4324 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4325 workscan._auto_chebyshev_baseline(mask, order,
4326 clipthresh, clipniter,
4327 normalise_edge_param(edge),
4328 threshold,
4329 chan_avg_limit, getresidual,
4330 pack_progress_params(showprogress,
4331 minnrow),
4332 outlog, scsvformat+blfile,
4333 bltable)
4334 workscan._add_history("auto_chebyshev_baseline", varlist)
4335
4336 if bltable == '':
4337 if insitu:
4338 self._assign(workscan)
4339 else:
4340 return workscan
4341 else:
4342 if not insitu:
4343 return None
4344
4345 except RuntimeError, e:
4346 raise_fitting_failure_exception(e)
4347
4348 @asaplog_post_dec
4349 def poly_baseline(self, mask=None, order=None, insitu=None,
4350 clipthresh=None, clipniter=None, plot=None,
4351 getresidual=None, showprogress=None, minnrow=None,
4352 outlog=None, blfile=None, csvformat=None,
4353 bltable=None):
4354 """\
4355 Return a scan which has been baselined (all rows) by a polynomial.
4356 Parameters:
4357 mask: an optional mask
4358 order: the order of the polynomial (default is 0)
4359 insitu: if False a new scantable is returned.
4360 Otherwise, the scaling is done in-situ
4361 The default is taken from .asaprc (False)
4362 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4363 clipniter: maximum number of iteration of 'clipthresh'-sigma
4364 clipping (default is 0)
4365 plot: plot the fit and the residual. In this each
4366 indivual fit has to be approved, by typing 'y'
4367 or 'n'
4368 getresidual: if False, returns best-fit values instead of
4369 residual. (default is True)
4370 showprogress: show progress status for large data.
4371 default is True.
4372 minnrow: minimum number of input spectra to show.
4373 default is 1000.
4374 outlog: Output the coefficients of the best-fit
4375 function to logger (default is False)
4376 blfile: Name of a text file in which the best-fit
4377 parameter values to be written
4378 (default is "": no file/logger output)
4379 csvformat: if True blfile is csv-formatted, default is False.
4380 bltable: name of a baseline table where fitting results
4381 (coefficients, rms, etc.) are to be written.
4382 if given, fitting results will NOT be output to
4383 scantable (insitu=True) or None will be
4384 returned (insitu=False).
4385 (default is "": no table output)
4386
4387 Example:
4388 # return a scan baselined by a third order polynomial,
4389 # not using a mask
4390 bscan = scan.poly_baseline(order=3)
4391 """
4392
4393 try:
4394 varlist = vars()
4395
4396 if insitu is None:
4397 insitu = rcParams["insitu"]
4398 if insitu:
4399 workscan = self
4400 else:
4401 workscan = self.copy()
4402
4403 if mask is None: mask = []
4404 if order is None: order = 0
4405 if clipthresh is None: clipthresh = 3.0
4406 if clipniter is None: clipniter = 0
4407 if plot is None: plot = False
4408 if getresidual is None: getresidual = True
4409 if showprogress is None: showprogress = True
4410 if minnrow is None: minnrow = 1000
4411 if outlog is None: outlog = False
4412 if blfile is None: blfile = ''
4413 if csvformat is None: csvformat = False
4414 if bltable is None: bltable = ''
4415
4416 scsvformat = 'T' if csvformat else 'F'
4417
4418 if plot:
4419 outblfile = (blfile != "") and \
4420 os.path.exists(os.path.expanduser(
4421 os.path.expandvars(blfile))
4422 )
4423 if outblfile:
4424 blf = open(blfile, "a")
4425
4426 f = fitter()
4427 f.set_function(lpoly=order)
4428
4429 rows = xrange(workscan.nrow())
4430 #if len(rows) > 0: workscan._init_blinfo()
4431
4432 action = "H"
4433 for r in rows:
4434 f.x = workscan._getabcissa(r)
4435 f.y = workscan._getspectrum(r)
4436 if mask:
4437 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4438 else: # mask=None
4439 f.mask = workscan._getmask(r)
4440
4441 f.data = None
4442 f.fit()
4443
4444 if action != "Y": # skip plotting when accepting all
4445 f.plot(residual=True)
4446 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4447 #if accept_fit.upper() == "N":
4448 # #workscan._append_blinfo(None, None, None)
4449 # continue
4450 accept_fit = self._get_verify_action("Accept fit?",action)
4451 if r == 0: action = None
4452 if accept_fit.upper() == "N":
4453 continue
4454 elif accept_fit.upper() == "R":
4455 break
4456 elif accept_fit.upper() == "A":
4457 action = "Y"
4458
4459 blpars = f.get_parameters()
4460 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4461 #workscan._append_blinfo(blpars, masklist, f.mask)
4462 workscan._setspectrum((f.fitter.getresidual()
4463 if getresidual else f.fitter.getfit()), r)
4464
4465 if outblfile:
4466 rms = workscan.get_rms(f.mask, r)
4467 dataout = \
4468 workscan.format_blparams_row(blpars["params"],
4469 blpars["fixed"],
4470 rms, str(masklist),
4471 r, True, csvformat)
4472 blf.write(dataout)
4473
4474 f._p.unmap()
4475 f._p = None
4476
4477 if outblfile:
4478 blf.close()
4479 else:
4480 workscan._poly_baseline(mask, order,
4481 clipthresh, clipniter, #
4482 getresidual,
4483 pack_progress_params(showprogress,
4484 minnrow),
4485 outlog, scsvformat+blfile,
4486 bltable) #
4487
4488 workscan._add_history("poly_baseline", varlist)
4489
4490 if insitu:
4491 self._assign(workscan)
4492 else:
4493 return workscan
4494
4495 except RuntimeError, e:
4496 raise_fitting_failure_exception(e)
4497
4498 @asaplog_post_dec
4499 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4500 clipthresh=None, clipniter=None,
4501 edge=None, threshold=None, chan_avg_limit=None,
4502 getresidual=None, plot=None,
4503 showprogress=None, minnrow=None, outlog=None,
4504 blfile=None, csvformat=None, bltable=None):
4505 """\
4506 Return a scan which has been baselined (all rows) by a polynomial.
4507 Spectral lines are detected first using linefinder and masked out
4508 to avoid them affecting the baseline solution.
4509
4510 Parameters:
4511 mask: an optional mask retreived from scantable
4512 order: the order of the polynomial (default is 0)
4513 insitu: if False a new scantable is returned.
4514 Otherwise, the scaling is done in-situ
4515 The default is taken from .asaprc (False)
4516 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4517 clipniter: maximum number of iteration of 'clipthresh'-sigma
4518 clipping (default is 0)
4519 edge: an optional number of channel to drop at
4520 the edge of spectrum. If only one value is
4521 specified, the same number will be dropped
4522 from both sides of the spectrum. Default
4523 is to keep all channels. Nested tuples
4524 represent individual edge selection for
4525 different IFs (a number of spectral channels
4526 can be different)
4527 threshold: the threshold used by line finder. It is
4528 better to keep it large as only strong lines
4529 affect the baseline solution.
4530 chan_avg_limit: a maximum number of consequtive spectral
4531 channels to average during the search of
4532 weak and broad lines. The default is no
4533 averaging (and no search for weak lines).
4534 If such lines can affect the fitted baseline
4535 (e.g. a high order polynomial is fitted),
4536 increase this parameter (usually values up
4537 to 8 are reasonable). Most users of this
4538 method should find the default value sufficient.
4539 plot: plot the fit and the residual. In this each
4540 indivual fit has to be approved, by typing 'y'
4541 or 'n'
4542 getresidual: if False, returns best-fit values instead of
4543 residual. (default is True)
4544 showprogress: show progress status for large data.
4545 default is True.
4546 minnrow: minimum number of input spectra to show.
4547 default is 1000.
4548 outlog: Output the coefficients of the best-fit
4549 function to logger (default is False)
4550 blfile: Name of a text file in which the best-fit
4551 parameter values to be written
4552 (default is "": no file/logger output)
4553 csvformat: if True blfile is csv-formatted, default is False.
4554 bltable: name of a baseline table where fitting results
4555 (coefficients, rms, etc.) are to be written.
4556 if given, fitting results will NOT be output to
4557 scantable (insitu=True) or None will be
4558 returned (insitu=False).
4559 (default is "": no table output)
4560
4561 Example:
4562 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4563 """
4564
4565 try:
4566 varlist = vars()
4567
4568 if insitu is None:
4569 insitu = rcParams['insitu']
4570 if insitu:
4571 workscan = self
4572 else:
4573 workscan = self.copy()
4574
4575 if mask is None: mask = []
4576 if order is None: order = 0
4577 if clipthresh is None: clipthresh = 3.0
4578 if clipniter is None: clipniter = 0
4579 if edge is None: edge = (0, 0)
4580 if threshold is None: threshold = 3
4581 if chan_avg_limit is None: chan_avg_limit = 1
4582 if plot is None: plot = False
4583 if getresidual is None: getresidual = True
4584 if showprogress is None: showprogress = True
4585 if minnrow is None: minnrow = 1000
4586 if outlog is None: outlog = False
4587 if blfile is None: blfile = ''
4588 if csvformat is None: csvformat = False
4589 if bltable is None: bltable = ''
4590
4591 scsvformat = 'T' if csvformat else 'F'
4592
4593 edge = normalise_edge_param(edge)
4594
4595 if plot:
4596 outblfile = (blfile != "") and \
4597 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4598 if outblfile: blf = open(blfile, "a")
4599
4600 from asap.asaplinefind import linefinder
4601 fl = linefinder()
4602 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4603 fl.set_scan(workscan)
4604
4605 f = fitter()
4606 f.set_function(lpoly=order)
4607
4608 rows = xrange(workscan.nrow())
4609 #if len(rows) > 0: workscan._init_blinfo()
4610
4611 action = "H"
4612 for r in rows:
4613 idx = 2*workscan.getif(r)
4614 if mask:
4615 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4616 else: # mask=None
4617 msk = workscan._getmask(r)
4618 fl.find_lines(r, msk, edge[idx:idx+2])
4619
4620 f.x = workscan._getabcissa(r)
4621 f.y = workscan._getspectrum(r)
4622 f.mask = fl.get_mask()
4623 f.data = None
4624 f.fit()
4625
4626 if action != "Y": # skip plotting when accepting all
4627 f.plot(residual=True)
4628 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4629 accept_fit = self._get_verify_action("Accept fit?",action)
4630 if r == 0: action = None
4631 if accept_fit.upper() == "N":
4632 #workscan._append_blinfo(None, None, None)
4633 continue
4634 elif accept_fit.upper() == "R":
4635 break
4636 elif accept_fit.upper() == "A":
4637 action = "Y"
4638
4639 blpars = f.get_parameters()
4640 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4641 #workscan._append_blinfo(blpars, masklist, f.mask)
4642 workscan._setspectrum(
4643 (f.fitter.getresidual() if getresidual
4644 else f.fitter.getfit()), r
4645 )
4646
4647 if outblfile:
4648 rms = workscan.get_rms(f.mask, r)
4649 dataout = \
4650 workscan.format_blparams_row(blpars["params"],
4651 blpars["fixed"],
4652 rms, str(masklist),
4653 r, True, csvformat)
4654 blf.write(dataout)
4655
4656 f._p.unmap()
4657 f._p = None
4658
4659 if outblfile: blf.close()
4660 else:
4661 workscan._auto_poly_baseline(mask, order,
4662 clipthresh, clipniter,
4663 edge, threshold,
4664 chan_avg_limit, getresidual,
4665 pack_progress_params(showprogress,
4666 minnrow),
4667 outlog, scsvformat+blfile,
4668 bltable)
4669 workscan._add_history("auto_poly_baseline", varlist)
4670
4671 if bltable == '':
4672 if insitu:
4673 self._assign(workscan)
4674 else:
4675 return workscan
4676 else:
4677 if not insitu:
4678 return None
4679
4680 except RuntimeError, e:
4681 raise_fitting_failure_exception(e)
4682
4683 def _init_blinfo(self):
4684 """\
4685 Initialise the following three auxiliary members:
4686 blpars : parameters of the best-fit baseline,
4687 masklists : mask data (edge positions of masked channels) and
4688 actualmask : mask data (in boolean list),
4689 to keep for use later (including output to logger/text files).
4690 Used by poly_baseline() and auto_poly_baseline() in case of
4691 'plot=True'.
4692 """
4693 self.blpars = []
4694 self.masklists = []
4695 self.actualmask = []
4696 return
4697
4698 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4699 """\
4700 Append baseline-fitting related info to blpars, masklist and
4701 actualmask.
4702 """
4703 self.blpars.append(data_blpars)
4704 self.masklists.append(data_masklists)
4705 self.actualmask.append(data_actualmask)
4706 return
4707
4708 @asaplog_post_dec
4709 def rotate_linpolphase(self, angle):
4710 """\
4711 Rotate the phase of the complex polarization O=Q+iU correlation.
4712 This is always done in situ in the raw data. So if you call this
4713 function more than once then each call rotates the phase further.
4714
4715 Parameters:
4716
4717 angle: The angle (degrees) to rotate (add) by.
4718
4719 Example::
4720
4721 scan.rotate_linpolphase(2.3)
4722
4723 """
4724 varlist = vars()
4725 self._math._rotate_linpolphase(self, angle)
4726 self._add_history("rotate_linpolphase", varlist)
4727 return
4728
4729 @asaplog_post_dec
4730 def rotate_xyphase(self, angle):
4731 """\
4732 Rotate the phase of the XY correlation. This is always done in situ
4733 in the data. So if you call this function more than once
4734 then each call rotates the phase further.
4735
4736 Parameters:
4737
4738 angle: The angle (degrees) to rotate (add) by.
4739
4740 Example::
4741
4742 scan.rotate_xyphase(2.3)
4743
4744 """
4745 varlist = vars()
4746 self._math._rotate_xyphase(self, angle)
4747 self._add_history("rotate_xyphase", varlist)
4748 return
4749
4750 @asaplog_post_dec
4751 def swap_linears(self):
4752 """\
4753 Swap the linear polarisations XX and YY, or better the first two
4754 polarisations as this also works for ciculars.
4755 """
4756 varlist = vars()
4757 self._math._swap_linears(self)
4758 self._add_history("swap_linears", varlist)
4759 return
4760
4761 @asaplog_post_dec
4762 def invert_phase(self):
4763 """\
4764 Invert the phase of the complex polarisation
4765 """
4766 varlist = vars()
4767 self._math._invert_phase(self)
4768 self._add_history("invert_phase", varlist)
4769 return
4770
4771 @asaplog_post_dec
4772 def add(self, offset, insitu=None):
4773 """\
4774 Return a scan where all spectra have the offset added
4775
4776 Parameters:
4777
4778 offset: the offset
4779
4780 insitu: if False a new scantable is returned.
4781 Otherwise, the scaling is done in-situ
4782 The default is taken from .asaprc (False)
4783
4784 """
4785 if insitu is None: insitu = rcParams['insitu']
4786 self._math._setinsitu(insitu)
4787 varlist = vars()
4788 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4789 s._add_history("add", varlist)
4790 if insitu:
4791 self._assign(s)
4792 else:
4793 return s
4794
4795 @asaplog_post_dec
4796 def scale(self, factor, tsys=True, insitu=None):
4797 """\
4798
4799 Return a scan where all spectra are scaled by the given 'factor'
4800
4801 Parameters:
4802
4803 factor: the scaling factor (float or 1D float list)
4804
4805 insitu: if False a new scantable is returned.
4806 Otherwise, the scaling is done in-situ
4807 The default is taken from .asaprc (False)
4808
4809 tsys: if True (default) then apply the operation to Tsys
4810 as well as the data
4811
4812 """
4813 if insitu is None: insitu = rcParams['insitu']
4814 self._math._setinsitu(insitu)
4815 varlist = vars()
4816 s = None
4817 import numpy
4818 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4819 if isinstance(factor[0], list) or isinstance(factor[0],
4820 numpy.ndarray):
4821 from asapmath import _array2dOp
4822 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4823 else:
4824 s = scantable( self._math._arrayop( self, factor,
4825 "MUL", tsys ) )
4826 else:
4827 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4828 s._add_history("scale", varlist)
4829 if insitu:
4830 self._assign(s)
4831 else:
4832 return s
4833
4834 @preserve_selection
4835 def set_sourcetype(self, match, matchtype="pattern",
4836 sourcetype="reference"):
4837 """\
4838 Set the type of the source to be an source or reference scan
4839 using the provided pattern.
4840
4841 Parameters:
4842
4843 match: a Unix style pattern, regular expression or selector
4844
4845 matchtype: 'pattern' (default) UNIX style pattern or
4846 'regex' regular expression
4847
4848 sourcetype: the type of the source to use (source/reference)
4849
4850 """
4851 varlist = vars()
4852 stype = -1
4853 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4854 stype = 1
4855 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4856 stype = 0
4857 else:
4858 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4859 if matchtype.lower().startswith("p"):
4860 matchtype = "pattern"
4861 elif matchtype.lower().startswith("r"):
4862 matchtype = "regex"
4863 else:
4864 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4865 sel = selector()
4866 if isinstance(match, selector):
4867 sel = match
4868 else:
4869 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4870 self.set_selection(sel)
4871 self._setsourcetype(stype)
4872 self._add_history("set_sourcetype", varlist)
4873
4874
4875 def set_sourcename(self, name):
4876 varlist = vars()
4877 self._setsourcename(name)
4878 self._add_history("set_sourcename", varlist)
4879
4880 @asaplog_post_dec
4881 @preserve_selection
4882 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4883 """\
4884 This function allows to build quotients automatically.
4885 It assumes the observation to have the same number of
4886 "ons" and "offs"
4887
4888 Parameters:
4889
4890 preserve: you can preserve (default) the continuum or
4891 remove it. The equations used are
4892
4893 preserve: Output = Toff * (on/off) - Toff
4894
4895 remove: Output = Toff * (on/off) - Ton
4896
4897 mode: the on/off detection mode
4898 'paired' (default)
4899 identifies 'off' scans by the
4900 trailing '_R' (Mopra/Parkes) or
4901 '_e'/'_w' (Tid) and matches
4902 on/off pairs from the observing pattern
4903 'time'
4904 finds the closest off in time
4905
4906 .. todo:: verify argument is not implemented
4907
4908 """
4909 varlist = vars()
4910 modes = ["time", "paired"]
4911 if not mode in modes:
4912 msg = "please provide valid mode. Valid modes are %s" % (modes)
4913 raise ValueError(msg)
4914 s = None
4915 if mode.lower() == "paired":
4916 from asap._asap import srctype
4917 sel = self.get_selection()
4918 #sel.set_query("SRCTYPE==psoff")
4919 sel.set_types(srctype.psoff)
4920 self.set_selection(sel)
4921 offs = self.copy()
4922 #sel.set_query("SRCTYPE==pson")
4923 sel.set_types(srctype.pson)
4924 self.set_selection(sel)
4925 ons = self.copy()
4926 s = scantable(self._math._quotient(ons, offs, preserve))
4927 elif mode.lower() == "time":
4928 s = scantable(self._math._auto_quotient(self, mode, preserve))
4929 s._add_history("auto_quotient", varlist)
4930 return s
4931
4932 @asaplog_post_dec
4933 def mx_quotient(self, mask = None, weight='median', preserve=True):
4934 """\
4935 Form a quotient using "off" beams when observing in "MX" mode.
4936
4937 Parameters:
4938
4939 mask: an optional mask to be used when weight == 'stddev'
4940
4941 weight: How to average the off beams. Default is 'median'.
4942
4943 preserve: you can preserve (default) the continuum or
4944 remove it. The equations used are:
4945
4946 preserve: Output = Toff * (on/off) - Toff
4947
4948 remove: Output = Toff * (on/off) - Ton
4949
4950 """
4951 mask = mask or ()
4952 varlist = vars()
4953 on = scantable(self._math._mx_extract(self, 'on'))
4954 preoff = scantable(self._math._mx_extract(self, 'off'))
4955 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4956 from asapmath import quotient
4957 q = quotient(on, off, preserve)
4958 q._add_history("mx_quotient", varlist)
4959 return q
4960
4961 @asaplog_post_dec
4962 def freq_switch(self, insitu=None):
4963 """\
4964 Apply frequency switching to the data.
4965
4966 Parameters:
4967
4968 insitu: if False a new scantable is returned.
4969 Otherwise, the swictching is done in-situ
4970 The default is taken from .asaprc (False)
4971
4972 """
4973 if insitu is None: insitu = rcParams['insitu']
4974 self._math._setinsitu(insitu)
4975 varlist = vars()
4976 s = scantable(self._math._freqswitch(self))
4977 s._add_history("freq_switch", varlist)
4978 if insitu:
4979 self._assign(s)
4980 else:
4981 return s
4982
4983 @asaplog_post_dec
4984 def recalc_azel(self):
4985 """Recalculate the azimuth and elevation for each position."""
4986 varlist = vars()
4987 self._recalcazel()
4988 self._add_history("recalc_azel", varlist)
4989 return
4990
4991 @asaplog_post_dec
4992 def __add__(self, other):
4993 """
4994 implicit on all axes and on Tsys
4995 """
4996 varlist = vars()
4997 s = self.__op( other, "ADD" )
4998 s._add_history("operator +", varlist)
4999 return s
5000
5001 @asaplog_post_dec
5002 def __sub__(self, other):
5003 """
5004 implicit on all axes and on Tsys
5005 """
5006 varlist = vars()
5007 s = self.__op( other, "SUB" )
5008 s._add_history("operator -", varlist)
5009 return s
5010
5011 @asaplog_post_dec
5012 def __mul__(self, other):
5013 """
5014 implicit on all axes and on Tsys
5015 """
5016 varlist = vars()
5017 s = self.__op( other, "MUL" ) ;
5018 s._add_history("operator *", varlist)
5019 return s
5020
5021
5022 @asaplog_post_dec
5023 def __div__(self, other):
5024 """
5025 implicit on all axes and on Tsys
5026 """
5027 varlist = vars()
5028 s = self.__op( other, "DIV" )
5029 s._add_history("operator /", varlist)
5030 return s
5031
5032 @asaplog_post_dec
5033 def __op( self, other, mode ):
5034 s = None
5035 if isinstance(other, scantable):
5036 s = scantable(self._math._binaryop(self, other, mode))
5037 elif isinstance(other, float):
5038 if other == 0.0:
5039 raise ZeroDivisionError("Dividing by zero is not recommended")
5040 s = scantable(self._math._unaryop(self, other, mode, False))
5041 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
5042 if isinstance(other[0], list) \
5043 or isinstance(other[0], numpy.ndarray):
5044 from asapmath import _array2dOp
5045 s = _array2dOp( self, other, mode, False )
5046 else:
5047 s = scantable( self._math._arrayop( self, other,
5048 mode, False ) )
5049 else:
5050 raise TypeError("Other input is not a scantable or float value")
5051 return s
5052
5053 @asaplog_post_dec
5054 def get_fit(self, row=0):
5055 """\
5056 Print or return the stored fits for a row in the scantable
5057
5058 Parameters:
5059
5060 row: the row which the fit has been applied to.
5061
5062 """
5063 if row > self.nrow():
5064 return
5065 from asap.asapfit import asapfit
5066 fit = asapfit(self._getfit(row))
5067 asaplog.push( '%s' %(fit) )
5068 return fit.as_dict()
5069
5070 @preserve_selection
5071 def flag_nans(self):
5072 """\
5073 Utility function to flag NaN values in the scantable.
5074 """
5075 import numpy
5076 basesel = self.get_selection()
5077 for i in range(self.nrow()):
5078 sel = self.get_row_selector(i)
5079 self.set_selection(basesel+sel)
5080 nans = numpy.isnan(self._getspectrum(0))
5081 if numpy.any(nans):
5082 bnans = [ bool(v) for v in nans]
5083 self.flag(bnans)
5084
5085 self.set_selection(basesel)
5086
5087 def get_row_selector(self, rowno):
5088 return selector(rows=[rowno])
5089
5090 def _add_history(self, funcname, parameters):
5091 if not rcParams['scantable.history']:
5092 return
5093 # create date
5094 sep = "##"
5095 from datetime import datetime
5096 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5097 hist = dstr+sep
5098 hist += funcname+sep#cdate+sep
5099 if parameters.has_key('self'):
5100 del parameters['self']
5101 for k, v in parameters.iteritems():
5102 if type(v) is dict:
5103 for k2, v2 in v.iteritems():
5104 hist += k2
5105 hist += "="
5106 if isinstance(v2, scantable):
5107 hist += 'scantable'
5108 elif k2 == 'mask':
5109 if isinstance(v2, list) or isinstance(v2, tuple):
5110 hist += str(self._zip_mask(v2))
5111 else:
5112 hist += str(v2)
5113 else:
5114 hist += str(v2)
5115 else:
5116 hist += k
5117 hist += "="
5118 if isinstance(v, scantable):
5119 hist += 'scantable'
5120 elif k == 'mask':
5121 if isinstance(v, list) or isinstance(v, tuple):
5122 hist += str(self._zip_mask(v))
5123 else:
5124 hist += str(v)
5125 else:
5126 hist += str(v)
5127 hist += sep
5128 hist = hist[:-2] # remove trailing '##'
5129 self._addhistory(hist)
5130
5131
5132 def _zip_mask(self, mask):
5133 mask = list(mask)
5134 i = 0
5135 segments = []
5136 while mask[i:].count(1):
5137 i += mask[i:].index(1)
5138 if mask[i:].count(0):
5139 j = i + mask[i:].index(0)
5140 else:
5141 j = len(mask)
5142 segments.append([i, j])
5143 i = j
5144 return segments
5145
5146 def _get_ordinate_label(self):
5147 fu = "("+self.get_fluxunit()+")"
5148 import re
5149 lbl = "Intensity"
5150 if re.match(".K.", fu):
5151 lbl = "Brightness Temperature "+ fu
5152 elif re.match(".Jy.", fu):
5153 lbl = "Flux density "+ fu
5154 return lbl
5155
5156 def _check_ifs(self):
5157# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5158 nchans = [self.nchan(i) for i in self.getifnos()]
5159 nchans = filter(lambda t: t > 0, nchans)
5160 return (sum(nchans)/len(nchans) == nchans[0])
5161
5162 @asaplog_post_dec
5163 def _fill(self, names, unit, average, opts={}):
5164 first = True
5165 fullnames = []
5166 for name in names:
5167 name = os.path.expandvars(name)
5168 name = os.path.expanduser(name)
5169 if not os.path.exists(name):
5170 msg = "File '%s' does not exists" % (name)
5171 raise IOError(msg)
5172 fullnames.append(name)
5173 if average:
5174 asaplog.push('Auto averaging integrations')
5175 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5176 for name in fullnames:
5177 tbl = Scantable(stype)
5178 if is_ms( name ):
5179 r = msfiller( tbl )
5180 else:
5181 r = filler( tbl )
5182 msg = "Importing %s..." % (name)
5183 asaplog.push(msg, False)
5184 r.open(name, opts)
5185 rx = rcParams['scantable.reference']
5186 r.setreferenceexpr(rx)
5187 r.fill()
5188 if average:
5189 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5190 if not first:
5191 tbl = self._math._merge([self, tbl])
5192 Scantable.__init__(self, tbl)
5193 r.close()
5194 del r, tbl
5195 first = False
5196 #flush log
5197 asaplog.post()
5198 if unit is not None:
5199 self.set_fluxunit(unit)
5200 if not is_casapy():
5201 self.set_freqframe(rcParams['scantable.freqframe'])
5202
5203 def _get_verify_action( self, msg, action=None ):
5204 valid_act = ['Y', 'N', 'A', 'R']
5205 if not action or not isinstance(action, str):
5206 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5207 if action == '':
5208 return "Y"
5209 elif (action.upper()[0] in valid_act):
5210 return action.upper()[0]
5211 elif (action.upper()[0] in ['H','?']):
5212 print "Available actions of verification [Y|n|a|r]"
5213 print " Y : Yes for current data (default)"
5214 print " N : No for current data"
5215 print " A : Accept all in the following and exit from verification"
5216 print " R : Reject all in the following and exit from verification"
5217 print " H or ?: help (show this message)"
5218 return self._get_verify_action(msg)
5219 else:
5220 return 'Y'
5221
5222 def __getitem__(self, key):
5223 if key < 0:
5224 key += self.nrow()
5225 if key >= self.nrow():
5226 raise IndexError("Row index out of range.")
5227 return self._getspectrum(key)
5228
5229 def __setitem__(self, key, value):
5230 if key < 0:
5231 key += self.nrow()
5232 if key >= self.nrow():
5233 raise IndexError("Row index out of range.")
5234 if not hasattr(value, "__len__") or \
5235 len(value) > self.nchan(self.getif(key)):
5236 raise ValueError("Spectrum length doesn't match.")
5237 return self._setspectrum(value, key)
5238
5239 def __len__(self):
5240 return self.nrow()
5241
5242 def __iter__(self):
5243 for i in range(len(self)):
5244 yield self[i]
Note: See TracBrowser for help on using the repository browser.