source: trunk/python/scantable.py@ 2892

Last change on this file since 2892 was 2892, checked in by WataruKawasaki, 11 years ago

New Development: No

JIRA Issue: Yes CAS-5859

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: Yes

Module(s): sd

Description: modified parse_spw_selection() so that the original MOLECULE_ID column values are saved and restored finally.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 205.2 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel, doppler):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
380
381def get_restfreq_in_Hz(s_restfreq):
382 value = 0.0
383 unit = ""
384 s = s_restfreq.replace(" ","")
385
386 for i in range(len(s))[::-1]:
387 if s[i].isalpha():
388 unit = s[i] + unit
389 else:
390 value = float(s[0:i+1])
391 break
392
393 if (unit == "") or (unit.lower() == "hz"):
394 return value
395 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
396 unitprefix = unit[0]
397 factor = 1.0
398
399 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
400 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
401 factor = factor_list[prefix_list.index(unitprefix)]
402 """
403 if (unitprefix == 'a'):
404 factor = 1.0e-18
405 elif (unitprefix == 'f'):
406 factor = 1.0e-15
407 elif (unitprefix == 'p'):
408 factor = 1.0e-12
409 elif (unitprefix == 'n'):
410 factor = 1.0e-9
411 elif (unitprefix == 'u'):
412 factor = 1.0e-6
413 elif (unitprefix == 'm'):
414 factor = 1.0e-3
415 elif (unitprefix == 'k'):
416 factor = 1.0e+3
417 elif (unitprefix == 'M'):
418 factor = 1.0e+6
419 elif (unitprefix == 'G'):
420 factor = 1.0e+9
421 elif (unitprefix == 'T'):
422 factor = 1.0e+12
423 elif (unitprefix == 'P'):
424 factor = 1.0e+15
425 elif (unitprefix == 'E'):
426 factor = 1.0e+18
427 """
428 return value*factor
429 else:
430 mesg = "wrong unit of restfreq."
431 raise Exception, mesg
432
433def normalise_restfreq(in_restfreq):
434 if isinstance(in_restfreq, float):
435 return in_restfreq
436 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
437 return float(in_restfreq)
438 elif isinstance(in_restfreq, str):
439 return get_restfreq_in_Hz(in_restfreq)
440 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
441 if isinstance(in_restfreq, numpy.ndarray):
442 if len(in_restfreq.shape) > 1:
443 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
444 raise Exception, mesg
445
446 res = []
447 for i in xrange(len(in_restfreq)):
448 elem = in_restfreq[i]
449 if isinstance(elem, float):
450 res.append(elem)
451 elif isinstance(elem, int) or isinstance(elem, long):
452 res.append(float(elem))
453 elif isinstance(elem, str):
454 res.append(get_restfreq_in_Hz(elem))
455 elif isinstance(elem, dict):
456 if isinstance(elem["value"], float):
457 res.append(elem)
458 elif isinstance(elem["value"], int):
459 dictelem = {}
460 dictelem["name"] = elem["name"]
461 dictelem["value"] = float(elem["value"])
462 res.append(dictelem)
463 elif isinstance(elem["value"], str):
464 dictelem = {}
465 dictelem["name"] = elem["name"]
466 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
467 res.append(dictelem)
468 else:
469 mesg = "restfreq elements must be float, int, or string."
470 raise Exception, mesg
471 return res
472 else:
473 mesg = "wrong type of restfreq given."
474 raise Exception, mesg
475
476def set_restfreq(s, restfreq):
477 rfset = (restfreq != '') and (restfreq != [])
478 if rfset:
479 s.set_restfreqs(normalise_restfreq(restfreq))
480
481class scantable(Scantable):
482 """\
483 The ASAP container for scans (single-dish data).
484 """
485
486 @asaplog_post_dec
487 def __init__(self, filename, average=None, unit=None, parallactify=None,
488 **args):
489 """\
490 Create a scantable from a saved one or make a reference
491
492 Parameters:
493
494 filename: the name of an asap table on disk
495 or
496 the name of a rpfits/sdfits/ms file
497 (integrations within scans are auto averaged
498 and the whole file is read) or
499 [advanced] a reference to an existing scantable
500
501 average: average all integrations withinb a scan on read.
502 The default (True) is taken from .asaprc.
503
504 unit: brightness unit; must be consistent with K or Jy.
505 Over-rides the default selected by the filler
506 (input rpfits/sdfits/ms) or replaces the value
507 in existing scantables
508
509 antenna: for MeasurementSet input data only:
510 Antenna selection. integer (id) or string
511 (name or id).
512
513 parallactify: Indicate that the data had been parallactified.
514 Default (false) is taken from rc file.
515
516 getpt: Whether to import direction from MS/POINTING
517 table properly or not.
518 This is effective only when filename is MS.
519 The default (True) is to import direction
520 from MS/POINTING.
521 """
522 if average is None:
523 average = rcParams['scantable.autoaverage']
524 parallactify = parallactify or rcParams['scantable.parallactify']
525 varlist = vars()
526 from asap._asap import stmath
527 self._math = stmath( rcParams['insitu'] )
528 if isinstance(filename, Scantable):
529 Scantable.__init__(self, filename)
530 else:
531 if isinstance(filename, str):
532 filename = os.path.expandvars(filename)
533 filename = os.path.expanduser(filename)
534 if not os.path.exists(filename):
535 s = "File '%s' not found." % (filename)
536 raise IOError(s)
537 if is_scantable(filename):
538 ondisk = rcParams['scantable.storage'] == 'disk'
539 Scantable.__init__(self, filename, ondisk)
540 if unit is not None:
541 self.set_fluxunit(unit)
542 if average:
543 self._assign( self.average_time( scanav=True ) )
544 # do not reset to the default freqframe
545 #self.set_freqframe(rcParams['scantable.freqframe'])
546 elif is_ms(filename):
547 # Measurement Set
548 opts={'ms': {}}
549 mskeys=['getpt','antenna']
550 for key in mskeys:
551 if key in args.keys():
552 opts['ms'][key] = args[key]
553 self._fill([filename], unit, average, opts)
554 elif os.path.isfile(filename):
555 opts={'nro': {}}
556 nrokeys=['freqref']
557 for key in nrokeys:
558 if key in args.keys():
559 opts['nro'][key] = args[key]
560 self._fill([filename], unit, average, opts)
561 # only apply to new data not "copy constructor"
562 self.parallactify(parallactify)
563 else:
564 msg = "The given file '%s'is not a valid " \
565 "asap table." % (filename)
566 raise IOError(msg)
567 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
568 and isinstance(filename[-1], str):
569 self._fill(filename, unit, average)
570 self.parallactify(parallactify)
571 self._add_history("scantable", varlist)
572
573 @asaplog_post_dec
574 def save(self, name=None, format=None, overwrite=False):
575 """\
576 Store the scantable on disk. This can be an asap (aips++) Table,
577 SDFITS or MS2 format.
578
579 Parameters:
580
581 name: the name of the outputfile. For format 'ASCII'
582 this is the root file name (data in 'name'.txt
583 and header in 'name'_header.txt)
584
585 format: an optional file format. Default is ASAP.
586 Allowed are:
587
588 * 'ASAP' (save as ASAP [aips++] Table),
589 * 'SDFITS' (save as SDFITS file)
590 * 'ASCII' (saves as ascii text file)
591 * 'MS2' (saves as an casacore MeasurementSet V2)
592 * 'FITS' (save as image FITS - not readable by
593 class)
594 * 'CLASS' (save as FITS readable by CLASS)
595
596 overwrite: If the file should be overwritten if it exists.
597 The default False is to return with warning
598 without writing the output. USE WITH CARE.
599
600 Example::
601
602 scan.save('myscan.asap')
603 scan.save('myscan.sdfits', 'SDFITS')
604
605 """
606 from os import path
607 format = format or rcParams['scantable.save']
608 suffix = '.'+format.lower()
609 if name is None or name == "":
610 name = 'scantable'+suffix
611 msg = "No filename given. Using default name %s..." % name
612 asaplog.push(msg)
613 name = path.expandvars(name)
614 if path.isfile(name) or path.isdir(name):
615 if not overwrite:
616 msg = "File %s exists." % name
617 raise IOError(msg)
618 format2 = format.upper()
619 if format2 == 'ASAP':
620 self._save(name)
621 elif format2 == 'MS2':
622 msopt = {'ms': {'overwrite': overwrite } }
623 from asap._asap import mswriter
624 writer = mswriter( self )
625 writer.write( name, msopt )
626 else:
627 from asap._asap import stwriter as stw
628 writer = stw(format2)
629 writer.write(self, name)
630 return
631
632 def copy(self):
633 """Return a copy of this scantable.
634
635 *Note*:
636
637 This makes a full (deep) copy. scan2 = scan1 makes a reference.
638
639 Example::
640
641 copiedscan = scan.copy()
642
643 """
644 sd = scantable(Scantable._copy(self))
645 return sd
646
647 def drop_scan(self, scanid=None):
648 """\
649 Return a new scantable where the specified scan number(s) has(have)
650 been dropped.
651
652 Parameters:
653
654 scanid: a (list of) scan number(s)
655
656 """
657 from asap import _is_sequence_or_number as _is_valid
658 from asap import _to_list
659 from asap import unique
660 if not _is_valid(scanid):
661 raise RuntimeError( 'Please specify a scanno to drop from the'
662 ' scantable' )
663 scanid = _to_list(scanid)
664 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
665 for sid in scanid: allscans.remove(sid)
666 if len(allscans) == 0:
667 raise ValueError("Can't remove all scans")
668 sel = selector(scans=allscans)
669 return self._select_copy(sel)
670
671 def _select_copy(self, selection):
672 orig = self.get_selection()
673 self.set_selection(orig+selection)
674 cp = self.copy()
675 self.set_selection(orig)
676 return cp
677
678 def get_scan(self, scanid=None):
679 """\
680 Return a specific scan (by scanno) or collection of scans (by
681 source name) in a new scantable.
682
683 *Note*:
684
685 See scantable.drop_scan() for the inverse operation.
686
687 Parameters:
688
689 scanid: a (list of) scanno or a source name, unix-style
690 patterns are accepted for source name matching, e.g.
691 '*_R' gets all 'ref scans
692
693 Example::
694
695 # get all scans containing the source '323p459'
696 newscan = scan.get_scan('323p459')
697 # get all 'off' scans
698 refscans = scan.get_scan('*_R')
699 # get a susbset of scans by scanno (as listed in scan.summary())
700 newscan = scan.get_scan([0, 2, 7, 10])
701
702 """
703 if scanid is None:
704 raise RuntimeError( 'Please specify a scan no or name to '
705 'retrieve from the scantable' )
706 try:
707 bsel = self.get_selection()
708 sel = selector()
709 if type(scanid) is str:
710 sel.set_name(scanid)
711 return self._select_copy(sel)
712 elif type(scanid) is int:
713 sel.set_scans([scanid])
714 return self._select_copy(sel)
715 elif type(scanid) is list:
716 sel.set_scans(scanid)
717 return self._select_copy(sel)
718 else:
719 msg = "Illegal scanid type, use 'int' or 'list' if ints."
720 raise TypeError(msg)
721 except RuntimeError:
722 raise
723
724 def __str__(self):
725 tempFile = tempfile.NamedTemporaryFile()
726 Scantable._summary(self, tempFile.name)
727 tempFile.seek(0)
728 asaplog.clear()
729 return tempFile.file.read()
730
731 @asaplog_post_dec
732 def summary(self, filename=None):
733 """\
734 Print a summary of the contents of this scantable.
735
736 Parameters:
737
738 filename: the name of a file to write the putput to
739 Default - no file output
740
741 """
742 if filename is not None:
743 if filename is "":
744 filename = 'scantable_summary.txt'
745 from os.path import expandvars, isdir
746 filename = expandvars(filename)
747 if isdir(filename):
748 msg = "Illegal file name '%s'." % (filename)
749 raise IOError(msg)
750 else:
751 filename = ""
752 Scantable._summary(self, filename)
753
754 def get_spectrum(self, rowno):
755 """Return the spectrum for the current row in the scantable as a list.
756
757 Parameters:
758
759 rowno: the row number to retrieve the spectrum from
760
761 """
762 return self._getspectrum(rowno)
763
764 def get_mask(self, rowno):
765 """Return the mask for the current row in the scantable as a list.
766
767 Parameters:
768
769 rowno: the row number to retrieve the mask from
770
771 """
772 return self._getmask(rowno)
773
774 def set_spectrum(self, spec, rowno):
775 """Set the spectrum for the current row in the scantable.
776
777 Parameters:
778
779 spec: the new spectrum
780
781 rowno: the row number to set the spectrum for
782
783 """
784 assert(len(spec) == self.nchan(self.getif(rowno)))
785 return self._setspectrum(spec, rowno)
786
787 def get_coordinate(self, rowno):
788 """Return the (spectral) coordinate for a a given 'rowno'.
789
790 *Note*:
791
792 * This coordinate is only valid until a scantable method modifies
793 the frequency axis.
794 * This coordinate does contain the original frequency set-up
795 NOT the new frame. The conversions however are done using the user
796 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
797 use scantable.freq_align first. Without it there is no closure,
798 i.e.::
799
800 c = myscan.get_coordinate(0)
801 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
802
803 Parameters:
804
805 rowno: the row number for the spectral coordinate
806
807 """
808 return coordinate(Scantable.get_coordinate(self, rowno))
809
810 def get_selection(self):
811 """\
812 Get the selection object currently set on this scantable.
813
814 Example::
815
816 sel = scan.get_selection()
817 sel.set_ifs(0) # select IF 0
818 scan.set_selection(sel) # apply modified selection
819
820 """
821 return selector(self._getselection())
822
823 def set_selection(self, selection=None, **kw):
824 """\
825 Select a subset of the data. All following operations on this scantable
826 are only applied to thi selection.
827
828 Parameters:
829
830 selection: a selector object (default unset the selection), or
831 any combination of 'pols', 'ifs', 'beams', 'scans',
832 'cycles', 'name', 'query'
833
834 Examples::
835
836 sel = selector() # create a selection object
837 self.set_scans([0, 3]) # select SCANNO 0 and 3
838 scan.set_selection(sel) # set the selection
839 scan.summary() # will only print summary of scanno 0 an 3
840 scan.set_selection() # unset the selection
841 # or the equivalent
842 scan.set_selection(scans=[0,3])
843 scan.summary() # will only print summary of scanno 0 an 3
844 scan.set_selection() # unset the selection
845
846 """
847 if selection is None:
848 # reset
849 if len(kw) == 0:
850 selection = selector()
851 else:
852 # try keywords
853 for k in kw:
854 if k not in selector.fields:
855 raise KeyError("Invalid selection key '%s', "
856 "valid keys are %s" % (k,
857 selector.fields))
858 selection = selector(**kw)
859 self._setselection(selection)
860
861 def get_row(self, row=0, insitu=None):
862 """\
863 Select a row in the scantable.
864 Return a scantable with single row.
865
866 Parameters:
867
868 row: row no of integration, default is 0.
869 insitu: if False a new scantable is returned. Otherwise, the
870 scaling is done in-situ. The default is taken from .asaprc
871 (False)
872
873 """
874 if insitu is None:
875 insitu = rcParams['insitu']
876 if not insitu:
877 workscan = self.copy()
878 else:
879 workscan = self
880 # Select a row
881 sel = selector()
882 sel.set_rows([row])
883 workscan.set_selection(sel)
884 if not workscan.nrow() == 1:
885 msg = "Could not identify single row. %d rows selected." \
886 % (workscan.nrow())
887 raise RuntimeError(msg)
888 if insitu:
889 self._assign(workscan)
890 else:
891 return workscan
892
893 @asaplog_post_dec
894 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
895 """\
896 Determine the specified statistic of the current beam/if/pol
897 Takes a 'mask' as an optional parameter to specify which
898 channels should be excluded.
899
900 Parameters:
901
902 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
903 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
904
905 mask: an optional mask specifying where the statistic
906 should be determined.
907
908 form: format string to print statistic values
909
910 row: row number of spectrum to process.
911 (default is None: for all rows)
912
913 Example:
914 scan.set_unit('channel')
915 msk = scan.create_mask([100, 200], [500, 600])
916 scan.stats(stat='mean', mask=m)
917
918 """
919 mask = mask or []
920 if not self._check_ifs():
921 raise ValueError("Cannot apply mask as the IFs have different "
922 "number of channels. Please use setselection() "
923 "to select individual IFs")
924 rtnabc = False
925 if stat.lower().endswith('_abc'): rtnabc = True
926 getchan = False
927 if stat.lower().startswith('min') or stat.lower().startswith('max'):
928 chan = self._math._minmaxchan(self, mask, stat)
929 getchan = True
930 statvals = []
931 if not rtnabc:
932 if row == None:
933 statvals = self._math._stats(self, mask, stat)
934 else:
935 statvals = self._math._statsrow(self, mask, stat, int(row))
936
937 #def cb(i):
938 # return statvals[i]
939
940 #return self._row_callback(cb, stat)
941
942 label=stat
943 #callback=cb
944 out = ""
945 #outvec = []
946 sep = '-'*50
947
948 if row == None:
949 rows = xrange(self.nrow())
950 elif isinstance(row, int):
951 rows = [ row ]
952
953 for i in rows:
954 refstr = ''
955 statunit= ''
956 if getchan:
957 qx, qy = self.chan2data(rowno=i, chan=chan[i])
958 if rtnabc:
959 statvals.append(qx['value'])
960 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
961 statunit= '['+qx['unit']+']'
962 else:
963 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
964
965 tm = self._gettime(i)
966 src = self._getsourcename(i)
967 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
968 out += 'Time[%s]:\n' % (tm)
969 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
970 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
971 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
972 #outvec.append(callback(i))
973 if len(rows) > 1:
974 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
975 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
976 else:
977 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
978 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
979 out += sep+"\n"
980
981 import os
982 if os.environ.has_key( 'USER' ):
983 usr = os.environ['USER']
984 else:
985 import commands
986 usr = commands.getoutput( 'whoami' )
987 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
988 f = open(tmpfile,'w')
989 print >> f, sep
990 print >> f, ' %s %s' % (label, statunit)
991 print >> f, sep
992 print >> f, out
993 f.close()
994 f = open(tmpfile,'r')
995 x = f.readlines()
996 f.close()
997 asaplog.push(''.join(x), False)
998
999 return statvals
1000
1001 def chan2data(self, rowno=0, chan=0):
1002 """\
1003 Returns channel/frequency/velocity and spectral value
1004 at an arbitrary row and channel in the scantable.
1005
1006 Parameters:
1007
1008 rowno: a row number in the scantable. Default is the
1009 first row, i.e. rowno=0
1010
1011 chan: a channel in the scantable. Default is the first
1012 channel, i.e. pos=0
1013
1014 """
1015 if isinstance(rowno, int) and isinstance(chan, int):
1016 qx = {'unit': self.get_unit(),
1017 'value': self._getabcissa(rowno)[chan]}
1018 qy = {'unit': self.get_fluxunit(),
1019 'value': self._getspectrum(rowno)[chan]}
1020 return qx, qy
1021
1022 def stddev(self, mask=None):
1023 """\
1024 Determine the standard deviation of the current beam/if/pol
1025 Takes a 'mask' as an optional parameter to specify which
1026 channels should be excluded.
1027
1028 Parameters:
1029
1030 mask: an optional mask specifying where the standard
1031 deviation should be determined.
1032
1033 Example::
1034
1035 scan.set_unit('channel')
1036 msk = scan.create_mask([100, 200], [500, 600])
1037 scan.stddev(mask=m)
1038
1039 """
1040 return self.stats(stat='stddev', mask=mask);
1041
1042
1043 def get_column_names(self):
1044 """\
1045 Return a list of column names, which can be used for selection.
1046 """
1047 return list(Scantable.get_column_names(self))
1048
1049 def get_tsys(self, row=-1):
1050 """\
1051 Return the System temperatures.
1052
1053 Parameters:
1054
1055 row: the rowno to get the information for. (default all rows)
1056
1057 Returns:
1058
1059 a list of Tsys values for the current selection
1060
1061 """
1062 if row > -1:
1063 return self._get_column(self._gettsys, row)
1064 return self._row_callback(self._gettsys, "Tsys")
1065
1066 def get_tsysspectrum(self, row=-1):
1067 """\
1068 Return the channel dependent system temperatures.
1069
1070 Parameters:
1071
1072 row: the rowno to get the information for. (default all rows)
1073
1074 Returns:
1075
1076 a list of Tsys values for the current selection
1077
1078 """
1079 return self._get_column( self._gettsysspectrum, row )
1080
1081 def set_tsys(self, values, row=-1):
1082 """\
1083 Set the Tsys value(s) of the given 'row' or the whole scantable
1084 (selection).
1085
1086 Parameters:
1087
1088 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1089 row: the row number to apply Tsys values to.
1090 (default all rows)
1091
1092 """
1093
1094 if not hasattr(values, "__len__"):
1095 values = [values]
1096 self._settsys(values, row)
1097
1098 def get_weather(self, row=-1):
1099 """\
1100 Return the weather informations.
1101
1102 Parameters:
1103
1104 row: the rowno to get the information for. (default all rows)
1105
1106 Returns:
1107
1108 a dict or list of of dicts of values for the current selection
1109
1110 """
1111
1112 values = self._get_column(self._get_weather, row)
1113 if row > -1:
1114 return {'temperature': values[0],
1115 'pressure': values[1], 'humidity' : values[2],
1116 'windspeed' : values[3], 'windaz' : values[4]
1117 }
1118 else:
1119 out = []
1120 for r in values:
1121
1122 out.append({'temperature': r[0],
1123 'pressure': r[1], 'humidity' : r[2],
1124 'windspeed' : r[3], 'windaz' : r[4]
1125 })
1126 return out
1127
1128 def _row_callback(self, callback, label):
1129 out = ""
1130 outvec = []
1131 sep = '-'*50
1132 for i in range(self.nrow()):
1133 tm = self._gettime(i)
1134 src = self._getsourcename(i)
1135 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1136 out += 'Time[%s]:\n' % (tm)
1137 if self.nbeam(-1) > 1:
1138 out += ' Beam[%d] ' % (self.getbeam(i))
1139 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1140 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1141 outvec.append(callback(i))
1142 out += '= %3.3f\n' % (outvec[i])
1143 out += sep+'\n'
1144
1145 asaplog.push(sep)
1146 asaplog.push(" %s" % (label))
1147 asaplog.push(sep)
1148 asaplog.push(out)
1149 asaplog.post()
1150 return outvec
1151
1152 def _get_column(self, callback, row=-1, *args):
1153 """
1154 """
1155 if row == -1:
1156 return [callback(i, *args) for i in range(self.nrow())]
1157 else:
1158 if 0 <= row < self.nrow():
1159 return callback(row, *args)
1160
1161
1162 def get_time(self, row=-1, asdatetime=False, prec=-1):
1163 """\
1164 Get a list of time stamps for the observations.
1165 Return a datetime object or a string (default) for each
1166 integration time stamp in the scantable.
1167
1168 Parameters:
1169
1170 row: row no of integration. Default -1 return all rows
1171
1172 asdatetime: return values as datetime objects rather than strings
1173
1174 prec: number of digits shown. Default -1 to automatic
1175 calculation.
1176 Note this number is equals to the digits of MVTime,
1177 i.e., 0<prec<3: dates with hh:: only,
1178 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1179 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1180
1181 """
1182 from datetime import datetime
1183 if prec < 0:
1184 # automagically set necessary precision +1
1185 prec = 7 - \
1186 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1187 prec = max(6, int(prec))
1188 else:
1189 prec = max(0, prec)
1190 if asdatetime:
1191 #precision can be 1 millisecond at max
1192 prec = min(12, prec)
1193
1194 times = self._get_column(self._gettime, row, prec)
1195 if not asdatetime:
1196 return times
1197 format = "%Y/%m/%d/%H:%M:%S.%f"
1198 if prec < 7:
1199 nsub = 1 + (((6-prec)/2) % 3)
1200 substr = [".%f","%S","%M"]
1201 for i in range(nsub):
1202 format = format.replace(substr[i],"")
1203 if isinstance(times, list):
1204 return [datetime.strptime(i, format) for i in times]
1205 else:
1206 return datetime.strptime(times, format)
1207
1208
1209 def get_inttime(self, row=-1):
1210 """\
1211 Get a list of integration times for the observations.
1212 Return a time in seconds for each integration in the scantable.
1213
1214 Parameters:
1215
1216 row: row no of integration. Default -1 return all rows.
1217
1218 """
1219 return self._get_column(self._getinttime, row)
1220
1221
1222 def get_sourcename(self, row=-1):
1223 """\
1224 Get a list source names for the observations.
1225 Return a string for each integration in the scantable.
1226 Parameters:
1227
1228 row: row no of integration. Default -1 return all rows.
1229
1230 """
1231 return self._get_column(self._getsourcename, row)
1232
1233 def get_elevation(self, row=-1):
1234 """\
1235 Get a list of elevations for the observations.
1236 Return a float for each integration in the scantable.
1237
1238 Parameters:
1239
1240 row: row no of integration. Default -1 return all rows.
1241
1242 """
1243 return self._get_column(self._getelevation, row)
1244
1245 def get_azimuth(self, row=-1):
1246 """\
1247 Get a list of azimuths for the observations.
1248 Return a float for each integration in the scantable.
1249
1250 Parameters:
1251 row: row no of integration. Default -1 return all rows.
1252
1253 """
1254 return self._get_column(self._getazimuth, row)
1255
1256 def get_parangle(self, row=-1):
1257 """\
1258 Get a list of parallactic angles for the observations.
1259 Return a float for each integration in the scantable.
1260
1261 Parameters:
1262
1263 row: row no of integration. Default -1 return all rows.
1264
1265 """
1266 return self._get_column(self._getparangle, row)
1267
1268 def get_direction(self, row=-1):
1269 """
1270 Get a list of Positions on the sky (direction) for the observations.
1271 Return a string for each integration in the scantable.
1272
1273 Parameters:
1274
1275 row: row no of integration. Default -1 return all rows
1276
1277 """
1278 return self._get_column(self._getdirection, row)
1279
1280 def get_directionval(self, row=-1):
1281 """\
1282 Get a list of Positions on the sky (direction) for the observations.
1283 Return a float for each integration in the scantable.
1284
1285 Parameters:
1286
1287 row: row no of integration. Default -1 return all rows
1288
1289 """
1290 return self._get_column(self._getdirectionvec, row)
1291
1292 @asaplog_post_dec
1293 def set_unit(self, unit='channel'):
1294 """\
1295 Set the unit for all following operations on this scantable
1296
1297 Parameters:
1298
1299 unit: optional unit, default is 'channel'. Use one of '*Hz',
1300 'km/s', 'channel' or equivalent ''
1301
1302 """
1303 varlist = vars()
1304 if unit in ['', 'pixel', 'channel']:
1305 unit = ''
1306 inf = list(self._getcoordinfo())
1307 inf[0] = unit
1308 self._setcoordinfo(inf)
1309 self._add_history("set_unit", varlist)
1310
1311 @asaplog_post_dec
1312 def set_instrument(self, instr):
1313 """\
1314 Set the instrument for subsequent processing.
1315
1316 Parameters:
1317
1318 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1319 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1320
1321 """
1322 self._setInstrument(instr)
1323 self._add_history("set_instument", vars())
1324
1325 @asaplog_post_dec
1326 def set_feedtype(self, feedtype):
1327 """\
1328 Overwrite the feed type, which might not be set correctly.
1329
1330 Parameters:
1331
1332 feedtype: 'linear' or 'circular'
1333
1334 """
1335 self._setfeedtype(feedtype)
1336 self._add_history("set_feedtype", vars())
1337
1338 @asaplog_post_dec
1339 def set_doppler(self, doppler='RADIO'):
1340 """\
1341 Set the doppler for all following operations on this scantable.
1342
1343 Parameters:
1344
1345 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1346
1347 """
1348 varlist = vars()
1349 inf = list(self._getcoordinfo())
1350 inf[2] = doppler
1351 self._setcoordinfo(inf)
1352 self._add_history("set_doppler", vars())
1353
1354 @asaplog_post_dec
1355 def set_freqframe(self, frame=None):
1356 """\
1357 Set the frame type of the Spectral Axis.
1358
1359 Parameters:
1360
1361 frame: an optional frame type, default 'LSRK'. Valid frames are:
1362 'TOPO', 'LSRD', 'LSRK', 'BARY',
1363 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1364
1365 Example::
1366
1367 scan.set_freqframe('BARY')
1368
1369 """
1370 frame = frame or rcParams['scantable.freqframe']
1371 varlist = vars()
1372 # "REST" is not implemented in casacore
1373 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1374 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1375 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1376 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1377
1378 if frame in valid:
1379 inf = list(self._getcoordinfo())
1380 inf[1] = frame
1381 self._setcoordinfo(inf)
1382 self._add_history("set_freqframe", varlist)
1383 else:
1384 msg = "Please specify a valid freq type. Valid types are:\n", valid
1385 raise TypeError(msg)
1386
1387 @asaplog_post_dec
1388 def set_dirframe(self, frame=""):
1389 """\
1390 Set the frame type of the Direction on the sky.
1391
1392 Parameters:
1393
1394 frame: an optional frame type, default ''. Valid frames are:
1395 'J2000', 'B1950', 'GALACTIC'
1396
1397 Example:
1398
1399 scan.set_dirframe('GALACTIC')
1400
1401 """
1402 varlist = vars()
1403 Scantable.set_dirframe(self, frame)
1404 self._add_history("set_dirframe", varlist)
1405
1406 def get_unit(self):
1407 """\
1408 Get the default unit set in this scantable
1409
1410 Returns:
1411
1412 A unit string
1413
1414 """
1415 inf = self._getcoordinfo()
1416 unit = inf[0]
1417 if unit == '': unit = 'channel'
1418 return unit
1419
1420 @asaplog_post_dec
1421 def get_abcissa(self, rowno=0):
1422 """\
1423 Get the abcissa in the current coordinate setup for the currently
1424 selected Beam/IF/Pol
1425
1426 Parameters:
1427
1428 rowno: an optional row number in the scantable. Default is the
1429 first row, i.e. rowno=0
1430
1431 Returns:
1432
1433 The abcissa values and the format string (as a dictionary)
1434
1435 """
1436 abc = self._getabcissa(rowno)
1437 lbl = self._getabcissalabel(rowno)
1438 return abc, lbl
1439
1440 @asaplog_post_dec
1441 def flag(self, mask=None, unflag=False, row=-1):
1442 """\
1443 Flag the selected data using an optional channel mask.
1444
1445 Parameters:
1446
1447 mask: an optional channel mask, created with create_mask. Default
1448 (no mask) is all channels.
1449
1450 unflag: if True, unflag the data
1451
1452 row: an optional row number in the scantable.
1453 Default -1 flags all rows
1454
1455 """
1456 varlist = vars()
1457 mask = mask or []
1458 self._flag(row, mask, unflag)
1459 self._add_history("flag", varlist)
1460
1461 @asaplog_post_dec
1462 def flag_row(self, rows=None, unflag=False):
1463 """\
1464 Flag the selected data in row-based manner.
1465
1466 Parameters:
1467
1468 rows: list of row numbers to be flagged. Default is no row
1469 (must be explicitly specified to execute row-based
1470 flagging).
1471
1472 unflag: if True, unflag the data.
1473
1474 """
1475 varlist = vars()
1476 if rows is None:
1477 rows = []
1478 self._flag_row(rows, unflag)
1479 self._add_history("flag_row", varlist)
1480
1481 @asaplog_post_dec
1482 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1483 """\
1484 Flag the selected data outside a specified range (in channel-base)
1485
1486 Parameters:
1487
1488 uthres: upper threshold.
1489
1490 dthres: lower threshold
1491
1492 clipoutside: True for flagging data outside the range
1493 [dthres:uthres].
1494 False for flagging data inside the range.
1495
1496 unflag: if True, unflag the data.
1497
1498 """
1499 varlist = vars()
1500 self._clip(uthres, dthres, clipoutside, unflag)
1501 self._add_history("clip", varlist)
1502
1503 @asaplog_post_dec
1504 def lag_flag(self, start, end, unit="MHz", insitu=None):
1505 """\
1506 Flag the data in 'lag' space by providing a frequency to remove.
1507 Flagged data in the scantable get interpolated over the region.
1508 No taper is applied.
1509
1510 Parameters:
1511
1512 start: the start frequency (really a period within the
1513 bandwidth) or period to remove
1514
1515 end: the end frequency or period to remove
1516
1517 unit: the frequency unit (default 'MHz') or '' for
1518 explicit lag channels
1519
1520 *Notes*:
1521
1522 It is recommended to flag edges of the band or strong
1523 signals beforehand.
1524
1525 """
1526 if insitu is None: insitu = rcParams['insitu']
1527 self._math._setinsitu(insitu)
1528 varlist = vars()
1529 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1530 if not (unit == "" or base.has_key(unit)):
1531 raise ValueError("%s is not a valid unit." % unit)
1532 if unit == "":
1533 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1534 else:
1535 s = scantable(self._math._lag_flag(self, start*base[unit],
1536 end*base[unit], "frequency"))
1537 s._add_history("lag_flag", varlist)
1538 if insitu:
1539 self._assign(s)
1540 else:
1541 return s
1542
1543 @asaplog_post_dec
1544 def fft(self, rowno=None, mask=None, getrealimag=False):
1545 """\
1546 Apply FFT to the spectra.
1547 Flagged data in the scantable get interpolated over the region.
1548
1549 Parameters:
1550
1551 rowno: The row number(s) to be processed. int, list
1552 and tuple are accepted. By default (None), FFT
1553 is applied to the whole data.
1554
1555 mask: Auxiliary channel mask(s). Given as a boolean
1556 list, it is applied to all specified rows.
1557 A list of boolean lists can also be used to
1558 apply different masks. In the latter case, the
1559 length of 'mask' must be the same as that of
1560 'rowno'. The default is None.
1561
1562 getrealimag: If True, returns the real and imaginary part
1563 values of the complex results.
1564 If False (the default), returns the amplitude
1565 (absolute value) normalised with Ndata/2 and
1566 phase (argument, in unit of radian).
1567
1568 Returns:
1569
1570 A list of dictionaries containing the results for each spectrum.
1571 Each dictionary contains two values, the real and the imaginary
1572 parts when getrealimag = True, or the amplitude(absolute value)
1573 and the phase(argument) when getrealimag = False. The key for
1574 these values are 'real' and 'imag', or 'ampl' and 'phase',
1575 respectively.
1576 """
1577 if rowno is None:
1578 rowno = []
1579 if isinstance(rowno, int):
1580 rowno = [rowno]
1581 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1582 raise TypeError("The row number(s) must be int, list or tuple.")
1583 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1584
1585 usecommonmask = True
1586
1587 if mask is None:
1588 mask = []
1589 if isinstance(mask, list) or isinstance(mask, tuple):
1590 if len(mask) == 0:
1591 mask = [[]]
1592 else:
1593 if isinstance(mask[0], bool):
1594 if len(mask) != self.nchan(self.getif(rowno[0])):
1595 raise ValueError("The spectra and the mask have "
1596 "different length.")
1597 mask = [mask]
1598 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1599 usecommonmask = False
1600 if len(mask) != len(rowno):
1601 raise ValueError("When specifying masks for each "
1602 "spectrum, the numbers of them "
1603 "must be identical.")
1604 for i in xrange(mask):
1605 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1606 raise ValueError("The spectra and the mask have "
1607 "different length.")
1608 else:
1609 raise TypeError("The mask must be a boolean list or "
1610 "a list of boolean list.")
1611 else:
1612 raise TypeError("The mask must be a boolean list or a list of "
1613 "boolean list.")
1614
1615 res = []
1616
1617 imask = 0
1618 for whichrow in rowno:
1619 fspec = self._fft(whichrow, mask[imask], getrealimag)
1620 nspec = len(fspec)
1621
1622 i = 0
1623 v1 = []
1624 v2 = []
1625 reselem = {"real":[],"imag":[]} if getrealimag \
1626 else {"ampl":[],"phase":[]}
1627
1628 while (i < nspec):
1629 v1.append(fspec[i])
1630 v2.append(fspec[i+1])
1631 i += 2
1632
1633 if getrealimag:
1634 reselem["real"] += v1
1635 reselem["imag"] += v2
1636 else:
1637 reselem["ampl"] += v1
1638 reselem["phase"] += v2
1639
1640 res.append(reselem)
1641
1642 if not usecommonmask:
1643 imask += 1
1644
1645 return res
1646
1647 @asaplog_post_dec
1648 def create_mask(self, *args, **kwargs):
1649 """\
1650 Compute and return a mask based on [min, max] windows.
1651 The specified windows are to be INCLUDED, when the mask is
1652 applied.
1653
1654 Parameters:
1655
1656 [min, max], [min2, max2], ...
1657 Pairs of start/end points (inclusive)specifying the regions
1658 to be masked
1659
1660 invert: optional argument. If specified as True,
1661 return an inverted mask, i.e. the regions
1662 specified are EXCLUDED
1663
1664 row: create the mask using the specified row for
1665 unit conversions, default is row=0
1666 only necessary if frequency varies over rows.
1667
1668 Examples::
1669
1670 scan.set_unit('channel')
1671 # a)
1672 msk = scan.create_mask([400, 500], [800, 900])
1673 # masks everything outside 400 and 500
1674 # and 800 and 900 in the unit 'channel'
1675
1676 # b)
1677 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1678 # masks the regions between 400 and 500
1679 # and 800 and 900 in the unit 'channel'
1680
1681 # c)
1682 #mask only channel 400
1683 msk = scan.create_mask([400])
1684
1685 """
1686 row = kwargs.get("row", 0)
1687 data = self._getabcissa(row)
1688 u = self._getcoordinfo()[0]
1689 if u == "":
1690 u = "channel"
1691 msg = "The current mask window unit is %s" % u
1692 i = self._check_ifs()
1693 if not i:
1694 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1695 asaplog.push(msg)
1696 n = len(data)
1697 msk = _n_bools(n, False)
1698 # test if args is a 'list' or a 'normal *args - UGLY!!!
1699
1700 ws = (isinstance(args[-1][-1], int)
1701 or isinstance(args[-1][-1], float)) and args or args[0]
1702 for window in ws:
1703 if len(window) == 1:
1704 window = [window[0], window[0]]
1705 if len(window) == 0 or len(window) > 2:
1706 raise ValueError("A window needs to be defined as "
1707 "[start(, end)]")
1708 if window[0] > window[1]:
1709 tmp = window[0]
1710 window[0] = window[1]
1711 window[1] = tmp
1712 for i in range(n):
1713 if data[i] >= window[0] and data[i] <= window[1]:
1714 msk[i] = True
1715 if kwargs.has_key('invert'):
1716 if kwargs.get('invert'):
1717 msk = mask_not(msk)
1718 return msk
1719
1720 def get_masklist(self, mask=None, row=0, silent=False):
1721 """\
1722 Compute and return a list of mask windows, [min, max].
1723
1724 Parameters:
1725
1726 mask: channel mask, created with create_mask.
1727
1728 row: calcutate the masklist using the specified row
1729 for unit conversions, default is row=0
1730 only necessary if frequency varies over rows.
1731
1732 Returns:
1733
1734 [min, max], [min2, max2], ...
1735 Pairs of start/end points (inclusive)specifying
1736 the masked regions
1737
1738 """
1739 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1740 raise TypeError("The mask should be list or tuple.")
1741 if len(mask) <= 0:
1742 raise TypeError("The mask elements should be > 0")
1743 data = self._getabcissa(row)
1744 if len(data) != len(mask):
1745 msg = "Number of channels in scantable != number of mask elements"
1746 raise TypeError(msg)
1747 u = self._getcoordinfo()[0]
1748 if u == "":
1749 u = "channel"
1750 msg = "The current mask window unit is %s" % u
1751 i = self._check_ifs()
1752 if not i:
1753 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1754 if not silent:
1755 asaplog.push(msg)
1756 masklist = []
1757 ist, ien = None, None
1758 ist, ien=self.get_mask_indices(mask)
1759 if ist is not None and ien is not None:
1760 for i in xrange(len(ist)):
1761 range=[data[ist[i]],data[ien[i]]]
1762 range.sort()
1763 masklist.append([range[0],range[1]])
1764 return masklist
1765
1766 def get_mask_indices(self, mask=None):
1767 """\
1768 Compute and Return lists of mask start indices and mask end indices.
1769
1770 Parameters:
1771
1772 mask: channel mask, created with create_mask.
1773
1774 Returns:
1775
1776 List of mask start indices and that of mask end indices,
1777 i.e., [istart1,istart2,....], [iend1,iend2,....].
1778
1779 """
1780 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1781 raise TypeError("The mask should be list or tuple.")
1782 if len(mask) <= 0:
1783 raise TypeError("The mask elements should be > 0")
1784 istart = []
1785 iend = []
1786 if mask[0]:
1787 istart.append(0)
1788 for i in range(len(mask)-1):
1789 if not mask[i] and mask[i+1]:
1790 istart.append(i+1)
1791 elif mask[i] and not mask[i+1]:
1792 iend.append(i)
1793 if mask[len(mask)-1]:
1794 iend.append(len(mask)-1)
1795 if len(istart) != len(iend):
1796 raise RuntimeError("Numbers of mask start != mask end.")
1797 for i in range(len(istart)):
1798 if istart[i] > iend[i]:
1799 raise RuntimeError("Mask start index > mask end index")
1800 break
1801 return istart,iend
1802
1803 @asaplog_post_dec
1804 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1805 """
1806 Parse MS type spw/channel selection syntax.
1807
1808 Parameters:
1809 selectstring : A string expression of spw and channel selection.
1810 Comma-separated expressions mean different spw -
1811 channel combinations. Spws and channel selections
1812 are partitioned by a colon ':'. In a single
1813 selection expression, you can put multiple values
1814 separated by semicolons ';'. Both for spw and
1815 channel selection, allowed cases include single
1816 value, blank('') or asterisk('*') to specify all
1817 available values, two values connected with a
1818 tilde ('~') to specify an inclusive range. Unit
1819 strings for frequency or velocity can be added to
1820 the tilde-connected values. For channel selection
1821 expression, placing a '<' or a '>' is possible to
1822 specify a semi-infinite interval as well.
1823
1824 examples:
1825 '' or '*' = all spws (all channels)
1826 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1827 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1828 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1829 all channels in spw8
1830 '1.3~1.5GHz' = all spws that fall in or have at
1831 least some overwrap with frequency
1832 range between 1.3GHz and 1.5GHz.
1833 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1834 between the specified
1835 frequency range in spws
1836 that fall in or have
1837 overwrap with the
1838 specified frequency
1839 range.
1840 '1:-200~250km/s' = channels that fall between the
1841 specified velocity range in
1842 spw 1.
1843 Returns:
1844 A dictionary of selected (valid) spw and masklist pairs,
1845 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1846 """
1847 if not isinstance(selectstring, str):
1848 asaplog.post()
1849 asaplog.push("Expression of spw/channel selection must be a string.")
1850 asaplog.post("ERROR")
1851
1852 orig_unit = self.get_unit()
1853 self.set_unit('channel')
1854
1855 if restfreq is not None:
1856 orig_molids = self._getmolidcol_list()
1857 set_restfreq(self, restfreq)
1858
1859 orig_coord = self._getcoordinfo()
1860
1861 if frame is not None:
1862 orig_frame = orig_coord[1]
1863 self.set_freqframe(frame)
1864
1865 if doppler is not None:
1866 orig_doppler = orig_coord[2]
1867 self.set_doppler(doppler)
1868 """
1869 if frame is None: frame = orig_frame
1870 self.set_freqframe(frame)
1871
1872 if doppler is None: doppler = orig_doppler
1873 self.set_doppler(doppler)
1874 """
1875
1876 valid_ifs = self.getifnos()
1877
1878 comma_sep = selectstring.split(",")
1879 res = {}
1880
1881 for cms_elem in comma_sep:
1882 colon_sep = cms_elem.split(":")
1883
1884 if (len(colon_sep) > 2):
1885 raise RuntimeError("Invalid selection expression: more than two colons!")
1886
1887 # parse spw expression and store result in spw_list.
1888 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1889 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1890 # 'a~b*m/s' (where * can be '' or 'k') and also
1891 # several of the above expressions connected with ';'.
1892
1893 spw_list = []
1894
1895 semicolon_sep = colon_sep[0].split(";")
1896
1897 for scs_elem in semicolon_sep:
1898 scs_elem = scs_elem.strip()
1899
1900 lt_sep = scs_elem.split("<")
1901 gt_sep = scs_elem.split(">")
1902 ti_sep = scs_elem.split("~")
1903
1904 lt_sep_length = len(lt_sep)
1905 gt_sep_length = len(gt_sep)
1906 ti_sep_length = len(ti_sep)
1907
1908 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1909
1910 if (len_product > 2):
1911 # '<', '>' and '~' must not coexist in a single spw expression
1912
1913 raise RuntimeError("Invalid spw selection.")
1914
1915 elif (len_product == 1):
1916 # '', '*', or single spw number.
1917
1918 if (scs_elem == "") or (scs_elem == "*"):
1919 spw_list = valid_ifs[:] # deep copy
1920
1921 else: # single number
1922 expr = int(scs_elem)
1923 spw_list.append(expr)
1924 if expr not in valid_ifs:
1925 asaplog.push("Invalid spw given. Ignored.")
1926
1927 else: # (len_product == 2)
1928 # namely, one of '<', '>' or '~' appears just once.
1929
1930 if (lt_sep_length == 2): # '<a'
1931 if is_number(lt_sep[1]):
1932 no_valid_spw = True
1933 for i in valid_ifs:
1934 if (i < float(lt_sep[1])):
1935 spw_list.append(i)
1936 no_valid_spw = False
1937
1938 if no_valid_spw:
1939 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1940
1941 else:
1942 raise RuntimeError("Invalid spw selection.")
1943
1944 elif (gt_sep_length == 2): # '>a'
1945 if is_number(gt_sep[1]):
1946 no_valid_spw = True
1947 for i in valid_ifs:
1948 if (i > float(gt_sep[1])):
1949 spw_list.append(i)
1950 no_valid_spw = False
1951
1952 if no_valid_spw:
1953 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1954
1955 else:
1956 raise RuntimeError("Invalid spw selection.")
1957
1958 else: # (ti_sep_length == 2) where both boundaries inclusive
1959 expr0 = ti_sep[0].strip()
1960 expr1 = ti_sep[1].strip()
1961
1962 if is_number(expr0) and is_number(expr1):
1963 # 'a~b'
1964 expr_pmin = min(float(expr0), float(expr1))
1965 expr_pmax = max(float(expr0), float(expr1))
1966 has_invalid_spw = False
1967 no_valid_spw = True
1968
1969 for i in valid_ifs:
1970 if (expr_pmin <= i) and (i <= expr_pmax):
1971 spw_list.append(i)
1972 no_valid_spw = False
1973 else:
1974 has_invalid_spw = True
1975
1976 if has_invalid_spw:
1977 msg = "Invalid spw is given. Ignored."
1978 asaplog.push(msg)
1979 asaplog.post()
1980
1981 if no_valid_spw:
1982 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1983
1984 elif is_number(expr0) and is_frequency(expr1):
1985 # 'a~b*Hz'
1986 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1987 no_valid_spw = True
1988
1989 for coord in self._get_coordinate_list():
1990 expr_p0 = coord['coord'].to_pixel(expr_f0)
1991 expr_p1 = coord['coord'].to_pixel(expr_f1)
1992 expr_pmin = min(expr_p0, expr_p1)
1993 expr_pmax = max(expr_p0, expr_p1)
1994
1995 spw = coord['if']
1996 pmin = 0.0
1997 pmax = float(self.nchan(spw) - 1)
1998
1999 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2000 spw_list.append(spw)
2001 no_valid_spw = False
2002
2003 if no_valid_spw:
2004 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2005
2006 elif is_number(expr0) and is_velocity(expr1):
2007 # 'a~b*m/s'
2008 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2009 expr_vmin = min(expr_v0, expr_v1)
2010 expr_vmax = max(expr_v0, expr_v1)
2011 no_valid_spw = True
2012
2013 for coord in self._get_coordinate_list():
2014 spw = coord['if']
2015
2016 pmin = 0.0
2017 pmax = float(self.nchan(spw) - 1)
2018
2019 vel0 = coord['coord'].to_velocity(pmin)
2020 vel1 = coord['coord'].to_velocity(pmax)
2021
2022 vmin = min(vel0, vel1)
2023 vmax = max(vel0, vel1)
2024
2025 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2026 spw_list.append(spw)
2027 no_valid_spw = False
2028
2029 if no_valid_spw:
2030 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2031
2032 else:
2033 # cases such as 'aGHz~bkm/s' are not allowed now
2034 raise RuntimeError("Invalid spw selection.")
2035
2036 # check spw list and remove invalid ones.
2037 # if no valid spw left, emit ValueError.
2038 if len(spw_list) == 0:
2039 raise ValueError("No valid spw in given range.")
2040
2041 # parse channel expression and store the result in crange_list.
2042 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2043 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2044 # and also several of the above expressions connected with ';'.
2045
2046 for spw in spw_list:
2047 pmin = 0.0
2048 pmax = float(self.nchan(spw) - 1)
2049
2050 if (len(colon_sep) == 1):
2051 # no expression for channel selection,
2052 # which means all channels are to be selected.
2053 crange_list = [[pmin, pmax]]
2054
2055 else: # (len(colon_sep) == 2)
2056 crange_list = []
2057
2058 found = False
2059 for i in self._get_coordinate_list():
2060 if (i['if'] == spw):
2061 coord = i['coord']
2062 found = True
2063 break
2064
2065 if found:
2066 semicolon_sep = colon_sep[1].split(";")
2067 for scs_elem in semicolon_sep:
2068 scs_elem = scs_elem.strip()
2069
2070 ti_sep = scs_elem.split("~")
2071 ti_sep_length = len(ti_sep)
2072
2073 if (ti_sep_length > 2):
2074 raise RuntimeError("Invalid channel selection.")
2075
2076 elif (ti_sep_length == 1):
2077 if (scs_elem == "") or (scs_elem == "*"):
2078 # '' and '*' for all channels
2079 crange_list = [[pmin, pmax]]
2080 break
2081 elif (is_number(scs_elem)):
2082 # single channel given
2083 crange_list.append([float(scs_elem), float(scs_elem)])
2084 else:
2085 raise RuntimeError("Invalid channel selection.")
2086
2087 else: #(ti_sep_length == 2)
2088 expr0 = ti_sep[0].strip()
2089 expr1 = ti_sep[1].strip()
2090
2091 if is_number(expr0) and is_number(expr1):
2092 # 'a~b'
2093 expr_pmin = min(float(expr0), float(expr1))
2094 expr_pmax = max(float(expr0), float(expr1))
2095
2096 elif is_number(expr0) and is_frequency(expr1):
2097 # 'a~b*Hz'
2098 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2099 expr_p0 = coord.to_pixel(expr_f0)
2100 expr_p1 = coord.to_pixel(expr_f1)
2101 expr_pmin = min(expr_p0, expr_p1)
2102 expr_pmax = max(expr_p0, expr_p1)
2103
2104 elif is_number(expr0) and is_velocity(expr1):
2105 # 'a~b*m/s'
2106 restf = self.get_restfreqs().values()[0][0]
2107 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2108 expr_f0 = get_frequency_by_velocity(restf, expr_v0, doppler)
2109 expr_f1 = get_frequency_by_velocity(restf, expr_v1, doppler)
2110 expr_p0 = coord.to_pixel(expr_f0)
2111 expr_p1 = coord.to_pixel(expr_f1)
2112 expr_pmin = min(expr_p0, expr_p1)
2113 expr_pmax = max(expr_p0, expr_p1)
2114
2115 else:
2116 # cases such as 'aGHz~bkm/s' are not allowed now
2117 raise RuntimeError("Invalid channel selection.")
2118
2119 cmin = max(pmin, expr_pmin)
2120 cmax = min(pmax, expr_pmax)
2121 # if the given range of channel selection has overwrap with
2122 # that of current spw, output the overwrap area.
2123 if (cmin <= cmax):
2124 cmin = float(int(cmin + 0.5))
2125 cmax = float(int(cmax + 0.5))
2126 crange_list.append([cmin, cmax])
2127
2128 if (len(crange_list) == 0):
2129 crange_list.append([])
2130
2131 if res.has_key(spw):
2132 res[spw].extend(crange_list)
2133 else:
2134 res[spw] = crange_list
2135
2136 for spw in res.keys():
2137 if spw not in valid_ifs:
2138 del res[spw]
2139
2140 if len(res) == 0:
2141 raise RuntimeError("No valid spw.")
2142
2143 # restore original values
2144 self.set_unit(orig_unit)
2145 if restfreq is not None:
2146 self._setmolidcol_list(orig_molids)
2147 if frame is not None:
2148 self.set_freqframe(orig_frame)
2149 if doppler is not None:
2150 self.set_doppler(orig_doppler)
2151
2152 return res
2153
2154 @asaplog_post_dec
2155 def get_first_rowno_by_if(self, ifno):
2156 found = False
2157 for irow in xrange(self.nrow()):
2158 if (self.getif(irow) == ifno):
2159 res = irow
2160 found = True
2161 break
2162
2163 if not found: raise RuntimeError("Invalid IF value.")
2164
2165 return res
2166
2167 @asaplog_post_dec
2168 def _get_coordinate_list(self):
2169 res = []
2170 spws = self.getifnos()
2171 for spw in spws:
2172 elem = {}
2173 elem['if'] = spw
2174 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2175 res.append(elem)
2176
2177 return res
2178
2179 @asaplog_post_dec
2180 def parse_maskexpr(self, maskstring):
2181 """
2182 Parse CASA type mask selection syntax (IF dependent).
2183
2184 Parameters:
2185 maskstring : A string mask selection expression.
2186 A comma separated selections mean different IF -
2187 channel combinations. IFs and channel selections
2188 are partitioned by a colon, ':'.
2189 examples:
2190 '' = all IFs (all channels)
2191 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2192 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2193 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2194 all channels in IF8
2195 Returns:
2196 A dictionary of selected (valid) IF and masklist pairs,
2197 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2198 """
2199 if not isinstance(maskstring,str):
2200 asaplog.post()
2201 asaplog.push("Mask expression should be a string.")
2202 asaplog.post("ERROR")
2203
2204 valid_ifs = self.getifnos()
2205 frequnit = self.get_unit()
2206 seldict = {}
2207 if maskstring == "":
2208 maskstring = str(valid_ifs)[1:-1]
2209 ## split each selection "IF range[:CHAN range]"
2210 # split maskstring by "<spaces>,<spaces>"
2211 comma_sep = re.compile('\s*,\s*')
2212 sellist = comma_sep.split(maskstring)
2213 # separator by "<spaces>:<spaces>"
2214 collon_sep = re.compile('\s*:\s*')
2215 for currselstr in sellist:
2216 selset = collon_sep.split(currselstr)
2217 # spw and mask string (may include ~, < or >)
2218 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2219 minval=min(valid_ifs),
2220 maxval=max(valid_ifs))
2221 for spwlist in spwmasklist:
2222 selspws = []
2223 for ispw in range(spwlist[0],spwlist[1]+1):
2224 # Put into the list only if ispw exists
2225 if valid_ifs.count(ispw):
2226 selspws.append(ispw)
2227 del spwmasklist, spwlist
2228
2229 # parse frequency mask list
2230 if len(selset) > 1:
2231 freqmasklist = self._parse_selection(selset[1], typestr='float',
2232 offset=0.)
2233 else:
2234 # want to select the whole spectrum
2235 freqmasklist = [None]
2236
2237 ## define a dictionary of spw - masklist combination
2238 for ispw in selspws:
2239 #print "working on", ispw
2240 spwstr = str(ispw)
2241 if len(selspws) == 0:
2242 # empty spw
2243 continue
2244 else:
2245 ## want to get min and max of the spw and
2246 ## offset to set for '<' and '>'
2247 if frequnit == 'channel':
2248 minfreq = 0
2249 maxfreq = self.nchan(ifno=ispw)
2250 offset = 0.5
2251 else:
2252 ## This is ugly part. need improvement
2253 for ifrow in xrange(self.nrow()):
2254 if self.getif(ifrow) == ispw:
2255 #print "IF",ispw,"found in row =",ifrow
2256 break
2257 freqcoord = self.get_coordinate(ifrow)
2258 freqs = self._getabcissa(ifrow)
2259 minfreq = min(freqs)
2260 maxfreq = max(freqs)
2261 if len(freqs) == 1:
2262 offset = 0.5
2263 elif frequnit.find('Hz') > 0:
2264 offset = abs(freqcoord.to_frequency(1,
2265 unit=frequnit)
2266 -freqcoord.to_frequency(0,
2267 unit=frequnit)
2268 )*0.5
2269 elif frequnit.find('m/s') > 0:
2270 offset = abs(freqcoord.to_velocity(1,
2271 unit=frequnit)
2272 -freqcoord.to_velocity(0,
2273 unit=frequnit)
2274 )*0.5
2275 else:
2276 asaplog.post()
2277 asaplog.push("Invalid frequency unit")
2278 asaplog.post("ERROR")
2279 del freqs, freqcoord, ifrow
2280 for freq in freqmasklist:
2281 selmask = freq or [minfreq, maxfreq]
2282 if selmask[0] == None:
2283 ## selection was "<freq[1]".
2284 if selmask[1] < minfreq:
2285 ## avoid adding region selection
2286 selmask = None
2287 else:
2288 selmask = [minfreq,selmask[1]-offset]
2289 elif selmask[1] == None:
2290 ## selection was ">freq[0]"
2291 if selmask[0] > maxfreq:
2292 ## avoid adding region selection
2293 selmask = None
2294 else:
2295 selmask = [selmask[0]+offset,maxfreq]
2296 if selmask:
2297 if not seldict.has_key(spwstr):
2298 # new spw selection
2299 seldict[spwstr] = []
2300 seldict[spwstr] += [selmask]
2301 del minfreq,maxfreq,offset,freq,selmask
2302 del spwstr
2303 del freqmasklist
2304 del valid_ifs
2305 if len(seldict) == 0:
2306 asaplog.post()
2307 asaplog.push("No valid selection in the mask expression: "
2308 +maskstring)
2309 asaplog.post("WARN")
2310 return None
2311 msg = "Selected masklist:\n"
2312 for sif, lmask in seldict.iteritems():
2313 msg += " IF"+sif+" - "+str(lmask)+"\n"
2314 asaplog.push(msg)
2315 return seldict
2316
2317 @asaplog_post_dec
2318 def parse_idx_selection(self, mode, selexpr):
2319 """
2320 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2321 BEAMNO, and row number
2322
2323 Parameters:
2324 mode : which column to select.
2325 ['scan',|'if'|'pol'|'beam'|'row']
2326 selexpr : A comma separated selection expression.
2327 examples:
2328 '' = all (returns [])
2329 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2330 (returns [0,1,4,5,6,9])
2331 Returns:
2332 A List of selected indices
2333 """
2334 if selexpr == "":
2335 return []
2336 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2337 'b': 'beam', 'r': 'row'}
2338 smode = mode.lower()[0]
2339 if not (smode in valid_modes.keys()):
2340 msg = "Invalid mode '%s'. Valid modes are %s" %\
2341 (mode, str(valid_modes.values()))
2342 asaplog.post()
2343 asaplog.push(msg)
2344 asaplog.post("ERROR")
2345 mode = valid_modes[smode]
2346 minidx = None
2347 maxidx = None
2348 if smode == 'r':
2349 minidx = 0
2350 maxidx = self.nrow()
2351 else:
2352 idx = getattr(self,"get"+mode+"nos")()
2353 minidx = min(idx)
2354 maxidx = max(idx)
2355 del idx
2356 # split selexpr by "<spaces>,<spaces>"
2357 comma_sep = re.compile('\s*,\s*')
2358 sellist = comma_sep.split(selexpr)
2359 idxlist = []
2360 for currselstr in sellist:
2361 # single range (may include ~, < or >)
2362 currlist = self._parse_selection(currselstr, typestr='integer',
2363 minval=minidx,maxval=maxidx)
2364 for thelist in currlist:
2365 idxlist += range(thelist[0],thelist[1]+1)
2366 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2367 asaplog.push(msg)
2368 return idxlist
2369
2370 def _parse_selection(self, selstr, typestr='float', offset=0.,
2371 minval=None, maxval=None):
2372 """
2373 Parameters:
2374 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2375 typestr : The type of the values in returned list
2376 ('integer' or 'float')
2377 offset : The offset value to subtract from or add to
2378 the boundary value if the selection string
2379 includes '<' or '>' [Valid only for typestr='float']
2380 minval, maxval : The minimum/maximum values to set if the
2381 selection string includes '<' or '>'.
2382 The list element is filled with None by default.
2383 Returns:
2384 A list of min/max pair of selections.
2385 Example:
2386 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2387 --> returns [[0,2],[5,7],[9,9]]
2388 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2389 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2390 """
2391 # split selstr by '<spaces>;<spaces>'
2392 semi_sep = re.compile('\s*;\s*')
2393 selgroups = semi_sep.split(selstr)
2394 sellists = []
2395 if typestr.lower().startswith('int'):
2396 formatfunc = int
2397 offset = 1
2398 else:
2399 formatfunc = float
2400
2401 for currsel in selgroups:
2402 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2403 minsel = minval
2404 maxsel = maxval
2405 if currsel.find('~') > 0:
2406 # val0 <= x <= val1
2407 minsel = formatfunc(currsel.split('~')[0].strip())
2408 maxsel = formatfunc(currsel.split('~')[1].strip())
2409 elif currsel.strip().find('<=') > -1:
2410 bound = currsel.split('<=')
2411 try: # try "x <= val"
2412 minsel = minval
2413 maxsel = formatfunc(bound[1].strip())
2414 except ValueError: # now "val <= x"
2415 minsel = formatfunc(bound[0].strip())
2416 maxsel = maxval
2417 elif currsel.strip().find('>=') > -1:
2418 bound = currsel.split('>=')
2419 try: # try "x >= val"
2420 minsel = formatfunc(bound[1].strip())
2421 maxsel = maxval
2422 except ValueError: # now "val >= x"
2423 minsel = minval
2424 maxsel = formatfunc(bound[0].strip())
2425 elif currsel.strip().find('<') > -1:
2426 bound = currsel.split('<')
2427 try: # try "x < val"
2428 minsel = minval
2429 maxsel = formatfunc(bound[1].strip()) \
2430 - formatfunc(offset)
2431 except ValueError: # now "val < x"
2432 minsel = formatfunc(bound[0].strip()) \
2433 + formatfunc(offset)
2434 maxsel = maxval
2435 elif currsel.strip().find('>') > -1:
2436 bound = currsel.split('>')
2437 try: # try "x > val"
2438 minsel = formatfunc(bound[1].strip()) \
2439 + formatfunc(offset)
2440 maxsel = maxval
2441 except ValueError: # now "val > x"
2442 minsel = minval
2443 maxsel = formatfunc(bound[0].strip()) \
2444 - formatfunc(offset)
2445 else:
2446 minsel = formatfunc(currsel)
2447 maxsel = formatfunc(currsel)
2448 sellists.append([minsel,maxsel])
2449 return sellists
2450
2451# def get_restfreqs(self):
2452# """
2453# Get the restfrequency(s) stored in this scantable.
2454# The return value(s) are always of unit 'Hz'
2455# Parameters:
2456# none
2457# Returns:
2458# a list of doubles
2459# """
2460# return list(self._getrestfreqs())
2461
2462 def get_restfreqs(self, ids=None):
2463 """\
2464 Get the restfrequency(s) stored in this scantable.
2465 The return value(s) are always of unit 'Hz'
2466
2467 Parameters:
2468
2469 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2470 be retrieved
2471
2472 Returns:
2473
2474 dictionary containing ids and a list of doubles for each id
2475
2476 """
2477 if ids is None:
2478 rfreqs = {}
2479 idlist = self.getmolnos()
2480 for i in idlist:
2481 rfreqs[i] = list(self._getrestfreqs(i))
2482 return rfreqs
2483 else:
2484 if type(ids) == list or type(ids) == tuple:
2485 rfreqs = {}
2486 for i in ids:
2487 rfreqs[i] = list(self._getrestfreqs(i))
2488 return rfreqs
2489 else:
2490 return list(self._getrestfreqs(ids))
2491
2492 @asaplog_post_dec
2493 def set_restfreqs(self, freqs=None, unit='Hz'):
2494 """\
2495 Set or replace the restfrequency specified and
2496 if the 'freqs' argument holds a scalar,
2497 then that rest frequency will be applied to all the selected
2498 data. If the 'freqs' argument holds
2499 a vector, then it MUST be of equal or smaller length than
2500 the number of IFs (and the available restfrequencies will be
2501 replaced by this vector). In this case, *all* data have
2502 the restfrequency set per IF according
2503 to the corresponding value you give in the 'freqs' vector.
2504 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2505 IF 1 gets restfreq 2e9.
2506
2507 You can also specify the frequencies via a linecatalog.
2508
2509 Parameters:
2510
2511 freqs: list of rest frequency values or string idenitfiers
2512
2513 unit: unit for rest frequency (default 'Hz')
2514
2515
2516 Example::
2517
2518 # set the given restfrequency for the all currently selected IFs
2519 scan.set_restfreqs(freqs=1.4e9)
2520 # set restfrequencies for the n IFs (n > 1) in the order of the
2521 # list, i.e
2522 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2523 # len(list_of_restfreqs) == nIF
2524 # for nIF == 1 the following will set multiple restfrequency for
2525 # that IF
2526 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2527 # set multiple restfrequencies per IF. as a list of lists where
2528 # the outer list has nIF elements, the inner s arbitrary
2529 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2530
2531 *Note*:
2532
2533 To do more sophisticate Restfrequency setting, e.g. on a
2534 source and IF basis, use scantable.set_selection() before using
2535 this function::
2536
2537 # provided your scantable is called scan
2538 selection = selector()
2539 selection.set_name('ORION*')
2540 selection.set_ifs([1])
2541 scan.set_selection(selection)
2542 scan.set_restfreqs(freqs=86.6e9)
2543
2544 """
2545 varlist = vars()
2546 from asap import linecatalog
2547 # simple value
2548 if isinstance(freqs, int) or isinstance(freqs, float):
2549 self._setrestfreqs([freqs], [""], unit)
2550 # list of values
2551 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2552 # list values are scalars
2553 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2554 if len(freqs) == 1:
2555 self._setrestfreqs(freqs, [""], unit)
2556 else:
2557 # allow the 'old' mode of setting mulitple IFs
2558 savesel = self._getselection()
2559 sel = self.get_selection()
2560 iflist = self.getifnos()
2561 if len(freqs)>len(iflist):
2562 raise ValueError("number of elements in list of list "
2563 "exeeds the current IF selections")
2564 iflist = self.getifnos()
2565 for i, fval in enumerate(freqs):
2566 sel.set_ifs(iflist[i])
2567 self._setselection(sel)
2568 self._setrestfreqs([fval], [""], unit)
2569 self._setselection(savesel)
2570
2571 # list values are dict, {'value'=, 'name'=)
2572 elif isinstance(freqs[-1], dict):
2573 values = []
2574 names = []
2575 for d in freqs:
2576 values.append(d["value"])
2577 names.append(d["name"])
2578 self._setrestfreqs(values, names, unit)
2579 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2580 savesel = self._getselection()
2581 sel = self.get_selection()
2582 iflist = self.getifnos()
2583 if len(freqs)>len(iflist):
2584 raise ValueError("number of elements in list of list exeeds"
2585 " the current IF selections")
2586 for i, fval in enumerate(freqs):
2587 sel.set_ifs(iflist[i])
2588 self._setselection(sel)
2589 self._setrestfreqs(fval, [""], unit)
2590 self._setselection(savesel)
2591 # freqs are to be taken from a linecatalog
2592 elif isinstance(freqs, linecatalog):
2593 savesel = self._getselection()
2594 sel = self.get_selection()
2595 for i in xrange(freqs.nrow()):
2596 sel.set_ifs(iflist[i])
2597 self._setselection(sel)
2598 self._setrestfreqs([freqs.get_frequency(i)],
2599 [freqs.get_name(i)], "MHz")
2600 # ensure that we are not iterating past nIF
2601 if i == self.nif()-1: break
2602 self._setselection(savesel)
2603 else:
2604 return
2605 self._add_history("set_restfreqs", varlist)
2606
2607 @asaplog_post_dec
2608 def shift_refpix(self, delta):
2609 """\
2610 Shift the reference pixel of the Spectra Coordinate by an
2611 integer amount.
2612
2613 Parameters:
2614
2615 delta: the amount to shift by
2616
2617 *Note*:
2618
2619 Be careful using this with broadband data.
2620
2621 """
2622 varlist = vars()
2623 Scantable.shift_refpix(self, delta)
2624 s._add_history("shift_refpix", varlist)
2625
2626 @asaplog_post_dec
2627 def history(self, filename=None, nrows=-1, start=0):
2628 """\
2629 Print the history. Optionally to a file.
2630
2631 Parameters:
2632
2633 filename: The name of the file to save the history to.
2634
2635 """
2636 n = self._historylength()
2637 if nrows == -1:
2638 nrows = n
2639 if start+nrows > n:
2640 nrows = nrows-start
2641 if n > 1000 and nrows == n:
2642 nrows = 1000
2643 start = n-1000
2644 asaplog.push("Warning: History has {0} entries. Displaying last "
2645 "1000".format(n))
2646 hist = list(self._gethistory(nrows, start))
2647 out = "-"*80
2648 for h in hist:
2649 if not h.strip():
2650 continue
2651 if h.find("---") >-1:
2652 continue
2653 else:
2654 items = h.split("##")
2655 date = items[0]
2656 func = items[1]
2657 items = items[2:]
2658 out += "\n"+date+"\n"
2659 out += "Function: %s\n Parameters:" % (func)
2660 for i in items:
2661 if i == '':
2662 continue
2663 s = i.split("=")
2664 out += "\n %s = %s" % (s[0], s[1])
2665 out = "\n".join([out, "*"*80])
2666 if filename is not None:
2667 if filename is "":
2668 filename = 'scantable_history.txt'
2669 filename = os.path.expandvars(os.path.expanduser(filename))
2670 if not os.path.isdir(filename):
2671 data = open(filename, 'w')
2672 data.write(out)
2673 data.close()
2674 else:
2675 msg = "Illegal file name '%s'." % (filename)
2676 raise IOError(msg)
2677 return page(out)
2678
2679 #
2680 # Maths business
2681 #
2682 @asaplog_post_dec
2683 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2684 avmode="NONE"):
2685 """\
2686 Return the (time) weighted average of a scan. Scans will be averaged
2687 only if the source direction (RA/DEC) is within 1' otherwise
2688
2689 *Note*:
2690
2691 in channels only - align if necessary
2692
2693 Parameters:
2694
2695 mask: an optional mask (only used for 'var' and 'tsys'
2696 weighting)
2697
2698 scanav: True averages each scan separately
2699 False (default) averages all scans together,
2700
2701 weight: Weighting scheme.
2702 'none' (mean no weight)
2703 'var' (1/var(spec) weighted)
2704 'tsys' (1/Tsys**2 weighted)
2705 'tint' (integration time weighted)
2706 'tintsys' (Tint/Tsys**2)
2707 'median' ( median averaging)
2708 The default is 'tint'
2709
2710 align: align the spectra in velocity before averaging. It takes
2711 the time of the first spectrum as reference time.
2712 avmode: 'SOURCE' - also select by source name - or
2713 'NONE' (default). Not applicable for scanav=True or
2714 weight=median
2715
2716 Example::
2717
2718 # time average the scantable without using a mask
2719 newscan = scan.average_time()
2720
2721 """
2722 varlist = vars()
2723 weight = weight or 'TINT'
2724 mask = mask or ()
2725 scanav = (scanav and 'SCAN') or avmode.upper()
2726 scan = (self, )
2727
2728 if align:
2729 scan = (self.freq_align(insitu=False), )
2730 asaplog.push("Note: Alignment is don on a source-by-source basis")
2731 asaplog.push("Note: Averaging (by default) is not")
2732 # we need to set it to SOURCE averaging here
2733 s = None
2734 if weight.upper() == 'MEDIAN':
2735 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2736 scanav))
2737 else:
2738 s = scantable(self._math._average(scan, mask, weight.upper(),
2739 scanav))
2740 s._add_history("average_time", varlist)
2741 return s
2742
2743 @asaplog_post_dec
2744 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2745 """\
2746 Return a scan where all spectra are converted to either
2747 Jansky or Kelvin depending upon the flux units of the scan table.
2748 By default the function tries to look the values up internally.
2749 If it can't find them (or if you want to over-ride), you must
2750 specify EITHER jyperk OR eta (and D which it will try to look up
2751 also if you don't set it). jyperk takes precedence if you set both.
2752
2753 Parameters:
2754
2755 jyperk: the Jy / K conversion factor
2756
2757 eta: the aperture efficiency
2758
2759 d: the geometric diameter (metres)
2760
2761 insitu: if False a new scantable is returned.
2762 Otherwise, the scaling is done in-situ
2763 The default is taken from .asaprc (False)
2764
2765 """
2766 if insitu is None: insitu = rcParams['insitu']
2767 self._math._setinsitu(insitu)
2768 varlist = vars()
2769 jyperk = jyperk or -1.0
2770 d = d or -1.0
2771 eta = eta or -1.0
2772 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2773 s._add_history("convert_flux", varlist)
2774 if insitu: self._assign(s)
2775 else: return s
2776
2777 @asaplog_post_dec
2778 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2779 """\
2780 Return a scan after applying a gain-elevation correction.
2781 The correction can be made via either a polynomial or a
2782 table-based interpolation (and extrapolation if necessary).
2783 You specify polynomial coefficients, an ascii table or neither.
2784 If you specify neither, then a polynomial correction will be made
2785 with built in coefficients known for certain telescopes (an error
2786 will occur if the instrument is not known).
2787 The data and Tsys are *divided* by the scaling factors.
2788
2789 Parameters:
2790
2791 poly: Polynomial coefficients (default None) to compute a
2792 gain-elevation correction as a function of
2793 elevation (in degrees).
2794
2795 filename: The name of an ascii file holding correction factors.
2796 The first row of the ascii file must give the column
2797 names and these MUST include columns
2798 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2799 by this) somewhere.
2800 The second row must give the data type of the
2801 column. Use 'R' for Real and 'I' for Integer.
2802 An example file would be
2803 (actual factors are arbitrary) :
2804
2805 TIME ELEVATION FACTOR
2806 R R R
2807 0.1 0 0.8
2808 0.2 20 0.85
2809 0.3 40 0.9
2810 0.4 60 0.85
2811 0.5 80 0.8
2812 0.6 90 0.75
2813
2814 method: Interpolation method when correcting from a table.
2815 Values are 'nearest', 'linear' (default), 'cubic'
2816 and 'spline'
2817
2818 insitu: if False a new scantable is returned.
2819 Otherwise, the scaling is done in-situ
2820 The default is taken from .asaprc (False)
2821
2822 """
2823
2824 if insitu is None: insitu = rcParams['insitu']
2825 self._math._setinsitu(insitu)
2826 varlist = vars()
2827 poly = poly or ()
2828 from os.path import expandvars
2829 filename = expandvars(filename)
2830 s = scantable(self._math._gainel(self, poly, filename, method))
2831 s._add_history("gain_el", varlist)
2832 if insitu:
2833 self._assign(s)
2834 else:
2835 return s
2836
2837 @asaplog_post_dec
2838 def freq_align(self, reftime=None, method='cubic', insitu=None):
2839 """\
2840 Return a scan where all rows have been aligned in frequency/velocity.
2841 The alignment frequency frame (e.g. LSRK) is that set by function
2842 set_freqframe.
2843
2844 Parameters:
2845
2846 reftime: reference time to align at. By default, the time of
2847 the first row of data is used.
2848
2849 method: Interpolation method for regridding the spectra.
2850 Choose from 'nearest', 'linear', 'cubic' (default)
2851 and 'spline'
2852
2853 insitu: if False a new scantable is returned.
2854 Otherwise, the scaling is done in-situ
2855 The default is taken from .asaprc (False)
2856
2857 """
2858 if insitu is None: insitu = rcParams["insitu"]
2859 oldInsitu = self._math._insitu()
2860 self._math._setinsitu(insitu)
2861 varlist = vars()
2862 reftime = reftime or ""
2863 s = scantable(self._math._freq_align(self, reftime, method))
2864 s._add_history("freq_align", varlist)
2865 self._math._setinsitu(oldInsitu)
2866 if insitu:
2867 self._assign(s)
2868 else:
2869 return s
2870
2871 @asaplog_post_dec
2872 def opacity(self, tau=None, insitu=None):
2873 """\
2874 Apply an opacity correction. The data
2875 and Tsys are multiplied by the correction factor.
2876
2877 Parameters:
2878
2879 tau: (list of) opacity from which the correction factor is
2880 exp(tau*ZD)
2881 where ZD is the zenith-distance.
2882 If a list is provided, it has to be of length nIF,
2883 nIF*nPol or 1 and in order of IF/POL, e.g.
2884 [opif0pol0, opif0pol1, opif1pol0 ...]
2885 if tau is `None` the opacities are determined from a
2886 model.
2887
2888 insitu: if False a new scantable is returned.
2889 Otherwise, the scaling is done in-situ
2890 The default is taken from .asaprc (False)
2891
2892 """
2893 if insitu is None:
2894 insitu = rcParams['insitu']
2895 self._math._setinsitu(insitu)
2896 varlist = vars()
2897 if not hasattr(tau, "__len__"):
2898 tau = [tau]
2899 s = scantable(self._math._opacity(self, tau))
2900 s._add_history("opacity", varlist)
2901 if insitu:
2902 self._assign(s)
2903 else:
2904 return s
2905
2906 @asaplog_post_dec
2907 def bin(self, width=5, insitu=None):
2908 """\
2909 Return a scan where all spectra have been binned up.
2910
2911 Parameters:
2912
2913 width: The bin width (default=5) in pixels
2914
2915 insitu: if False a new scantable is returned.
2916 Otherwise, the scaling is done in-situ
2917 The default is taken from .asaprc (False)
2918
2919 """
2920 if insitu is None:
2921 insitu = rcParams['insitu']
2922 self._math._setinsitu(insitu)
2923 varlist = vars()
2924 s = scantable(self._math._bin(self, width))
2925 s._add_history("bin", varlist)
2926 if insitu:
2927 self._assign(s)
2928 else:
2929 return s
2930
2931 @asaplog_post_dec
2932 def reshape(self, first, last, insitu=None):
2933 """Resize the band by providing first and last channel.
2934 This will cut off all channels outside [first, last].
2935 """
2936 if insitu is None:
2937 insitu = rcParams['insitu']
2938 varlist = vars()
2939 if last < 0:
2940 last = self.nchan()-1 + last
2941 s = None
2942 if insitu:
2943 s = self
2944 else:
2945 s = self.copy()
2946 s._reshape(first,last)
2947 s._add_history("reshape", varlist)
2948 if not insitu:
2949 return s
2950
2951 @asaplog_post_dec
2952 def resample(self, width=5, method='cubic', insitu=None):
2953 """\
2954 Return a scan where all spectra have been binned up.
2955
2956 Parameters:
2957
2958 width: The bin width (default=5) in pixels
2959
2960 method: Interpolation method when correcting from a table.
2961 Values are 'nearest', 'linear', 'cubic' (default)
2962 and 'spline'
2963
2964 insitu: if False a new scantable is returned.
2965 Otherwise, the scaling is done in-situ
2966 The default is taken from .asaprc (False)
2967
2968 """
2969 if insitu is None:
2970 insitu = rcParams['insitu']
2971 self._math._setinsitu(insitu)
2972 varlist = vars()
2973 s = scantable(self._math._resample(self, method, width))
2974 s._add_history("resample", varlist)
2975 if insitu:
2976 self._assign(s)
2977 else:
2978 return s
2979
2980 @asaplog_post_dec
2981 def average_pol(self, mask=None, weight='none'):
2982 """\
2983 Average the Polarisations together.
2984
2985 Parameters:
2986
2987 mask: An optional mask defining the region, where the
2988 averaging will be applied. The output will have all
2989 specified points masked.
2990
2991 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2992 weighted), or 'tsys' (1/Tsys**2 weighted)
2993
2994 """
2995 varlist = vars()
2996 mask = mask or ()
2997 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2998 s._add_history("average_pol", varlist)
2999 return s
3000
3001 @asaplog_post_dec
3002 def average_beam(self, mask=None, weight='none'):
3003 """\
3004 Average the Beams together.
3005
3006 Parameters:
3007 mask: An optional mask defining the region, where the
3008 averaging will be applied. The output will have all
3009 specified points masked.
3010
3011 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3012 weighted), or 'tsys' (1/Tsys**2 weighted)
3013
3014 """
3015 varlist = vars()
3016 mask = mask or ()
3017 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3018 s._add_history("average_beam", varlist)
3019 return s
3020
3021 def parallactify(self, pflag):
3022 """\
3023 Set a flag to indicate whether this data should be treated as having
3024 been 'parallactified' (total phase == 0.0)
3025
3026 Parameters:
3027
3028 pflag: Bool indicating whether to turn this on (True) or
3029 off (False)
3030
3031 """
3032 varlist = vars()
3033 self._parallactify(pflag)
3034 self._add_history("parallactify", varlist)
3035
3036 @asaplog_post_dec
3037 def convert_pol(self, poltype=None):
3038 """\
3039 Convert the data to a different polarisation type.
3040 Note that you will need cross-polarisation terms for most conversions.
3041
3042 Parameters:
3043
3044 poltype: The new polarisation type. Valid types are:
3045 'linear', 'circular', 'stokes' and 'linpol'
3046
3047 """
3048 varlist = vars()
3049 s = scantable(self._math._convertpol(self, poltype))
3050 s._add_history("convert_pol", varlist)
3051 return s
3052
3053 @asaplog_post_dec
3054 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3055 insitu=None):
3056 """\
3057 Smooth the spectrum by the specified kernel (conserving flux).
3058
3059 Parameters:
3060
3061 kernel: The type of smoothing kernel. Select from
3062 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3063 or 'poly'
3064
3065 width: The width of the kernel in pixels. For hanning this is
3066 ignored otherwise it defauls to 5 pixels.
3067 For 'gaussian' it is the Full Width Half
3068 Maximum. For 'boxcar' it is the full width.
3069 For 'rmedian' and 'poly' it is the half width.
3070
3071 order: Optional parameter for 'poly' kernel (default is 2), to
3072 specify the order of the polnomial. Ignored by all other
3073 kernels.
3074
3075 plot: plot the original and the smoothed spectra.
3076 In this each indivual fit has to be approved, by
3077 typing 'y' or 'n'
3078
3079 insitu: if False a new scantable is returned.
3080 Otherwise, the scaling is done in-situ
3081 The default is taken from .asaprc (False)
3082
3083 """
3084 if insitu is None: insitu = rcParams['insitu']
3085 self._math._setinsitu(insitu)
3086 varlist = vars()
3087
3088 if plot: orgscan = self.copy()
3089
3090 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
3091 s._add_history("smooth", varlist)
3092
3093 action = 'H'
3094 if plot:
3095 from asap.asapplotter import new_asaplot
3096 theplot = new_asaplot(rcParams['plotter.gui'])
3097 from matplotlib import rc as rcp
3098 rcp('lines', linewidth=1)
3099 theplot.set_panels()
3100 ylab=s._get_ordinate_label()
3101 #theplot.palette(0,["#777777","red"])
3102 for r in xrange(s.nrow()):
3103 xsm=s._getabcissa(r)
3104 ysm=s._getspectrum(r)
3105 xorg=orgscan._getabcissa(r)
3106 yorg=orgscan._getspectrum(r)
3107 if action != "N": #skip plotting if rejecting all
3108 theplot.clear()
3109 theplot.hold()
3110 theplot.set_axes('ylabel',ylab)
3111 theplot.set_axes('xlabel',s._getabcissalabel(r))
3112 theplot.set_axes('title',s._getsourcename(r))
3113 theplot.set_line(label='Original',color="#777777")
3114 theplot.plot(xorg,yorg)
3115 theplot.set_line(label='Smoothed',color="red")
3116 theplot.plot(xsm,ysm)
3117 ### Ugly part for legend
3118 for i in [0,1]:
3119 theplot.subplots[0]['lines'].append(
3120 [theplot.subplots[0]['axes'].lines[i]]
3121 )
3122 theplot.release()
3123 ### Ugly part for legend
3124 theplot.subplots[0]['lines']=[]
3125 res = self._get_verify_action("Accept smoothing?",action)
3126 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3127 if r == 0: action = None
3128 #res = raw_input("Accept smoothing ([y]/n): ")
3129 if res.upper() == 'N':
3130 # reject for the current rows
3131 s._setspectrum(yorg, r)
3132 elif res.upper() == 'R':
3133 # reject all the following rows
3134 action = "N"
3135 s._setspectrum(yorg, r)
3136 elif res.upper() == 'A':
3137 # accept all the following rows
3138 break
3139 theplot.quit()
3140 del theplot
3141 del orgscan
3142
3143 if insitu: self._assign(s)
3144 else: return s
3145
3146 @asaplog_post_dec
3147 def regrid_channel(self, width=5, plot=False, insitu=None):
3148 """\
3149 Regrid the spectra by the specified channel width
3150
3151 Parameters:
3152
3153 width: The channel width (float) of regridded spectra
3154 in the current spectral unit.
3155
3156 plot: [NOT IMPLEMENTED YET]
3157 plot the original and the regridded spectra.
3158 In this each indivual fit has to be approved, by
3159 typing 'y' or 'n'
3160
3161 insitu: if False a new scantable is returned.
3162 Otherwise, the scaling is done in-situ
3163 The default is taken from .asaprc (False)
3164
3165 """
3166 if insitu is None: insitu = rcParams['insitu']
3167 varlist = vars()
3168
3169 if plot:
3170 asaplog.post()
3171 asaplog.push("Verification plot is not implemtnetd yet.")
3172 asaplog.post("WARN")
3173
3174 s = self.copy()
3175 s._regrid_specchan(width)
3176
3177 s._add_history("regrid_channel", varlist)
3178
3179# if plot:
3180# from asap.asapplotter import new_asaplot
3181# theplot = new_asaplot(rcParams['plotter.gui'])
3182# from matplotlib import rc as rcp
3183# rcp('lines', linewidth=1)
3184# theplot.set_panels()
3185# ylab=s._get_ordinate_label()
3186# #theplot.palette(0,["#777777","red"])
3187# for r in xrange(s.nrow()):
3188# xsm=s._getabcissa(r)
3189# ysm=s._getspectrum(r)
3190# xorg=orgscan._getabcissa(r)
3191# yorg=orgscan._getspectrum(r)
3192# theplot.clear()
3193# theplot.hold()
3194# theplot.set_axes('ylabel',ylab)
3195# theplot.set_axes('xlabel',s._getabcissalabel(r))
3196# theplot.set_axes('title',s._getsourcename(r))
3197# theplot.set_line(label='Original',color="#777777")
3198# theplot.plot(xorg,yorg)
3199# theplot.set_line(label='Smoothed',color="red")
3200# theplot.plot(xsm,ysm)
3201# ### Ugly part for legend
3202# for i in [0,1]:
3203# theplot.subplots[0]['lines'].append(
3204# [theplot.subplots[0]['axes'].lines[i]]
3205# )
3206# theplot.release()
3207# ### Ugly part for legend
3208# theplot.subplots[0]['lines']=[]
3209# res = raw_input("Accept smoothing ([y]/n): ")
3210# if res.upper() == 'N':
3211# s._setspectrum(yorg, r)
3212# theplot.quit()
3213# del theplot
3214# del orgscan
3215
3216 if insitu: self._assign(s)
3217 else: return s
3218
3219 @asaplog_post_dec
3220 def _parse_wn(self, wn):
3221 if isinstance(wn, list) or isinstance(wn, tuple):
3222 return wn
3223 elif isinstance(wn, int):
3224 return [ wn ]
3225 elif isinstance(wn, str):
3226 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3227 val = wn.split('-')
3228 val = [int(val[0]), int(val[1])]
3229 val.sort()
3230 res = [i for i in xrange(val[0], val[1]+1)]
3231 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3232 val = int(wn[2:])+1
3233 res = [i for i in xrange(val)]
3234 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3235 val = int(wn[:-2])+1
3236 res = [i for i in xrange(val)]
3237 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3238 val = int(wn[1:])
3239 res = [i for i in xrange(val)]
3240 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3241 val = int(wn[:-1])
3242 res = [i for i in xrange(val)]
3243 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3244 # then interpreted in C++
3245 # side as [a,a+1,...,a_nyq]
3246 # (CAS-3759)
3247 val = int(wn[2:])
3248 res = [val, -999]
3249 #res = [i for i in xrange(val, self.nchan()/2+1)]
3250 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3251 # then interpreted in C++
3252 # side as [a,a+1,...,a_nyq]
3253 # (CAS-3759)
3254 val = int(wn[:-2])
3255 res = [val, -999]
3256 #res = [i for i in xrange(val, self.nchan()/2+1)]
3257 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3258 # then interpreted in C++
3259 # side as [a+1,a+2,...,a_nyq]
3260 # (CAS-3759)
3261 val = int(wn[1:])+1
3262 res = [val, -999]
3263 #res = [i for i in xrange(val, self.nchan()/2+1)]
3264 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3265 # then interpreted in C++
3266 # side as [a+1,a+2,...,a_nyq]
3267 # (CAS-3759)
3268 val = int(wn[:-1])+1
3269 res = [val, -999]
3270 #res = [i for i in xrange(val, self.nchan()/2+1)]
3271
3272 return res
3273 else:
3274 msg = 'wrong value given for addwn/rejwn'
3275 raise RuntimeError(msg)
3276
3277 @asaplog_post_dec
3278 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3279 """\
3280 Subtract baseline based on parameters written in Baseline Table.
3281
3282 Parameters:
3283 insitu: if True, baseline fitting/subtraction is done
3284 in-situ. If False, a new scantable with
3285 baseline subtracted is returned. Actually,
3286 format of the returned value depends on both
3287 insitu and retfitres (see below).
3288 The default is taken from .asaprc (False)
3289 retfitres: if True, the results of baseline fitting (i.e.,
3290 coefficients and rms) are returned.
3291 default is False.
3292 The format of the returned value of this
3293 function varies as follows:
3294 (1) in case insitu=True and retfitres=True:
3295 fitting result.
3296 (2) in case insitu=True and retfitres=False:
3297 None.
3298 (3) in case insitu=False and retfitres=True:
3299 a dictionary containing a new scantable
3300 (with baseline subtracted) and the fitting
3301 results.
3302 (4) in case insitu=False and retfitres=False:
3303 a new scantable (with baseline subtracted).
3304 inbltable: name of input baseline table. The row number of
3305 scantable and that of inbltable must be
3306 identical.
3307 outbltable: name of output baseline table where baseline
3308 parameters and fitting results recorded.
3309 default is ''(no output).
3310 overwrite: if True when an existing baseline table is
3311 specified for outbltable, overwrites it.
3312 Otherwise there is no harm.
3313 default is False.
3314 """
3315
3316 try:
3317 varlist = vars()
3318 if retfitres is None: retfitres = False
3319 if inbltable is None: raise ValueError("bltable missing.")
3320 if outbltable is None: outbltable = ''
3321 if overwrite is None: overwrite = False
3322
3323 if insitu is None: insitu = rcParams['insitu']
3324 if insitu:
3325 workscan = self
3326 else:
3327 workscan = self.copy()
3328
3329 sres = workscan._apply_bltable(inbltable,
3330 retfitres,
3331 outbltable,
3332 os.path.exists(outbltable),
3333 overwrite)
3334 if retfitres: res = parse_fitresult(sres)
3335
3336 workscan._add_history('apply_bltable', varlist)
3337
3338 if insitu:
3339 self._assign(workscan)
3340 if retfitres:
3341 return res
3342 else:
3343 return None
3344 else:
3345 if retfitres:
3346 return {'scantable': workscan, 'fitresults': res}
3347 else:
3348 return workscan
3349
3350 except RuntimeError, e:
3351 raise_fitting_failure_exception(e)
3352
3353 @asaplog_post_dec
3354 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3355 """\
3356 Subtract baseline based on parameters written in the input list.
3357
3358 Parameters:
3359 insitu: if True, baseline fitting/subtraction is done
3360 in-situ. If False, a new scantable with
3361 baseline subtracted is returned. Actually,
3362 format of the returned value depends on both
3363 insitu and retfitres (see below).
3364 The default is taken from .asaprc (False)
3365 retfitres: if True, the results of baseline fitting (i.e.,
3366 coefficients and rms) are returned.
3367 default is False.
3368 The format of the returned value of this
3369 function varies as follows:
3370 (1) in case insitu=True and retfitres=True:
3371 fitting result.
3372 (2) in case insitu=True and retfitres=False:
3373 None.
3374 (3) in case insitu=False and retfitres=True:
3375 a dictionary containing a new scantable
3376 (with baseline subtracted) and the fitting
3377 results.
3378 (4) in case insitu=False and retfitres=False:
3379 a new scantable (with baseline subtracted).
3380 blinfo: baseline parameter set stored in a dictionary
3381 or a list of dictionary. Each dictionary
3382 corresponds to each spectrum and must contain
3383 the following keys and values:
3384 'row': row number,
3385 'blfunc': function name. available ones include
3386 'poly', 'chebyshev', 'cspline' and
3387 'sinusoid',
3388 'order': maximum order of polynomial. needed
3389 if blfunc='poly' or 'chebyshev',
3390 'npiece': number or piecewise polynomial.
3391 needed if blfunc='cspline',
3392 'nwave': a list of sinusoidal wave numbers.
3393 needed if blfunc='sinusoid', and
3394 'masklist': min-max windows for channel mask.
3395 the specified ranges will be used
3396 for fitting.
3397 bltable: name of output baseline table where baseline
3398 parameters and fitting results recorded.
3399 default is ''(no output).
3400 overwrite: if True when an existing baseline table is
3401 specified for bltable, overwrites it.
3402 Otherwise there is no harm.
3403 default is False.
3404
3405 Example:
3406 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3407 'masklist':[[10,350],[352,510]]},
3408 {'row':1, 'blfunc':'cspline', 'npiece':3,
3409 'masklist':[[3,16],[19,404],[407,511]]}
3410 ])
3411
3412 the first spectrum (row=0) will be fitted with polynomial
3413 of order=5 and the next one (row=1) will be fitted with cubic
3414 spline consisting of 3 pieces.
3415 """
3416
3417 try:
3418 varlist = vars()
3419 if retfitres is None: retfitres = False
3420 if blinfo is None: blinfo = []
3421 if bltable is None: bltable = ''
3422 if overwrite is None: overwrite = False
3423
3424 if insitu is None: insitu = rcParams['insitu']
3425 if insitu:
3426 workscan = self
3427 else:
3428 workscan = self.copy()
3429
3430 nrow = workscan.nrow()
3431
3432 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3433
3434 print "in_blinfo=< "+ str(in_blinfo)+" >"
3435
3436 sres = workscan._sub_baseline(in_blinfo,
3437 retfitres,
3438 bltable,
3439 os.path.exists(bltable),
3440 overwrite)
3441 if retfitres: res = parse_fitresult(sres)
3442
3443 workscan._add_history('sub_baseline', varlist)
3444
3445 if insitu:
3446 self._assign(workscan)
3447 if retfitres:
3448 return res
3449 else:
3450 return None
3451 else:
3452 if retfitres:
3453 return {'scantable': workscan, 'fitresults': res}
3454 else:
3455 return workscan
3456
3457 except RuntimeError, e:
3458 raise_fitting_failure_exception(e)
3459
3460 @asaplog_post_dec
3461 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3462 whichrow=None, uselinefinder=None, edge=None,
3463 threshold=None, chan_avg_limit=None):
3464 """\
3465 Calculates and returns model selection criteria for a specified
3466 baseline model and a given spectrum data.
3467 Available values include Akaike Information Criterion (AIC), the
3468 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3469 Bayesian Information Criterion (BIC) and the Generalised Cross
3470 Validation (GCV).
3471
3472 Parameters:
3473 value: name of model selection criteria to calculate.
3474 available ones include 'aic', 'aicc', 'bic' and
3475 'gcv'. default is 'aicc'.
3476 blfunc: baseline function name. available ones include
3477 'chebyshev', 'cspline' and 'sinusoid'.
3478 default is 'chebyshev'.
3479 order: parameter for basline function. actually stands for
3480 order of polynomial (order) for 'chebyshev',
3481 number of spline pieces (npiece) for 'cspline' and
3482 maximum wave number for 'sinusoid', respectively.
3483 default is 5 (which is also the default order value
3484 for [auto_]chebyshev_baseline()).
3485 mask: an optional mask. default is [].
3486 whichrow: row number. default is 0 (the first row)
3487 uselinefinder: use sd.linefinder() to flag out line regions
3488 default is True.
3489 edge: an optional number of channel to drop at
3490 the edge of spectrum. If only one value is
3491 specified, the same number will be dropped
3492 from both sides of the spectrum. Default
3493 is to keep all channels. Nested tuples
3494 represent individual edge selection for
3495 different IFs (a number of spectral channels
3496 can be different)
3497 default is (0, 0).
3498 threshold: the threshold used by line finder. It is
3499 better to keep it large as only strong lines
3500 affect the baseline solution.
3501 default is 3.
3502 chan_avg_limit: a maximum number of consequtive spectral
3503 channels to average during the search of
3504 weak and broad lines. The default is no
3505 averaging (and no search for weak lines).
3506 If such lines can affect the fitted baseline
3507 (e.g. a high order polynomial is fitted),
3508 increase this parameter (usually values up
3509 to 8 are reasonable). Most users of this
3510 method should find the default value sufficient.
3511 default is 1.
3512
3513 Example:
3514 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3515 """
3516
3517 try:
3518 varlist = vars()
3519
3520 if value is None: value = 'aicc'
3521 if blfunc is None: blfunc = 'chebyshev'
3522 if order is None: order = 5
3523 if mask is None: mask = []
3524 if whichrow is None: whichrow = 0
3525 if uselinefinder is None: uselinefinder = True
3526 if edge is None: edge = (0, 0)
3527 if threshold is None: threshold = 3
3528 if chan_avg_limit is None: chan_avg_limit = 1
3529
3530 return self._calc_aic(value, blfunc, order, mask,
3531 whichrow, uselinefinder, edge,
3532 threshold, chan_avg_limit)
3533
3534 except RuntimeError, e:
3535 raise_fitting_failure_exception(e)
3536
3537 @asaplog_post_dec
3538 def sinusoid_baseline(self, mask=None, applyfft=None,
3539 fftmethod=None, fftthresh=None,
3540 addwn=None, rejwn=None,
3541 insitu=None,
3542 clipthresh=None, clipniter=None,
3543 plot=None,
3544 getresidual=None,
3545 showprogress=None, minnrow=None,
3546 outlog=None,
3547 blfile=None, csvformat=None,
3548 bltable=None):
3549 """\
3550 Return a scan which has been baselined (all rows) with sinusoidal
3551 functions.
3552
3553 Parameters:
3554 mask: an optional mask
3555 applyfft: if True use some method, such as FFT, to find
3556 strongest sinusoidal components in the wavenumber
3557 domain to be used for baseline fitting.
3558 default is True.
3559 fftmethod: method to find the strong sinusoidal components.
3560 now only 'fft' is available and it is the default.
3561 fftthresh: the threshold to select wave numbers to be used for
3562 fitting from the distribution of amplitudes in the
3563 wavenumber domain.
3564 both float and string values accepted.
3565 given a float value, the unit is set to sigma.
3566 for string values, allowed formats include:
3567 'xsigma' or 'x' (= x-sigma level. e.g.,
3568 '3sigma'), or
3569 'topx' (= the x strongest ones, e.g. 'top5').
3570 default is 3.0 (unit: sigma).
3571 addwn: the additional wave numbers to be used for fitting.
3572 list or integer value is accepted to specify every
3573 wave numbers. also string value can be used in case
3574 you need to specify wave numbers in a certain range,
3575 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3576 '<a' (= 0,1,...,a-2,a-1),
3577 '>=a' (= a, a+1, ... up to the maximum wave
3578 number corresponding to the Nyquist
3579 frequency for the case of FFT).
3580 default is [0].
3581 rejwn: the wave numbers NOT to be used for fitting.
3582 can be set just as addwn but has higher priority:
3583 wave numbers which are specified both in addwn
3584 and rejwn will NOT be used. default is [].
3585 insitu: if False a new scantable is returned.
3586 Otherwise, the scaling is done in-situ
3587 The default is taken from .asaprc (False)
3588 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3589 clipniter: maximum number of iteration of 'clipthresh'-sigma
3590 clipping (default is 0)
3591 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3592 plot the fit and the residual. In this each
3593 indivual fit has to be approved, by typing 'y'
3594 or 'n'
3595 getresidual: if False, returns best-fit values instead of
3596 residual. (default is True)
3597 showprogress: show progress status for large data.
3598 default is True.
3599 minnrow: minimum number of input spectra to show.
3600 default is 1000.
3601 outlog: Output the coefficients of the best-fit
3602 function to logger (default is False)
3603 blfile: Name of a text file in which the best-fit
3604 parameter values to be written
3605 (default is '': no file/logger output)
3606 csvformat: if True blfile is csv-formatted, default is False.
3607 bltable: name of a baseline table where fitting results
3608 (coefficients, rms, etc.) are to be written.
3609 if given, fitting results will NOT be output to
3610 scantable (insitu=True) or None will be
3611 returned (insitu=False).
3612 (default is "": no table output)
3613
3614 Example:
3615 # return a scan baselined by a combination of sinusoidal curves
3616 # having wave numbers in spectral window up to 10,
3617 # also with 3-sigma clipping, iteration up to 4 times
3618 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3619
3620 Note:
3621 The best-fit parameter values output in logger and/or blfile are now
3622 based on specunit of 'channel'.
3623 """
3624
3625 try:
3626 varlist = vars()
3627
3628 if insitu is None: insitu = rcParams['insitu']
3629 if insitu:
3630 workscan = self
3631 else:
3632 workscan = self.copy()
3633
3634 if mask is None: mask = []
3635 if applyfft is None: applyfft = True
3636 if fftmethod is None: fftmethod = 'fft'
3637 if fftthresh is None: fftthresh = 3.0
3638 if addwn is None: addwn = [0]
3639 if rejwn is None: rejwn = []
3640 if clipthresh is None: clipthresh = 3.0
3641 if clipniter is None: clipniter = 0
3642 if plot is None: plot = False
3643 if getresidual is None: getresidual = True
3644 if showprogress is None: showprogress = True
3645 if minnrow is None: minnrow = 1000
3646 if outlog is None: outlog = False
3647 if blfile is None: blfile = ''
3648 if csvformat is None: csvformat = False
3649 if bltable is None: bltable = ''
3650
3651 sapplyfft = 'true' if applyfft else 'false'
3652 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3653
3654 scsvformat = 'T' if csvformat else 'F'
3655
3656 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3657 workscan._sinusoid_baseline(mask,
3658 fftinfo,
3659 #applyfft, fftmethod.lower(),
3660 #str(fftthresh).lower(),
3661 workscan._parse_wn(addwn),
3662 workscan._parse_wn(rejwn),
3663 clipthresh, clipniter,
3664 getresidual,
3665 pack_progress_params(showprogress,
3666 minnrow),
3667 outlog, scsvformat+blfile,
3668 bltable)
3669 workscan._add_history('sinusoid_baseline', varlist)
3670
3671 if bltable == '':
3672 if insitu:
3673 self._assign(workscan)
3674 else:
3675 return workscan
3676 else:
3677 if not insitu:
3678 return None
3679
3680 except RuntimeError, e:
3681 raise_fitting_failure_exception(e)
3682
3683
3684 @asaplog_post_dec
3685 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3686 fftmethod=None, fftthresh=None,
3687 addwn=None, rejwn=None,
3688 insitu=None,
3689 clipthresh=None, clipniter=None,
3690 edge=None, threshold=None, chan_avg_limit=None,
3691 plot=None,
3692 getresidual=None,
3693 showprogress=None, minnrow=None,
3694 outlog=None,
3695 blfile=None, csvformat=None,
3696 bltable=None):
3697 """\
3698 Return a scan which has been baselined (all rows) with sinusoidal
3699 functions.
3700 Spectral lines are detected first using linefinder and masked out
3701 to avoid them affecting the baseline solution.
3702
3703 Parameters:
3704 mask: an optional mask retreived from scantable
3705 applyfft: if True use some method, such as FFT, to find
3706 strongest sinusoidal components in the wavenumber
3707 domain to be used for baseline fitting.
3708 default is True.
3709 fftmethod: method to find the strong sinusoidal components.
3710 now only 'fft' is available and it is the default.
3711 fftthresh: the threshold to select wave numbers to be used for
3712 fitting from the distribution of amplitudes in the
3713 wavenumber domain.
3714 both float and string values accepted.
3715 given a float value, the unit is set to sigma.
3716 for string values, allowed formats include:
3717 'xsigma' or 'x' (= x-sigma level. e.g.,
3718 '3sigma'), or
3719 'topx' (= the x strongest ones, e.g. 'top5').
3720 default is 3.0 (unit: sigma).
3721 addwn: the additional wave numbers to be used for fitting.
3722 list or integer value is accepted to specify every
3723 wave numbers. also string value can be used in case
3724 you need to specify wave numbers in a certain range,
3725 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3726 '<a' (= 0,1,...,a-2,a-1),
3727 '>=a' (= a, a+1, ... up to the maximum wave
3728 number corresponding to the Nyquist
3729 frequency for the case of FFT).
3730 default is [0].
3731 rejwn: the wave numbers NOT to be used for fitting.
3732 can be set just as addwn but has higher priority:
3733 wave numbers which are specified both in addwn
3734 and rejwn will NOT be used. default is [].
3735 insitu: if False a new scantable is returned.
3736 Otherwise, the scaling is done in-situ
3737 The default is taken from .asaprc (False)
3738 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3739 clipniter: maximum number of iteration of 'clipthresh'-sigma
3740 clipping (default is 0)
3741 edge: an optional number of channel to drop at
3742 the edge of spectrum. If only one value is
3743 specified, the same number will be dropped
3744 from both sides of the spectrum. Default
3745 is to keep all channels. Nested tuples
3746 represent individual edge selection for
3747 different IFs (a number of spectral channels
3748 can be different)
3749 threshold: the threshold used by line finder. It is
3750 better to keep it large as only strong lines
3751 affect the baseline solution.
3752 chan_avg_limit: a maximum number of consequtive spectral
3753 channels to average during the search of
3754 weak and broad lines. The default is no
3755 averaging (and no search for weak lines).
3756 If such lines can affect the fitted baseline
3757 (e.g. a high order polynomial is fitted),
3758 increase this parameter (usually values up
3759 to 8 are reasonable). Most users of this
3760 method should find the default value sufficient.
3761 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3762 plot the fit and the residual. In this each
3763 indivual fit has to be approved, by typing 'y'
3764 or 'n'
3765 getresidual: if False, returns best-fit values instead of
3766 residual. (default is True)
3767 showprogress: show progress status for large data.
3768 default is True.
3769 minnrow: minimum number of input spectra to show.
3770 default is 1000.
3771 outlog: Output the coefficients of the best-fit
3772 function to logger (default is False)
3773 blfile: Name of a text file in which the best-fit
3774 parameter values to be written
3775 (default is "": no file/logger output)
3776 csvformat: if True blfile is csv-formatted, default is False.
3777 bltable: name of a baseline table where fitting results
3778 (coefficients, rms, etc.) are to be written.
3779 if given, fitting results will NOT be output to
3780 scantable (insitu=True) or None will be
3781 returned (insitu=False).
3782 (default is "": no table output)
3783
3784 Example:
3785 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3786
3787 Note:
3788 The best-fit parameter values output in logger and/or blfile are now
3789 based on specunit of 'channel'.
3790 """
3791
3792 try:
3793 varlist = vars()
3794
3795 if insitu is None: insitu = rcParams['insitu']
3796 if insitu:
3797 workscan = self
3798 else:
3799 workscan = self.copy()
3800
3801 if mask is None: mask = []
3802 if applyfft is None: applyfft = True
3803 if fftmethod is None: fftmethod = 'fft'
3804 if fftthresh is None: fftthresh = 3.0
3805 if addwn is None: addwn = [0]
3806 if rejwn is None: rejwn = []
3807 if clipthresh is None: clipthresh = 3.0
3808 if clipniter is None: clipniter = 0
3809 if edge is None: edge = (0,0)
3810 if threshold is None: threshold = 3
3811 if chan_avg_limit is None: chan_avg_limit = 1
3812 if plot is None: plot = False
3813 if getresidual is None: getresidual = True
3814 if showprogress is None: showprogress = True
3815 if minnrow is None: minnrow = 1000
3816 if outlog is None: outlog = False
3817 if blfile is None: blfile = ''
3818 if csvformat is None: csvformat = False
3819 if bltable is None: bltable = ''
3820
3821 sapplyfft = 'true' if applyfft else 'false'
3822 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3823
3824 scsvformat = 'T' if csvformat else 'F'
3825
3826 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3827 workscan._auto_sinusoid_baseline(mask,
3828 fftinfo,
3829 workscan._parse_wn(addwn),
3830 workscan._parse_wn(rejwn),
3831 clipthresh, clipniter,
3832 normalise_edge_param(edge),
3833 threshold, chan_avg_limit,
3834 getresidual,
3835 pack_progress_params(showprogress,
3836 minnrow),
3837 outlog, scsvformat+blfile, bltable)
3838 workscan._add_history("auto_sinusoid_baseline", varlist)
3839
3840 if bltable == '':
3841 if insitu:
3842 self._assign(workscan)
3843 else:
3844 return workscan
3845 else:
3846 if not insitu:
3847 return None
3848
3849 except RuntimeError, e:
3850 raise_fitting_failure_exception(e)
3851
3852 @asaplog_post_dec
3853 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3854 clipthresh=None, clipniter=None, plot=None,
3855 getresidual=None, showprogress=None, minnrow=None,
3856 outlog=None, blfile=None, csvformat=None,
3857 bltable=None):
3858 """\
3859 Return a scan which has been baselined (all rows) by cubic spline
3860 function (piecewise cubic polynomial).
3861
3862 Parameters:
3863 mask: An optional mask
3864 npiece: Number of pieces. (default is 2)
3865 insitu: If False a new scantable is returned.
3866 Otherwise, the scaling is done in-situ
3867 The default is taken from .asaprc (False)
3868 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3869 clipniter: maximum number of iteration of 'clipthresh'-sigma
3870 clipping (default is 0)
3871 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3872 plot the fit and the residual. In this each
3873 indivual fit has to be approved, by typing 'y'
3874 or 'n'
3875 getresidual: if False, returns best-fit values instead of
3876 residual. (default is True)
3877 showprogress: show progress status for large data.
3878 default is True.
3879 minnrow: minimum number of input spectra to show.
3880 default is 1000.
3881 outlog: Output the coefficients of the best-fit
3882 function to logger (default is False)
3883 blfile: Name of a text file in which the best-fit
3884 parameter values to be written
3885 (default is "": no file/logger output)
3886 csvformat: if True blfile is csv-formatted, default is False.
3887 bltable: name of a baseline table where fitting results
3888 (coefficients, rms, etc.) are to be written.
3889 if given, fitting results will NOT be output to
3890 scantable (insitu=True) or None will be
3891 returned (insitu=False).
3892 (default is "": no table output)
3893
3894 Example:
3895 # return a scan baselined by a cubic spline consisting of 2 pieces
3896 # (i.e., 1 internal knot),
3897 # also with 3-sigma clipping, iteration up to 4 times
3898 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3899
3900 Note:
3901 The best-fit parameter values output in logger and/or blfile are now
3902 based on specunit of 'channel'.
3903 """
3904
3905 try:
3906 varlist = vars()
3907
3908 if insitu is None: insitu = rcParams['insitu']
3909 if insitu:
3910 workscan = self
3911 else:
3912 workscan = self.copy()
3913
3914 if mask is None: mask = []
3915 if npiece is None: npiece = 2
3916 if clipthresh is None: clipthresh = 3.0
3917 if clipniter is None: clipniter = 0
3918 if plot is None: plot = False
3919 if getresidual is None: getresidual = True
3920 if showprogress is None: showprogress = True
3921 if minnrow is None: minnrow = 1000
3922 if outlog is None: outlog = False
3923 if blfile is None: blfile = ''
3924 if csvformat is None: csvformat = False
3925 if bltable is None: bltable = ''
3926
3927 scsvformat = 'T' if csvformat else 'F'
3928
3929 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3930 workscan._cspline_baseline(mask, npiece,
3931 clipthresh, clipniter,
3932 getresidual,
3933 pack_progress_params(showprogress,
3934 minnrow),
3935 outlog, scsvformat+blfile,
3936 bltable)
3937 workscan._add_history("cspline_baseline", varlist)
3938
3939 if bltable == '':
3940 if insitu:
3941 self._assign(workscan)
3942 else:
3943 return workscan
3944 else:
3945 if not insitu:
3946 return None
3947
3948 except RuntimeError, e:
3949 raise_fitting_failure_exception(e)
3950
3951 @asaplog_post_dec
3952 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3953 clipthresh=None, clipniter=None,
3954 edge=None, threshold=None, chan_avg_limit=None,
3955 getresidual=None, plot=None,
3956 showprogress=None, minnrow=None, outlog=None,
3957 blfile=None, csvformat=None, bltable=None):
3958 """\
3959 Return a scan which has been baselined (all rows) by cubic spline
3960 function (piecewise cubic polynomial).
3961 Spectral lines are detected first using linefinder and masked out
3962 to avoid them affecting the baseline solution.
3963
3964 Parameters:
3965 mask: an optional mask retreived from scantable
3966 npiece: Number of pieces. (default is 2)
3967 insitu: if False a new scantable is returned.
3968 Otherwise, the scaling is done in-situ
3969 The default is taken from .asaprc (False)
3970 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3971 clipniter: maximum number of iteration of 'clipthresh'-sigma
3972 clipping (default is 0)
3973 edge: an optional number of channel to drop at
3974 the edge of spectrum. If only one value is
3975 specified, the same number will be dropped
3976 from both sides of the spectrum. Default
3977 is to keep all channels. Nested tuples
3978 represent individual edge selection for
3979 different IFs (a number of spectral channels
3980 can be different)
3981 threshold: the threshold used by line finder. It is
3982 better to keep it large as only strong lines
3983 affect the baseline solution.
3984 chan_avg_limit: a maximum number of consequtive spectral
3985 channels to average during the search of
3986 weak and broad lines. The default is no
3987 averaging (and no search for weak lines).
3988 If such lines can affect the fitted baseline
3989 (e.g. a high order polynomial is fitted),
3990 increase this parameter (usually values up
3991 to 8 are reasonable). Most users of this
3992 method should find the default value sufficient.
3993 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3994 plot the fit and the residual. In this each
3995 indivual fit has to be approved, by typing 'y'
3996 or 'n'
3997 getresidual: if False, returns best-fit values instead of
3998 residual. (default is True)
3999 showprogress: show progress status for large data.
4000 default is True.
4001 minnrow: minimum number of input spectra to show.
4002 default is 1000.
4003 outlog: Output the coefficients of the best-fit
4004 function to logger (default is False)
4005 blfile: Name of a text file in which the best-fit
4006 parameter values to be written
4007 (default is "": no file/logger output)
4008 csvformat: if True blfile is csv-formatted, default is False.
4009 bltable: name of a baseline table where fitting results
4010 (coefficients, rms, etc.) are to be written.
4011 if given, fitting results will NOT be output to
4012 scantable (insitu=True) or None will be
4013 returned (insitu=False).
4014 (default is "": no table output)
4015
4016 Example:
4017 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4018
4019 Note:
4020 The best-fit parameter values output in logger and/or blfile are now
4021 based on specunit of 'channel'.
4022 """
4023
4024 try:
4025 varlist = vars()
4026
4027 if insitu is None: insitu = rcParams['insitu']
4028 if insitu:
4029 workscan = self
4030 else:
4031 workscan = self.copy()
4032
4033 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4034 if mask is None: mask = []
4035 if npiece is None: npiece = 2
4036 if clipthresh is None: clipthresh = 3.0
4037 if clipniter is None: clipniter = 0
4038 if edge is None: edge = (0, 0)
4039 if threshold is None: threshold = 3
4040 if chan_avg_limit is None: chan_avg_limit = 1
4041 if plot is None: plot = False
4042 if getresidual is None: getresidual = True
4043 if showprogress is None: showprogress = True
4044 if minnrow is None: minnrow = 1000
4045 if outlog is None: outlog = False
4046 if blfile is None: blfile = ''
4047 if csvformat is None: csvformat = False
4048 if bltable is None: bltable = ''
4049
4050 scsvformat = 'T' if csvformat else 'F'
4051
4052 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4053 workscan._auto_cspline_baseline(mask, npiece,
4054 clipthresh, clipniter,
4055 normalise_edge_param(edge),
4056 threshold,
4057 chan_avg_limit, getresidual,
4058 pack_progress_params(showprogress,
4059 minnrow),
4060 outlog,
4061 scsvformat+blfile,
4062 bltable)
4063 workscan._add_history("auto_cspline_baseline", varlist)
4064
4065 if bltable == '':
4066 if insitu:
4067 self._assign(workscan)
4068 else:
4069 return workscan
4070 else:
4071 if not insitu:
4072 return None
4073
4074 except RuntimeError, e:
4075 raise_fitting_failure_exception(e)
4076
4077 @asaplog_post_dec
4078 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
4079 clipthresh=None, clipniter=None, plot=None,
4080 getresidual=None, showprogress=None, minnrow=None,
4081 outlog=None, blfile=None, csvformat=None,
4082 bltable=None):
4083 """\
4084 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4085
4086 Parameters:
4087 mask: An optional mask
4088 order: the maximum order of Chebyshev polynomial (default is 5)
4089 insitu: If False a new scantable is returned.
4090 Otherwise, the scaling is done in-situ
4091 The default is taken from .asaprc (False)
4092 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4093 clipniter: maximum number of iteration of 'clipthresh'-sigma
4094 clipping (default is 0)
4095 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4096 plot the fit and the residual. In this each
4097 indivual fit has to be approved, by typing 'y'
4098 or 'n'
4099 getresidual: if False, returns best-fit values instead of
4100 residual. (default is True)
4101 showprogress: show progress status for large data.
4102 default is True.
4103 minnrow: minimum number of input spectra to show.
4104 default is 1000.
4105 outlog: Output the coefficients of the best-fit
4106 function to logger (default is False)
4107 blfile: Name of a text file in which the best-fit
4108 parameter values to be written
4109 (default is "": no file/logger output)
4110 csvformat: if True blfile is csv-formatted, default is False.
4111 bltable: name of a baseline table where fitting results
4112 (coefficients, rms, etc.) are to be written.
4113 if given, fitting results will NOT be output to
4114 scantable (insitu=True) or None will be
4115 returned (insitu=False).
4116 (default is "": no table output)
4117
4118 Example:
4119 # return a scan baselined by a cubic spline consisting of 2 pieces
4120 # (i.e., 1 internal knot),
4121 # also with 3-sigma clipping, iteration up to 4 times
4122 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4123
4124 Note:
4125 The best-fit parameter values output in logger and/or blfile are now
4126 based on specunit of 'channel'.
4127 """
4128
4129 try:
4130 varlist = vars()
4131
4132 if insitu is None: insitu = rcParams['insitu']
4133 if insitu:
4134 workscan = self
4135 else:
4136 workscan = self.copy()
4137
4138 if mask is None: mask = []
4139 if order is None: order = 5
4140 if clipthresh is None: clipthresh = 3.0
4141 if clipniter is None: clipniter = 0
4142 if plot is None: plot = False
4143 if getresidual is None: getresidual = True
4144 if showprogress is None: showprogress = True
4145 if minnrow is None: minnrow = 1000
4146 if outlog is None: outlog = False
4147 if blfile is None: blfile = ''
4148 if csvformat is None: csvformat = False
4149 if bltable is None: bltable = ''
4150
4151 scsvformat = 'T' if csvformat else 'F'
4152
4153 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4154 workscan._chebyshev_baseline(mask, order,
4155 clipthresh, clipniter,
4156 getresidual,
4157 pack_progress_params(showprogress,
4158 minnrow),
4159 outlog, scsvformat+blfile,
4160 bltable)
4161 workscan._add_history("chebyshev_baseline", varlist)
4162
4163 if bltable == '':
4164 if insitu:
4165 self._assign(workscan)
4166 else:
4167 return workscan
4168 else:
4169 if not insitu:
4170 return None
4171
4172 except RuntimeError, e:
4173 raise_fitting_failure_exception(e)
4174
4175 @asaplog_post_dec
4176 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4177 clipthresh=None, clipniter=None,
4178 edge=None, threshold=None, chan_avg_limit=None,
4179 getresidual=None, plot=None,
4180 showprogress=None, minnrow=None, outlog=None,
4181 blfile=None, csvformat=None, bltable=None):
4182 """\
4183 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4184 Spectral lines are detected first using linefinder and masked out
4185 to avoid them affecting the baseline solution.
4186
4187 Parameters:
4188 mask: an optional mask retreived from scantable
4189 order: the maximum order of Chebyshev polynomial (default is 5)
4190 insitu: if False a new scantable is returned.
4191 Otherwise, the scaling is done in-situ
4192 The default is taken from .asaprc (False)
4193 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4194 clipniter: maximum number of iteration of 'clipthresh'-sigma
4195 clipping (default is 0)
4196 edge: an optional number of channel to drop at
4197 the edge of spectrum. If only one value is
4198 specified, the same number will be dropped
4199 from both sides of the spectrum. Default
4200 is to keep all channels. Nested tuples
4201 represent individual edge selection for
4202 different IFs (a number of spectral channels
4203 can be different)
4204 threshold: the threshold used by line finder. It is
4205 better to keep it large as only strong lines
4206 affect the baseline solution.
4207 chan_avg_limit: a maximum number of consequtive spectral
4208 channels to average during the search of
4209 weak and broad lines. The default is no
4210 averaging (and no search for weak lines).
4211 If such lines can affect the fitted baseline
4212 (e.g. a high order polynomial is fitted),
4213 increase this parameter (usually values up
4214 to 8 are reasonable). Most users of this
4215 method should find the default value sufficient.
4216 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4217 plot the fit and the residual. In this each
4218 indivual fit has to be approved, by typing 'y'
4219 or 'n'
4220 getresidual: if False, returns best-fit values instead of
4221 residual. (default is True)
4222 showprogress: show progress status for large data.
4223 default is True.
4224 minnrow: minimum number of input spectra to show.
4225 default is 1000.
4226 outlog: Output the coefficients of the best-fit
4227 function to logger (default is False)
4228 blfile: Name of a text file in which the best-fit
4229 parameter values to be written
4230 (default is "": no file/logger output)
4231 csvformat: if True blfile is csv-formatted, default is False.
4232 bltable: name of a baseline table where fitting results
4233 (coefficients, rms, etc.) are to be written.
4234 if given, fitting results will NOT be output to
4235 scantable (insitu=True) or None will be
4236 returned (insitu=False).
4237 (default is "": no table output)
4238
4239 Example:
4240 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4241
4242 Note:
4243 The best-fit parameter values output in logger and/or blfile are now
4244 based on specunit of 'channel'.
4245 """
4246
4247 try:
4248 varlist = vars()
4249
4250 if insitu is None: insitu = rcParams['insitu']
4251 if insitu:
4252 workscan = self
4253 else:
4254 workscan = self.copy()
4255
4256 if mask is None: mask = []
4257 if order is None: order = 5
4258 if clipthresh is None: clipthresh = 3.0
4259 if clipniter is None: clipniter = 0
4260 if edge is None: edge = (0, 0)
4261 if threshold is None: threshold = 3
4262 if chan_avg_limit is None: chan_avg_limit = 1
4263 if plot is None: plot = False
4264 if getresidual is None: getresidual = True
4265 if showprogress is None: showprogress = True
4266 if minnrow is None: minnrow = 1000
4267 if outlog is None: outlog = False
4268 if blfile is None: blfile = ''
4269 if csvformat is None: csvformat = False
4270 if bltable is None: bltable = ''
4271
4272 scsvformat = 'T' if csvformat else 'F'
4273
4274 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4275 workscan._auto_chebyshev_baseline(mask, order,
4276 clipthresh, clipniter,
4277 normalise_edge_param(edge),
4278 threshold,
4279 chan_avg_limit, getresidual,
4280 pack_progress_params(showprogress,
4281 minnrow),
4282 outlog, scsvformat+blfile,
4283 bltable)
4284 workscan._add_history("auto_chebyshev_baseline", varlist)
4285
4286 if bltable == '':
4287 if insitu:
4288 self._assign(workscan)
4289 else:
4290 return workscan
4291 else:
4292 if not insitu:
4293 return None
4294
4295 except RuntimeError, e:
4296 raise_fitting_failure_exception(e)
4297
4298 @asaplog_post_dec
4299 def poly_baseline(self, mask=None, order=None, insitu=None,
4300 clipthresh=None, clipniter=None, plot=None,
4301 getresidual=None, showprogress=None, minnrow=None,
4302 outlog=None, blfile=None, csvformat=None,
4303 bltable=None):
4304 """\
4305 Return a scan which has been baselined (all rows) by a polynomial.
4306 Parameters:
4307 mask: an optional mask
4308 order: the order of the polynomial (default is 0)
4309 insitu: if False a new scantable is returned.
4310 Otherwise, the scaling is done in-situ
4311 The default is taken from .asaprc (False)
4312 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4313 clipniter: maximum number of iteration of 'clipthresh'-sigma
4314 clipping (default is 0)
4315 plot: plot the fit and the residual. In this each
4316 indivual fit has to be approved, by typing 'y'
4317 or 'n'
4318 getresidual: if False, returns best-fit values instead of
4319 residual. (default is True)
4320 showprogress: show progress status for large data.
4321 default is True.
4322 minnrow: minimum number of input spectra to show.
4323 default is 1000.
4324 outlog: Output the coefficients of the best-fit
4325 function to logger (default is False)
4326 blfile: Name of a text file in which the best-fit
4327 parameter values to be written
4328 (default is "": no file/logger output)
4329 csvformat: if True blfile is csv-formatted, default is False.
4330 bltable: name of a baseline table where fitting results
4331 (coefficients, rms, etc.) are to be written.
4332 if given, fitting results will NOT be output to
4333 scantable (insitu=True) or None will be
4334 returned (insitu=False).
4335 (default is "": no table output)
4336
4337 Example:
4338 # return a scan baselined by a third order polynomial,
4339 # not using a mask
4340 bscan = scan.poly_baseline(order=3)
4341 """
4342
4343 try:
4344 varlist = vars()
4345
4346 if insitu is None:
4347 insitu = rcParams["insitu"]
4348 if insitu:
4349 workscan = self
4350 else:
4351 workscan = self.copy()
4352
4353 if mask is None: mask = []
4354 if order is None: order = 0
4355 if clipthresh is None: clipthresh = 3.0
4356 if clipniter is None: clipniter = 0
4357 if plot is None: plot = False
4358 if getresidual is None: getresidual = True
4359 if showprogress is None: showprogress = True
4360 if minnrow is None: minnrow = 1000
4361 if outlog is None: outlog = False
4362 if blfile is None: blfile = ''
4363 if csvformat is None: csvformat = False
4364 if bltable is None: bltable = ''
4365
4366 scsvformat = 'T' if csvformat else 'F'
4367
4368 if plot:
4369 outblfile = (blfile != "") and \
4370 os.path.exists(os.path.expanduser(
4371 os.path.expandvars(blfile))
4372 )
4373 if outblfile:
4374 blf = open(blfile, "a")
4375
4376 f = fitter()
4377 f.set_function(lpoly=order)
4378
4379 rows = xrange(workscan.nrow())
4380 #if len(rows) > 0: workscan._init_blinfo()
4381
4382 action = "H"
4383 for r in rows:
4384 f.x = workscan._getabcissa(r)
4385 f.y = workscan._getspectrum(r)
4386 if mask:
4387 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4388 else: # mask=None
4389 f.mask = workscan._getmask(r)
4390
4391 f.data = None
4392 f.fit()
4393
4394 if action != "Y": # skip plotting when accepting all
4395 f.plot(residual=True)
4396 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4397 #if accept_fit.upper() == "N":
4398 # #workscan._append_blinfo(None, None, None)
4399 # continue
4400 accept_fit = self._get_verify_action("Accept fit?",action)
4401 if r == 0: action = None
4402 if accept_fit.upper() == "N":
4403 continue
4404 elif accept_fit.upper() == "R":
4405 break
4406 elif accept_fit.upper() == "A":
4407 action = "Y"
4408
4409 blpars = f.get_parameters()
4410 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4411 #workscan._append_blinfo(blpars, masklist, f.mask)
4412 workscan._setspectrum((f.fitter.getresidual()
4413 if getresidual else f.fitter.getfit()), r)
4414
4415 if outblfile:
4416 rms = workscan.get_rms(f.mask, r)
4417 dataout = \
4418 workscan.format_blparams_row(blpars["params"],
4419 blpars["fixed"],
4420 rms, str(masklist),
4421 r, True, csvformat)
4422 blf.write(dataout)
4423
4424 f._p.unmap()
4425 f._p = None
4426
4427 if outblfile:
4428 blf.close()
4429 else:
4430 workscan._poly_baseline(mask, order,
4431 clipthresh, clipniter, #
4432 getresidual,
4433 pack_progress_params(showprogress,
4434 minnrow),
4435 outlog, scsvformat+blfile,
4436 bltable) #
4437
4438 workscan._add_history("poly_baseline", varlist)
4439
4440 if insitu:
4441 self._assign(workscan)
4442 else:
4443 return workscan
4444
4445 except RuntimeError, e:
4446 raise_fitting_failure_exception(e)
4447
4448 @asaplog_post_dec
4449 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4450 clipthresh=None, clipniter=None,
4451 edge=None, threshold=None, chan_avg_limit=None,
4452 getresidual=None, plot=None,
4453 showprogress=None, minnrow=None, outlog=None,
4454 blfile=None, csvformat=None, bltable=None):
4455 """\
4456 Return a scan which has been baselined (all rows) by a polynomial.
4457 Spectral lines are detected first using linefinder and masked out
4458 to avoid them affecting the baseline solution.
4459
4460 Parameters:
4461 mask: an optional mask retreived from scantable
4462 order: the order of the polynomial (default is 0)
4463 insitu: if False a new scantable is returned.
4464 Otherwise, the scaling is done in-situ
4465 The default is taken from .asaprc (False)
4466 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4467 clipniter: maximum number of iteration of 'clipthresh'-sigma
4468 clipping (default is 0)
4469 edge: an optional number of channel to drop at
4470 the edge of spectrum. If only one value is
4471 specified, the same number will be dropped
4472 from both sides of the spectrum. Default
4473 is to keep all channels. Nested tuples
4474 represent individual edge selection for
4475 different IFs (a number of spectral channels
4476 can be different)
4477 threshold: the threshold used by line finder. It is
4478 better to keep it large as only strong lines
4479 affect the baseline solution.
4480 chan_avg_limit: a maximum number of consequtive spectral
4481 channels to average during the search of
4482 weak and broad lines. The default is no
4483 averaging (and no search for weak lines).
4484 If such lines can affect the fitted baseline
4485 (e.g. a high order polynomial is fitted),
4486 increase this parameter (usually values up
4487 to 8 are reasonable). Most users of this
4488 method should find the default value sufficient.
4489 plot: plot the fit and the residual. In this each
4490 indivual fit has to be approved, by typing 'y'
4491 or 'n'
4492 getresidual: if False, returns best-fit values instead of
4493 residual. (default is True)
4494 showprogress: show progress status for large data.
4495 default is True.
4496 minnrow: minimum number of input spectra to show.
4497 default is 1000.
4498 outlog: Output the coefficients of the best-fit
4499 function to logger (default is False)
4500 blfile: Name of a text file in which the best-fit
4501 parameter values to be written
4502 (default is "": no file/logger output)
4503 csvformat: if True blfile is csv-formatted, default is False.
4504 bltable: name of a baseline table where fitting results
4505 (coefficients, rms, etc.) are to be written.
4506 if given, fitting results will NOT be output to
4507 scantable (insitu=True) or None will be
4508 returned (insitu=False).
4509 (default is "": no table output)
4510
4511 Example:
4512 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4513 """
4514
4515 try:
4516 varlist = vars()
4517
4518 if insitu is None:
4519 insitu = rcParams['insitu']
4520 if insitu:
4521 workscan = self
4522 else:
4523 workscan = self.copy()
4524
4525 if mask is None: mask = []
4526 if order is None: order = 0
4527 if clipthresh is None: clipthresh = 3.0
4528 if clipniter is None: clipniter = 0
4529 if edge is None: edge = (0, 0)
4530 if threshold is None: threshold = 3
4531 if chan_avg_limit is None: chan_avg_limit = 1
4532 if plot is None: plot = False
4533 if getresidual is None: getresidual = True
4534 if showprogress is None: showprogress = True
4535 if minnrow is None: minnrow = 1000
4536 if outlog is None: outlog = False
4537 if blfile is None: blfile = ''
4538 if csvformat is None: csvformat = False
4539 if bltable is None: bltable = ''
4540
4541 scsvformat = 'T' if csvformat else 'F'
4542
4543 edge = normalise_edge_param(edge)
4544
4545 if plot:
4546 outblfile = (blfile != "") and \
4547 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4548 if outblfile: blf = open(blfile, "a")
4549
4550 from asap.asaplinefind import linefinder
4551 fl = linefinder()
4552 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4553 fl.set_scan(workscan)
4554
4555 f = fitter()
4556 f.set_function(lpoly=order)
4557
4558 rows = xrange(workscan.nrow())
4559 #if len(rows) > 0: workscan._init_blinfo()
4560
4561 action = "H"
4562 for r in rows:
4563 idx = 2*workscan.getif(r)
4564 if mask:
4565 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4566 else: # mask=None
4567 msk = workscan._getmask(r)
4568 fl.find_lines(r, msk, edge[idx:idx+2])
4569
4570 f.x = workscan._getabcissa(r)
4571 f.y = workscan._getspectrum(r)
4572 f.mask = fl.get_mask()
4573 f.data = None
4574 f.fit()
4575
4576 if action != "Y": # skip plotting when accepting all
4577 f.plot(residual=True)
4578 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4579 accept_fit = self._get_verify_action("Accept fit?",action)
4580 if r == 0: action = None
4581 if accept_fit.upper() == "N":
4582 #workscan._append_blinfo(None, None, None)
4583 continue
4584 elif accept_fit.upper() == "R":
4585 break
4586 elif accept_fit.upper() == "A":
4587 action = "Y"
4588
4589 blpars = f.get_parameters()
4590 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4591 #workscan._append_blinfo(blpars, masklist, f.mask)
4592 workscan._setspectrum(
4593 (f.fitter.getresidual() if getresidual
4594 else f.fitter.getfit()), r
4595 )
4596
4597 if outblfile:
4598 rms = workscan.get_rms(f.mask, r)
4599 dataout = \
4600 workscan.format_blparams_row(blpars["params"],
4601 blpars["fixed"],
4602 rms, str(masklist),
4603 r, True, csvformat)
4604 blf.write(dataout)
4605
4606 f._p.unmap()
4607 f._p = None
4608
4609 if outblfile: blf.close()
4610 else:
4611 workscan._auto_poly_baseline(mask, order,
4612 clipthresh, clipniter,
4613 edge, threshold,
4614 chan_avg_limit, getresidual,
4615 pack_progress_params(showprogress,
4616 minnrow),
4617 outlog, scsvformat+blfile,
4618 bltable)
4619 workscan._add_history("auto_poly_baseline", varlist)
4620
4621 if bltable == '':
4622 if insitu:
4623 self._assign(workscan)
4624 else:
4625 return workscan
4626 else:
4627 if not insitu:
4628 return None
4629
4630 except RuntimeError, e:
4631 raise_fitting_failure_exception(e)
4632
4633 def _init_blinfo(self):
4634 """\
4635 Initialise the following three auxiliary members:
4636 blpars : parameters of the best-fit baseline,
4637 masklists : mask data (edge positions of masked channels) and
4638 actualmask : mask data (in boolean list),
4639 to keep for use later (including output to logger/text files).
4640 Used by poly_baseline() and auto_poly_baseline() in case of
4641 'plot=True'.
4642 """
4643 self.blpars = []
4644 self.masklists = []
4645 self.actualmask = []
4646 return
4647
4648 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4649 """\
4650 Append baseline-fitting related info to blpars, masklist and
4651 actualmask.
4652 """
4653 self.blpars.append(data_blpars)
4654 self.masklists.append(data_masklists)
4655 self.actualmask.append(data_actualmask)
4656 return
4657
4658 @asaplog_post_dec
4659 def rotate_linpolphase(self, angle):
4660 """\
4661 Rotate the phase of the complex polarization O=Q+iU correlation.
4662 This is always done in situ in the raw data. So if you call this
4663 function more than once then each call rotates the phase further.
4664
4665 Parameters:
4666
4667 angle: The angle (degrees) to rotate (add) by.
4668
4669 Example::
4670
4671 scan.rotate_linpolphase(2.3)
4672
4673 """
4674 varlist = vars()
4675 self._math._rotate_linpolphase(self, angle)
4676 self._add_history("rotate_linpolphase", varlist)
4677 return
4678
4679 @asaplog_post_dec
4680 def rotate_xyphase(self, angle):
4681 """\
4682 Rotate the phase of the XY correlation. This is always done in situ
4683 in the data. So if you call this function more than once
4684 then each call rotates the phase further.
4685
4686 Parameters:
4687
4688 angle: The angle (degrees) to rotate (add) by.
4689
4690 Example::
4691
4692 scan.rotate_xyphase(2.3)
4693
4694 """
4695 varlist = vars()
4696 self._math._rotate_xyphase(self, angle)
4697 self._add_history("rotate_xyphase", varlist)
4698 return
4699
4700 @asaplog_post_dec
4701 def swap_linears(self):
4702 """\
4703 Swap the linear polarisations XX and YY, or better the first two
4704 polarisations as this also works for ciculars.
4705 """
4706 varlist = vars()
4707 self._math._swap_linears(self)
4708 self._add_history("swap_linears", varlist)
4709 return
4710
4711 @asaplog_post_dec
4712 def invert_phase(self):
4713 """\
4714 Invert the phase of the complex polarisation
4715 """
4716 varlist = vars()
4717 self._math._invert_phase(self)
4718 self._add_history("invert_phase", varlist)
4719 return
4720
4721 @asaplog_post_dec
4722 def add(self, offset, insitu=None):
4723 """\
4724 Return a scan where all spectra have the offset added
4725
4726 Parameters:
4727
4728 offset: the offset
4729
4730 insitu: if False a new scantable is returned.
4731 Otherwise, the scaling is done in-situ
4732 The default is taken from .asaprc (False)
4733
4734 """
4735 if insitu is None: insitu = rcParams['insitu']
4736 self._math._setinsitu(insitu)
4737 varlist = vars()
4738 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4739 s._add_history("add", varlist)
4740 if insitu:
4741 self._assign(s)
4742 else:
4743 return s
4744
4745 @asaplog_post_dec
4746 def scale(self, factor, tsys=True, insitu=None):
4747 """\
4748
4749 Return a scan where all spectra are scaled by the given 'factor'
4750
4751 Parameters:
4752
4753 factor: the scaling factor (float or 1D float list)
4754
4755 insitu: if False a new scantable is returned.
4756 Otherwise, the scaling is done in-situ
4757 The default is taken from .asaprc (False)
4758
4759 tsys: if True (default) then apply the operation to Tsys
4760 as well as the data
4761
4762 """
4763 if insitu is None: insitu = rcParams['insitu']
4764 self._math._setinsitu(insitu)
4765 varlist = vars()
4766 s = None
4767 import numpy
4768 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4769 if isinstance(factor[0], list) or isinstance(factor[0],
4770 numpy.ndarray):
4771 from asapmath import _array2dOp
4772 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4773 else:
4774 s = scantable( self._math._arrayop( self, factor,
4775 "MUL", tsys ) )
4776 else:
4777 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4778 s._add_history("scale", varlist)
4779 if insitu:
4780 self._assign(s)
4781 else:
4782 return s
4783
4784 @preserve_selection
4785 def set_sourcetype(self, match, matchtype="pattern",
4786 sourcetype="reference"):
4787 """\
4788 Set the type of the source to be an source or reference scan
4789 using the provided pattern.
4790
4791 Parameters:
4792
4793 match: a Unix style pattern, regular expression or selector
4794
4795 matchtype: 'pattern' (default) UNIX style pattern or
4796 'regex' regular expression
4797
4798 sourcetype: the type of the source to use (source/reference)
4799
4800 """
4801 varlist = vars()
4802 stype = -1
4803 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4804 stype = 1
4805 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4806 stype = 0
4807 else:
4808 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4809 if matchtype.lower().startswith("p"):
4810 matchtype = "pattern"
4811 elif matchtype.lower().startswith("r"):
4812 matchtype = "regex"
4813 else:
4814 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4815 sel = selector()
4816 if isinstance(match, selector):
4817 sel = match
4818 else:
4819 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4820 self.set_selection(sel)
4821 self._setsourcetype(stype)
4822 self._add_history("set_sourcetype", varlist)
4823
4824
4825 def set_sourcename(self, name):
4826 varlist = vars()
4827 self._setsourcename(name)
4828 self._add_history("set_sourcename", varlist)
4829
4830 @asaplog_post_dec
4831 @preserve_selection
4832 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4833 """\
4834 This function allows to build quotients automatically.
4835 It assumes the observation to have the same number of
4836 "ons" and "offs"
4837
4838 Parameters:
4839
4840 preserve: you can preserve (default) the continuum or
4841 remove it. The equations used are
4842
4843 preserve: Output = Toff * (on/off) - Toff
4844
4845 remove: Output = Toff * (on/off) - Ton
4846
4847 mode: the on/off detection mode
4848 'paired' (default)
4849 identifies 'off' scans by the
4850 trailing '_R' (Mopra/Parkes) or
4851 '_e'/'_w' (Tid) and matches
4852 on/off pairs from the observing pattern
4853 'time'
4854 finds the closest off in time
4855
4856 .. todo:: verify argument is not implemented
4857
4858 """
4859 varlist = vars()
4860 modes = ["time", "paired"]
4861 if not mode in modes:
4862 msg = "please provide valid mode. Valid modes are %s" % (modes)
4863 raise ValueError(msg)
4864 s = None
4865 if mode.lower() == "paired":
4866 from asap._asap import srctype
4867 sel = self.get_selection()
4868 #sel.set_query("SRCTYPE==psoff")
4869 sel.set_types(srctype.psoff)
4870 self.set_selection(sel)
4871 offs = self.copy()
4872 #sel.set_query("SRCTYPE==pson")
4873 sel.set_types(srctype.pson)
4874 self.set_selection(sel)
4875 ons = self.copy()
4876 s = scantable(self._math._quotient(ons, offs, preserve))
4877 elif mode.lower() == "time":
4878 s = scantable(self._math._auto_quotient(self, mode, preserve))
4879 s._add_history("auto_quotient", varlist)
4880 return s
4881
4882 @asaplog_post_dec
4883 def mx_quotient(self, mask = None, weight='median', preserve=True):
4884 """\
4885 Form a quotient using "off" beams when observing in "MX" mode.
4886
4887 Parameters:
4888
4889 mask: an optional mask to be used when weight == 'stddev'
4890
4891 weight: How to average the off beams. Default is 'median'.
4892
4893 preserve: you can preserve (default) the continuum or
4894 remove it. The equations used are:
4895
4896 preserve: Output = Toff * (on/off) - Toff
4897
4898 remove: Output = Toff * (on/off) - Ton
4899
4900 """
4901 mask = mask or ()
4902 varlist = vars()
4903 on = scantable(self._math._mx_extract(self, 'on'))
4904 preoff = scantable(self._math._mx_extract(self, 'off'))
4905 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4906 from asapmath import quotient
4907 q = quotient(on, off, preserve)
4908 q._add_history("mx_quotient", varlist)
4909 return q
4910
4911 @asaplog_post_dec
4912 def freq_switch(self, insitu=None):
4913 """\
4914 Apply frequency switching to the data.
4915
4916 Parameters:
4917
4918 insitu: if False a new scantable is returned.
4919 Otherwise, the swictching is done in-situ
4920 The default is taken from .asaprc (False)
4921
4922 """
4923 if insitu is None: insitu = rcParams['insitu']
4924 self._math._setinsitu(insitu)
4925 varlist = vars()
4926 s = scantable(self._math._freqswitch(self))
4927 s._add_history("freq_switch", varlist)
4928 if insitu:
4929 self._assign(s)
4930 else:
4931 return s
4932
4933 @asaplog_post_dec
4934 def recalc_azel(self):
4935 """Recalculate the azimuth and elevation for each position."""
4936 varlist = vars()
4937 self._recalcazel()
4938 self._add_history("recalc_azel", varlist)
4939 return
4940
4941 @asaplog_post_dec
4942 def __add__(self, other):
4943 """
4944 implicit on all axes and on Tsys
4945 """
4946 varlist = vars()
4947 s = self.__op( other, "ADD" )
4948 s._add_history("operator +", varlist)
4949 return s
4950
4951 @asaplog_post_dec
4952 def __sub__(self, other):
4953 """
4954 implicit on all axes and on Tsys
4955 """
4956 varlist = vars()
4957 s = self.__op( other, "SUB" )
4958 s._add_history("operator -", varlist)
4959 return s
4960
4961 @asaplog_post_dec
4962 def __mul__(self, other):
4963 """
4964 implicit on all axes and on Tsys
4965 """
4966 varlist = vars()
4967 s = self.__op( other, "MUL" ) ;
4968 s._add_history("operator *", varlist)
4969 return s
4970
4971
4972 @asaplog_post_dec
4973 def __div__(self, other):
4974 """
4975 implicit on all axes and on Tsys
4976 """
4977 varlist = vars()
4978 s = self.__op( other, "DIV" )
4979 s._add_history("operator /", varlist)
4980 return s
4981
4982 @asaplog_post_dec
4983 def __op( self, other, mode ):
4984 s = None
4985 if isinstance(other, scantable):
4986 s = scantable(self._math._binaryop(self, other, mode))
4987 elif isinstance(other, float):
4988 if other == 0.0:
4989 raise ZeroDivisionError("Dividing by zero is not recommended")
4990 s = scantable(self._math._unaryop(self, other, mode, False))
4991 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
4992 if isinstance(other[0], list) \
4993 or isinstance(other[0], numpy.ndarray):
4994 from asapmath import _array2dOp
4995 s = _array2dOp( self, other, mode, False )
4996 else:
4997 s = scantable( self._math._arrayop( self, other,
4998 mode, False ) )
4999 else:
5000 raise TypeError("Other input is not a scantable or float value")
5001 return s
5002
5003 @asaplog_post_dec
5004 def get_fit(self, row=0):
5005 """\
5006 Print or return the stored fits for a row in the scantable
5007
5008 Parameters:
5009
5010 row: the row which the fit has been applied to.
5011
5012 """
5013 if row > self.nrow():
5014 return
5015 from asap.asapfit import asapfit
5016 fit = asapfit(self._getfit(row))
5017 asaplog.push( '%s' %(fit) )
5018 return fit.as_dict()
5019
5020 @preserve_selection
5021 def flag_nans(self):
5022 """\
5023 Utility function to flag NaN values in the scantable.
5024 """
5025 import numpy
5026 basesel = self.get_selection()
5027 for i in range(self.nrow()):
5028 sel = self.get_row_selector(i)
5029 self.set_selection(basesel+sel)
5030 nans = numpy.isnan(self._getspectrum(0))
5031 if numpy.any(nans):
5032 bnans = [ bool(v) for v in nans]
5033 self.flag(bnans)
5034
5035 self.set_selection(basesel)
5036
5037 def get_row_selector(self, rowno):
5038 return selector(rows=[rowno])
5039
5040 def _add_history(self, funcname, parameters):
5041 if not rcParams['scantable.history']:
5042 return
5043 # create date
5044 sep = "##"
5045 from datetime import datetime
5046 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5047 hist = dstr+sep
5048 hist += funcname+sep#cdate+sep
5049 if parameters.has_key('self'):
5050 del parameters['self']
5051 for k, v in parameters.iteritems():
5052 if type(v) is dict:
5053 for k2, v2 in v.iteritems():
5054 hist += k2
5055 hist += "="
5056 if isinstance(v2, scantable):
5057 hist += 'scantable'
5058 elif k2 == 'mask':
5059 if isinstance(v2, list) or isinstance(v2, tuple):
5060 hist += str(self._zip_mask(v2))
5061 else:
5062 hist += str(v2)
5063 else:
5064 hist += str(v2)
5065 else:
5066 hist += k
5067 hist += "="
5068 if isinstance(v, scantable):
5069 hist += 'scantable'
5070 elif k == 'mask':
5071 if isinstance(v, list) or isinstance(v, tuple):
5072 hist += str(self._zip_mask(v))
5073 else:
5074 hist += str(v)
5075 else:
5076 hist += str(v)
5077 hist += sep
5078 hist = hist[:-2] # remove trailing '##'
5079 self._addhistory(hist)
5080
5081
5082 def _zip_mask(self, mask):
5083 mask = list(mask)
5084 i = 0
5085 segments = []
5086 while mask[i:].count(1):
5087 i += mask[i:].index(1)
5088 if mask[i:].count(0):
5089 j = i + mask[i:].index(0)
5090 else:
5091 j = len(mask)
5092 segments.append([i, j])
5093 i = j
5094 return segments
5095
5096 def _get_ordinate_label(self):
5097 fu = "("+self.get_fluxunit()+")"
5098 import re
5099 lbl = "Intensity"
5100 if re.match(".K.", fu):
5101 lbl = "Brightness Temperature "+ fu
5102 elif re.match(".Jy.", fu):
5103 lbl = "Flux density "+ fu
5104 return lbl
5105
5106 def _check_ifs(self):
5107# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5108 nchans = [self.nchan(i) for i in self.getifnos()]
5109 nchans = filter(lambda t: t > 0, nchans)
5110 return (sum(nchans)/len(nchans) == nchans[0])
5111
5112 @asaplog_post_dec
5113 def _fill(self, names, unit, average, opts={}):
5114 first = True
5115 fullnames = []
5116 for name in names:
5117 name = os.path.expandvars(name)
5118 name = os.path.expanduser(name)
5119 if not os.path.exists(name):
5120 msg = "File '%s' does not exists" % (name)
5121 raise IOError(msg)
5122 fullnames.append(name)
5123 if average:
5124 asaplog.push('Auto averaging integrations')
5125 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5126 for name in fullnames:
5127 tbl = Scantable(stype)
5128 if is_ms( name ):
5129 r = msfiller( tbl )
5130 else:
5131 r = filler( tbl )
5132 msg = "Importing %s..." % (name)
5133 asaplog.push(msg, False)
5134 r.open(name, opts)
5135 rx = rcParams['scantable.reference']
5136 r.setreferenceexpr(rx)
5137 r.fill()
5138 if average:
5139 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5140 if not first:
5141 tbl = self._math._merge([self, tbl])
5142 Scantable.__init__(self, tbl)
5143 r.close()
5144 del r, tbl
5145 first = False
5146 #flush log
5147 asaplog.post()
5148 if unit is not None:
5149 self.set_fluxunit(unit)
5150 if not is_casapy():
5151 self.set_freqframe(rcParams['scantable.freqframe'])
5152
5153 def _get_verify_action( self, msg, action=None ):
5154 valid_act = ['Y', 'N', 'A', 'R']
5155 if not action or not isinstance(action, str):
5156 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5157 if action == '':
5158 return "Y"
5159 elif (action.upper()[0] in valid_act):
5160 return action.upper()[0]
5161 elif (action.upper()[0] in ['H','?']):
5162 print "Available actions of verification [Y|n|a|r]"
5163 print " Y : Yes for current data (default)"
5164 print " N : No for current data"
5165 print " A : Accept all in the following and exit from verification"
5166 print " R : Reject all in the following and exit from verification"
5167 print " H or ?: help (show this message)"
5168 return self._get_verify_action(msg)
5169 else:
5170 return 'Y'
5171
5172 def __getitem__(self, key):
5173 if key < 0:
5174 key += self.nrow()
5175 if key >= self.nrow():
5176 raise IndexError("Row index out of range.")
5177 return self._getspectrum(key)
5178
5179 def __setitem__(self, key, value):
5180 if key < 0:
5181 key += self.nrow()
5182 if key >= self.nrow():
5183 raise IndexError("Row index out of range.")
5184 if not hasattr(value, "__len__") or \
5185 len(value) > self.nchan(self.getif(key)):
5186 raise ValueError("Spectrum length doesn't match.")
5187 return self._setspectrum(value, key)
5188
5189 def __len__(self):
5190 return self.nrow()
5191
5192 def __iter__(self):
5193 for i in range(len(self)):
5194 yield self[i]
Note: See TracBrowser for help on using the repository browser.