source: trunk/python/scantable.py@ 2973

Last change on this file since 2973 was 2973, checked in by WataruKawasaki, 10 years ago

New Development: No

JIRA Issue: Yes CAS-6599

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs: test_sdstat

Put in Release Notes:

Module(s): sd

Description: modified scantable.stats() so that the output of sdstat have None value for flagged rows.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 208.3 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel, doppler):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
380
381def get_restfreq_in_Hz(s_restfreq):
382 value = 0.0
383 unit = ""
384 s = s_restfreq.replace(" ","")
385
386 for i in range(len(s))[::-1]:
387 if s[i].isalpha():
388 unit = s[i] + unit
389 else:
390 value = float(s[0:i+1])
391 break
392
393 if (unit == "") or (unit.lower() == "hz"):
394 return value
395 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
396 unitprefix = unit[0]
397 factor = 1.0
398
399 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
400 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
401 factor = factor_list[prefix_list.index(unitprefix)]
402 """
403 if (unitprefix == 'a'):
404 factor = 1.0e-18
405 elif (unitprefix == 'f'):
406 factor = 1.0e-15
407 elif (unitprefix == 'p'):
408 factor = 1.0e-12
409 elif (unitprefix == 'n'):
410 factor = 1.0e-9
411 elif (unitprefix == 'u'):
412 factor = 1.0e-6
413 elif (unitprefix == 'm'):
414 factor = 1.0e-3
415 elif (unitprefix == 'k'):
416 factor = 1.0e+3
417 elif (unitprefix == 'M'):
418 factor = 1.0e+6
419 elif (unitprefix == 'G'):
420 factor = 1.0e+9
421 elif (unitprefix == 'T'):
422 factor = 1.0e+12
423 elif (unitprefix == 'P'):
424 factor = 1.0e+15
425 elif (unitprefix == 'E'):
426 factor = 1.0e+18
427 """
428 return value*factor
429 else:
430 mesg = "wrong unit of restfreq."
431 raise Exception, mesg
432
433def normalise_restfreq(in_restfreq):
434 if isinstance(in_restfreq, float):
435 return in_restfreq
436 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
437 return float(in_restfreq)
438 elif isinstance(in_restfreq, str):
439 return get_restfreq_in_Hz(in_restfreq)
440 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
441 if isinstance(in_restfreq, numpy.ndarray):
442 if len(in_restfreq.shape) > 1:
443 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
444 raise Exception, mesg
445
446 res = []
447 for i in xrange(len(in_restfreq)):
448 elem = in_restfreq[i]
449 if isinstance(elem, float):
450 res.append(elem)
451 elif isinstance(elem, int) or isinstance(elem, long):
452 res.append(float(elem))
453 elif isinstance(elem, str):
454 res.append(get_restfreq_in_Hz(elem))
455 elif isinstance(elem, dict):
456 if isinstance(elem["value"], float):
457 res.append(elem)
458 elif isinstance(elem["value"], int):
459 dictelem = {}
460 dictelem["name"] = elem["name"]
461 dictelem["value"] = float(elem["value"])
462 res.append(dictelem)
463 elif isinstance(elem["value"], str):
464 dictelem = {}
465 dictelem["name"] = elem["name"]
466 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
467 res.append(dictelem)
468 else:
469 mesg = "restfreq elements must be float, int, or string."
470 raise Exception, mesg
471 return res
472 else:
473 mesg = "wrong type of restfreq given."
474 raise Exception, mesg
475
476def set_restfreq(s, restfreq):
477 rfset = (restfreq != '') and (restfreq != [])
478 if rfset:
479 s.set_restfreqs(normalise_restfreq(restfreq))
480
481class scantable(Scantable):
482 """\
483 The ASAP container for scans (single-dish data).
484 """
485
486 @asaplog_post_dec
487 def __init__(self, filename, average=None, unit=None, parallactify=None,
488 **args):
489 """\
490 Create a scantable from a saved one or make a reference
491
492 Parameters:
493
494 filename: the name of an asap table on disk
495 or
496 the name of a rpfits/sdfits/ms file
497 (integrations within scans are auto averaged
498 and the whole file is read) or
499 [advanced] a reference to an existing scantable
500
501 average: average all integrations withinb a scan on read.
502 The default (True) is taken from .asaprc.
503
504 unit: brightness unit; must be consistent with K or Jy.
505 Over-rides the default selected by the filler
506 (input rpfits/sdfits/ms) or replaces the value
507 in existing scantables
508
509 antenna: for MeasurementSet input data only:
510 Antenna selection. integer (id) or string
511 (name or id).
512
513 parallactify: Indicate that the data had been parallactified.
514 Default (false) is taken from rc file.
515
516 getpt: Whether to import direction from MS/POINTING
517 table properly or not.
518 This is effective only when filename is MS.
519 The default (True) is to import direction
520 from MS/POINTING.
521 """
522 if average is None:
523 average = rcParams['scantable.autoaverage']
524 parallactify = parallactify or rcParams['scantable.parallactify']
525 varlist = vars()
526 from asap._asap import stmath
527 self._math = stmath( rcParams['insitu'] )
528 if isinstance(filename, Scantable):
529 Scantable.__init__(self, filename)
530 else:
531 if isinstance(filename, str):
532 filename = os.path.expandvars(filename)
533 filename = os.path.expanduser(filename)
534 if not os.path.exists(filename):
535 s = "File '%s' not found." % (filename)
536 raise IOError(s)
537 if is_scantable(filename):
538 ondisk = rcParams['scantable.storage'] == 'disk'
539 Scantable.__init__(self, filename, ondisk)
540 if unit is not None:
541 self.set_fluxunit(unit)
542 if average:
543 self._assign( self.average_time( scanav=True ) )
544 # do not reset to the default freqframe
545 #self.set_freqframe(rcParams['scantable.freqframe'])
546 elif is_ms(filename):
547 # Measurement Set
548 opts={'ms': {}}
549 mskeys=['getpt','antenna']
550 for key in mskeys:
551 if key in args.keys():
552 opts['ms'][key] = args[key]
553 self._fill([filename], unit, average, opts)
554 elif os.path.isfile(filename):
555 opts={'nro': {}}
556 nrokeys=['freqref']
557 for key in nrokeys:
558 if key in args.keys():
559 opts['nro'][key] = args[key]
560 self._fill([filename], unit, average, opts)
561 # only apply to new data not "copy constructor"
562 self.parallactify(parallactify)
563 else:
564 msg = "The given file '%s'is not a valid " \
565 "asap table." % (filename)
566 raise IOError(msg)
567 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
568 and isinstance(filename[-1], str):
569 self._fill(filename, unit, average)
570 self.parallactify(parallactify)
571 self._add_history("scantable", varlist)
572
573 @asaplog_post_dec
574 def save(self, name=None, format=None, overwrite=False):
575 """\
576 Store the scantable on disk. This can be an asap (aips++) Table,
577 SDFITS or MS2 format.
578
579 Parameters:
580
581 name: the name of the outputfile. For format 'ASCII'
582 this is the root file name (data in 'name'.txt
583 and header in 'name'_header.txt)
584
585 format: an optional file format. Default is ASAP.
586 Allowed are:
587
588 * 'ASAP' (save as ASAP [aips++] Table),
589 * 'SDFITS' (save as SDFITS file)
590 * 'ASCII' (saves as ascii text file)
591 * 'MS2' (saves as an casacore MeasurementSet V2)
592 * 'FITS' (save as image FITS - not readable by
593 class)
594 * 'CLASS' (save as FITS readable by CLASS)
595
596 overwrite: If the file should be overwritten if it exists.
597 The default False is to return with warning
598 without writing the output. USE WITH CARE.
599
600 Example::
601
602 scan.save('myscan.asap')
603 scan.save('myscan.sdfits', 'SDFITS')
604
605 """
606 from os import path
607 format = format or rcParams['scantable.save']
608 suffix = '.'+format.lower()
609 if name is None or name == "":
610 name = 'scantable'+suffix
611 msg = "No filename given. Using default name %s..." % name
612 asaplog.push(msg)
613 name = path.expandvars(name)
614 if path.isfile(name) or path.isdir(name):
615 if not overwrite:
616 msg = "File %s exists." % name
617 raise IOError(msg)
618 format2 = format.upper()
619 if format2 == 'ASAP':
620 self._save(name)
621 elif format2 == 'MS2':
622 msopt = {'ms': {'overwrite': overwrite } }
623 from asap._asap import mswriter
624 writer = mswriter( self )
625 writer.write( name, msopt )
626 else:
627 from asap._asap import stwriter as stw
628 writer = stw(format2)
629 writer.write(self, name)
630 return
631
632 def copy(self):
633 """Return a copy of this scantable.
634
635 *Note*:
636
637 This makes a full (deep) copy. scan2 = scan1 makes a reference.
638
639 Example::
640
641 copiedscan = scan.copy()
642
643 """
644 sd = scantable(Scantable._copy(self))
645 return sd
646
647 def drop_scan(self, scanid=None):
648 """\
649 Return a new scantable where the specified scan number(s) has(have)
650 been dropped.
651
652 Parameters:
653
654 scanid: a (list of) scan number(s)
655
656 """
657 from asap import _is_sequence_or_number as _is_valid
658 from asap import _to_list
659 from asap import unique
660 if not _is_valid(scanid):
661 raise RuntimeError( 'Please specify a scanno to drop from the'
662 ' scantable' )
663 scanid = _to_list(scanid)
664 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
665 for sid in scanid: allscans.remove(sid)
666 if len(allscans) == 0:
667 raise ValueError("Can't remove all scans")
668 sel = selector(scans=allscans)
669 return self._select_copy(sel)
670
671 def _select_copy(self, selection):
672 orig = self.get_selection()
673 self.set_selection(orig+selection)
674 cp = self.copy()
675 self.set_selection(orig)
676 return cp
677
678 def get_scan(self, scanid=None):
679 """\
680 Return a specific scan (by scanno) or collection of scans (by
681 source name) in a new scantable.
682
683 *Note*:
684
685 See scantable.drop_scan() for the inverse operation.
686
687 Parameters:
688
689 scanid: a (list of) scanno or a source name, unix-style
690 patterns are accepted for source name matching, e.g.
691 '*_R' gets all 'ref scans
692
693 Example::
694
695 # get all scans containing the source '323p459'
696 newscan = scan.get_scan('323p459')
697 # get all 'off' scans
698 refscans = scan.get_scan('*_R')
699 # get a susbset of scans by scanno (as listed in scan.summary())
700 newscan = scan.get_scan([0, 2, 7, 10])
701
702 """
703 if scanid is None:
704 raise RuntimeError( 'Please specify a scan no or name to '
705 'retrieve from the scantable' )
706 try:
707 bsel = self.get_selection()
708 sel = selector()
709 if type(scanid) is str:
710 sel.set_name(scanid)
711 return self._select_copy(sel)
712 elif type(scanid) is int:
713 sel.set_scans([scanid])
714 return self._select_copy(sel)
715 elif type(scanid) is list:
716 sel.set_scans(scanid)
717 return self._select_copy(sel)
718 else:
719 msg = "Illegal scanid type, use 'int' or 'list' if ints."
720 raise TypeError(msg)
721 except RuntimeError:
722 raise
723
724 def __str__(self):
725 tempFile = tempfile.NamedTemporaryFile()
726 Scantable._summary(self, tempFile.name)
727 tempFile.seek(0)
728 asaplog.clear()
729 return tempFile.file.read()
730
731 @asaplog_post_dec
732 def summary(self, filename=None):
733 """\
734 Print a summary of the contents of this scantable.
735
736 Parameters:
737
738 filename: the name of a file to write the putput to
739 Default - no file output
740
741 """
742 if filename is not None:
743 if filename is "":
744 filename = 'scantable_summary.txt'
745 from os.path import expandvars, isdir
746 filename = expandvars(filename)
747 if isdir(filename):
748 msg = "Illegal file name '%s'." % (filename)
749 raise IOError(msg)
750 else:
751 filename = ""
752 Scantable._summary(self, filename)
753
754 def get_spectrum(self, rowno):
755 """Return the spectrum for the current row in the scantable as a list.
756
757 Parameters:
758
759 rowno: the row number to retrieve the spectrum from
760
761 """
762 return self._getspectrum(rowno)
763
764 def get_mask(self, rowno):
765 """Return the mask for the current row in the scantable as a list.
766
767 Parameters:
768
769 rowno: the row number to retrieve the mask from
770
771 """
772 return self._getmask(rowno)
773
774 def set_spectrum(self, spec, rowno):
775 """Set the spectrum for the current row in the scantable.
776
777 Parameters:
778
779 spec: the new spectrum
780
781 rowno: the row number to set the spectrum for
782
783 """
784 assert(len(spec) == self.nchan(self.getif(rowno)))
785 return self._setspectrum(spec, rowno)
786
787 def get_coordinate(self, rowno):
788 """Return the (spectral) coordinate for a a given 'rowno'.
789
790 *Note*:
791
792 * This coordinate is only valid until a scantable method modifies
793 the frequency axis.
794 * This coordinate does contain the original frequency set-up
795 NOT the new frame. The conversions however are done using the user
796 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
797 use scantable.freq_align first. Without it there is no closure,
798 i.e.::
799
800 c = myscan.get_coordinate(0)
801 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
802
803 Parameters:
804
805 rowno: the row number for the spectral coordinate
806
807 """
808 return coordinate(Scantable.get_coordinate(self, rowno))
809
810 def get_selection(self):
811 """\
812 Get the selection object currently set on this scantable.
813
814 Example::
815
816 sel = scan.get_selection()
817 sel.set_ifs(0) # select IF 0
818 scan.set_selection(sel) # apply modified selection
819
820 """
821 return selector(self._getselection())
822
823 def set_selection(self, selection=None, **kw):
824 """\
825 Select a subset of the data. All following operations on this scantable
826 are only applied to thi selection.
827
828 Parameters:
829
830 selection: a selector object (default unset the selection), or
831 any combination of 'pols', 'ifs', 'beams', 'scans',
832 'cycles', 'name', 'query'
833
834 Examples::
835
836 sel = selector() # create a selection object
837 self.set_scans([0, 3]) # select SCANNO 0 and 3
838 scan.set_selection(sel) # set the selection
839 scan.summary() # will only print summary of scanno 0 an 3
840 scan.set_selection() # unset the selection
841 # or the equivalent
842 scan.set_selection(scans=[0,3])
843 scan.summary() # will only print summary of scanno 0 an 3
844 scan.set_selection() # unset the selection
845
846 """
847 if selection is None:
848 # reset
849 if len(kw) == 0:
850 selection = selector()
851 else:
852 # try keywords
853 for k in kw:
854 if k not in selector.fields:
855 raise KeyError("Invalid selection key '%s', "
856 "valid keys are %s" % (k,
857 selector.fields))
858 selection = selector(**kw)
859 self._setselection(selection)
860
861 def get_row(self, row=0, insitu=None):
862 """\
863 Select a row in the scantable.
864 Return a scantable with single row.
865
866 Parameters:
867
868 row: row no of integration, default is 0.
869 insitu: if False a new scantable is returned. Otherwise, the
870 scaling is done in-situ. The default is taken from .asaprc
871 (False)
872
873 """
874 if insitu is None:
875 insitu = rcParams['insitu']
876 if not insitu:
877 workscan = self.copy()
878 else:
879 workscan = self
880 # Select a row
881 sel = selector()
882 sel.set_rows([row])
883 workscan.set_selection(sel)
884 if not workscan.nrow() == 1:
885 msg = "Could not identify single row. %d rows selected." \
886 % (workscan.nrow())
887 raise RuntimeError(msg)
888 if insitu:
889 self._assign(workscan)
890 else:
891 return workscan
892
893 @asaplog_post_dec
894 def stats(self, stat='stddev', mask=None, form='3.3f', row=None, skip_flaggedrow=False):
895 """\
896 Determine the specified statistic of the current beam/if/pol
897 Takes a 'mask' as an optional parameter to specify which
898 channels should be excluded.
899
900 Parameters:
901
902 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
903 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
904
905 mask: an optional mask specifying where the statistic
906 should be determined.
907
908 form: format string to print statistic values
909
910 row: row number of spectrum to process.
911 (default is None: for all rows)
912
913 skip_flaggedrow: if True, skip outputting text for flagged
914 spectra. default is False.
915
916 Example:
917 scan.set_unit('channel')
918 msk = scan.create_mask([100, 200], [500, 600])
919 scan.stats(stat='mean', mask=m)
920
921 """
922 mask = mask or []
923 if not self._check_ifs():
924 raise ValueError("Cannot apply mask as the IFs have different "
925 "number of channels. Please use setselection() "
926 "to select individual IFs")
927 getchan = False
928 if stat.lower().startswith('min') or stat.lower().startswith('max'):
929 chan = self._math._minmaxchan(self, mask, stat)
930 getchan = True
931 statvals = []
932
933 rtnabc = False
934 if stat.lower().endswith('_abc'):
935 rtnabc = True
936 else:
937 if row == None:
938 statvals = self._math._stats(self, mask, stat)
939 else:
940 statvals = self._math._statsrow(self, mask, stat, int(row))
941
942 #def cb(i):
943 # return statvals[i]
944
945 #return self._row_callback(cb, stat)
946
947 label=stat
948 #callback=cb
949 out = ""
950 #outvec = []
951 sep = '-'*50
952
953 if row == None:
954 rows = xrange(self.nrow())
955 elif isinstance(row, int):
956 rows = [ row ]
957
958 for i in rows:
959 refstr = ''
960 statunit= ''
961 if getchan:
962 qx, qy = self.chan2data(rowno=i, chan=chan[i])
963 if rtnabc:
964 statvals.append(qx['value'])
965 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
966 statunit= '['+qx['unit']+']'
967 else:
968 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
969
970 if skip_flaggedrow and self._getflagrow(i):
971 statvals[i] = None
972 continue
973
974 tm = self._gettime(i)
975 src = self._getsourcename(i)
976 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
977 out += 'Time[%s]:\n' % (tm)
978 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
979 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
980 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
981 #outvec.append(callback(i))
982 if len(rows) > 1:
983 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
984 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
985 else:
986 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
987 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
988 out += sep+"\n"
989
990 import os
991 if os.environ.has_key( 'USER' ):
992 usr = os.environ['USER']
993 else:
994 import commands
995 usr = commands.getoutput( 'whoami' )
996 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
997 f = open(tmpfile,'w')
998 print >> f, sep
999 print >> f, ' %s %s' % (label, statunit)
1000 print >> f, sep
1001 print >> f, out
1002 f.close()
1003 f = open(tmpfile,'r')
1004 x = f.readlines()
1005 f.close()
1006 asaplog.push(''.join(x), False)
1007
1008 return statvals
1009
1010 def chan2data(self, rowno=0, chan=0):
1011 """\
1012 Returns channel/frequency/velocity and spectral value
1013 at an arbitrary row and channel in the scantable.
1014
1015 Parameters:
1016
1017 rowno: a row number in the scantable. Default is the
1018 first row, i.e. rowno=0
1019
1020 chan: a channel in the scantable. Default is the first
1021 channel, i.e. pos=0
1022
1023 """
1024 if isinstance(rowno, int) and isinstance(chan, int):
1025 qx = {'unit': self.get_unit(),
1026 'value': self._getabcissa(rowno)[chan]}
1027 qy = {'unit': self.get_fluxunit(),
1028 'value': self._getspectrum(rowno)[chan]}
1029 return qx, qy
1030
1031 def stddev(self, mask=None):
1032 """\
1033 Determine the standard deviation of the current beam/if/pol
1034 Takes a 'mask' as an optional parameter to specify which
1035 channels should be excluded.
1036
1037 Parameters:
1038
1039 mask: an optional mask specifying where the standard
1040 deviation should be determined.
1041
1042 Example::
1043
1044 scan.set_unit('channel')
1045 msk = scan.create_mask([100, 200], [500, 600])
1046 scan.stddev(mask=m)
1047
1048 """
1049 return self.stats(stat='stddev', mask=mask);
1050
1051
1052 def get_column_names(self):
1053 """\
1054 Return a list of column names, which can be used for selection.
1055 """
1056 return list(Scantable.get_column_names(self))
1057
1058 def get_tsys(self, row=-1):
1059 """\
1060 Return the System temperatures.
1061
1062 Parameters:
1063
1064 row: the rowno to get the information for. (default all rows)
1065
1066 Returns:
1067
1068 a list of Tsys values for the current selection
1069
1070 """
1071 if row > -1:
1072 return self._get_column(self._gettsys, row)
1073 return self._row_callback(self._gettsys, "Tsys")
1074
1075 def get_tsysspectrum(self, row=-1):
1076 """\
1077 Return the channel dependent system temperatures.
1078
1079 Parameters:
1080
1081 row: the rowno to get the information for. (default all rows)
1082
1083 Returns:
1084
1085 a list of Tsys values for the current selection
1086
1087 """
1088 return self._get_column( self._gettsysspectrum, row )
1089
1090 def set_tsys(self, values, row=-1):
1091 """\
1092 Set the Tsys value(s) of the given 'row' or the whole scantable
1093 (selection).
1094
1095 Parameters:
1096
1097 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1098 row: the row number to apply Tsys values to.
1099 (default all rows)
1100
1101 """
1102
1103 if not hasattr(values, "__len__"):
1104 values = [values]
1105 self._settsys(values, row)
1106
1107 def get_weather(self, row=-1):
1108 """\
1109 Return the weather information.
1110
1111 Parameters:
1112
1113 row: the rowno to get the information for. (default all rows)
1114
1115 Returns:
1116
1117 a dict or list of of dicts of values for the current selection
1118
1119 """
1120 if row >= len(self):
1121 raise IndexError("row out of range")
1122 values = self._get_column(self._get_weather, row)
1123 if row > -1:
1124 return {'temperature': values[0],
1125 'pressure': values[1], 'humidity' : values[2],
1126 'windspeed' : values[3], 'windaz' : values[4]
1127 }
1128 else:
1129 out = []
1130 for r in values:
1131 out.append({'temperature': r[0],
1132 'pressure': r[1], 'humidity' : r[2],
1133 'windspeed' : r[3], 'windaz' : r[4]
1134 })
1135 return out
1136
1137 def _row_callback(self, callback, label):
1138 out = ""
1139 outvec = []
1140 sep = '-'*50
1141 for i in range(self.nrow()):
1142 tm = self._gettime(i)
1143 src = self._getsourcename(i)
1144 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1145 out += 'Time[%s]:\n' % (tm)
1146 if self.nbeam(-1) > 1:
1147 out += ' Beam[%d] ' % (self.getbeam(i))
1148 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1149 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1150 outvec.append(callback(i))
1151 out += '= %3.3f\n' % (outvec[i])
1152 out += sep+'\n'
1153
1154 asaplog.push(sep)
1155 asaplog.push(" %s" % (label))
1156 asaplog.push(sep)
1157 asaplog.push(out)
1158 asaplog.post()
1159 return outvec
1160
1161 def _get_column(self, callback, row=-1, *args):
1162 """
1163 """
1164 if row == -1:
1165 return [callback(i, *args) for i in range(self.nrow())]
1166 else:
1167 if 0 <= row < self.nrow():
1168 return callback(row, *args)
1169
1170
1171 def get_time(self, row=-1, asdatetime=False, prec=-1):
1172 """\
1173 Get a list of time stamps for the observations.
1174 Return a datetime object or a string (default) for each
1175 integration time stamp in the scantable.
1176
1177 Parameters:
1178
1179 row: row no of integration. Default -1 return all rows
1180
1181 asdatetime: return values as datetime objects rather than strings
1182
1183 prec: number of digits shown. Default -1 to automatic
1184 calculation.
1185 Note this number is equals to the digits of MVTime,
1186 i.e., 0<prec<3: dates with hh:: only,
1187 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1188 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1189
1190 """
1191 from datetime import datetime
1192 if prec < 0:
1193 # automagically set necessary precision +1
1194 prec = 7 - \
1195 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1196 prec = max(6, int(prec))
1197 else:
1198 prec = max(0, prec)
1199 if asdatetime:
1200 #precision can be 1 millisecond at max
1201 prec = min(12, prec)
1202
1203 times = self._get_column(self._gettime, row, prec)
1204 if not asdatetime:
1205 return times
1206 format = "%Y/%m/%d/%H:%M:%S.%f"
1207 if prec < 7:
1208 nsub = 1 + (((6-prec)/2) % 3)
1209 substr = [".%f","%S","%M"]
1210 for i in range(nsub):
1211 format = format.replace(substr[i],"")
1212 if isinstance(times, list):
1213 return [datetime.strptime(i, format) for i in times]
1214 else:
1215 return datetime.strptime(times, format)
1216
1217
1218 def get_inttime(self, row=-1):
1219 """\
1220 Get a list of integration times for the observations.
1221 Return a time in seconds for each integration in the scantable.
1222
1223 Parameters:
1224
1225 row: row no of integration. Default -1 return all rows.
1226
1227 """
1228 return self._get_column(self._getinttime, row)
1229
1230
1231 def get_sourcename(self, row=-1):
1232 """\
1233 Get a list source names for the observations.
1234 Return a string for each integration in the scantable.
1235 Parameters:
1236
1237 row: row no of integration. Default -1 return all rows.
1238
1239 """
1240 return self._get_column(self._getsourcename, row)
1241
1242 def get_elevation(self, row=-1):
1243 """\
1244 Get a list of elevations for the observations.
1245 Return a float for each integration in the scantable.
1246
1247 Parameters:
1248
1249 row: row no of integration. Default -1 return all rows.
1250
1251 """
1252 return self._get_column(self._getelevation, row)
1253
1254 def get_azimuth(self, row=-1):
1255 """\
1256 Get a list of azimuths for the observations.
1257 Return a float for each integration in the scantable.
1258
1259 Parameters:
1260 row: row no of integration. Default -1 return all rows.
1261
1262 """
1263 return self._get_column(self._getazimuth, row)
1264
1265 def get_parangle(self, row=-1):
1266 """\
1267 Get a list of parallactic angles for the observations.
1268 Return a float for each integration in the scantable.
1269
1270 Parameters:
1271
1272 row: row no of integration. Default -1 return all rows.
1273
1274 """
1275 return self._get_column(self._getparangle, row)
1276
1277 def get_direction(self, row=-1):
1278 """
1279 Get a list of Positions on the sky (direction) for the observations.
1280 Return a string for each integration in the scantable.
1281
1282 Parameters:
1283
1284 row: row no of integration. Default -1 return all rows
1285
1286 """
1287 return self._get_column(self._getdirection, row)
1288
1289 def get_directionval(self, row=-1):
1290 """\
1291 Get a list of Positions on the sky (direction) for the observations.
1292 Return a float for each integration in the scantable.
1293
1294 Parameters:
1295
1296 row: row no of integration. Default -1 return all rows
1297
1298 """
1299 return self._get_column(self._getdirectionvec, row)
1300
1301 @asaplog_post_dec
1302 def set_unit(self, unit='channel'):
1303 """\
1304 Set the unit for all following operations on this scantable
1305
1306 Parameters:
1307
1308 unit: optional unit, default is 'channel'. Use one of '*Hz',
1309 'km/s', 'channel' or equivalent ''
1310
1311 """
1312 varlist = vars()
1313 if unit in ['', 'pixel', 'channel']:
1314 unit = ''
1315 inf = list(self._getcoordinfo())
1316 inf[0] = unit
1317 self._setcoordinfo(inf)
1318 self._add_history("set_unit", varlist)
1319
1320 @asaplog_post_dec
1321 def set_instrument(self, instr):
1322 """\
1323 Set the instrument for subsequent processing.
1324
1325 Parameters:
1326
1327 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1328 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1329
1330 """
1331 self._setInstrument(instr)
1332 self._add_history("set_instument", vars())
1333
1334 @asaplog_post_dec
1335 def set_feedtype(self, feedtype):
1336 """\
1337 Overwrite the feed type, which might not be set correctly.
1338
1339 Parameters:
1340
1341 feedtype: 'linear' or 'circular'
1342
1343 """
1344 self._setfeedtype(feedtype)
1345 self._add_history("set_feedtype", vars())
1346
1347 @asaplog_post_dec
1348 def get_doppler(self):
1349 """\
1350 Get the doppler.
1351 """
1352 return self._getcoordinfo()[2]
1353
1354 @asaplog_post_dec
1355 def set_doppler(self, doppler='RADIO'):
1356 """\
1357 Set the doppler for all following operations on this scantable.
1358
1359 Parameters:
1360
1361 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1362
1363 """
1364 varlist = vars()
1365 inf = list(self._getcoordinfo())
1366 inf[2] = doppler
1367 self._setcoordinfo(inf)
1368 self._add_history("set_doppler", vars())
1369
1370 @asaplog_post_dec
1371 def set_freqframe(self, frame=None):
1372 """\
1373 Set the frame type of the Spectral Axis.
1374
1375 Parameters:
1376
1377 frame: an optional frame type, default 'LSRK'. Valid frames are:
1378 'TOPO', 'LSRD', 'LSRK', 'BARY',
1379 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1380
1381 Example::
1382
1383 scan.set_freqframe('BARY')
1384
1385 """
1386 frame = frame or rcParams['scantable.freqframe']
1387 varlist = vars()
1388 # "REST" is not implemented in casacore
1389 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1390 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1391 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1392 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1393
1394 if frame in valid:
1395 inf = list(self._getcoordinfo())
1396 inf[1] = frame
1397 self._setcoordinfo(inf)
1398 self._add_history("set_freqframe", varlist)
1399 else:
1400 msg = "Please specify a valid freq type. Valid types are:\n", valid
1401 raise TypeError(msg)
1402
1403 @asaplog_post_dec
1404 def set_dirframe(self, frame=""):
1405 """\
1406 Set the frame type of the Direction on the sky.
1407
1408 Parameters:
1409
1410 frame: an optional frame type, default ''. Valid frames are:
1411 'J2000', 'B1950', 'GALACTIC'
1412
1413 Example:
1414
1415 scan.set_dirframe('GALACTIC')
1416
1417 """
1418 varlist = vars()
1419 Scantable.set_dirframe(self, frame)
1420 self._add_history("set_dirframe", varlist)
1421
1422 def get_unit(self):
1423 """\
1424 Get the default unit set in this scantable
1425
1426 Returns:
1427
1428 A unit string
1429
1430 """
1431 inf = self._getcoordinfo()
1432 unit = inf[0]
1433 if unit == '': unit = 'channel'
1434 return unit
1435
1436 @asaplog_post_dec
1437 def get_abcissa(self, rowno=0):
1438 """\
1439 Get the abcissa in the current coordinate setup for the currently
1440 selected Beam/IF/Pol
1441
1442 Parameters:
1443
1444 rowno: an optional row number in the scantable. Default is the
1445 first row, i.e. rowno=0
1446
1447 Returns:
1448
1449 The abcissa values and the format string (as a dictionary)
1450
1451 """
1452 abc = self._getabcissa(rowno)
1453 lbl = self._getabcissalabel(rowno)
1454 return abc, lbl
1455
1456 @asaplog_post_dec
1457 def flag(self, mask=None, unflag=False, row=-1):
1458 """\
1459 Flag the selected data using an optional channel mask.
1460
1461 Parameters:
1462
1463 mask: an optional channel mask, created with create_mask. Default
1464 (no mask) is all channels.
1465
1466 unflag: if True, unflag the data
1467
1468 row: an optional row number in the scantable.
1469 Default -1 flags all rows
1470
1471 """
1472 varlist = vars()
1473 mask = mask or []
1474 self._flag(row, mask, unflag)
1475 self._add_history("flag", varlist)
1476
1477 @asaplog_post_dec
1478 def flag_row(self, rows=None, unflag=False):
1479 """\
1480 Flag the selected data in row-based manner.
1481
1482 Parameters:
1483
1484 rows: list of row numbers to be flagged. Default is no row
1485 (must be explicitly specified to execute row-based
1486 flagging).
1487
1488 unflag: if True, unflag the data.
1489
1490 """
1491 varlist = vars()
1492 if rows is None:
1493 rows = []
1494 self._flag_row(rows, unflag)
1495 self._add_history("flag_row", varlist)
1496
1497 @asaplog_post_dec
1498 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1499 """\
1500 Flag the selected data outside a specified range (in channel-base)
1501
1502 Parameters:
1503
1504 uthres: upper threshold.
1505
1506 dthres: lower threshold
1507
1508 clipoutside: True for flagging data outside the range
1509 [dthres:uthres].
1510 False for flagging data inside the range.
1511
1512 unflag: if True, unflag the data.
1513
1514 """
1515 varlist = vars()
1516 self._clip(uthres, dthres, clipoutside, unflag)
1517 self._add_history("clip", varlist)
1518
1519 @asaplog_post_dec
1520 def lag_flag(self, start, end, unit="MHz", insitu=None):
1521 """\
1522 Flag the data in 'lag' space by providing a frequency to remove.
1523 Flagged data in the scantable get interpolated over the region.
1524 No taper is applied.
1525
1526 Parameters:
1527
1528 start: the start frequency (really a period within the
1529 bandwidth) or period to remove
1530
1531 end: the end frequency or period to remove
1532
1533 unit: the frequency unit (default 'MHz') or '' for
1534 explicit lag channels
1535
1536 *Notes*:
1537
1538 It is recommended to flag edges of the band or strong
1539 signals beforehand.
1540
1541 """
1542 if insitu is None: insitu = rcParams['insitu']
1543 self._math._setinsitu(insitu)
1544 varlist = vars()
1545 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1546 if not (unit == "" or base.has_key(unit)):
1547 raise ValueError("%s is not a valid unit." % unit)
1548 if unit == "":
1549 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1550 else:
1551 s = scantable(self._math._lag_flag(self, start*base[unit],
1552 end*base[unit], "frequency"))
1553 s._add_history("lag_flag", varlist)
1554 if insitu:
1555 self._assign(s)
1556 else:
1557 return s
1558
1559 @asaplog_post_dec
1560 def fft(self, rowno=None, mask=None, getrealimag=False):
1561 """\
1562 Apply FFT to the spectra.
1563 Flagged data in the scantable get interpolated over the region.
1564
1565 Parameters:
1566
1567 rowno: The row number(s) to be processed. int, list
1568 and tuple are accepted. By default (None), FFT
1569 is applied to the whole data.
1570
1571 mask: Auxiliary channel mask(s). Given as a boolean
1572 list, it is applied to all specified rows.
1573 A list of boolean lists can also be used to
1574 apply different masks. In the latter case, the
1575 length of 'mask' must be the same as that of
1576 'rowno'. The default is None.
1577
1578 getrealimag: If True, returns the real and imaginary part
1579 values of the complex results.
1580 If False (the default), returns the amplitude
1581 (absolute value) normalised with Ndata/2 and
1582 phase (argument, in unit of radian).
1583
1584 Returns:
1585
1586 A list of dictionaries containing the results for each spectrum.
1587 Each dictionary contains two values, the real and the imaginary
1588 parts when getrealimag = True, or the amplitude(absolute value)
1589 and the phase(argument) when getrealimag = False. The key for
1590 these values are 'real' and 'imag', or 'ampl' and 'phase',
1591 respectively.
1592 """
1593 if rowno is None:
1594 rowno = []
1595 if isinstance(rowno, int):
1596 rowno = [rowno]
1597 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1598 raise TypeError("The row number(s) must be int, list or tuple.")
1599 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1600
1601 usecommonmask = True
1602
1603 if mask is None:
1604 mask = []
1605 if isinstance(mask, list) or isinstance(mask, tuple):
1606 if len(mask) == 0:
1607 mask = [[]]
1608 else:
1609 if isinstance(mask[0], bool):
1610 if len(mask) != self.nchan(self.getif(rowno[0])):
1611 raise ValueError("The spectra and the mask have "
1612 "different length.")
1613 mask = [mask]
1614 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1615 usecommonmask = False
1616 if len(mask) != len(rowno):
1617 raise ValueError("When specifying masks for each "
1618 "spectrum, the numbers of them "
1619 "must be identical.")
1620 for i in xrange(mask):
1621 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1622 raise ValueError("The spectra and the mask have "
1623 "different length.")
1624 else:
1625 raise TypeError("The mask must be a boolean list or "
1626 "a list of boolean list.")
1627 else:
1628 raise TypeError("The mask must be a boolean list or a list of "
1629 "boolean list.")
1630
1631 res = []
1632
1633 imask = 0
1634 for whichrow in rowno:
1635 fspec = self._fft(whichrow, mask[imask], getrealimag)
1636 nspec = len(fspec)
1637
1638 i = 0
1639 v1 = []
1640 v2 = []
1641 reselem = {"real":[],"imag":[]} if getrealimag \
1642 else {"ampl":[],"phase":[]}
1643
1644 while (i < nspec):
1645 v1.append(fspec[i])
1646 v2.append(fspec[i+1])
1647 i += 2
1648
1649 if getrealimag:
1650 reselem["real"] += v1
1651 reselem["imag"] += v2
1652 else:
1653 reselem["ampl"] += v1
1654 reselem["phase"] += v2
1655
1656 res.append(reselem)
1657
1658 if not usecommonmask:
1659 imask += 1
1660
1661 return res
1662
1663 @asaplog_post_dec
1664 def create_mask(self, *args, **kwargs):
1665 """\
1666 Compute and return a mask based on [min, max] windows.
1667 The specified windows are to be INCLUDED, when the mask is
1668 applied.
1669
1670 Parameters:
1671
1672 [min, max], [min2, max2], ...
1673 Pairs of start/end points (inclusive)specifying the regions
1674 to be masked
1675
1676 invert: optional argument. If specified as True,
1677 return an inverted mask, i.e. the regions
1678 specified are EXCLUDED
1679
1680 row: create the mask using the specified row for
1681 unit conversions, default is row=0
1682 only necessary if frequency varies over rows.
1683
1684 Examples::
1685
1686 scan.set_unit('channel')
1687 # a)
1688 msk = scan.create_mask([400, 500], [800, 900])
1689 # masks everything outside 400 and 500
1690 # and 800 and 900 in the unit 'channel'
1691
1692 # b)
1693 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1694 # masks the regions between 400 and 500
1695 # and 800 and 900 in the unit 'channel'
1696
1697 # c)
1698 #mask only channel 400
1699 msk = scan.create_mask([400])
1700
1701 """
1702 row = kwargs.get("row", 0)
1703 data = self._getabcissa(row)
1704 u = self._getcoordinfo()[0]
1705 if u == "":
1706 u = "channel"
1707 msg = "The current mask window unit is %s" % u
1708 i = self._check_ifs()
1709 if not i:
1710 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1711 asaplog.push(msg)
1712 n = len(data)
1713 msk = _n_bools(n, False)
1714 # test if args is a 'list' or a 'normal *args - UGLY!!!
1715
1716 ws = (isinstance(args[-1][-1], int)
1717 or isinstance(args[-1][-1], float)) and args or args[0]
1718 for window in ws:
1719 if len(window) == 1:
1720 window = [window[0], window[0]]
1721 if len(window) == 0 or len(window) > 2:
1722 raise ValueError("A window needs to be defined as "
1723 "[start(, end)]")
1724 if window[0] > window[1]:
1725 tmp = window[0]
1726 window[0] = window[1]
1727 window[1] = tmp
1728 for i in range(n):
1729 if data[i] >= window[0] and data[i] <= window[1]:
1730 msk[i] = True
1731 if kwargs.has_key('invert'):
1732 if kwargs.get('invert'):
1733 msk = mask_not(msk)
1734 return msk
1735
1736 def get_masklist(self, mask=None, row=0, silent=False):
1737 """\
1738 Compute and return a list of mask windows, [min, max].
1739
1740 Parameters:
1741
1742 mask: channel mask, created with create_mask.
1743
1744 row: calcutate the masklist using the specified row
1745 for unit conversions, default is row=0
1746 only necessary if frequency varies over rows.
1747
1748 Returns:
1749
1750 [min, max], [min2, max2], ...
1751 Pairs of start/end points (inclusive)specifying
1752 the masked regions
1753
1754 """
1755 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1756 raise TypeError("The mask should be list or tuple.")
1757 if len(mask) <= 0:
1758 raise TypeError("The mask elements should be > 0")
1759 data = self._getabcissa(row)
1760 if len(data) != len(mask):
1761 msg = "Number of channels in scantable != number of mask elements"
1762 raise TypeError(msg)
1763 u = self._getcoordinfo()[0]
1764 if u == "":
1765 u = "channel"
1766 msg = "The current mask window unit is %s" % u
1767 i = self._check_ifs()
1768 if not i:
1769 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1770 if not silent:
1771 asaplog.push(msg)
1772 masklist = []
1773 ist, ien = None, None
1774 ist, ien=self.get_mask_indices(mask)
1775 if ist is not None and ien is not None:
1776 for i in xrange(len(ist)):
1777 range=[data[ist[i]],data[ien[i]]]
1778 range.sort()
1779 masklist.append([range[0],range[1]])
1780 return masklist
1781
1782 def get_mask_indices(self, mask=None):
1783 """\
1784 Compute and Return lists of mask start indices and mask end indices.
1785
1786 Parameters:
1787
1788 mask: channel mask, created with create_mask.
1789
1790 Returns:
1791
1792 List of mask start indices and that of mask end indices,
1793 i.e., [istart1,istart2,....], [iend1,iend2,....].
1794
1795 """
1796 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1797 raise TypeError("The mask should be list or tuple.")
1798 if len(mask) <= 0:
1799 raise TypeError("The mask elements should be > 0")
1800 istart = []
1801 iend = []
1802 if mask[0]:
1803 istart.append(0)
1804 for i in range(len(mask)-1):
1805 if not mask[i] and mask[i+1]:
1806 istart.append(i+1)
1807 elif mask[i] and not mask[i+1]:
1808 iend.append(i)
1809 if mask[len(mask)-1]:
1810 iend.append(len(mask)-1)
1811 if len(istart) != len(iend):
1812 raise RuntimeError("Numbers of mask start != mask end.")
1813 for i in range(len(istart)):
1814 if istart[i] > iend[i]:
1815 raise RuntimeError("Mask start index > mask end index")
1816 break
1817 return istart,iend
1818
1819 @asaplog_post_dec
1820 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1821 """
1822 Parse MS type spw/channel selection syntax.
1823
1824 Parameters:
1825 selectstring : A string expression of spw and channel selection.
1826 Comma-separated expressions mean different spw -
1827 channel combinations. Spws and channel selections
1828 are partitioned by a colon ':'. In a single
1829 selection expression, you can put multiple values
1830 separated by semicolons ';'. Both for spw and
1831 channel selection, allowed cases include single
1832 value, blank('') or asterisk('*') to specify all
1833 available values, two values connected with a
1834 tilde ('~') to specify an inclusive range. Unit
1835 strings for frequency or velocity can be added to
1836 the tilde-connected values. For channel selection
1837 expression, placing a '<' or a '>' is possible to
1838 specify a semi-infinite interval as well.
1839
1840 examples:
1841 '' or '*' = all spws (all channels)
1842 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1843 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1844 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1845 all channels in spw8
1846 '1.3~1.5GHz' = all spws whose central frequency
1847 falls in frequency range between
1848 1.3GHz and 1.5GHz.
1849 '1.3~1.5GHz:1.3~1.5GHz' = channels which fall
1850 between the specified
1851 frequency range in spws
1852 whose central frequency
1853 falls in the specified
1854 frequency range.
1855 '1:-200~250km/s' = channels that fall between the
1856 specified velocity range in
1857 spw 1.
1858 restfreq: the rest frequency.
1859 examples: '115.2712GHz', 115271201800.0
1860 frame: an optional frame type, default 'LSRK'. Valid frames are:
1861 'TOPO', 'LSRD', 'LSRK', 'BARY',
1862 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1863 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1864 Returns:
1865 A dictionary of selected (valid) spw and masklist pairs,
1866 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1867 """
1868 if not isinstance(selectstring, str):
1869 asaplog.post()
1870 asaplog.push("Expression of spw/channel selection must be a string.")
1871 asaplog.post("ERROR")
1872
1873 orig_unit = self.get_unit()
1874 self.set_unit('channel')
1875
1876 if restfreq is not None:
1877 orig_molids = self._getmolidcol_list()
1878 set_restfreq(self, restfreq)
1879
1880 orig_coord = self._getcoordinfo()
1881
1882 if frame is not None:
1883 orig_frame = orig_coord[1]
1884 self.set_freqframe(frame)
1885
1886 if doppler is not None:
1887 orig_doppler = orig_coord[2]
1888 self.set_doppler(doppler)
1889
1890 valid_ifs = self.getifnos()
1891
1892 comma_sep = selectstring.split(",")
1893 res = {}
1894
1895 for cms_elem in comma_sep:
1896 colon_sep = cms_elem.split(":")
1897
1898 if (len(colon_sep) > 2):
1899 raise RuntimeError("Invalid selection expression: more than two colons!")
1900
1901 # parse spw expression and store result in spw_list.
1902 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1903 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1904 # 'a~b*m/s' (where * can be '' or 'k') and also
1905 # several of the above expressions connected with ';'.
1906
1907 spw_list = []
1908
1909 semicolon_sep = colon_sep[0].split(";")
1910
1911 for scs_elem in semicolon_sep:
1912 scs_elem = scs_elem.strip()
1913
1914 lt_sep = scs_elem.split("<")
1915 gt_sep = scs_elem.split(">")
1916 ti_sep = scs_elem.split("~")
1917
1918 lt_sep_length = len(lt_sep)
1919 gt_sep_length = len(gt_sep)
1920 ti_sep_length = len(ti_sep)
1921
1922 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1923
1924 if (len_product > 2):
1925 # '<', '>' and '~' must not coexist in a single spw expression
1926
1927 raise RuntimeError("Invalid spw selection.")
1928
1929 elif (len_product == 1):
1930 # '', '*', or single spw number.
1931
1932 if (scs_elem == "") or (scs_elem == "*"):
1933 spw_list = valid_ifs[:] # deep copy
1934
1935 else: # single number
1936 expr = int(scs_elem)
1937 spw_list.append(expr)
1938 if expr not in valid_ifs:
1939 asaplog.push("Invalid spw given. Ignored.")
1940
1941 else: # (len_product == 2)
1942 # namely, one of '<', '>' or '~' appears just once.
1943
1944 if (lt_sep_length == 2): # '<a'
1945 if is_number(lt_sep[1]):
1946 no_valid_spw = True
1947 for i in valid_ifs:
1948 if (i < float(lt_sep[1])):
1949 spw_list.append(i)
1950 no_valid_spw = False
1951
1952 if no_valid_spw:
1953 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1954
1955 else:
1956 raise RuntimeError("Invalid spw selection.")
1957
1958 elif (gt_sep_length == 2): # '>a'
1959 if is_number(gt_sep[1]):
1960 no_valid_spw = True
1961 for i in valid_ifs:
1962 if (i > float(gt_sep[1])):
1963 spw_list.append(i)
1964 no_valid_spw = False
1965
1966 if no_valid_spw:
1967 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1968
1969 else:
1970 raise RuntimeError("Invalid spw selection.")
1971
1972 else: # (ti_sep_length == 2) where both boundaries inclusive
1973 expr0 = ti_sep[0].strip()
1974 expr1 = ti_sep[1].strip()
1975
1976 if is_number(expr0) and is_number(expr1):
1977 # 'a~b'
1978 expr_pmin = min(float(expr0), float(expr1))
1979 expr_pmax = max(float(expr0), float(expr1))
1980 has_invalid_spw = False
1981 no_valid_spw = True
1982
1983 for i in valid_ifs:
1984 if (expr_pmin <= i) and (i <= expr_pmax):
1985 spw_list.append(i)
1986 no_valid_spw = False
1987 else:
1988 has_invalid_spw = True
1989
1990 if has_invalid_spw:
1991 msg = "Invalid spw is given. Ignored."
1992 asaplog.push(msg)
1993 asaplog.post()
1994
1995 if no_valid_spw:
1996 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1997
1998 elif is_number(expr0) and is_frequency(expr1):
1999 # 'a~b*Hz'
2000 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2001 expr_fmin = min(expr_f0, expr_f1)
2002 expr_fmax = max(expr_f0, expr_f1)
2003 no_valid_spw = True
2004
2005 for coord in self._get_coordinate_list():
2006 spw = coord['if']
2007
2008 """
2009 expr_p0 = coord['coord'].to_pixel(expr_f0)
2010 expr_p1 = coord['coord'].to_pixel(expr_f1)
2011 expr_pmin = min(expr_p0, expr_p1)
2012 expr_pmax = max(expr_p0, expr_p1)
2013
2014 pmin = 0.0
2015 pmax = float(self.nchan(spw) - 1)
2016
2017 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2018 spw_list.append(spw)
2019 no_valid_spw = False
2020 """
2021
2022 crd = coord['coord']
2023 fhead = crd.to_frequency(0)
2024 ftail = crd.to_frequency(self.nchan(spw) - 1)
2025 fcen = (fhead + ftail) / 2.0
2026
2027 if ((expr_fmin <= fcen) and (fcen <= expr_fmax)):
2028 spw_list.append(spw)
2029 no_valid_spw = False
2030
2031 if no_valid_spw:
2032 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2033
2034 elif is_number(expr0) and is_velocity(expr1):
2035 # 'a~b*m/s'
2036 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2037 expr_vmin = min(expr_v0, expr_v1)
2038 expr_vmax = max(expr_v0, expr_v1)
2039 no_valid_spw = True
2040
2041 for coord in self._get_coordinate_list():
2042 spw = coord['if']
2043
2044 """
2045 pmin = 0.0
2046 pmax = float(self.nchan(spw) - 1)
2047
2048 vel0 = coord['coord'].to_velocity(pmin)
2049 vel1 = coord['coord'].to_velocity(pmax)
2050
2051 vmin = min(vel0, vel1)
2052 vmax = max(vel0, vel1)
2053
2054 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2055 spw_list.append(spw)
2056 no_valid_spw = False
2057 """
2058
2059 crd = coord['coord']
2060 fhead = crd.to_frequency(0)
2061 ftail = crd.to_frequency(self.nchan(spw) - 1)
2062 fcen = (fhead + ftail) / 2.0
2063 vcen = crd.to_velocity(crd.to_pixel(fcen))
2064
2065 if ((expr_vmin <= vcen) and (vcen <= expr_vmax)):
2066 spw_list.append(spw)
2067 no_valid_spw = False
2068
2069 if no_valid_spw:
2070 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2071
2072 else:
2073 # cases such as 'aGHz~bkm/s' are not allowed now
2074 raise RuntimeError("Invalid spw selection.")
2075
2076 # check spw list and remove invalid ones.
2077 # if no valid spw left, emit ValueError.
2078 if len(spw_list) == 0:
2079 raise ValueError("No valid spw in given range.")
2080
2081 # parse channel expression and store the result in crange_list.
2082 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2083 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2084 # and also several of the above expressions connected with ';'.
2085
2086 for spw in spw_list:
2087 pmin = 0.0
2088 pmax = float(self.nchan(spw) - 1)
2089
2090 molid = self._getmolidcol_list()[self.get_first_rowno_by_if(spw)]
2091
2092 if (len(colon_sep) == 1):
2093 # no expression for channel selection,
2094 # which means all channels are to be selected.
2095 crange_list = [[pmin, pmax]]
2096
2097 else: # (len(colon_sep) == 2)
2098 crange_list = []
2099
2100 found = False
2101 for i in self._get_coordinate_list():
2102 if (i['if'] == spw):
2103 coord = i['coord']
2104 found = True
2105 break
2106
2107 if found:
2108 semicolon_sep = colon_sep[1].split(";")
2109 for scs_elem in semicolon_sep:
2110 scs_elem = scs_elem.strip()
2111
2112 ti_sep = scs_elem.split("~")
2113 ti_sep_length = len(ti_sep)
2114
2115 if (ti_sep_length > 2):
2116 raise RuntimeError("Invalid channel selection.")
2117
2118 elif (ti_sep_length == 1):
2119 if (scs_elem == "") or (scs_elem == "*"):
2120 # '' and '*' for all channels
2121 crange_list = [[pmin, pmax]]
2122 break
2123 elif (is_number(scs_elem)):
2124 # single channel given
2125 crange_list.append([float(scs_elem), float(scs_elem)])
2126 else:
2127 raise RuntimeError("Invalid channel selection.")
2128
2129 else: #(ti_sep_length == 2)
2130 expr0 = ti_sep[0].strip()
2131 expr1 = ti_sep[1].strip()
2132
2133 if is_number(expr0) and is_number(expr1):
2134 # 'a~b'
2135 expr_pmin = min(float(expr0), float(expr1))
2136 expr_pmax = max(float(expr0), float(expr1))
2137
2138 elif is_number(expr0) and is_frequency(expr1):
2139 # 'a~b*Hz'
2140 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2141 expr_p0 = coord.to_pixel(expr_f0)
2142 expr_p1 = coord.to_pixel(expr_f1)
2143 expr_pmin = min(expr_p0, expr_p1)
2144 expr_pmax = max(expr_p0, expr_p1)
2145
2146 elif is_number(expr0) and is_velocity(expr1):
2147 # 'a~b*m/s'
2148 restf = self.get_restfreqs()[molid][0]
2149 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2150 dppl = self.get_doppler()
2151 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)
2152 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)
2153 expr_p0 = coord.to_pixel(expr_f0)
2154 expr_p1 = coord.to_pixel(expr_f1)
2155 expr_pmin = min(expr_p0, expr_p1)
2156 expr_pmax = max(expr_p0, expr_p1)
2157
2158 else:
2159 # cases such as 'aGHz~bkm/s' are not allowed now
2160 raise RuntimeError("Invalid channel selection.")
2161
2162 cmin = max(pmin, expr_pmin)
2163 cmax = min(pmax, expr_pmax)
2164 # if the given range of channel selection has overwrap with
2165 # that of current spw, output the overwrap area.
2166 if (cmin <= cmax):
2167 cmin = float(int(cmin + 0.5))
2168 cmax = float(int(cmax + 0.5))
2169 crange_list.append([cmin, cmax])
2170
2171 if (len(crange_list) == 0):
2172 crange_list.append([])
2173
2174 if (len(crange_list[0]) > 0):
2175 if res.has_key(spw):
2176 res[spw].extend(crange_list)
2177 else:
2178 res[spw] = crange_list
2179
2180 for spw in res.keys():
2181 if spw in valid_ifs:
2182 # remove duplicated channel ranges
2183 for i in reversed(xrange(len(res[spw]))):
2184 for j in xrange(i):
2185 if ((res[spw][i][0]-res[spw][j][1])*(res[spw][i][1]-res[spw][j][0]) <= 0) or \
2186 (min(abs(res[spw][i][0]-res[spw][j][1]),abs(res[spw][j][0]-res[spw][i][1])) == 1):
2187 asaplog.post()
2188 merge_warn_mesg = "Spw " + str(spw) + ": overwrapping channel ranges are merged."
2189 asaplog.push(merge_warn_mesg)
2190 asaplog.post('WARN')
2191 res[spw][j][0] = min(res[spw][i][0], res[spw][j][0])
2192 res[spw][j][1] = max(res[spw][i][1], res[spw][j][1])
2193 res[spw].pop(i)
2194 break
2195 else:
2196 del res[spw]
2197
2198 if len(res) == 0:
2199 raise RuntimeError("No valid spw.")
2200
2201 # restore original values
2202 self.set_unit(orig_unit)
2203 if restfreq is not None:
2204 self._setmolidcol_list(orig_molids)
2205 if frame is not None:
2206 self.set_freqframe(orig_frame)
2207 if doppler is not None:
2208 self.set_doppler(orig_doppler)
2209
2210 return res
2211
2212 @asaplog_post_dec
2213 def get_first_rowno_by_if(self, ifno):
2214 found = False
2215 for irow in xrange(self.nrow()):
2216 if (self.getif(irow) == ifno):
2217 res = irow
2218 found = True
2219 break
2220
2221 if not found: raise RuntimeError("No valid spw.")
2222
2223 return res
2224
2225 @asaplog_post_dec
2226 def _get_coordinate_list(self):
2227 res = []
2228 spws = self.getifnos()
2229 for spw in spws:
2230 elem = {}
2231 elem['if'] = spw
2232 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2233 res.append(elem)
2234
2235 return res
2236
2237 @asaplog_post_dec
2238 def parse_maskexpr(self, maskstring):
2239 """
2240 Parse CASA type mask selection syntax (IF dependent).
2241
2242 Parameters:
2243 maskstring : A string mask selection expression.
2244 A comma separated selections mean different IF -
2245 channel combinations. IFs and channel selections
2246 are partitioned by a colon, ':'.
2247 examples:
2248 '' = all IFs (all channels)
2249 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2250 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2251 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2252 all channels in IF8
2253 Returns:
2254 A dictionary of selected (valid) IF and masklist pairs,
2255 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2256 """
2257 if not isinstance(maskstring,str):
2258 asaplog.post()
2259 asaplog.push("Mask expression should be a string.")
2260 asaplog.post("ERROR")
2261
2262 valid_ifs = self.getifnos()
2263 frequnit = self.get_unit()
2264 seldict = {}
2265 if maskstring == "":
2266 maskstring = str(valid_ifs)[1:-1]
2267 ## split each selection "IF range[:CHAN range]"
2268 # split maskstring by "<spaces>,<spaces>"
2269 comma_sep = re.compile('\s*,\s*')
2270 sellist = comma_sep.split(maskstring)
2271 # separator by "<spaces>:<spaces>"
2272 collon_sep = re.compile('\s*:\s*')
2273 for currselstr in sellist:
2274 selset = collon_sep.split(currselstr)
2275 # spw and mask string (may include ~, < or >)
2276 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2277 minval=min(valid_ifs),
2278 maxval=max(valid_ifs))
2279 for spwlist in spwmasklist:
2280 selspws = []
2281 for ispw in range(spwlist[0],spwlist[1]+1):
2282 # Put into the list only if ispw exists
2283 if valid_ifs.count(ispw):
2284 selspws.append(ispw)
2285 del spwmasklist, spwlist
2286
2287 # parse frequency mask list
2288 if len(selset) > 1:
2289 freqmasklist = self._parse_selection(selset[1], typestr='float',
2290 offset=0.)
2291 else:
2292 # want to select the whole spectrum
2293 freqmasklist = [None]
2294
2295 ## define a dictionary of spw - masklist combination
2296 for ispw in selspws:
2297 #print "working on", ispw
2298 spwstr = str(ispw)
2299 if len(selspws) == 0:
2300 # empty spw
2301 continue
2302 else:
2303 ## want to get min and max of the spw and
2304 ## offset to set for '<' and '>'
2305 if frequnit == 'channel':
2306 minfreq = 0
2307 maxfreq = self.nchan(ifno=ispw)
2308 offset = 0.5
2309 else:
2310 ## This is ugly part. need improvement
2311 for ifrow in xrange(self.nrow()):
2312 if self.getif(ifrow) == ispw:
2313 #print "IF",ispw,"found in row =",ifrow
2314 break
2315 freqcoord = self.get_coordinate(ifrow)
2316 freqs = self._getabcissa(ifrow)
2317 minfreq = min(freqs)
2318 maxfreq = max(freqs)
2319 if len(freqs) == 1:
2320 offset = 0.5
2321 elif frequnit.find('Hz') > 0:
2322 offset = abs(freqcoord.to_frequency(1,
2323 unit=frequnit)
2324 -freqcoord.to_frequency(0,
2325 unit=frequnit)
2326 )*0.5
2327 elif frequnit.find('m/s') > 0:
2328 offset = abs(freqcoord.to_velocity(1,
2329 unit=frequnit)
2330 -freqcoord.to_velocity(0,
2331 unit=frequnit)
2332 )*0.5
2333 else:
2334 asaplog.post()
2335 asaplog.push("Invalid frequency unit")
2336 asaplog.post("ERROR")
2337 del freqs, freqcoord, ifrow
2338 for freq in freqmasklist:
2339 selmask = freq or [minfreq, maxfreq]
2340 if selmask[0] == None:
2341 ## selection was "<freq[1]".
2342 if selmask[1] < minfreq:
2343 ## avoid adding region selection
2344 selmask = None
2345 else:
2346 selmask = [minfreq,selmask[1]-offset]
2347 elif selmask[1] == None:
2348 ## selection was ">freq[0]"
2349 if selmask[0] > maxfreq:
2350 ## avoid adding region selection
2351 selmask = None
2352 else:
2353 selmask = [selmask[0]+offset,maxfreq]
2354 if selmask:
2355 if not seldict.has_key(spwstr):
2356 # new spw selection
2357 seldict[spwstr] = []
2358 seldict[spwstr] += [selmask]
2359 del minfreq,maxfreq,offset,freq,selmask
2360 del spwstr
2361 del freqmasklist
2362 del valid_ifs
2363 if len(seldict) == 0:
2364 asaplog.post()
2365 asaplog.push("No valid selection in the mask expression: "
2366 +maskstring)
2367 asaplog.post("WARN")
2368 return None
2369 msg = "Selected masklist:\n"
2370 for sif, lmask in seldict.iteritems():
2371 msg += " IF"+sif+" - "+str(lmask)+"\n"
2372 asaplog.push(msg)
2373 return seldict
2374
2375 @asaplog_post_dec
2376 def parse_idx_selection(self, mode, selexpr):
2377 """
2378 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2379 BEAMNO, and row number
2380
2381 Parameters:
2382 mode : which column to select.
2383 ['scan',|'if'|'pol'|'beam'|'row']
2384 selexpr : A comma separated selection expression.
2385 examples:
2386 '' = all (returns [])
2387 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2388 (returns [0,1,4,5,6,9])
2389 Returns:
2390 A List of selected indices
2391 """
2392 if selexpr == "":
2393 return []
2394 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2395 'b': 'beam', 'r': 'row'}
2396 smode = mode.lower()[0]
2397 if not (smode in valid_modes.keys()):
2398 msg = "Invalid mode '%s'. Valid modes are %s" %\
2399 (mode, str(valid_modes.values()))
2400 asaplog.post()
2401 asaplog.push(msg)
2402 asaplog.post("ERROR")
2403 mode = valid_modes[smode]
2404 minidx = None
2405 maxidx = None
2406 if smode == 'r':
2407 minidx = 0
2408 maxidx = self.nrow()
2409 else:
2410 idx = getattr(self,"get"+mode+"nos")()
2411 minidx = min(idx)
2412 maxidx = max(idx)
2413 del idx
2414 # split selexpr by "<spaces>,<spaces>"
2415 comma_sep = re.compile('\s*,\s*')
2416 sellist = comma_sep.split(selexpr)
2417 idxlist = []
2418 for currselstr in sellist:
2419 # single range (may include ~, < or >)
2420 currlist = self._parse_selection(currselstr, typestr='integer',
2421 minval=minidx,maxval=maxidx)
2422 for thelist in currlist:
2423 idxlist += range(thelist[0],thelist[1]+1)
2424 # remove duplicated elements after first ones
2425 for i in reversed(xrange(len(idxlist))):
2426 if idxlist.index(idxlist[i]) < i:
2427 idxlist.pop(i)
2428 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2429 asaplog.push(msg)
2430 return idxlist
2431
2432 def _parse_selection(self, selstr, typestr='float', offset=0.,
2433 minval=None, maxval=None):
2434 """
2435 Parameters:
2436 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2437 typestr : The type of the values in returned list
2438 ('integer' or 'float')
2439 offset : The offset value to subtract from or add to
2440 the boundary value if the selection string
2441 includes '<' or '>' [Valid only for typestr='float']
2442 minval, maxval : The minimum/maximum values to set if the
2443 selection string includes '<' or '>'.
2444 The list element is filled with None by default.
2445 Returns:
2446 A list of min/max pair of selections.
2447 Example:
2448 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2449 --> returns [[0,2],[5,7],[9,9]]
2450 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2451 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2452 """
2453 # split selstr by '<spaces>;<spaces>'
2454 semi_sep = re.compile('\s*;\s*')
2455 selgroups = semi_sep.split(selstr)
2456 sellists = []
2457 if typestr.lower().startswith('int'):
2458 formatfunc = int
2459 offset = 1
2460 else:
2461 formatfunc = float
2462
2463 for currsel in selgroups:
2464 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2465 minsel = minval
2466 maxsel = maxval
2467 if currsel.find('~') > 0:
2468 # val0 <= x <= val1
2469 minsel = formatfunc(currsel.split('~')[0].strip())
2470 maxsel = formatfunc(currsel.split('~')[1].strip())
2471 elif currsel.strip().find('<=') > -1:
2472 bound = currsel.split('<=')
2473 try: # try "x <= val"
2474 minsel = minval
2475 maxsel = formatfunc(bound[1].strip())
2476 except ValueError: # now "val <= x"
2477 minsel = formatfunc(bound[0].strip())
2478 maxsel = maxval
2479 elif currsel.strip().find('>=') > -1:
2480 bound = currsel.split('>=')
2481 try: # try "x >= val"
2482 minsel = formatfunc(bound[1].strip())
2483 maxsel = maxval
2484 except ValueError: # now "val >= x"
2485 minsel = minval
2486 maxsel = formatfunc(bound[0].strip())
2487 elif currsel.strip().find('<') > -1:
2488 bound = currsel.split('<')
2489 try: # try "x < val"
2490 minsel = minval
2491 maxsel = formatfunc(bound[1].strip()) \
2492 - formatfunc(offset)
2493 except ValueError: # now "val < x"
2494 minsel = formatfunc(bound[0].strip()) \
2495 + formatfunc(offset)
2496 maxsel = maxval
2497 elif currsel.strip().find('>') > -1:
2498 bound = currsel.split('>')
2499 try: # try "x > val"
2500 minsel = formatfunc(bound[1].strip()) \
2501 + formatfunc(offset)
2502 maxsel = maxval
2503 except ValueError: # now "val > x"
2504 minsel = minval
2505 maxsel = formatfunc(bound[0].strip()) \
2506 - formatfunc(offset)
2507 else:
2508 minsel = formatfunc(currsel)
2509 maxsel = formatfunc(currsel)
2510 sellists.append([minsel,maxsel])
2511 return sellists
2512
2513# def get_restfreqs(self):
2514# """
2515# Get the restfrequency(s) stored in this scantable.
2516# The return value(s) are always of unit 'Hz'
2517# Parameters:
2518# none
2519# Returns:
2520# a list of doubles
2521# """
2522# return list(self._getrestfreqs())
2523
2524 def get_restfreqs(self, ids=None):
2525 """\
2526 Get the restfrequency(s) stored in this scantable.
2527 The return value(s) are always of unit 'Hz'
2528
2529 Parameters:
2530
2531 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2532 be retrieved
2533
2534 Returns:
2535
2536 dictionary containing ids and a list of doubles for each id
2537
2538 """
2539 if ids is None:
2540 rfreqs = {}
2541 idlist = self.getmolnos()
2542 for i in idlist:
2543 rfreqs[i] = list(self._getrestfreqs(i))
2544 return rfreqs
2545 else:
2546 if type(ids) == list or type(ids) == tuple:
2547 rfreqs = {}
2548 for i in ids:
2549 rfreqs[i] = list(self._getrestfreqs(i))
2550 return rfreqs
2551 else:
2552 return list(self._getrestfreqs(ids))
2553
2554 @asaplog_post_dec
2555 def set_restfreqs(self, freqs=None, unit='Hz'):
2556 """\
2557 Set or replace the restfrequency specified and
2558 if the 'freqs' argument holds a scalar,
2559 then that rest frequency will be applied to all the selected
2560 data. If the 'freqs' argument holds
2561 a vector, then it MUST be of equal or smaller length than
2562 the number of IFs (and the available restfrequencies will be
2563 replaced by this vector). In this case, *all* data have
2564 the restfrequency set per IF according
2565 to the corresponding value you give in the 'freqs' vector.
2566 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2567 IF 1 gets restfreq 2e9.
2568
2569 You can also specify the frequencies via a linecatalog.
2570
2571 Parameters:
2572
2573 freqs: list of rest frequency values or string idenitfiers
2574
2575 unit: unit for rest frequency (default 'Hz')
2576
2577
2578 Example::
2579
2580 # set the given restfrequency for the all currently selected IFs
2581 scan.set_restfreqs(freqs=1.4e9)
2582 # set restfrequencies for the n IFs (n > 1) in the order of the
2583 # list, i.e
2584 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2585 # len(list_of_restfreqs) == nIF
2586 # for nIF == 1 the following will set multiple restfrequency for
2587 # that IF
2588 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2589 # set multiple restfrequencies per IF. as a list of lists where
2590 # the outer list has nIF elements, the inner s arbitrary
2591 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2592
2593 *Note*:
2594
2595 To do more sophisticate Restfrequency setting, e.g. on a
2596 source and IF basis, use scantable.set_selection() before using
2597 this function::
2598
2599 # provided your scantable is called scan
2600 selection = selector()
2601 selection.set_name('ORION*')
2602 selection.set_ifs([1])
2603 scan.set_selection(selection)
2604 scan.set_restfreqs(freqs=86.6e9)
2605
2606 """
2607 varlist = vars()
2608 from asap import linecatalog
2609 # simple value
2610 if isinstance(freqs, int) or isinstance(freqs, float):
2611 self._setrestfreqs([freqs], [""], unit)
2612 # list of values
2613 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2614 # list values are scalars
2615 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2616 if len(freqs) == 1:
2617 self._setrestfreqs(freqs, [""], unit)
2618 else:
2619 # allow the 'old' mode of setting mulitple IFs
2620 savesel = self._getselection()
2621 sel = self.get_selection()
2622 iflist = self.getifnos()
2623 if len(freqs)>len(iflist):
2624 raise ValueError("number of elements in list of list "
2625 "exeeds the current IF selections")
2626 iflist = self.getifnos()
2627 for i, fval in enumerate(freqs):
2628 sel.set_ifs(iflist[i])
2629 self._setselection(sel)
2630 self._setrestfreqs([fval], [""], unit)
2631 self._setselection(savesel)
2632
2633 # list values are dict, {'value'=, 'name'=)
2634 elif isinstance(freqs[-1], dict):
2635 values = []
2636 names = []
2637 for d in freqs:
2638 values.append(d["value"])
2639 names.append(d["name"])
2640 self._setrestfreqs(values, names, unit)
2641 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2642 savesel = self._getselection()
2643 sel = self.get_selection()
2644 iflist = self.getifnos()
2645 if len(freqs)>len(iflist):
2646 raise ValueError("number of elements in list of list exeeds"
2647 " the current IF selections")
2648 for i, fval in enumerate(freqs):
2649 sel.set_ifs(iflist[i])
2650 self._setselection(sel)
2651 self._setrestfreqs(fval, [""], unit)
2652 self._setselection(savesel)
2653 # freqs are to be taken from a linecatalog
2654 elif isinstance(freqs, linecatalog):
2655 savesel = self._getselection()
2656 sel = self.get_selection()
2657 for i in xrange(freqs.nrow()):
2658 sel.set_ifs(iflist[i])
2659 self._setselection(sel)
2660 self._setrestfreqs([freqs.get_frequency(i)],
2661 [freqs.get_name(i)], "MHz")
2662 # ensure that we are not iterating past nIF
2663 if i == self.nif()-1: break
2664 self._setselection(savesel)
2665 else:
2666 return
2667 self._add_history("set_restfreqs", varlist)
2668
2669 @asaplog_post_dec
2670 def shift_refpix(self, delta):
2671 """\
2672 Shift the reference pixel of the Spectra Coordinate by an
2673 integer amount.
2674
2675 Parameters:
2676
2677 delta: the amount to shift by
2678
2679 *Note*:
2680
2681 Be careful using this with broadband data.
2682
2683 """
2684 varlist = vars()
2685 Scantable.shift_refpix(self, delta)
2686 s._add_history("shift_refpix", varlist)
2687
2688 @asaplog_post_dec
2689 def history(self, filename=None, nrows=-1, start=0):
2690 """\
2691 Print the history. Optionally to a file.
2692
2693 Parameters:
2694
2695 filename: The name of the file to save the history to.
2696
2697 """
2698 n = self._historylength()
2699 if nrows == -1:
2700 nrows = n
2701 if start+nrows > n:
2702 nrows = nrows-start
2703 if n > 1000 and nrows == n:
2704 nrows = 1000
2705 start = n-1000
2706 asaplog.push("Warning: History has {0} entries. Displaying last "
2707 "1000".format(n))
2708 hist = list(self._gethistory(nrows, start))
2709 out = "-"*80
2710 for h in hist:
2711 if not h.strip():
2712 continue
2713 if h.find("---") >-1:
2714 continue
2715 else:
2716 items = h.split("##")
2717 date = items[0]
2718 func = items[1]
2719 items = items[2:]
2720 out += "\n"+date+"\n"
2721 out += "Function: %s\n Parameters:" % (func)
2722 for i in items:
2723 if i == '':
2724 continue
2725 s = i.split("=")
2726 out += "\n %s = %s" % (s[0], s[1])
2727 out = "\n".join([out, "*"*80])
2728 if filename is not None:
2729 if filename is "":
2730 filename = 'scantable_history.txt'
2731 filename = os.path.expandvars(os.path.expanduser(filename))
2732 if not os.path.isdir(filename):
2733 data = open(filename, 'w')
2734 data.write(out)
2735 data.close()
2736 else:
2737 msg = "Illegal file name '%s'." % (filename)
2738 raise IOError(msg)
2739 return page(out)
2740
2741 #
2742 # Maths business
2743 #
2744 @asaplog_post_dec
2745 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2746 avmode="NONE"):
2747 """\
2748 Return the (time) weighted average of a scan. Scans will be averaged
2749 only if the source direction (RA/DEC) is within 1' otherwise
2750
2751 *Note*:
2752
2753 in channels only - align if necessary
2754
2755 Parameters:
2756
2757 mask: an optional mask (only used for 'var' and 'tsys'
2758 weighting)
2759
2760 scanav: True averages each scan separately
2761 False (default) averages all scans together,
2762
2763 weight: Weighting scheme.
2764 'none' (mean no weight)
2765 'var' (1/var(spec) weighted)
2766 'tsys' (1/Tsys**2 weighted)
2767 'tint' (integration time weighted)
2768 'tintsys' (Tint/Tsys**2)
2769 'median' ( median averaging)
2770 The default is 'tint'
2771
2772 align: align the spectra in velocity before averaging. It takes
2773 the time of the first spectrum as reference time.
2774 avmode: 'SOURCE' - also select by source name - or
2775 'NONE' (default). Not applicable for scanav=True or
2776 weight=median
2777
2778 Example::
2779
2780 # time average the scantable without using a mask
2781 newscan = scan.average_time()
2782
2783 """
2784 varlist = vars()
2785 weight = weight or 'TINT'
2786 mask = mask or ()
2787 scanav = (scanav and 'SCAN') or avmode.upper()
2788 scan = (self, )
2789
2790 if align:
2791 scan = (self.freq_align(insitu=False), )
2792 asaplog.push("Note: Alignment is don on a source-by-source basis")
2793 asaplog.push("Note: Averaging (by default) is not")
2794 # we need to set it to SOURCE averaging here
2795 s = None
2796 if weight.upper() == 'MEDIAN':
2797 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2798 scanav))
2799 else:
2800 s = scantable(self._math._average(scan, mask, weight.upper(),
2801 scanav))
2802 s._add_history("average_time", varlist)
2803 return s
2804
2805 @asaplog_post_dec
2806 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2807 """\
2808 Return a scan where all spectra are converted to either
2809 Jansky or Kelvin depending upon the flux units of the scan table.
2810 By default the function tries to look the values up internally.
2811 If it can't find them (or if you want to over-ride), you must
2812 specify EITHER jyperk OR eta (and D which it will try to look up
2813 also if you don't set it). jyperk takes precedence if you set both.
2814
2815 Parameters:
2816
2817 jyperk: the Jy / K conversion factor
2818
2819 eta: the aperture efficiency
2820
2821 d: the geometric diameter (metres)
2822
2823 insitu: if False a new scantable is returned.
2824 Otherwise, the scaling is done in-situ
2825 The default is taken from .asaprc (False)
2826
2827 """
2828 if insitu is None: insitu = rcParams['insitu']
2829 self._math._setinsitu(insitu)
2830 varlist = vars()
2831 jyperk = jyperk or -1.0
2832 d = d or -1.0
2833 eta = eta or -1.0
2834 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2835 s._add_history("convert_flux", varlist)
2836 if insitu: self._assign(s)
2837 else: return s
2838
2839 @asaplog_post_dec
2840 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2841 """\
2842 Return a scan after applying a gain-elevation correction.
2843 The correction can be made via either a polynomial or a
2844 table-based interpolation (and extrapolation if necessary).
2845 You specify polynomial coefficients, an ascii table or neither.
2846 If you specify neither, then a polynomial correction will be made
2847 with built in coefficients known for certain telescopes (an error
2848 will occur if the instrument is not known).
2849 The data and Tsys are *divided* by the scaling factors.
2850
2851 Parameters:
2852
2853 poly: Polynomial coefficients (default None) to compute a
2854 gain-elevation correction as a function of
2855 elevation (in degrees).
2856
2857 filename: The name of an ascii file holding correction factors.
2858 The first row of the ascii file must give the column
2859 names and these MUST include columns
2860 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2861 by this) somewhere.
2862 The second row must give the data type of the
2863 column. Use 'R' for Real and 'I' for Integer.
2864 An example file would be
2865 (actual factors are arbitrary) :
2866
2867 TIME ELEVATION FACTOR
2868 R R R
2869 0.1 0 0.8
2870 0.2 20 0.85
2871 0.3 40 0.9
2872 0.4 60 0.85
2873 0.5 80 0.8
2874 0.6 90 0.75
2875
2876 method: Interpolation method when correcting from a table.
2877 Values are 'nearest', 'linear' (default), 'cubic'
2878 and 'spline'
2879
2880 insitu: if False a new scantable is returned.
2881 Otherwise, the scaling is done in-situ
2882 The default is taken from .asaprc (False)
2883
2884 """
2885
2886 if insitu is None: insitu = rcParams['insitu']
2887 self._math._setinsitu(insitu)
2888 varlist = vars()
2889 poly = poly or ()
2890 from os.path import expandvars
2891 filename = expandvars(filename)
2892 s = scantable(self._math._gainel(self, poly, filename, method))
2893 s._add_history("gain_el", varlist)
2894 if insitu:
2895 self._assign(s)
2896 else:
2897 return s
2898
2899 @asaplog_post_dec
2900 def freq_align(self, reftime=None, method='cubic', insitu=None):
2901 """\
2902 Return a scan where all rows have been aligned in frequency/velocity.
2903 The alignment frequency frame (e.g. LSRK) is that set by function
2904 set_freqframe.
2905
2906 Parameters:
2907
2908 reftime: reference time to align at. By default, the time of
2909 the first row of data is used.
2910
2911 method: Interpolation method for regridding the spectra.
2912 Choose from 'nearest', 'linear', 'cubic' (default)
2913 and 'spline'
2914
2915 insitu: if False a new scantable is returned.
2916 Otherwise, the scaling is done in-situ
2917 The default is taken from .asaprc (False)
2918
2919 """
2920 if insitu is None: insitu = rcParams["insitu"]
2921 oldInsitu = self._math._insitu()
2922 self._math._setinsitu(insitu)
2923 varlist = vars()
2924 reftime = reftime or ""
2925 s = scantable(self._math._freq_align(self, reftime, method))
2926 s._add_history("freq_align", varlist)
2927 self._math._setinsitu(oldInsitu)
2928 if insitu:
2929 self._assign(s)
2930 else:
2931 return s
2932
2933 @asaplog_post_dec
2934 def opacity(self, tau=None, insitu=None):
2935 """\
2936 Apply an opacity correction. The data
2937 and Tsys are multiplied by the correction factor.
2938
2939 Parameters:
2940
2941 tau: (list of) opacity from which the correction factor is
2942 exp(tau*ZD)
2943 where ZD is the zenith-distance.
2944 If a list is provided, it has to be of length nIF,
2945 nIF*nPol or 1 and in order of IF/POL, e.g.
2946 [opif0pol0, opif0pol1, opif1pol0 ...]
2947 if tau is `None` the opacities are determined from a
2948 model.
2949
2950 insitu: if False a new scantable is returned.
2951 Otherwise, the scaling is done in-situ
2952 The default is taken from .asaprc (False)
2953
2954 """
2955 if insitu is None:
2956 insitu = rcParams['insitu']
2957 self._math._setinsitu(insitu)
2958 varlist = vars()
2959 if not hasattr(tau, "__len__"):
2960 tau = [tau]
2961 s = scantable(self._math._opacity(self, tau))
2962 s._add_history("opacity", varlist)
2963 if insitu:
2964 self._assign(s)
2965 else:
2966 return s
2967
2968 @asaplog_post_dec
2969 def bin(self, width=5, insitu=None):
2970 """\
2971 Return a scan where all spectra have been binned up.
2972
2973 Parameters:
2974
2975 width: The bin width (default=5) in pixels
2976
2977 insitu: if False a new scantable is returned.
2978 Otherwise, the scaling is done in-situ
2979 The default is taken from .asaprc (False)
2980
2981 """
2982 if insitu is None:
2983 insitu = rcParams['insitu']
2984 self._math._setinsitu(insitu)
2985 varlist = vars()
2986 s = scantable(self._math._bin(self, width))
2987 s._add_history("bin", varlist)
2988 if insitu:
2989 self._assign(s)
2990 else:
2991 return s
2992
2993 @asaplog_post_dec
2994 def reshape(self, first, last, insitu=None):
2995 """Resize the band by providing first and last channel.
2996 This will cut off all channels outside [first, last].
2997 """
2998 if insitu is None:
2999 insitu = rcParams['insitu']
3000 varlist = vars()
3001 if last < 0:
3002 last = self.nchan()-1 + last
3003 s = None
3004 if insitu:
3005 s = self
3006 else:
3007 s = self.copy()
3008 s._reshape(first,last)
3009 s._add_history("reshape", varlist)
3010 if not insitu:
3011 return s
3012
3013 @asaplog_post_dec
3014 def resample(self, width=5, method='cubic', insitu=None):
3015 """\
3016 Return a scan where all spectra have been binned up.
3017
3018 Parameters:
3019
3020 width: The bin width (default=5) in pixels
3021
3022 method: Interpolation method when correcting from a table.
3023 Values are 'nearest', 'linear', 'cubic' (default)
3024 and 'spline'
3025
3026 insitu: if False a new scantable is returned.
3027 Otherwise, the scaling is done in-situ
3028 The default is taken from .asaprc (False)
3029
3030 """
3031 if insitu is None:
3032 insitu = rcParams['insitu']
3033 self._math._setinsitu(insitu)
3034 varlist = vars()
3035 s = scantable(self._math._resample(self, method, width))
3036 s._add_history("resample", varlist)
3037 if insitu:
3038 self._assign(s)
3039 else:
3040 return s
3041
3042 @asaplog_post_dec
3043 def average_pol(self, mask=None, weight='none'):
3044 """\
3045 Average the Polarisations together.
3046
3047 Parameters:
3048
3049 mask: An optional mask defining the region, where the
3050 averaging will be applied. The output will have all
3051 specified points masked.
3052
3053 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3054 weighted), or 'tsys' (1/Tsys**2 weighted)
3055
3056 """
3057 varlist = vars()
3058 mask = mask or ()
3059 s = scantable(self._math._averagepol(self, mask, weight.upper()))
3060 s._add_history("average_pol", varlist)
3061 return s
3062
3063 @asaplog_post_dec
3064 def average_beam(self, mask=None, weight='none'):
3065 """\
3066 Average the Beams together.
3067
3068 Parameters:
3069 mask: An optional mask defining the region, where the
3070 averaging will be applied. The output will have all
3071 specified points masked.
3072
3073 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3074 weighted), or 'tsys' (1/Tsys**2 weighted)
3075
3076 """
3077 varlist = vars()
3078 mask = mask or ()
3079 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3080 s._add_history("average_beam", varlist)
3081 return s
3082
3083 def parallactify(self, pflag):
3084 """\
3085 Set a flag to indicate whether this data should be treated as having
3086 been 'parallactified' (total phase == 0.0)
3087
3088 Parameters:
3089
3090 pflag: Bool indicating whether to turn this on (True) or
3091 off (False)
3092
3093 """
3094 varlist = vars()
3095 self._parallactify(pflag)
3096 self._add_history("parallactify", varlist)
3097
3098 @asaplog_post_dec
3099 def convert_pol(self, poltype=None):
3100 """\
3101 Convert the data to a different polarisation type.
3102 Note that you will need cross-polarisation terms for most conversions.
3103
3104 Parameters:
3105
3106 poltype: The new polarisation type. Valid types are:
3107 'linear', 'circular', 'stokes' and 'linpol'
3108
3109 """
3110 varlist = vars()
3111 s = scantable(self._math._convertpol(self, poltype))
3112 s._add_history("convert_pol", varlist)
3113 return s
3114
3115 @asaplog_post_dec
3116 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3117 insitu=None):
3118 """\
3119 Smooth the spectrum by the specified kernel (conserving flux).
3120
3121 Parameters:
3122
3123 kernel: The type of smoothing kernel. Select from
3124 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3125 or 'poly'
3126
3127 width: The width of the kernel in pixels. For hanning this is
3128 ignored otherwise it defauls to 5 pixels.
3129 For 'gaussian' it is the Full Width Half
3130 Maximum. For 'boxcar' it is the full width.
3131 For 'rmedian' and 'poly' it is the half width.
3132
3133 order: Optional parameter for 'poly' kernel (default is 2), to
3134 specify the order of the polnomial. Ignored by all other
3135 kernels.
3136
3137 plot: plot the original and the smoothed spectra.
3138 In this each indivual fit has to be approved, by
3139 typing 'y' or 'n'
3140
3141 insitu: if False a new scantable is returned.
3142 Otherwise, the scaling is done in-situ
3143 The default is taken from .asaprc (False)
3144
3145 """
3146 if insitu is None: insitu = rcParams['insitu']
3147 self._math._setinsitu(insitu)
3148 varlist = vars()
3149
3150 if plot: orgscan = self.copy()
3151
3152 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
3153 s._add_history("smooth", varlist)
3154
3155 action = 'H'
3156 if plot:
3157 from asap.asapplotter import new_asaplot
3158 theplot = new_asaplot(rcParams['plotter.gui'])
3159 from matplotlib import rc as rcp
3160 rcp('lines', linewidth=1)
3161 theplot.set_panels()
3162 ylab=s._get_ordinate_label()
3163 #theplot.palette(0,["#777777","red"])
3164 for r in xrange(s.nrow()):
3165 xsm=s._getabcissa(r)
3166 ysm=s._getspectrum(r)
3167 xorg=orgscan._getabcissa(r)
3168 yorg=orgscan._getspectrum(r)
3169 if action != "N": #skip plotting if rejecting all
3170 theplot.clear()
3171 theplot.hold()
3172 theplot.set_axes('ylabel',ylab)
3173 theplot.set_axes('xlabel',s._getabcissalabel(r))
3174 theplot.set_axes('title',s._getsourcename(r))
3175 theplot.set_line(label='Original',color="#777777")
3176 theplot.plot(xorg,yorg)
3177 theplot.set_line(label='Smoothed',color="red")
3178 theplot.plot(xsm,ysm)
3179 ### Ugly part for legend
3180 for i in [0,1]:
3181 theplot.subplots[0]['lines'].append(
3182 [theplot.subplots[0]['axes'].lines[i]]
3183 )
3184 theplot.release()
3185 ### Ugly part for legend
3186 theplot.subplots[0]['lines']=[]
3187 res = self._get_verify_action("Accept smoothing?",action)
3188 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3189 if r == 0: action = None
3190 #res = raw_input("Accept smoothing ([y]/n): ")
3191 if res.upper() == 'N':
3192 # reject for the current rows
3193 s._setspectrum(yorg, r)
3194 elif res.upper() == 'R':
3195 # reject all the following rows
3196 action = "N"
3197 s._setspectrum(yorg, r)
3198 elif res.upper() == 'A':
3199 # accept all the following rows
3200 break
3201 theplot.quit()
3202 del theplot
3203 del orgscan
3204
3205 if insitu: self._assign(s)
3206 else: return s
3207
3208 @asaplog_post_dec
3209 def regrid_channel(self, width=5, plot=False, insitu=None):
3210 """\
3211 Regrid the spectra by the specified channel width
3212
3213 Parameters:
3214
3215 width: The channel width (float) of regridded spectra
3216 in the current spectral unit.
3217
3218 plot: [NOT IMPLEMENTED YET]
3219 plot the original and the regridded spectra.
3220 In this each indivual fit has to be approved, by
3221 typing 'y' or 'n'
3222
3223 insitu: if False a new scantable is returned.
3224 Otherwise, the scaling is done in-situ
3225 The default is taken from .asaprc (False)
3226
3227 """
3228 if insitu is None: insitu = rcParams['insitu']
3229 varlist = vars()
3230
3231 if plot:
3232 asaplog.post()
3233 asaplog.push("Verification plot is not implemtnetd yet.")
3234 asaplog.post("WARN")
3235
3236 s = self.copy()
3237 s._regrid_specchan(width)
3238
3239 s._add_history("regrid_channel", varlist)
3240
3241# if plot:
3242# from asap.asapplotter import new_asaplot
3243# theplot = new_asaplot(rcParams['plotter.gui'])
3244# from matplotlib import rc as rcp
3245# rcp('lines', linewidth=1)
3246# theplot.set_panels()
3247# ylab=s._get_ordinate_label()
3248# #theplot.palette(0,["#777777","red"])
3249# for r in xrange(s.nrow()):
3250# xsm=s._getabcissa(r)
3251# ysm=s._getspectrum(r)
3252# xorg=orgscan._getabcissa(r)
3253# yorg=orgscan._getspectrum(r)
3254# theplot.clear()
3255# theplot.hold()
3256# theplot.set_axes('ylabel',ylab)
3257# theplot.set_axes('xlabel',s._getabcissalabel(r))
3258# theplot.set_axes('title',s._getsourcename(r))
3259# theplot.set_line(label='Original',color="#777777")
3260# theplot.plot(xorg,yorg)
3261# theplot.set_line(label='Smoothed',color="red")
3262# theplot.plot(xsm,ysm)
3263# ### Ugly part for legend
3264# for i in [0,1]:
3265# theplot.subplots[0]['lines'].append(
3266# [theplot.subplots[0]['axes'].lines[i]]
3267# )
3268# theplot.release()
3269# ### Ugly part for legend
3270# theplot.subplots[0]['lines']=[]
3271# res = raw_input("Accept smoothing ([y]/n): ")
3272# if res.upper() == 'N':
3273# s._setspectrum(yorg, r)
3274# theplot.quit()
3275# del theplot
3276# del orgscan
3277
3278 if insitu: self._assign(s)
3279 else: return s
3280
3281 @asaplog_post_dec
3282 def _parse_wn(self, wn):
3283 if isinstance(wn, list) or isinstance(wn, tuple):
3284 return wn
3285 elif isinstance(wn, int):
3286 return [ wn ]
3287 elif isinstance(wn, str):
3288 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3289 val = wn.split('-')
3290 val = [int(val[0]), int(val[1])]
3291 val.sort()
3292 res = [i for i in xrange(val[0], val[1]+1)]
3293 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3294 val = int(wn[2:])+1
3295 res = [i for i in xrange(val)]
3296 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3297 val = int(wn[:-2])+1
3298 res = [i for i in xrange(val)]
3299 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3300 val = int(wn[1:])
3301 res = [i for i in xrange(val)]
3302 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3303 val = int(wn[:-1])
3304 res = [i for i in xrange(val)]
3305 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3306 # then interpreted in C++
3307 # side as [a,a+1,...,a_nyq]
3308 # (CAS-3759)
3309 val = int(wn[2:])
3310 res = [val, -999]
3311 #res = [i for i in xrange(val, self.nchan()/2+1)]
3312 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3313 # then interpreted in C++
3314 # side as [a,a+1,...,a_nyq]
3315 # (CAS-3759)
3316 val = int(wn[:-2])
3317 res = [val, -999]
3318 #res = [i for i in xrange(val, self.nchan()/2+1)]
3319 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3320 # then interpreted in C++
3321 # side as [a+1,a+2,...,a_nyq]
3322 # (CAS-3759)
3323 val = int(wn[1:])+1
3324 res = [val, -999]
3325 #res = [i for i in xrange(val, self.nchan()/2+1)]
3326 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3327 # then interpreted in C++
3328 # side as [a+1,a+2,...,a_nyq]
3329 # (CAS-3759)
3330 val = int(wn[:-1])+1
3331 res = [val, -999]
3332 #res = [i for i in xrange(val, self.nchan()/2+1)]
3333
3334 return res
3335 else:
3336 msg = 'wrong value given for addwn/rejwn'
3337 raise RuntimeError(msg)
3338
3339 @asaplog_post_dec
3340 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3341 """\
3342 Subtract baseline based on parameters written in Baseline Table.
3343
3344 Parameters:
3345 insitu: if True, baseline fitting/subtraction is done
3346 in-situ. If False, a new scantable with
3347 baseline subtracted is returned. Actually,
3348 format of the returned value depends on both
3349 insitu and retfitres (see below).
3350 The default is taken from .asaprc (False)
3351 retfitres: if True, the results of baseline fitting (i.e.,
3352 coefficients and rms) are returned.
3353 default is False.
3354 The format of the returned value of this
3355 function varies as follows:
3356 (1) in case insitu=True and retfitres=True:
3357 fitting result.
3358 (2) in case insitu=True and retfitres=False:
3359 None.
3360 (3) in case insitu=False and retfitres=True:
3361 a dictionary containing a new scantable
3362 (with baseline subtracted) and the fitting
3363 results.
3364 (4) in case insitu=False and retfitres=False:
3365 a new scantable (with baseline subtracted).
3366 inbltable: name of input baseline table. The row number of
3367 scantable and that of inbltable must be
3368 identical.
3369 outbltable: name of output baseline table where baseline
3370 parameters and fitting results recorded.
3371 default is ''(no output).
3372 overwrite: if True when an existing baseline table is
3373 specified for outbltable, overwrites it.
3374 Otherwise there is no harm.
3375 default is False.
3376 """
3377
3378 try:
3379 varlist = vars()
3380 if retfitres is None: retfitres = False
3381 if inbltable is None: raise ValueError("bltable missing.")
3382 if outbltable is None: outbltable = ''
3383 if overwrite is None: overwrite = False
3384
3385 if insitu is None: insitu = rcParams['insitu']
3386 if insitu:
3387 workscan = self
3388 else:
3389 workscan = self.copy()
3390
3391 sres = workscan._apply_bltable(inbltable,
3392 retfitres,
3393 outbltable,
3394 os.path.exists(outbltable),
3395 overwrite)
3396 if retfitres: res = parse_fitresult(sres)
3397
3398 workscan._add_history('apply_bltable', varlist)
3399
3400 if insitu:
3401 self._assign(workscan)
3402 if retfitres:
3403 return res
3404 else:
3405 return None
3406 else:
3407 if retfitres:
3408 return {'scantable': workscan, 'fitresults': res}
3409 else:
3410 return workscan
3411
3412 except RuntimeError, e:
3413 raise_fitting_failure_exception(e)
3414
3415 @asaplog_post_dec
3416 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3417 """\
3418 Subtract baseline based on parameters written in the input list.
3419
3420 Parameters:
3421 insitu: if True, baseline fitting/subtraction is done
3422 in-situ. If False, a new scantable with
3423 baseline subtracted is returned. Actually,
3424 format of the returned value depends on both
3425 insitu and retfitres (see below).
3426 The default is taken from .asaprc (False)
3427 retfitres: if True, the results of baseline fitting (i.e.,
3428 coefficients and rms) are returned.
3429 default is False.
3430 The format of the returned value of this
3431 function varies as follows:
3432 (1) in case insitu=True and retfitres=True:
3433 fitting result.
3434 (2) in case insitu=True and retfitres=False:
3435 None.
3436 (3) in case insitu=False and retfitres=True:
3437 a dictionary containing a new scantable
3438 (with baseline subtracted) and the fitting
3439 results.
3440 (4) in case insitu=False and retfitres=False:
3441 a new scantable (with baseline subtracted).
3442 blinfo: baseline parameter set stored in a dictionary
3443 or a list of dictionary. Each dictionary
3444 corresponds to each spectrum and must contain
3445 the following keys and values:
3446 'row': row number,
3447 'blfunc': function name. available ones include
3448 'poly', 'chebyshev', 'cspline' and
3449 'sinusoid',
3450 'order': maximum order of polynomial. needed
3451 if blfunc='poly' or 'chebyshev',
3452 'npiece': number or piecewise polynomial.
3453 needed if blfunc='cspline',
3454 'nwave': a list of sinusoidal wave numbers.
3455 needed if blfunc='sinusoid', and
3456 'masklist': min-max windows for channel mask.
3457 the specified ranges will be used
3458 for fitting.
3459 bltable: name of output baseline table where baseline
3460 parameters and fitting results recorded.
3461 default is ''(no output).
3462 overwrite: if True when an existing baseline table is
3463 specified for bltable, overwrites it.
3464 Otherwise there is no harm.
3465 default is False.
3466
3467 Example:
3468 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3469 'masklist':[[10,350],[352,510]]},
3470 {'row':1, 'blfunc':'cspline', 'npiece':3,
3471 'masklist':[[3,16],[19,404],[407,511]]}
3472 ])
3473
3474 the first spectrum (row=0) will be fitted with polynomial
3475 of order=5 and the next one (row=1) will be fitted with cubic
3476 spline consisting of 3 pieces.
3477 """
3478
3479 try:
3480 varlist = vars()
3481 if retfitres is None: retfitres = False
3482 if blinfo is None: blinfo = []
3483 if bltable is None: bltable = ''
3484 if overwrite is None: overwrite = False
3485
3486 if insitu is None: insitu = rcParams['insitu']
3487 if insitu:
3488 workscan = self
3489 else:
3490 workscan = self.copy()
3491
3492 nrow = workscan.nrow()
3493
3494 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3495
3496 print "in_blinfo=< "+ str(in_blinfo)+" >"
3497
3498 sres = workscan._sub_baseline(in_blinfo,
3499 retfitres,
3500 bltable,
3501 os.path.exists(bltable),
3502 overwrite)
3503 if retfitres: res = parse_fitresult(sres)
3504
3505 workscan._add_history('sub_baseline', varlist)
3506
3507 if insitu:
3508 self._assign(workscan)
3509 if retfitres:
3510 return res
3511 else:
3512 return None
3513 else:
3514 if retfitres:
3515 return {'scantable': workscan, 'fitresults': res}
3516 else:
3517 return workscan
3518
3519 except RuntimeError, e:
3520 raise_fitting_failure_exception(e)
3521
3522 @asaplog_post_dec
3523 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3524 whichrow=None, uselinefinder=None, edge=None,
3525 threshold=None, chan_avg_limit=None):
3526 """\
3527 Calculates and returns model selection criteria for a specified
3528 baseline model and a given spectrum data.
3529 Available values include Akaike Information Criterion (AIC), the
3530 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3531 Bayesian Information Criterion (BIC) and the Generalised Cross
3532 Validation (GCV).
3533
3534 Parameters:
3535 value: name of model selection criteria to calculate.
3536 available ones include 'aic', 'aicc', 'bic' and
3537 'gcv'. default is 'aicc'.
3538 blfunc: baseline function name. available ones include
3539 'chebyshev', 'cspline' and 'sinusoid'.
3540 default is 'chebyshev'.
3541 order: parameter for basline function. actually stands for
3542 order of polynomial (order) for 'chebyshev',
3543 number of spline pieces (npiece) for 'cspline' and
3544 maximum wave number for 'sinusoid', respectively.
3545 default is 5 (which is also the default order value
3546 for [auto_]chebyshev_baseline()).
3547 mask: an optional mask. default is [].
3548 whichrow: row number. default is 0 (the first row)
3549 uselinefinder: use sd.linefinder() to flag out line regions
3550 default is True.
3551 edge: an optional number of channel to drop at
3552 the edge of spectrum. If only one value is
3553 specified, the same number will be dropped
3554 from both sides of the spectrum. Default
3555 is to keep all channels. Nested tuples
3556 represent individual edge selection for
3557 different IFs (a number of spectral channels
3558 can be different)
3559 default is (0, 0).
3560 threshold: the threshold used by line finder. It is
3561 better to keep it large as only strong lines
3562 affect the baseline solution.
3563 default is 3.
3564 chan_avg_limit: a maximum number of consequtive spectral
3565 channels to average during the search of
3566 weak and broad lines. The default is no
3567 averaging (and no search for weak lines).
3568 If such lines can affect the fitted baseline
3569 (e.g. a high order polynomial is fitted),
3570 increase this parameter (usually values up
3571 to 8 are reasonable). Most users of this
3572 method should find the default value sufficient.
3573 default is 1.
3574
3575 Example:
3576 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3577 """
3578
3579 try:
3580 varlist = vars()
3581
3582 if value is None: value = 'aicc'
3583 if blfunc is None: blfunc = 'chebyshev'
3584 if order is None: order = 5
3585 if mask is None: mask = []
3586 if whichrow is None: whichrow = 0
3587 if uselinefinder is None: uselinefinder = True
3588 if edge is None: edge = (0, 0)
3589 if threshold is None: threshold = 3
3590 if chan_avg_limit is None: chan_avg_limit = 1
3591
3592 return self._calc_aic(value, blfunc, order, mask,
3593 whichrow, uselinefinder, edge,
3594 threshold, chan_avg_limit)
3595
3596 except RuntimeError, e:
3597 raise_fitting_failure_exception(e)
3598
3599 @asaplog_post_dec
3600 def sinusoid_baseline(self, mask=None, applyfft=None,
3601 fftmethod=None, fftthresh=None,
3602 addwn=None, rejwn=None,
3603 insitu=None,
3604 clipthresh=None, clipniter=None,
3605 plot=None,
3606 getresidual=None,
3607 showprogress=None, minnrow=None,
3608 outlog=None,
3609 blfile=None, csvformat=None,
3610 bltable=None):
3611 """\
3612 Return a scan which has been baselined (all rows) with sinusoidal
3613 functions.
3614
3615 Parameters:
3616 mask: an optional mask
3617 applyfft: if True use some method, such as FFT, to find
3618 strongest sinusoidal components in the wavenumber
3619 domain to be used for baseline fitting.
3620 default is True.
3621 fftmethod: method to find the strong sinusoidal components.
3622 now only 'fft' is available and it is the default.
3623 fftthresh: the threshold to select wave numbers to be used for
3624 fitting from the distribution of amplitudes in the
3625 wavenumber domain.
3626 both float and string values accepted.
3627 given a float value, the unit is set to sigma.
3628 for string values, allowed formats include:
3629 'xsigma' or 'x' (= x-sigma level. e.g.,
3630 '3sigma'), or
3631 'topx' (= the x strongest ones, e.g. 'top5').
3632 default is 3.0 (unit: sigma).
3633 addwn: the additional wave numbers to be used for fitting.
3634 list or integer value is accepted to specify every
3635 wave numbers. also string value can be used in case
3636 you need to specify wave numbers in a certain range,
3637 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3638 '<a' (= 0,1,...,a-2,a-1),
3639 '>=a' (= a, a+1, ... up to the maximum wave
3640 number corresponding to the Nyquist
3641 frequency for the case of FFT).
3642 default is [0].
3643 rejwn: the wave numbers NOT to be used for fitting.
3644 can be set just as addwn but has higher priority:
3645 wave numbers which are specified both in addwn
3646 and rejwn will NOT be used. default is [].
3647 insitu: if False a new scantable is returned.
3648 Otherwise, the scaling is done in-situ
3649 The default is taken from .asaprc (False)
3650 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3651 clipniter: maximum number of iteration of 'clipthresh'-sigma
3652 clipping (default is 0)
3653 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3654 plot the fit and the residual. In this each
3655 indivual fit has to be approved, by typing 'y'
3656 or 'n'
3657 getresidual: if False, returns best-fit values instead of
3658 residual. (default is True)
3659 showprogress: show progress status for large data.
3660 default is True.
3661 minnrow: minimum number of input spectra to show.
3662 default is 1000.
3663 outlog: Output the coefficients of the best-fit
3664 function to logger (default is False)
3665 blfile: Name of a text file in which the best-fit
3666 parameter values to be written
3667 (default is '': no file/logger output)
3668 csvformat: if True blfile is csv-formatted, default is False.
3669 bltable: name of a baseline table where fitting results
3670 (coefficients, rms, etc.) are to be written.
3671 if given, fitting results will NOT be output to
3672 scantable (insitu=True) or None will be
3673 returned (insitu=False).
3674 (default is "": no table output)
3675
3676 Example:
3677 # return a scan baselined by a combination of sinusoidal curves
3678 # having wave numbers in spectral window up to 10,
3679 # also with 3-sigma clipping, iteration up to 4 times
3680 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3681
3682 Note:
3683 The best-fit parameter values output in logger and/or blfile are now
3684 based on specunit of 'channel'.
3685 """
3686
3687 try:
3688 varlist = vars()
3689
3690 if insitu is None: insitu = rcParams['insitu']
3691 if insitu:
3692 workscan = self
3693 else:
3694 workscan = self.copy()
3695
3696 if mask is None: mask = []
3697 if applyfft is None: applyfft = True
3698 if fftmethod is None: fftmethod = 'fft'
3699 if fftthresh is None: fftthresh = 3.0
3700 if addwn is None: addwn = [0]
3701 if rejwn is None: rejwn = []
3702 if clipthresh is None: clipthresh = 3.0
3703 if clipniter is None: clipniter = 0
3704 if plot is None: plot = False
3705 if getresidual is None: getresidual = True
3706 if showprogress is None: showprogress = True
3707 if minnrow is None: minnrow = 1000
3708 if outlog is None: outlog = False
3709 if blfile is None: blfile = ''
3710 if csvformat is None: csvformat = False
3711 if bltable is None: bltable = ''
3712
3713 sapplyfft = 'true' if applyfft else 'false'
3714 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3715
3716 scsvformat = 'T' if csvformat else 'F'
3717
3718 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3719 workscan._sinusoid_baseline(mask,
3720 fftinfo,
3721 #applyfft, fftmethod.lower(),
3722 #str(fftthresh).lower(),
3723 workscan._parse_wn(addwn),
3724 workscan._parse_wn(rejwn),
3725 clipthresh, clipniter,
3726 getresidual,
3727 pack_progress_params(showprogress,
3728 minnrow),
3729 outlog, scsvformat+blfile,
3730 bltable)
3731 workscan._add_history('sinusoid_baseline', varlist)
3732
3733 if bltable == '':
3734 if insitu:
3735 self._assign(workscan)
3736 else:
3737 return workscan
3738 else:
3739 if not insitu:
3740 return None
3741
3742 except RuntimeError, e:
3743 raise_fitting_failure_exception(e)
3744
3745
3746 @asaplog_post_dec
3747 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3748 fftmethod=None, fftthresh=None,
3749 addwn=None, rejwn=None,
3750 insitu=None,
3751 clipthresh=None, clipniter=None,
3752 edge=None, threshold=None, chan_avg_limit=None,
3753 plot=None,
3754 getresidual=None,
3755 showprogress=None, minnrow=None,
3756 outlog=None,
3757 blfile=None, csvformat=None,
3758 bltable=None):
3759 """\
3760 Return a scan which has been baselined (all rows) with sinusoidal
3761 functions.
3762 Spectral lines are detected first using linefinder and masked out
3763 to avoid them affecting the baseline solution.
3764
3765 Parameters:
3766 mask: an optional mask retreived from scantable
3767 applyfft: if True use some method, such as FFT, to find
3768 strongest sinusoidal components in the wavenumber
3769 domain to be used for baseline fitting.
3770 default is True.
3771 fftmethod: method to find the strong sinusoidal components.
3772 now only 'fft' is available and it is the default.
3773 fftthresh: the threshold to select wave numbers to be used for
3774 fitting from the distribution of amplitudes in the
3775 wavenumber domain.
3776 both float and string values accepted.
3777 given a float value, the unit is set to sigma.
3778 for string values, allowed formats include:
3779 'xsigma' or 'x' (= x-sigma level. e.g.,
3780 '3sigma'), or
3781 'topx' (= the x strongest ones, e.g. 'top5').
3782 default is 3.0 (unit: sigma).
3783 addwn: the additional wave numbers to be used for fitting.
3784 list or integer value is accepted to specify every
3785 wave numbers. also string value can be used in case
3786 you need to specify wave numbers in a certain range,
3787 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3788 '<a' (= 0,1,...,a-2,a-1),
3789 '>=a' (= a, a+1, ... up to the maximum wave
3790 number corresponding to the Nyquist
3791 frequency for the case of FFT).
3792 default is [0].
3793 rejwn: the wave numbers NOT to be used for fitting.
3794 can be set just as addwn but has higher priority:
3795 wave numbers which are specified both in addwn
3796 and rejwn will NOT be used. default is [].
3797 insitu: if False a new scantable is returned.
3798 Otherwise, the scaling is done in-situ
3799 The default is taken from .asaprc (False)
3800 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3801 clipniter: maximum number of iteration of 'clipthresh'-sigma
3802 clipping (default is 0)
3803 edge: an optional number of channel to drop at
3804 the edge of spectrum. If only one value is
3805 specified, the same number will be dropped
3806 from both sides of the spectrum. Default
3807 is to keep all channels. Nested tuples
3808 represent individual edge selection for
3809 different IFs (a number of spectral channels
3810 can be different)
3811 threshold: the threshold used by line finder. It is
3812 better to keep it large as only strong lines
3813 affect the baseline solution.
3814 chan_avg_limit: a maximum number of consequtive spectral
3815 channels to average during the search of
3816 weak and broad lines. The default is no
3817 averaging (and no search for weak lines).
3818 If such lines can affect the fitted baseline
3819 (e.g. a high order polynomial is fitted),
3820 increase this parameter (usually values up
3821 to 8 are reasonable). Most users of this
3822 method should find the default value sufficient.
3823 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3824 plot the fit and the residual. In this each
3825 indivual fit has to be approved, by typing 'y'
3826 or 'n'
3827 getresidual: if False, returns best-fit values instead of
3828 residual. (default is True)
3829 showprogress: show progress status for large data.
3830 default is True.
3831 minnrow: minimum number of input spectra to show.
3832 default is 1000.
3833 outlog: Output the coefficients of the best-fit
3834 function to logger (default is False)
3835 blfile: Name of a text file in which the best-fit
3836 parameter values to be written
3837 (default is "": no file/logger output)
3838 csvformat: if True blfile is csv-formatted, default is False.
3839 bltable: name of a baseline table where fitting results
3840 (coefficients, rms, etc.) are to be written.
3841 if given, fitting results will NOT be output to
3842 scantable (insitu=True) or None will be
3843 returned (insitu=False).
3844 (default is "": no table output)
3845
3846 Example:
3847 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3848
3849 Note:
3850 The best-fit parameter values output in logger and/or blfile are now
3851 based on specunit of 'channel'.
3852 """
3853
3854 try:
3855 varlist = vars()
3856
3857 if insitu is None: insitu = rcParams['insitu']
3858 if insitu:
3859 workscan = self
3860 else:
3861 workscan = self.copy()
3862
3863 if mask is None: mask = []
3864 if applyfft is None: applyfft = True
3865 if fftmethod is None: fftmethod = 'fft'
3866 if fftthresh is None: fftthresh = 3.0
3867 if addwn is None: addwn = [0]
3868 if rejwn is None: rejwn = []
3869 if clipthresh is None: clipthresh = 3.0
3870 if clipniter is None: clipniter = 0
3871 if edge is None: edge = (0,0)
3872 if threshold is None: threshold = 3
3873 if chan_avg_limit is None: chan_avg_limit = 1
3874 if plot is None: plot = False
3875 if getresidual is None: getresidual = True
3876 if showprogress is None: showprogress = True
3877 if minnrow is None: minnrow = 1000
3878 if outlog is None: outlog = False
3879 if blfile is None: blfile = ''
3880 if csvformat is None: csvformat = False
3881 if bltable is None: bltable = ''
3882
3883 sapplyfft = 'true' if applyfft else 'false'
3884 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3885
3886 scsvformat = 'T' if csvformat else 'F'
3887
3888 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3889 workscan._auto_sinusoid_baseline(mask,
3890 fftinfo,
3891 workscan._parse_wn(addwn),
3892 workscan._parse_wn(rejwn),
3893 clipthresh, clipniter,
3894 normalise_edge_param(edge),
3895 threshold, chan_avg_limit,
3896 getresidual,
3897 pack_progress_params(showprogress,
3898 minnrow),
3899 outlog, scsvformat+blfile, bltable)
3900 workscan._add_history("auto_sinusoid_baseline", varlist)
3901
3902 if bltable == '':
3903 if insitu:
3904 self._assign(workscan)
3905 else:
3906 return workscan
3907 else:
3908 if not insitu:
3909 return None
3910
3911 except RuntimeError, e:
3912 raise_fitting_failure_exception(e)
3913
3914 @asaplog_post_dec
3915 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3916 clipthresh=None, clipniter=None, plot=None,
3917 getresidual=None, showprogress=None, minnrow=None,
3918 outlog=None, blfile=None, csvformat=None,
3919 bltable=None):
3920 """\
3921 Return a scan which has been baselined (all rows) by cubic spline
3922 function (piecewise cubic polynomial).
3923
3924 Parameters:
3925 mask: An optional mask
3926 npiece: Number of pieces. (default is 2)
3927 insitu: If False a new scantable is returned.
3928 Otherwise, the scaling is done in-situ
3929 The default is taken from .asaprc (False)
3930 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3931 clipniter: maximum number of iteration of 'clipthresh'-sigma
3932 clipping (default is 0)
3933 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3934 plot the fit and the residual. In this each
3935 indivual fit has to be approved, by typing 'y'
3936 or 'n'
3937 getresidual: if False, returns best-fit values instead of
3938 residual. (default is True)
3939 showprogress: show progress status for large data.
3940 default is True.
3941 minnrow: minimum number of input spectra to show.
3942 default is 1000.
3943 outlog: Output the coefficients of the best-fit
3944 function to logger (default is False)
3945 blfile: Name of a text file in which the best-fit
3946 parameter values to be written
3947 (default is "": no file/logger output)
3948 csvformat: if True blfile is csv-formatted, default is False.
3949 bltable: name of a baseline table where fitting results
3950 (coefficients, rms, etc.) are to be written.
3951 if given, fitting results will NOT be output to
3952 scantable (insitu=True) or None will be
3953 returned (insitu=False).
3954 (default is "": no table output)
3955
3956 Example:
3957 # return a scan baselined by a cubic spline consisting of 2 pieces
3958 # (i.e., 1 internal knot),
3959 # also with 3-sigma clipping, iteration up to 4 times
3960 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3961
3962 Note:
3963 The best-fit parameter values output in logger and/or blfile are now
3964 based on specunit of 'channel'.
3965 """
3966
3967 try:
3968 varlist = vars()
3969
3970 if insitu is None: insitu = rcParams['insitu']
3971 if insitu:
3972 workscan = self
3973 else:
3974 workscan = self.copy()
3975
3976 if mask is None: mask = []
3977 if npiece is None: npiece = 2
3978 if clipthresh is None: clipthresh = 3.0
3979 if clipniter is None: clipniter = 0
3980 if plot is None: plot = False
3981 if getresidual is None: getresidual = True
3982 if showprogress is None: showprogress = True
3983 if minnrow is None: minnrow = 1000
3984 if outlog is None: outlog = False
3985 if blfile is None: blfile = ''
3986 if csvformat is None: csvformat = False
3987 if bltable is None: bltable = ''
3988
3989 scsvformat = 'T' if csvformat else 'F'
3990
3991 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3992 workscan._cspline_baseline(mask, npiece,
3993 clipthresh, clipniter,
3994 getresidual,
3995 pack_progress_params(showprogress,
3996 minnrow),
3997 outlog, scsvformat+blfile,
3998 bltable)
3999 workscan._add_history("cspline_baseline", varlist)
4000
4001 if bltable == '':
4002 if insitu:
4003 self._assign(workscan)
4004 else:
4005 return workscan
4006 else:
4007 if not insitu:
4008 return None
4009
4010 except RuntimeError, e:
4011 raise_fitting_failure_exception(e)
4012
4013 @asaplog_post_dec
4014 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
4015 clipthresh=None, clipniter=None,
4016 edge=None, threshold=None, chan_avg_limit=None,
4017 getresidual=None, plot=None,
4018 showprogress=None, minnrow=None, outlog=None,
4019 blfile=None, csvformat=None, bltable=None):
4020 """\
4021 Return a scan which has been baselined (all rows) by cubic spline
4022 function (piecewise cubic polynomial).
4023 Spectral lines are detected first using linefinder and masked out
4024 to avoid them affecting the baseline solution.
4025
4026 Parameters:
4027 mask: an optional mask retreived from scantable
4028 npiece: Number of pieces. (default is 2)
4029 insitu: if False a new scantable is returned.
4030 Otherwise, the scaling is done in-situ
4031 The default is taken from .asaprc (False)
4032 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4033 clipniter: maximum number of iteration of 'clipthresh'-sigma
4034 clipping (default is 0)
4035 edge: an optional number of channel to drop at
4036 the edge of spectrum. If only one value is
4037 specified, the same number will be dropped
4038 from both sides of the spectrum. Default
4039 is to keep all channels. Nested tuples
4040 represent individual edge selection for
4041 different IFs (a number of spectral channels
4042 can be different)
4043 threshold: the threshold used by line finder. It is
4044 better to keep it large as only strong lines
4045 affect the baseline solution.
4046 chan_avg_limit: a maximum number of consequtive spectral
4047 channels to average during the search of
4048 weak and broad lines. The default is no
4049 averaging (and no search for weak lines).
4050 If such lines can affect the fitted baseline
4051 (e.g. a high order polynomial is fitted),
4052 increase this parameter (usually values up
4053 to 8 are reasonable). Most users of this
4054 method should find the default value sufficient.
4055 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4056 plot the fit and the residual. In this each
4057 indivual fit has to be approved, by typing 'y'
4058 or 'n'
4059 getresidual: if False, returns best-fit values instead of
4060 residual. (default is True)
4061 showprogress: show progress status for large data.
4062 default is True.
4063 minnrow: minimum number of input spectra to show.
4064 default is 1000.
4065 outlog: Output the coefficients of the best-fit
4066 function to logger (default is False)
4067 blfile: Name of a text file in which the best-fit
4068 parameter values to be written
4069 (default is "": no file/logger output)
4070 csvformat: if True blfile is csv-formatted, default is False.
4071 bltable: name of a baseline table where fitting results
4072 (coefficients, rms, etc.) are to be written.
4073 if given, fitting results will NOT be output to
4074 scantable (insitu=True) or None will be
4075 returned (insitu=False).
4076 (default is "": no table output)
4077
4078 Example:
4079 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4080
4081 Note:
4082 The best-fit parameter values output in logger and/or blfile are now
4083 based on specunit of 'channel'.
4084 """
4085
4086 try:
4087 varlist = vars()
4088
4089 if insitu is None: insitu = rcParams['insitu']
4090 if insitu:
4091 workscan = self
4092 else:
4093 workscan = self.copy()
4094
4095 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4096 if mask is None: mask = []
4097 if npiece is None: npiece = 2
4098 if clipthresh is None: clipthresh = 3.0
4099 if clipniter is None: clipniter = 0
4100 if edge is None: edge = (0, 0)
4101 if threshold is None: threshold = 3
4102 if chan_avg_limit is None: chan_avg_limit = 1
4103 if plot is None: plot = False
4104 if getresidual is None: getresidual = True
4105 if showprogress is None: showprogress = True
4106 if minnrow is None: minnrow = 1000
4107 if outlog is None: outlog = False
4108 if blfile is None: blfile = ''
4109 if csvformat is None: csvformat = False
4110 if bltable is None: bltable = ''
4111
4112 scsvformat = 'T' if csvformat else 'F'
4113
4114 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4115 workscan._auto_cspline_baseline(mask, npiece,
4116 clipthresh, clipniter,
4117 normalise_edge_param(edge),
4118 threshold,
4119 chan_avg_limit, getresidual,
4120 pack_progress_params(showprogress,
4121 minnrow),
4122 outlog,
4123 scsvformat+blfile,
4124 bltable)
4125 workscan._add_history("auto_cspline_baseline", varlist)
4126
4127 if bltable == '':
4128 if insitu:
4129 self._assign(workscan)
4130 else:
4131 return workscan
4132 else:
4133 if not insitu:
4134 return None
4135
4136 except RuntimeError, e:
4137 raise_fitting_failure_exception(e)
4138
4139 @asaplog_post_dec
4140 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
4141 clipthresh=None, clipniter=None, plot=None,
4142 getresidual=None, showprogress=None, minnrow=None,
4143 outlog=None, blfile=None, csvformat=None,
4144 bltable=None):
4145 """\
4146 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4147
4148 Parameters:
4149 mask: An optional mask
4150 order: the maximum order of Chebyshev polynomial (default is 5)
4151 insitu: If False a new scantable is returned.
4152 Otherwise, the scaling is done in-situ
4153 The default is taken from .asaprc (False)
4154 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4155 clipniter: maximum number of iteration of 'clipthresh'-sigma
4156 clipping (default is 0)
4157 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4158 plot the fit and the residual. In this each
4159 indivual fit has to be approved, by typing 'y'
4160 or 'n'
4161 getresidual: if False, returns best-fit values instead of
4162 residual. (default is True)
4163 showprogress: show progress status for large data.
4164 default is True.
4165 minnrow: minimum number of input spectra to show.
4166 default is 1000.
4167 outlog: Output the coefficients of the best-fit
4168 function to logger (default is False)
4169 blfile: Name of a text file in which the best-fit
4170 parameter values to be written
4171 (default is "": no file/logger output)
4172 csvformat: if True blfile is csv-formatted, default is False.
4173 bltable: name of a baseline table where fitting results
4174 (coefficients, rms, etc.) are to be written.
4175 if given, fitting results will NOT be output to
4176 scantable (insitu=True) or None will be
4177 returned (insitu=False).
4178 (default is "": no table output)
4179
4180 Example:
4181 # return a scan baselined by a cubic spline consisting of 2 pieces
4182 # (i.e., 1 internal knot),
4183 # also with 3-sigma clipping, iteration up to 4 times
4184 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4185
4186 Note:
4187 The best-fit parameter values output in logger and/or blfile are now
4188 based on specunit of 'channel'.
4189 """
4190
4191 try:
4192 varlist = vars()
4193
4194 if insitu is None: insitu = rcParams['insitu']
4195 if insitu:
4196 workscan = self
4197 else:
4198 workscan = self.copy()
4199
4200 if mask is None: mask = []
4201 if order is None: order = 5
4202 if clipthresh is None: clipthresh = 3.0
4203 if clipniter is None: clipniter = 0
4204 if plot is None: plot = False
4205 if getresidual is None: getresidual = True
4206 if showprogress is None: showprogress = True
4207 if minnrow is None: minnrow = 1000
4208 if outlog is None: outlog = False
4209 if blfile is None: blfile = ''
4210 if csvformat is None: csvformat = False
4211 if bltable is None: bltable = ''
4212
4213 scsvformat = 'T' if csvformat else 'F'
4214
4215 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4216 workscan._chebyshev_baseline(mask, order,
4217 clipthresh, clipniter,
4218 getresidual,
4219 pack_progress_params(showprogress,
4220 minnrow),
4221 outlog, scsvformat+blfile,
4222 bltable)
4223 workscan._add_history("chebyshev_baseline", varlist)
4224
4225 if bltable == '':
4226 if insitu:
4227 self._assign(workscan)
4228 else:
4229 return workscan
4230 else:
4231 if not insitu:
4232 return None
4233
4234 except RuntimeError, e:
4235 raise_fitting_failure_exception(e)
4236
4237 @asaplog_post_dec
4238 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4239 clipthresh=None, clipniter=None,
4240 edge=None, threshold=None, chan_avg_limit=None,
4241 getresidual=None, plot=None,
4242 showprogress=None, minnrow=None, outlog=None,
4243 blfile=None, csvformat=None, bltable=None):
4244 """\
4245 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4246 Spectral lines are detected first using linefinder and masked out
4247 to avoid them affecting the baseline solution.
4248
4249 Parameters:
4250 mask: an optional mask retreived from scantable
4251 order: the maximum order of Chebyshev polynomial (default is 5)
4252 insitu: if False a new scantable is returned.
4253 Otherwise, the scaling is done in-situ
4254 The default is taken from .asaprc (False)
4255 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4256 clipniter: maximum number of iteration of 'clipthresh'-sigma
4257 clipping (default is 0)
4258 edge: an optional number of channel to drop at
4259 the edge of spectrum. If only one value is
4260 specified, the same number will be dropped
4261 from both sides of the spectrum. Default
4262 is to keep all channels. Nested tuples
4263 represent individual edge selection for
4264 different IFs (a number of spectral channels
4265 can be different)
4266 threshold: the threshold used by line finder. It is
4267 better to keep it large as only strong lines
4268 affect the baseline solution.
4269 chan_avg_limit: a maximum number of consequtive spectral
4270 channels to average during the search of
4271 weak and broad lines. The default is no
4272 averaging (and no search for weak lines).
4273 If such lines can affect the fitted baseline
4274 (e.g. a high order polynomial is fitted),
4275 increase this parameter (usually values up
4276 to 8 are reasonable). Most users of this
4277 method should find the default value sufficient.
4278 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4279 plot the fit and the residual. In this each
4280 indivual fit has to be approved, by typing 'y'
4281 or 'n'
4282 getresidual: if False, returns best-fit values instead of
4283 residual. (default is True)
4284 showprogress: show progress status for large data.
4285 default is True.
4286 minnrow: minimum number of input spectra to show.
4287 default is 1000.
4288 outlog: Output the coefficients of the best-fit
4289 function to logger (default is False)
4290 blfile: Name of a text file in which the best-fit
4291 parameter values to be written
4292 (default is "": no file/logger output)
4293 csvformat: if True blfile is csv-formatted, default is False.
4294 bltable: name of a baseline table where fitting results
4295 (coefficients, rms, etc.) are to be written.
4296 if given, fitting results will NOT be output to
4297 scantable (insitu=True) or None will be
4298 returned (insitu=False).
4299 (default is "": no table output)
4300
4301 Example:
4302 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4303
4304 Note:
4305 The best-fit parameter values output in logger and/or blfile are now
4306 based on specunit of 'channel'.
4307 """
4308
4309 try:
4310 varlist = vars()
4311
4312 if insitu is None: insitu = rcParams['insitu']
4313 if insitu:
4314 workscan = self
4315 else:
4316 workscan = self.copy()
4317
4318 if mask is None: mask = []
4319 if order is None: order = 5
4320 if clipthresh is None: clipthresh = 3.0
4321 if clipniter is None: clipniter = 0
4322 if edge is None: edge = (0, 0)
4323 if threshold is None: threshold = 3
4324 if chan_avg_limit is None: chan_avg_limit = 1
4325 if plot is None: plot = False
4326 if getresidual is None: getresidual = True
4327 if showprogress is None: showprogress = True
4328 if minnrow is None: minnrow = 1000
4329 if outlog is None: outlog = False
4330 if blfile is None: blfile = ''
4331 if csvformat is None: csvformat = False
4332 if bltable is None: bltable = ''
4333
4334 scsvformat = 'T' if csvformat else 'F'
4335
4336 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4337 workscan._auto_chebyshev_baseline(mask, order,
4338 clipthresh, clipniter,
4339 normalise_edge_param(edge),
4340 threshold,
4341 chan_avg_limit, getresidual,
4342 pack_progress_params(showprogress,
4343 minnrow),
4344 outlog, scsvformat+blfile,
4345 bltable)
4346 workscan._add_history("auto_chebyshev_baseline", varlist)
4347
4348 if bltable == '':
4349 if insitu:
4350 self._assign(workscan)
4351 else:
4352 return workscan
4353 else:
4354 if not insitu:
4355 return None
4356
4357 except RuntimeError, e:
4358 raise_fitting_failure_exception(e)
4359
4360 @asaplog_post_dec
4361 def poly_baseline(self, mask=None, order=None, insitu=None,
4362 clipthresh=None, clipniter=None, plot=None,
4363 getresidual=None, showprogress=None, minnrow=None,
4364 outlog=None, blfile=None, csvformat=None,
4365 bltable=None):
4366 """\
4367 Return a scan which has been baselined (all rows) by a polynomial.
4368 Parameters:
4369 mask: an optional mask
4370 order: the order of the polynomial (default is 0)
4371 insitu: if False a new scantable is returned.
4372 Otherwise, the scaling is done in-situ
4373 The default is taken from .asaprc (False)
4374 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4375 clipniter: maximum number of iteration of 'clipthresh'-sigma
4376 clipping (default is 0)
4377 plot: plot the fit and the residual. In this each
4378 indivual fit has to be approved, by typing 'y'
4379 or 'n'
4380 getresidual: if False, returns best-fit values instead of
4381 residual. (default is True)
4382 showprogress: show progress status for large data.
4383 default is True.
4384 minnrow: minimum number of input spectra to show.
4385 default is 1000.
4386 outlog: Output the coefficients of the best-fit
4387 function to logger (default is False)
4388 blfile: Name of a text file in which the best-fit
4389 parameter values to be written
4390 (default is "": no file/logger output)
4391 csvformat: if True blfile is csv-formatted, default is False.
4392 bltable: name of a baseline table where fitting results
4393 (coefficients, rms, etc.) are to be written.
4394 if given, fitting results will NOT be output to
4395 scantable (insitu=True) or None will be
4396 returned (insitu=False).
4397 (default is "": no table output)
4398
4399 Example:
4400 # return a scan baselined by a third order polynomial,
4401 # not using a mask
4402 bscan = scan.poly_baseline(order=3)
4403 """
4404
4405 try:
4406 varlist = vars()
4407
4408 if insitu is None:
4409 insitu = rcParams["insitu"]
4410 if insitu:
4411 workscan = self
4412 else:
4413 workscan = self.copy()
4414
4415 if mask is None: mask = []
4416 if order is None: order = 0
4417 if clipthresh is None: clipthresh = 3.0
4418 if clipniter is None: clipniter = 0
4419 if plot is None: plot = False
4420 if getresidual is None: getresidual = True
4421 if showprogress is None: showprogress = True
4422 if minnrow is None: minnrow = 1000
4423 if outlog is None: outlog = False
4424 if blfile is None: blfile = ''
4425 if csvformat is None: csvformat = False
4426 if bltable is None: bltable = ''
4427
4428 scsvformat = 'T' if csvformat else 'F'
4429
4430 if plot:
4431 outblfile = (blfile != "") and \
4432 os.path.exists(os.path.expanduser(
4433 os.path.expandvars(blfile))
4434 )
4435 if outblfile:
4436 blf = open(blfile, "a")
4437
4438 f = fitter()
4439 f.set_function(lpoly=order)
4440
4441 rows = xrange(workscan.nrow())
4442 #if len(rows) > 0: workscan._init_blinfo()
4443
4444 action = "H"
4445 for r in rows:
4446 f.x = workscan._getabcissa(r)
4447 f.y = workscan._getspectrum(r)
4448 if mask:
4449 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4450 else: # mask=None
4451 f.mask = workscan._getmask(r)
4452
4453 f.data = None
4454 f.fit()
4455
4456 if action != "Y": # skip plotting when accepting all
4457 f.plot(residual=True)
4458 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4459 #if accept_fit.upper() == "N":
4460 # #workscan._append_blinfo(None, None, None)
4461 # continue
4462 accept_fit = self._get_verify_action("Accept fit?",action)
4463 if r == 0: action = None
4464 if accept_fit.upper() == "N":
4465 continue
4466 elif accept_fit.upper() == "R":
4467 break
4468 elif accept_fit.upper() == "A":
4469 action = "Y"
4470
4471 blpars = f.get_parameters()
4472 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4473 #workscan._append_blinfo(blpars, masklist, f.mask)
4474 workscan._setspectrum((f.fitter.getresidual()
4475 if getresidual else f.fitter.getfit()), r)
4476
4477 if outblfile:
4478 rms = workscan.get_rms(f.mask, r)
4479 dataout = \
4480 workscan.format_blparams_row(blpars["params"],
4481 blpars["fixed"],
4482 rms, str(masklist),
4483 r, True, csvformat)
4484 blf.write(dataout)
4485
4486 f._p.unmap()
4487 f._p = None
4488
4489 if outblfile:
4490 blf.close()
4491 else:
4492 workscan._poly_baseline(mask, order,
4493 clipthresh, clipniter, #
4494 getresidual,
4495 pack_progress_params(showprogress,
4496 minnrow),
4497 outlog, scsvformat+blfile,
4498 bltable) #
4499
4500 workscan._add_history("poly_baseline", varlist)
4501
4502 if insitu:
4503 self._assign(workscan)
4504 else:
4505 return workscan
4506
4507 except RuntimeError, e:
4508 raise_fitting_failure_exception(e)
4509
4510 @asaplog_post_dec
4511 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4512 clipthresh=None, clipniter=None,
4513 edge=None, threshold=None, chan_avg_limit=None,
4514 getresidual=None, plot=None,
4515 showprogress=None, minnrow=None, outlog=None,
4516 blfile=None, csvformat=None, bltable=None):
4517 """\
4518 Return a scan which has been baselined (all rows) by a polynomial.
4519 Spectral lines are detected first using linefinder and masked out
4520 to avoid them affecting the baseline solution.
4521
4522 Parameters:
4523 mask: an optional mask retreived from scantable
4524 order: the order of the polynomial (default is 0)
4525 insitu: if False a new scantable is returned.
4526 Otherwise, the scaling is done in-situ
4527 The default is taken from .asaprc (False)
4528 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4529 clipniter: maximum number of iteration of 'clipthresh'-sigma
4530 clipping (default is 0)
4531 edge: an optional number of channel to drop at
4532 the edge of spectrum. If only one value is
4533 specified, the same number will be dropped
4534 from both sides of the spectrum. Default
4535 is to keep all channels. Nested tuples
4536 represent individual edge selection for
4537 different IFs (a number of spectral channels
4538 can be different)
4539 threshold: the threshold used by line finder. It is
4540 better to keep it large as only strong lines
4541 affect the baseline solution.
4542 chan_avg_limit: a maximum number of consequtive spectral
4543 channels to average during the search of
4544 weak and broad lines. The default is no
4545 averaging (and no search for weak lines).
4546 If such lines can affect the fitted baseline
4547 (e.g. a high order polynomial is fitted),
4548 increase this parameter (usually values up
4549 to 8 are reasonable). Most users of this
4550 method should find the default value sufficient.
4551 plot: plot the fit and the residual. In this each
4552 indivual fit has to be approved, by typing 'y'
4553 or 'n'
4554 getresidual: if False, returns best-fit values instead of
4555 residual. (default is True)
4556 showprogress: show progress status for large data.
4557 default is True.
4558 minnrow: minimum number of input spectra to show.
4559 default is 1000.
4560 outlog: Output the coefficients of the best-fit
4561 function to logger (default is False)
4562 blfile: Name of a text file in which the best-fit
4563 parameter values to be written
4564 (default is "": no file/logger output)
4565 csvformat: if True blfile is csv-formatted, default is False.
4566 bltable: name of a baseline table where fitting results
4567 (coefficients, rms, etc.) are to be written.
4568 if given, fitting results will NOT be output to
4569 scantable (insitu=True) or None will be
4570 returned (insitu=False).
4571 (default is "": no table output)
4572
4573 Example:
4574 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4575 """
4576
4577 try:
4578 varlist = vars()
4579
4580 if insitu is None:
4581 insitu = rcParams['insitu']
4582 if insitu:
4583 workscan = self
4584 else:
4585 workscan = self.copy()
4586
4587 if mask is None: mask = []
4588 if order is None: order = 0
4589 if clipthresh is None: clipthresh = 3.0
4590 if clipniter is None: clipniter = 0
4591 if edge is None: edge = (0, 0)
4592 if threshold is None: threshold = 3
4593 if chan_avg_limit is None: chan_avg_limit = 1
4594 if plot is None: plot = False
4595 if getresidual is None: getresidual = True
4596 if showprogress is None: showprogress = True
4597 if minnrow is None: minnrow = 1000
4598 if outlog is None: outlog = False
4599 if blfile is None: blfile = ''
4600 if csvformat is None: csvformat = False
4601 if bltable is None: bltable = ''
4602
4603 scsvformat = 'T' if csvformat else 'F'
4604
4605 edge = normalise_edge_param(edge)
4606
4607 if plot:
4608 outblfile = (blfile != "") and \
4609 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4610 if outblfile: blf = open(blfile, "a")
4611
4612 from asap.asaplinefind import linefinder
4613 fl = linefinder()
4614 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4615 fl.set_scan(workscan)
4616
4617 f = fitter()
4618 f.set_function(lpoly=order)
4619
4620 rows = xrange(workscan.nrow())
4621 #if len(rows) > 0: workscan._init_blinfo()
4622
4623 action = "H"
4624 for r in rows:
4625 idx = 2*workscan.getif(r)
4626 if mask:
4627 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4628 else: # mask=None
4629 msk = workscan._getmask(r)
4630 fl.find_lines(r, msk, edge[idx:idx+2])
4631
4632 f.x = workscan._getabcissa(r)
4633 f.y = workscan._getspectrum(r)
4634 f.mask = fl.get_mask()
4635 f.data = None
4636 f.fit()
4637
4638 if action != "Y": # skip plotting when accepting all
4639 f.plot(residual=True)
4640 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4641 accept_fit = self._get_verify_action("Accept fit?",action)
4642 if r == 0: action = None
4643 if accept_fit.upper() == "N":
4644 #workscan._append_blinfo(None, None, None)
4645 continue
4646 elif accept_fit.upper() == "R":
4647 break
4648 elif accept_fit.upper() == "A":
4649 action = "Y"
4650
4651 blpars = f.get_parameters()
4652 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4653 #workscan._append_blinfo(blpars, masklist, f.mask)
4654 workscan._setspectrum(
4655 (f.fitter.getresidual() if getresidual
4656 else f.fitter.getfit()), r
4657 )
4658
4659 if outblfile:
4660 rms = workscan.get_rms(f.mask, r)
4661 dataout = \
4662 workscan.format_blparams_row(blpars["params"],
4663 blpars["fixed"],
4664 rms, str(masklist),
4665 r, True, csvformat)
4666 blf.write(dataout)
4667
4668 f._p.unmap()
4669 f._p = None
4670
4671 if outblfile: blf.close()
4672 else:
4673 workscan._auto_poly_baseline(mask, order,
4674 clipthresh, clipniter,
4675 edge, threshold,
4676 chan_avg_limit, getresidual,
4677 pack_progress_params(showprogress,
4678 minnrow),
4679 outlog, scsvformat+blfile,
4680 bltable)
4681 workscan._add_history("auto_poly_baseline", varlist)
4682
4683 if bltable == '':
4684 if insitu:
4685 self._assign(workscan)
4686 else:
4687 return workscan
4688 else:
4689 if not insitu:
4690 return None
4691
4692 except RuntimeError, e:
4693 raise_fitting_failure_exception(e)
4694
4695 def _init_blinfo(self):
4696 """\
4697 Initialise the following three auxiliary members:
4698 blpars : parameters of the best-fit baseline,
4699 masklists : mask data (edge positions of masked channels) and
4700 actualmask : mask data (in boolean list),
4701 to keep for use later (including output to logger/text files).
4702 Used by poly_baseline() and auto_poly_baseline() in case of
4703 'plot=True'.
4704 """
4705 self.blpars = []
4706 self.masklists = []
4707 self.actualmask = []
4708 return
4709
4710 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4711 """\
4712 Append baseline-fitting related info to blpars, masklist and
4713 actualmask.
4714 """
4715 self.blpars.append(data_blpars)
4716 self.masklists.append(data_masklists)
4717 self.actualmask.append(data_actualmask)
4718 return
4719
4720 @asaplog_post_dec
4721 def rotate_linpolphase(self, angle):
4722 """\
4723 Rotate the phase of the complex polarization O=Q+iU correlation.
4724 This is always done in situ in the raw data. So if you call this
4725 function more than once then each call rotates the phase further.
4726
4727 Parameters:
4728
4729 angle: The angle (degrees) to rotate (add) by.
4730
4731 Example::
4732
4733 scan.rotate_linpolphase(2.3)
4734
4735 """
4736 varlist = vars()
4737 self._math._rotate_linpolphase(self, angle)
4738 self._add_history("rotate_linpolphase", varlist)
4739 return
4740
4741 @asaplog_post_dec
4742 def rotate_xyphase(self, angle):
4743 """\
4744 Rotate the phase of the XY correlation. This is always done in situ
4745 in the data. So if you call this function more than once
4746 then each call rotates the phase further.
4747
4748 Parameters:
4749
4750 angle: The angle (degrees) to rotate (add) by.
4751
4752 Example::
4753
4754 scan.rotate_xyphase(2.3)
4755
4756 """
4757 varlist = vars()
4758 self._math._rotate_xyphase(self, angle)
4759 self._add_history("rotate_xyphase", varlist)
4760 return
4761
4762 @asaplog_post_dec
4763 def swap_linears(self):
4764 """\
4765 Swap the linear polarisations XX and YY, or better the first two
4766 polarisations as this also works for ciculars.
4767 """
4768 varlist = vars()
4769 self._math._swap_linears(self)
4770 self._add_history("swap_linears", varlist)
4771 return
4772
4773 @asaplog_post_dec
4774 def invert_phase(self):
4775 """\
4776 Invert the phase of the complex polarisation
4777 """
4778 varlist = vars()
4779 self._math._invert_phase(self)
4780 self._add_history("invert_phase", varlist)
4781 return
4782
4783 @asaplog_post_dec
4784 def add(self, offset, insitu=None):
4785 """\
4786 Return a scan where all spectra have the offset added
4787
4788 Parameters:
4789
4790 offset: the offset
4791
4792 insitu: if False a new scantable is returned.
4793 Otherwise, the scaling is done in-situ
4794 The default is taken from .asaprc (False)
4795
4796 """
4797 if insitu is None: insitu = rcParams['insitu']
4798 self._math._setinsitu(insitu)
4799 varlist = vars()
4800 s = scantable(self._math._unaryop(self, offset, "ADD", False, False))
4801 s._add_history("add", varlist)
4802 if insitu:
4803 self._assign(s)
4804 else:
4805 return s
4806
4807 @asaplog_post_dec
4808 def scale(self, factor, tsys=True, insitu=None):
4809 """\
4810
4811 Return a scan where all spectra are scaled by the given 'factor'
4812
4813 Parameters:
4814
4815 factor: the scaling factor (float or 1D float list)
4816
4817 insitu: if False a new scantable is returned.
4818 Otherwise, the scaling is done in-situ
4819 The default is taken from .asaprc (False)
4820
4821 tsys: if True (default) then apply the operation to Tsys
4822 as well as the data
4823 """
4824 if insitu is None: insitu = rcParams['insitu']
4825 self._math._setinsitu(insitu)
4826 varlist = vars()
4827 s = None
4828 import numpy
4829 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4830 if isinstance(factor[0], list) or isinstance(factor[0],
4831 numpy.ndarray):
4832 from asapmath import _array2dOp
4833 s = _array2dOp( self, factor, "MUL", tsys, insitu, True )
4834 else:
4835 s = scantable( self._math._arrayop( self, factor,
4836 "MUL", tsys, True ) )
4837 else:
4838 s = scantable(self._math._unaryop(self, factor, "MUL", tsys, True ))
4839 s._add_history("scale", varlist)
4840 if insitu:
4841 self._assign(s)
4842 else:
4843 return s
4844
4845 @preserve_selection
4846 def set_sourcetype(self, match, matchtype="pattern",
4847 sourcetype="reference"):
4848 """\
4849 Set the type of the source to be an source or reference scan
4850 using the provided pattern.
4851
4852 Parameters:
4853
4854 match: a Unix style pattern, regular expression or selector
4855
4856 matchtype: 'pattern' (default) UNIX style pattern or
4857 'regex' regular expression
4858
4859 sourcetype: the type of the source to use (source/reference)
4860
4861 """
4862 varlist = vars()
4863 stype = -1
4864 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4865 stype = 1
4866 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4867 stype = 0
4868 else:
4869 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4870 if matchtype.lower().startswith("p"):
4871 matchtype = "pattern"
4872 elif matchtype.lower().startswith("r"):
4873 matchtype = "regex"
4874 else:
4875 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4876 sel = selector()
4877 if isinstance(match, selector):
4878 sel = match
4879 else:
4880 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4881 self.set_selection(sel)
4882 self._setsourcetype(stype)
4883 self._add_history("set_sourcetype", varlist)
4884
4885
4886 def set_sourcename(self, name):
4887 varlist = vars()
4888 self._setsourcename(name)
4889 self._add_history("set_sourcename", varlist)
4890
4891 @asaplog_post_dec
4892 @preserve_selection
4893 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4894 """\
4895 This function allows to build quotients automatically.
4896 It assumes the observation to have the same number of
4897 "ons" and "offs"
4898
4899 Parameters:
4900
4901 preserve: you can preserve (default) the continuum or
4902 remove it. The equations used are
4903
4904 preserve: Output = Toff * (on/off) - Toff
4905
4906 remove: Output = Toff * (on/off) - Ton
4907
4908 mode: the on/off detection mode
4909 'paired' (default)
4910 identifies 'off' scans by the
4911 trailing '_R' (Mopra/Parkes) or
4912 '_e'/'_w' (Tid) and matches
4913 on/off pairs from the observing pattern
4914 'time'
4915 finds the closest off in time
4916
4917 .. todo:: verify argument is not implemented
4918
4919 """
4920 varlist = vars()
4921 modes = ["time", "paired"]
4922 if not mode in modes:
4923 msg = "please provide valid mode. Valid modes are %s" % (modes)
4924 raise ValueError(msg)
4925 s = None
4926 if mode.lower() == "paired":
4927 from asap._asap import srctype
4928 sel = self.get_selection()
4929 #sel.set_query("SRCTYPE==psoff")
4930 sel.set_types(srctype.psoff)
4931 self.set_selection(sel)
4932 offs = self.copy()
4933 #sel.set_query("SRCTYPE==pson")
4934 sel.set_types(srctype.pson)
4935 self.set_selection(sel)
4936 ons = self.copy()
4937 s = scantable(self._math._quotient(ons, offs, preserve))
4938 elif mode.lower() == "time":
4939 s = scantable(self._math._auto_quotient(self, mode, preserve))
4940 s._add_history("auto_quotient", varlist)
4941 return s
4942
4943 @asaplog_post_dec
4944 def mx_quotient(self, mask = None, weight='median', preserve=True):
4945 """\
4946 Form a quotient using "off" beams when observing in "MX" mode.
4947
4948 Parameters:
4949
4950 mask: an optional mask to be used when weight == 'stddev'
4951
4952 weight: How to average the off beams. Default is 'median'.
4953
4954 preserve: you can preserve (default) the continuum or
4955 remove it. The equations used are:
4956
4957 preserve: Output = Toff * (on/off) - Toff
4958
4959 remove: Output = Toff * (on/off) - Ton
4960
4961 """
4962 mask = mask or ()
4963 varlist = vars()
4964 on = scantable(self._math._mx_extract(self, 'on'))
4965 preoff = scantable(self._math._mx_extract(self, 'off'))
4966 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4967 from asapmath import quotient
4968 q = quotient(on, off, preserve)
4969 q._add_history("mx_quotient", varlist)
4970 return q
4971
4972 @asaplog_post_dec
4973 def freq_switch(self, insitu=None):
4974 """\
4975 Apply frequency switching to the data.
4976
4977 Parameters:
4978
4979 insitu: if False a new scantable is returned.
4980 Otherwise, the swictching is done in-situ
4981 The default is taken from .asaprc (False)
4982
4983 """
4984 if insitu is None: insitu = rcParams['insitu']
4985 self._math._setinsitu(insitu)
4986 varlist = vars()
4987 s = scantable(self._math._freqswitch(self))
4988 s._add_history("freq_switch", varlist)
4989 if insitu:
4990 self._assign(s)
4991 else:
4992 return s
4993
4994 @asaplog_post_dec
4995 def recalc_azel(self):
4996 """Recalculate the azimuth and elevation for each position."""
4997 varlist = vars()
4998 self._recalcazel()
4999 self._add_history("recalc_azel", varlist)
5000 return
5001
5002 @asaplog_post_dec
5003 def __add__(self, other):
5004 """
5005 implicit on all axes and on Tsys
5006 """
5007 varlist = vars()
5008 s = self.__op( other, "ADD" )
5009 s._add_history("operator +", varlist)
5010 return s
5011
5012 @asaplog_post_dec
5013 def __sub__(self, other):
5014 """
5015 implicit on all axes and on Tsys
5016 """
5017 varlist = vars()
5018 s = self.__op( other, "SUB" )
5019 s._add_history("operator -", varlist)
5020 return s
5021
5022 @asaplog_post_dec
5023 def __mul__(self, other):
5024 """
5025 implicit on all axes and on Tsys
5026 """
5027 varlist = vars()
5028 s = self.__op( other, "MUL" ) ;
5029 s._add_history("operator *", varlist)
5030 return s
5031
5032
5033 @asaplog_post_dec
5034 def __div__(self, other):
5035 """
5036 implicit on all axes and on Tsys
5037 """
5038 varlist = vars()
5039 s = self.__op( other, "DIV" )
5040 s._add_history("operator /", varlist)
5041 return s
5042
5043 @asaplog_post_dec
5044 def __op( self, other, mode ):
5045 s = None
5046 if isinstance(other, scantable):
5047 s = scantable(self._math._binaryop(self, other, mode))
5048 elif isinstance(other, float):
5049 if other == 0.0:
5050 raise ZeroDivisionError("Dividing by zero is not recommended")
5051 s = scantable(self._math._unaryop(self, other, mode, False, False))
5052 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
5053 if isinstance(other[0], list) \
5054 or isinstance(other[0], numpy.ndarray):
5055 from asapmath import _array2dOp
5056 s = _array2dOp( self, other, mode, False )
5057 else:
5058 s = scantable( self._math._arrayop( self, other,
5059 mode, False, False ) )
5060 else:
5061 raise TypeError("Other input is not a scantable or float value")
5062 return s
5063
5064 @asaplog_post_dec
5065 def get_fit(self, row=0):
5066 """\
5067 Print or return the stored fits for a row in the scantable
5068
5069 Parameters:
5070
5071 row: the row which the fit has been applied to.
5072
5073 """
5074 if row > self.nrow():
5075 return
5076 from asap.asapfit import asapfit
5077 fit = asapfit(self._getfit(row))
5078 asaplog.push( '%s' %(fit) )
5079 return fit.as_dict()
5080
5081 @preserve_selection
5082 def flag_nans(self):
5083 """\
5084 Utility function to flag NaN values in the scantable.
5085 """
5086 import numpy
5087 basesel = self.get_selection()
5088 for i in range(self.nrow()):
5089 sel = self.get_row_selector(i)
5090 self.set_selection(basesel+sel)
5091 nans = numpy.isnan(self._getspectrum(0))
5092 if numpy.any(nans):
5093 bnans = [ bool(v) for v in nans]
5094 self.flag(bnans)
5095
5096 self.set_selection(basesel)
5097
5098 def get_row_selector(self, rowno):
5099 return selector(rows=[rowno])
5100
5101 def _add_history(self, funcname, parameters):
5102 if not rcParams['scantable.history']:
5103 return
5104 # create date
5105 sep = "##"
5106 from datetime import datetime
5107 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5108 hist = dstr+sep
5109 hist += funcname+sep#cdate+sep
5110 if parameters.has_key('self'):
5111 del parameters['self']
5112 for k, v in parameters.iteritems():
5113 if type(v) is dict:
5114 for k2, v2 in v.iteritems():
5115 hist += k2
5116 hist += "="
5117 if isinstance(v2, scantable):
5118 hist += 'scantable'
5119 elif k2 == 'mask':
5120 if isinstance(v2, list) or isinstance(v2, tuple):
5121 hist += str(self._zip_mask(v2))
5122 else:
5123 hist += str(v2)
5124 else:
5125 hist += str(v2)
5126 else:
5127 hist += k
5128 hist += "="
5129 if isinstance(v, scantable):
5130 hist += 'scantable'
5131 elif k == 'mask':
5132 if isinstance(v, list) or isinstance(v, tuple):
5133 hist += str(self._zip_mask(v))
5134 else:
5135 hist += str(v)
5136 else:
5137 hist += str(v)
5138 hist += sep
5139 hist = hist[:-2] # remove trailing '##'
5140 self._addhistory(hist)
5141
5142
5143 def _zip_mask(self, mask):
5144 mask = list(mask)
5145 i = 0
5146 segments = []
5147 while mask[i:].count(1):
5148 i += mask[i:].index(1)
5149 if mask[i:].count(0):
5150 j = i + mask[i:].index(0)
5151 else:
5152 j = len(mask)
5153 segments.append([i, j])
5154 i = j
5155 return segments
5156
5157 def _get_ordinate_label(self):
5158 fu = "("+self.get_fluxunit()+")"
5159 import re
5160 lbl = "Intensity"
5161 if re.match(".K.", fu):
5162 lbl = "Brightness Temperature "+ fu
5163 elif re.match(".Jy.", fu):
5164 lbl = "Flux density "+ fu
5165 return lbl
5166
5167 def _check_ifs(self):
5168# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5169 nchans = [self.nchan(i) for i in self.getifnos()]
5170 nchans = filter(lambda t: t > 0, nchans)
5171 return (sum(nchans)/len(nchans) == nchans[0])
5172
5173 @asaplog_post_dec
5174 def _fill(self, names, unit, average, opts={}):
5175 first = True
5176 fullnames = []
5177 for name in names:
5178 name = os.path.expandvars(name)
5179 name = os.path.expanduser(name)
5180 if not os.path.exists(name):
5181 msg = "File '%s' does not exists" % (name)
5182 raise IOError(msg)
5183 fullnames.append(name)
5184 if average:
5185 asaplog.push('Auto averaging integrations')
5186 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5187 for name in fullnames:
5188 tbl = Scantable(stype)
5189 if is_ms( name ):
5190 r = msfiller( tbl )
5191 else:
5192 r = filler( tbl )
5193 msg = "Importing %s..." % (name)
5194 asaplog.push(msg, False)
5195 r.open(name, opts)
5196 rx = rcParams['scantable.reference']
5197 r.setreferenceexpr(rx)
5198 r.fill()
5199 if average:
5200 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5201 if not first:
5202 tbl = self._math._merge([self, tbl])
5203 Scantable.__init__(self, tbl)
5204 r.close()
5205 del r, tbl
5206 first = False
5207 #flush log
5208 asaplog.post()
5209 if unit is not None:
5210 self.set_fluxunit(unit)
5211 if not is_casapy():
5212 self.set_freqframe(rcParams['scantable.freqframe'])
5213
5214 def _get_verify_action( self, msg, action=None ):
5215 valid_act = ['Y', 'N', 'A', 'R']
5216 if not action or not isinstance(action, str):
5217 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5218 if action == '':
5219 return "Y"
5220 elif (action.upper()[0] in valid_act):
5221 return action.upper()[0]
5222 elif (action.upper()[0] in ['H','?']):
5223 print "Available actions of verification [Y|n|a|r]"
5224 print " Y : Yes for current data (default)"
5225 print " N : No for current data"
5226 print " A : Accept all in the following and exit from verification"
5227 print " R : Reject all in the following and exit from verification"
5228 print " H or ?: help (show this message)"
5229 return self._get_verify_action(msg)
5230 else:
5231 return 'Y'
5232
5233 def __getitem__(self, key):
5234 if key < 0:
5235 key += self.nrow()
5236 if key >= self.nrow():
5237 raise IndexError("Row index out of range.")
5238 return self._getspectrum(key)
5239
5240 def __setitem__(self, key, value):
5241 if key < 0:
5242 key += self.nrow()
5243 if key >= self.nrow():
5244 raise IndexError("Row index out of range.")
5245 if not hasattr(value, "__len__") or \
5246 len(value) > self.nchan(self.getif(key)):
5247 raise ValueError("Spectrum length doesn't match.")
5248 return self._setspectrum(value, key)
5249
5250 def __len__(self):
5251 return self.nrow()
5252
5253 def __iter__(self):
5254 for i in range(len(self)):
5255 yield self[i]
Note: See TracBrowser for help on using the repository browser.