source: trunk/python/scantable.py@ 2935

Last change on this file since 2935 was 2935, checked in by WataruKawasaki, 12 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: modified sd.scantable.parse_spw_selection() so that warning message is shown in case merging overwrapping channel ranges is executed.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 206.8 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel, doppler):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
380
381def get_restfreq_in_Hz(s_restfreq):
382 value = 0.0
383 unit = ""
384 s = s_restfreq.replace(" ","")
385
386 for i in range(len(s))[::-1]:
387 if s[i].isalpha():
388 unit = s[i] + unit
389 else:
390 value = float(s[0:i+1])
391 break
392
393 if (unit == "") or (unit.lower() == "hz"):
394 return value
395 elif (len(unit) == 3) and (unit[1:3].lower() == "hz"):
396 unitprefix = unit[0]
397 factor = 1.0
398
399 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
400 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
401 factor = factor_list[prefix_list.index(unitprefix)]
402 """
403 if (unitprefix == 'a'):
404 factor = 1.0e-18
405 elif (unitprefix == 'f'):
406 factor = 1.0e-15
407 elif (unitprefix == 'p'):
408 factor = 1.0e-12
409 elif (unitprefix == 'n'):
410 factor = 1.0e-9
411 elif (unitprefix == 'u'):
412 factor = 1.0e-6
413 elif (unitprefix == 'm'):
414 factor = 1.0e-3
415 elif (unitprefix == 'k'):
416 factor = 1.0e+3
417 elif (unitprefix == 'M'):
418 factor = 1.0e+6
419 elif (unitprefix == 'G'):
420 factor = 1.0e+9
421 elif (unitprefix == 'T'):
422 factor = 1.0e+12
423 elif (unitprefix == 'P'):
424 factor = 1.0e+15
425 elif (unitprefix == 'E'):
426 factor = 1.0e+18
427 """
428 return value*factor
429 else:
430 mesg = "wrong unit of restfreq."
431 raise Exception, mesg
432
433def normalise_restfreq(in_restfreq):
434 if isinstance(in_restfreq, float):
435 return in_restfreq
436 elif isinstance(in_restfreq, int) or isinstance(in_restfreq, long):
437 return float(in_restfreq)
438 elif isinstance(in_restfreq, str):
439 return get_restfreq_in_Hz(in_restfreq)
440 elif isinstance(in_restfreq, list) or isinstance(in_restfreq, numpy.ndarray):
441 if isinstance(in_restfreq, numpy.ndarray):
442 if len(in_restfreq.shape) > 1:
443 mesg = "given in numpy.ndarray, in_restfreq must be 1-D."
444 raise Exception, mesg
445
446 res = []
447 for i in xrange(len(in_restfreq)):
448 elem = in_restfreq[i]
449 if isinstance(elem, float):
450 res.append(elem)
451 elif isinstance(elem, int) or isinstance(elem, long):
452 res.append(float(elem))
453 elif isinstance(elem, str):
454 res.append(get_restfreq_in_Hz(elem))
455 elif isinstance(elem, dict):
456 if isinstance(elem["value"], float):
457 res.append(elem)
458 elif isinstance(elem["value"], int):
459 dictelem = {}
460 dictelem["name"] = elem["name"]
461 dictelem["value"] = float(elem["value"])
462 res.append(dictelem)
463 elif isinstance(elem["value"], str):
464 dictelem = {}
465 dictelem["name"] = elem["name"]
466 dictelem["value"] = get_restfreq_in_Hz(elem["value"])
467 res.append(dictelem)
468 else:
469 mesg = "restfreq elements must be float, int, or string."
470 raise Exception, mesg
471 return res
472 else:
473 mesg = "wrong type of restfreq given."
474 raise Exception, mesg
475
476def set_restfreq(s, restfreq):
477 rfset = (restfreq != '') and (restfreq != [])
478 if rfset:
479 s.set_restfreqs(normalise_restfreq(restfreq))
480
481class scantable(Scantable):
482 """\
483 The ASAP container for scans (single-dish data).
484 """
485
486 @asaplog_post_dec
487 def __init__(self, filename, average=None, unit=None, parallactify=None,
488 **args):
489 """\
490 Create a scantable from a saved one or make a reference
491
492 Parameters:
493
494 filename: the name of an asap table on disk
495 or
496 the name of a rpfits/sdfits/ms file
497 (integrations within scans are auto averaged
498 and the whole file is read) or
499 [advanced] a reference to an existing scantable
500
501 average: average all integrations withinb a scan on read.
502 The default (True) is taken from .asaprc.
503
504 unit: brightness unit; must be consistent with K or Jy.
505 Over-rides the default selected by the filler
506 (input rpfits/sdfits/ms) or replaces the value
507 in existing scantables
508
509 antenna: for MeasurementSet input data only:
510 Antenna selection. integer (id) or string
511 (name or id).
512
513 parallactify: Indicate that the data had been parallactified.
514 Default (false) is taken from rc file.
515
516 getpt: Whether to import direction from MS/POINTING
517 table properly or not.
518 This is effective only when filename is MS.
519 The default (True) is to import direction
520 from MS/POINTING.
521 """
522 if average is None:
523 average = rcParams['scantable.autoaverage']
524 parallactify = parallactify or rcParams['scantable.parallactify']
525 varlist = vars()
526 from asap._asap import stmath
527 self._math = stmath( rcParams['insitu'] )
528 if isinstance(filename, Scantable):
529 Scantable.__init__(self, filename)
530 else:
531 if isinstance(filename, str):
532 filename = os.path.expandvars(filename)
533 filename = os.path.expanduser(filename)
534 if not os.path.exists(filename):
535 s = "File '%s' not found." % (filename)
536 raise IOError(s)
537 if is_scantable(filename):
538 ondisk = rcParams['scantable.storage'] == 'disk'
539 Scantable.__init__(self, filename, ondisk)
540 if unit is not None:
541 self.set_fluxunit(unit)
542 if average:
543 self._assign( self.average_time( scanav=True ) )
544 # do not reset to the default freqframe
545 #self.set_freqframe(rcParams['scantable.freqframe'])
546 elif is_ms(filename):
547 # Measurement Set
548 opts={'ms': {}}
549 mskeys=['getpt','antenna']
550 for key in mskeys:
551 if key in args.keys():
552 opts['ms'][key] = args[key]
553 self._fill([filename], unit, average, opts)
554 elif os.path.isfile(filename):
555 opts={'nro': {}}
556 nrokeys=['freqref']
557 for key in nrokeys:
558 if key in args.keys():
559 opts['nro'][key] = args[key]
560 self._fill([filename], unit, average, opts)
561 # only apply to new data not "copy constructor"
562 self.parallactify(parallactify)
563 else:
564 msg = "The given file '%s'is not a valid " \
565 "asap table." % (filename)
566 raise IOError(msg)
567 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
568 and isinstance(filename[-1], str):
569 self._fill(filename, unit, average)
570 self.parallactify(parallactify)
571 self._add_history("scantable", varlist)
572
573 @asaplog_post_dec
574 def save(self, name=None, format=None, overwrite=False):
575 """\
576 Store the scantable on disk. This can be an asap (aips++) Table,
577 SDFITS or MS2 format.
578
579 Parameters:
580
581 name: the name of the outputfile. For format 'ASCII'
582 this is the root file name (data in 'name'.txt
583 and header in 'name'_header.txt)
584
585 format: an optional file format. Default is ASAP.
586 Allowed are:
587
588 * 'ASAP' (save as ASAP [aips++] Table),
589 * 'SDFITS' (save as SDFITS file)
590 * 'ASCII' (saves as ascii text file)
591 * 'MS2' (saves as an casacore MeasurementSet V2)
592 * 'FITS' (save as image FITS - not readable by
593 class)
594 * 'CLASS' (save as FITS readable by CLASS)
595
596 overwrite: If the file should be overwritten if it exists.
597 The default False is to return with warning
598 without writing the output. USE WITH CARE.
599
600 Example::
601
602 scan.save('myscan.asap')
603 scan.save('myscan.sdfits', 'SDFITS')
604
605 """
606 from os import path
607 format = format or rcParams['scantable.save']
608 suffix = '.'+format.lower()
609 if name is None or name == "":
610 name = 'scantable'+suffix
611 msg = "No filename given. Using default name %s..." % name
612 asaplog.push(msg)
613 name = path.expandvars(name)
614 if path.isfile(name) or path.isdir(name):
615 if not overwrite:
616 msg = "File %s exists." % name
617 raise IOError(msg)
618 format2 = format.upper()
619 if format2 == 'ASAP':
620 self._save(name)
621 elif format2 == 'MS2':
622 msopt = {'ms': {'overwrite': overwrite } }
623 from asap._asap import mswriter
624 writer = mswriter( self )
625 writer.write( name, msopt )
626 else:
627 from asap._asap import stwriter as stw
628 writer = stw(format2)
629 writer.write(self, name)
630 return
631
632 def copy(self):
633 """Return a copy of this scantable.
634
635 *Note*:
636
637 This makes a full (deep) copy. scan2 = scan1 makes a reference.
638
639 Example::
640
641 copiedscan = scan.copy()
642
643 """
644 sd = scantable(Scantable._copy(self))
645 return sd
646
647 def drop_scan(self, scanid=None):
648 """\
649 Return a new scantable where the specified scan number(s) has(have)
650 been dropped.
651
652 Parameters:
653
654 scanid: a (list of) scan number(s)
655
656 """
657 from asap import _is_sequence_or_number as _is_valid
658 from asap import _to_list
659 from asap import unique
660 if not _is_valid(scanid):
661 raise RuntimeError( 'Please specify a scanno to drop from the'
662 ' scantable' )
663 scanid = _to_list(scanid)
664 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
665 for sid in scanid: allscans.remove(sid)
666 if len(allscans) == 0:
667 raise ValueError("Can't remove all scans")
668 sel = selector(scans=allscans)
669 return self._select_copy(sel)
670
671 def _select_copy(self, selection):
672 orig = self.get_selection()
673 self.set_selection(orig+selection)
674 cp = self.copy()
675 self.set_selection(orig)
676 return cp
677
678 def get_scan(self, scanid=None):
679 """\
680 Return a specific scan (by scanno) or collection of scans (by
681 source name) in a new scantable.
682
683 *Note*:
684
685 See scantable.drop_scan() for the inverse operation.
686
687 Parameters:
688
689 scanid: a (list of) scanno or a source name, unix-style
690 patterns are accepted for source name matching, e.g.
691 '*_R' gets all 'ref scans
692
693 Example::
694
695 # get all scans containing the source '323p459'
696 newscan = scan.get_scan('323p459')
697 # get all 'off' scans
698 refscans = scan.get_scan('*_R')
699 # get a susbset of scans by scanno (as listed in scan.summary())
700 newscan = scan.get_scan([0, 2, 7, 10])
701
702 """
703 if scanid is None:
704 raise RuntimeError( 'Please specify a scan no or name to '
705 'retrieve from the scantable' )
706 try:
707 bsel = self.get_selection()
708 sel = selector()
709 if type(scanid) is str:
710 sel.set_name(scanid)
711 return self._select_copy(sel)
712 elif type(scanid) is int:
713 sel.set_scans([scanid])
714 return self._select_copy(sel)
715 elif type(scanid) is list:
716 sel.set_scans(scanid)
717 return self._select_copy(sel)
718 else:
719 msg = "Illegal scanid type, use 'int' or 'list' if ints."
720 raise TypeError(msg)
721 except RuntimeError:
722 raise
723
724 def __str__(self):
725 tempFile = tempfile.NamedTemporaryFile()
726 Scantable._summary(self, tempFile.name)
727 tempFile.seek(0)
728 asaplog.clear()
729 return tempFile.file.read()
730
731 @asaplog_post_dec
732 def summary(self, filename=None):
733 """\
734 Print a summary of the contents of this scantable.
735
736 Parameters:
737
738 filename: the name of a file to write the putput to
739 Default - no file output
740
741 """
742 if filename is not None:
743 if filename is "":
744 filename = 'scantable_summary.txt'
745 from os.path import expandvars, isdir
746 filename = expandvars(filename)
747 if isdir(filename):
748 msg = "Illegal file name '%s'." % (filename)
749 raise IOError(msg)
750 else:
751 filename = ""
752 Scantable._summary(self, filename)
753
754 def get_spectrum(self, rowno):
755 """Return the spectrum for the current row in the scantable as a list.
756
757 Parameters:
758
759 rowno: the row number to retrieve the spectrum from
760
761 """
762 return self._getspectrum(rowno)
763
764 def get_mask(self, rowno):
765 """Return the mask for the current row in the scantable as a list.
766
767 Parameters:
768
769 rowno: the row number to retrieve the mask from
770
771 """
772 return self._getmask(rowno)
773
774 def set_spectrum(self, spec, rowno):
775 """Set the spectrum for the current row in the scantable.
776
777 Parameters:
778
779 spec: the new spectrum
780
781 rowno: the row number to set the spectrum for
782
783 """
784 assert(len(spec) == self.nchan(self.getif(rowno)))
785 return self._setspectrum(spec, rowno)
786
787 def get_coordinate(self, rowno):
788 """Return the (spectral) coordinate for a a given 'rowno'.
789
790 *Note*:
791
792 * This coordinate is only valid until a scantable method modifies
793 the frequency axis.
794 * This coordinate does contain the original frequency set-up
795 NOT the new frame. The conversions however are done using the user
796 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
797 use scantable.freq_align first. Without it there is no closure,
798 i.e.::
799
800 c = myscan.get_coordinate(0)
801 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
802
803 Parameters:
804
805 rowno: the row number for the spectral coordinate
806
807 """
808 return coordinate(Scantable.get_coordinate(self, rowno))
809
810 def get_selection(self):
811 """\
812 Get the selection object currently set on this scantable.
813
814 Example::
815
816 sel = scan.get_selection()
817 sel.set_ifs(0) # select IF 0
818 scan.set_selection(sel) # apply modified selection
819
820 """
821 return selector(self._getselection())
822
823 def set_selection(self, selection=None, **kw):
824 """\
825 Select a subset of the data. All following operations on this scantable
826 are only applied to thi selection.
827
828 Parameters:
829
830 selection: a selector object (default unset the selection), or
831 any combination of 'pols', 'ifs', 'beams', 'scans',
832 'cycles', 'name', 'query'
833
834 Examples::
835
836 sel = selector() # create a selection object
837 self.set_scans([0, 3]) # select SCANNO 0 and 3
838 scan.set_selection(sel) # set the selection
839 scan.summary() # will only print summary of scanno 0 an 3
840 scan.set_selection() # unset the selection
841 # or the equivalent
842 scan.set_selection(scans=[0,3])
843 scan.summary() # will only print summary of scanno 0 an 3
844 scan.set_selection() # unset the selection
845
846 """
847 if selection is None:
848 # reset
849 if len(kw) == 0:
850 selection = selector()
851 else:
852 # try keywords
853 for k in kw:
854 if k not in selector.fields:
855 raise KeyError("Invalid selection key '%s', "
856 "valid keys are %s" % (k,
857 selector.fields))
858 selection = selector(**kw)
859 self._setselection(selection)
860
861 def get_row(self, row=0, insitu=None):
862 """\
863 Select a row in the scantable.
864 Return a scantable with single row.
865
866 Parameters:
867
868 row: row no of integration, default is 0.
869 insitu: if False a new scantable is returned. Otherwise, the
870 scaling is done in-situ. The default is taken from .asaprc
871 (False)
872
873 """
874 if insitu is None:
875 insitu = rcParams['insitu']
876 if not insitu:
877 workscan = self.copy()
878 else:
879 workscan = self
880 # Select a row
881 sel = selector()
882 sel.set_rows([row])
883 workscan.set_selection(sel)
884 if not workscan.nrow() == 1:
885 msg = "Could not identify single row. %d rows selected." \
886 % (workscan.nrow())
887 raise RuntimeError(msg)
888 if insitu:
889 self._assign(workscan)
890 else:
891 return workscan
892
893 @asaplog_post_dec
894 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
895 """\
896 Determine the specified statistic of the current beam/if/pol
897 Takes a 'mask' as an optional parameter to specify which
898 channels should be excluded.
899
900 Parameters:
901
902 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
903 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
904
905 mask: an optional mask specifying where the statistic
906 should be determined.
907
908 form: format string to print statistic values
909
910 row: row number of spectrum to process.
911 (default is None: for all rows)
912
913 Example:
914 scan.set_unit('channel')
915 msk = scan.create_mask([100, 200], [500, 600])
916 scan.stats(stat='mean', mask=m)
917
918 """
919 mask = mask or []
920 if not self._check_ifs():
921 raise ValueError("Cannot apply mask as the IFs have different "
922 "number of channels. Please use setselection() "
923 "to select individual IFs")
924 rtnabc = False
925 if stat.lower().endswith('_abc'): rtnabc = True
926 getchan = False
927 if stat.lower().startswith('min') or stat.lower().startswith('max'):
928 chan = self._math._minmaxchan(self, mask, stat)
929 getchan = True
930 statvals = []
931 if not rtnabc:
932 if row == None:
933 statvals = self._math._stats(self, mask, stat)
934 else:
935 statvals = self._math._statsrow(self, mask, stat, int(row))
936
937 #def cb(i):
938 # return statvals[i]
939
940 #return self._row_callback(cb, stat)
941
942 label=stat
943 #callback=cb
944 out = ""
945 #outvec = []
946 sep = '-'*50
947
948 if row == None:
949 rows = xrange(self.nrow())
950 elif isinstance(row, int):
951 rows = [ row ]
952
953 for i in rows:
954 refstr = ''
955 statunit= ''
956 if getchan:
957 qx, qy = self.chan2data(rowno=i, chan=chan[i])
958 if rtnabc:
959 statvals.append(qx['value'])
960 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
961 statunit= '['+qx['unit']+']'
962 else:
963 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
964
965 tm = self._gettime(i)
966 src = self._getsourcename(i)
967 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
968 out += 'Time[%s]:\n' % (tm)
969 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
970 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
971 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
972 #outvec.append(callback(i))
973 if len(rows) > 1:
974 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
975 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
976 else:
977 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
978 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
979 out += sep+"\n"
980
981 import os
982 if os.environ.has_key( 'USER' ):
983 usr = os.environ['USER']
984 else:
985 import commands
986 usr = commands.getoutput( 'whoami' )
987 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
988 f = open(tmpfile,'w')
989 print >> f, sep
990 print >> f, ' %s %s' % (label, statunit)
991 print >> f, sep
992 print >> f, out
993 f.close()
994 f = open(tmpfile,'r')
995 x = f.readlines()
996 f.close()
997 asaplog.push(''.join(x), False)
998
999 return statvals
1000
1001 def chan2data(self, rowno=0, chan=0):
1002 """\
1003 Returns channel/frequency/velocity and spectral value
1004 at an arbitrary row and channel in the scantable.
1005
1006 Parameters:
1007
1008 rowno: a row number in the scantable. Default is the
1009 first row, i.e. rowno=0
1010
1011 chan: a channel in the scantable. Default is the first
1012 channel, i.e. pos=0
1013
1014 """
1015 if isinstance(rowno, int) and isinstance(chan, int):
1016 qx = {'unit': self.get_unit(),
1017 'value': self._getabcissa(rowno)[chan]}
1018 qy = {'unit': self.get_fluxunit(),
1019 'value': self._getspectrum(rowno)[chan]}
1020 return qx, qy
1021
1022 def stddev(self, mask=None):
1023 """\
1024 Determine the standard deviation of the current beam/if/pol
1025 Takes a 'mask' as an optional parameter to specify which
1026 channels should be excluded.
1027
1028 Parameters:
1029
1030 mask: an optional mask specifying where the standard
1031 deviation should be determined.
1032
1033 Example::
1034
1035 scan.set_unit('channel')
1036 msk = scan.create_mask([100, 200], [500, 600])
1037 scan.stddev(mask=m)
1038
1039 """
1040 return self.stats(stat='stddev', mask=mask);
1041
1042
1043 def get_column_names(self):
1044 """\
1045 Return a list of column names, which can be used for selection.
1046 """
1047 return list(Scantable.get_column_names(self))
1048
1049 def get_tsys(self, row=-1):
1050 """\
1051 Return the System temperatures.
1052
1053 Parameters:
1054
1055 row: the rowno to get the information for. (default all rows)
1056
1057 Returns:
1058
1059 a list of Tsys values for the current selection
1060
1061 """
1062 if row > -1:
1063 return self._get_column(self._gettsys, row)
1064 return self._row_callback(self._gettsys, "Tsys")
1065
1066 def get_tsysspectrum(self, row=-1):
1067 """\
1068 Return the channel dependent system temperatures.
1069
1070 Parameters:
1071
1072 row: the rowno to get the information for. (default all rows)
1073
1074 Returns:
1075
1076 a list of Tsys values for the current selection
1077
1078 """
1079 return self._get_column( self._gettsysspectrum, row )
1080
1081 def set_tsys(self, values, row=-1):
1082 """\
1083 Set the Tsys value(s) of the given 'row' or the whole scantable
1084 (selection).
1085
1086 Parameters:
1087
1088 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
1089 row: the row number to apply Tsys values to.
1090 (default all rows)
1091
1092 """
1093
1094 if not hasattr(values, "__len__"):
1095 values = [values]
1096 self._settsys(values, row)
1097
1098 def get_weather(self, row=-1):
1099 """\
1100 Return the weather information.
1101
1102 Parameters:
1103
1104 row: the rowno to get the information for. (default all rows)
1105
1106 Returns:
1107
1108 a dict or list of of dicts of values for the current selection
1109
1110 """
1111 if row >= len(self):
1112 raise IndexError("row out of range")
1113 values = self._get_column(self._get_weather, row)
1114 if row > -1:
1115 return {'temperature': values[0],
1116 'pressure': values[1], 'humidity' : values[2],
1117 'windspeed' : values[3], 'windaz' : values[4]
1118 }
1119 else:
1120 out = []
1121 for r in values:
1122 out.append({'temperature': r[0],
1123 'pressure': r[1], 'humidity' : r[2],
1124 'windspeed' : r[3], 'windaz' : r[4]
1125 })
1126 return out
1127
1128 def _row_callback(self, callback, label):
1129 out = ""
1130 outvec = []
1131 sep = '-'*50
1132 for i in range(self.nrow()):
1133 tm = self._gettime(i)
1134 src = self._getsourcename(i)
1135 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1136 out += 'Time[%s]:\n' % (tm)
1137 if self.nbeam(-1) > 1:
1138 out += ' Beam[%d] ' % (self.getbeam(i))
1139 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1140 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1141 outvec.append(callback(i))
1142 out += '= %3.3f\n' % (outvec[i])
1143 out += sep+'\n'
1144
1145 asaplog.push(sep)
1146 asaplog.push(" %s" % (label))
1147 asaplog.push(sep)
1148 asaplog.push(out)
1149 asaplog.post()
1150 return outvec
1151
1152 def _get_column(self, callback, row=-1, *args):
1153 """
1154 """
1155 if row == -1:
1156 return [callback(i, *args) for i in range(self.nrow())]
1157 else:
1158 if 0 <= row < self.nrow():
1159 return callback(row, *args)
1160
1161
1162 def get_time(self, row=-1, asdatetime=False, prec=-1):
1163 """\
1164 Get a list of time stamps for the observations.
1165 Return a datetime object or a string (default) for each
1166 integration time stamp in the scantable.
1167
1168 Parameters:
1169
1170 row: row no of integration. Default -1 return all rows
1171
1172 asdatetime: return values as datetime objects rather than strings
1173
1174 prec: number of digits shown. Default -1 to automatic
1175 calculation.
1176 Note this number is equals to the digits of MVTime,
1177 i.e., 0<prec<3: dates with hh:: only,
1178 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1179 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1180
1181 """
1182 from datetime import datetime
1183 if prec < 0:
1184 # automagically set necessary precision +1
1185 prec = 7 - \
1186 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1187 prec = max(6, int(prec))
1188 else:
1189 prec = max(0, prec)
1190 if asdatetime:
1191 #precision can be 1 millisecond at max
1192 prec = min(12, prec)
1193
1194 times = self._get_column(self._gettime, row, prec)
1195 if not asdatetime:
1196 return times
1197 format = "%Y/%m/%d/%H:%M:%S.%f"
1198 if prec < 7:
1199 nsub = 1 + (((6-prec)/2) % 3)
1200 substr = [".%f","%S","%M"]
1201 for i in range(nsub):
1202 format = format.replace(substr[i],"")
1203 if isinstance(times, list):
1204 return [datetime.strptime(i, format) for i in times]
1205 else:
1206 return datetime.strptime(times, format)
1207
1208
1209 def get_inttime(self, row=-1):
1210 """\
1211 Get a list of integration times for the observations.
1212 Return a time in seconds for each integration in the scantable.
1213
1214 Parameters:
1215
1216 row: row no of integration. Default -1 return all rows.
1217
1218 """
1219 return self._get_column(self._getinttime, row)
1220
1221
1222 def get_sourcename(self, row=-1):
1223 """\
1224 Get a list source names for the observations.
1225 Return a string for each integration in the scantable.
1226 Parameters:
1227
1228 row: row no of integration. Default -1 return all rows.
1229
1230 """
1231 return self._get_column(self._getsourcename, row)
1232
1233 def get_elevation(self, row=-1):
1234 """\
1235 Get a list of elevations for the observations.
1236 Return a float for each integration in the scantable.
1237
1238 Parameters:
1239
1240 row: row no of integration. Default -1 return all rows.
1241
1242 """
1243 return self._get_column(self._getelevation, row)
1244
1245 def get_azimuth(self, row=-1):
1246 """\
1247 Get a list of azimuths for the observations.
1248 Return a float for each integration in the scantable.
1249
1250 Parameters:
1251 row: row no of integration. Default -1 return all rows.
1252
1253 """
1254 return self._get_column(self._getazimuth, row)
1255
1256 def get_parangle(self, row=-1):
1257 """\
1258 Get a list of parallactic angles for the observations.
1259 Return a float for each integration in the scantable.
1260
1261 Parameters:
1262
1263 row: row no of integration. Default -1 return all rows.
1264
1265 """
1266 return self._get_column(self._getparangle, row)
1267
1268 def get_direction(self, row=-1):
1269 """
1270 Get a list of Positions on the sky (direction) for the observations.
1271 Return a string for each integration in the scantable.
1272
1273 Parameters:
1274
1275 row: row no of integration. Default -1 return all rows
1276
1277 """
1278 return self._get_column(self._getdirection, row)
1279
1280 def get_directionval(self, row=-1):
1281 """\
1282 Get a list of Positions on the sky (direction) for the observations.
1283 Return a float for each integration in the scantable.
1284
1285 Parameters:
1286
1287 row: row no of integration. Default -1 return all rows
1288
1289 """
1290 return self._get_column(self._getdirectionvec, row)
1291
1292 @asaplog_post_dec
1293 def set_unit(self, unit='channel'):
1294 """\
1295 Set the unit for all following operations on this scantable
1296
1297 Parameters:
1298
1299 unit: optional unit, default is 'channel'. Use one of '*Hz',
1300 'km/s', 'channel' or equivalent ''
1301
1302 """
1303 varlist = vars()
1304 if unit in ['', 'pixel', 'channel']:
1305 unit = ''
1306 inf = list(self._getcoordinfo())
1307 inf[0] = unit
1308 self._setcoordinfo(inf)
1309 self._add_history("set_unit", varlist)
1310
1311 @asaplog_post_dec
1312 def set_instrument(self, instr):
1313 """\
1314 Set the instrument for subsequent processing.
1315
1316 Parameters:
1317
1318 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1319 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1320
1321 """
1322 self._setInstrument(instr)
1323 self._add_history("set_instument", vars())
1324
1325 @asaplog_post_dec
1326 def set_feedtype(self, feedtype):
1327 """\
1328 Overwrite the feed type, which might not be set correctly.
1329
1330 Parameters:
1331
1332 feedtype: 'linear' or 'circular'
1333
1334 """
1335 self._setfeedtype(feedtype)
1336 self._add_history("set_feedtype", vars())
1337
1338 @asaplog_post_dec
1339 def get_doppler(self):
1340 """\
1341 Get the doppler.
1342 """
1343 return self._getcoordinfo()[2]
1344
1345 @asaplog_post_dec
1346 def set_doppler(self, doppler='RADIO'):
1347 """\
1348 Set the doppler for all following operations on this scantable.
1349
1350 Parameters:
1351
1352 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1353
1354 """
1355 varlist = vars()
1356 inf = list(self._getcoordinfo())
1357 inf[2] = doppler
1358 self._setcoordinfo(inf)
1359 self._add_history("set_doppler", vars())
1360
1361 @asaplog_post_dec
1362 def set_freqframe(self, frame=None):
1363 """\
1364 Set the frame type of the Spectral Axis.
1365
1366 Parameters:
1367
1368 frame: an optional frame type, default 'LSRK'. Valid frames are:
1369 'TOPO', 'LSRD', 'LSRK', 'BARY',
1370 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1371
1372 Example::
1373
1374 scan.set_freqframe('BARY')
1375
1376 """
1377 frame = frame or rcParams['scantable.freqframe']
1378 varlist = vars()
1379 # "REST" is not implemented in casacore
1380 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1381 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1382 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1383 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1384
1385 if frame in valid:
1386 inf = list(self._getcoordinfo())
1387 inf[1] = frame
1388 self._setcoordinfo(inf)
1389 self._add_history("set_freqframe", varlist)
1390 else:
1391 msg = "Please specify a valid freq type. Valid types are:\n", valid
1392 raise TypeError(msg)
1393
1394 @asaplog_post_dec
1395 def set_dirframe(self, frame=""):
1396 """\
1397 Set the frame type of the Direction on the sky.
1398
1399 Parameters:
1400
1401 frame: an optional frame type, default ''. Valid frames are:
1402 'J2000', 'B1950', 'GALACTIC'
1403
1404 Example:
1405
1406 scan.set_dirframe('GALACTIC')
1407
1408 """
1409 varlist = vars()
1410 Scantable.set_dirframe(self, frame)
1411 self._add_history("set_dirframe", varlist)
1412
1413 def get_unit(self):
1414 """\
1415 Get the default unit set in this scantable
1416
1417 Returns:
1418
1419 A unit string
1420
1421 """
1422 inf = self._getcoordinfo()
1423 unit = inf[0]
1424 if unit == '': unit = 'channel'
1425 return unit
1426
1427 @asaplog_post_dec
1428 def get_abcissa(self, rowno=0):
1429 """\
1430 Get the abcissa in the current coordinate setup for the currently
1431 selected Beam/IF/Pol
1432
1433 Parameters:
1434
1435 rowno: an optional row number in the scantable. Default is the
1436 first row, i.e. rowno=0
1437
1438 Returns:
1439
1440 The abcissa values and the format string (as a dictionary)
1441
1442 """
1443 abc = self._getabcissa(rowno)
1444 lbl = self._getabcissalabel(rowno)
1445 return abc, lbl
1446
1447 @asaplog_post_dec
1448 def flag(self, mask=None, unflag=False, row=-1):
1449 """\
1450 Flag the selected data using an optional channel mask.
1451
1452 Parameters:
1453
1454 mask: an optional channel mask, created with create_mask. Default
1455 (no mask) is all channels.
1456
1457 unflag: if True, unflag the data
1458
1459 row: an optional row number in the scantable.
1460 Default -1 flags all rows
1461
1462 """
1463 varlist = vars()
1464 mask = mask or []
1465 self._flag(row, mask, unflag)
1466 self._add_history("flag", varlist)
1467
1468 @asaplog_post_dec
1469 def flag_row(self, rows=None, unflag=False):
1470 """\
1471 Flag the selected data in row-based manner.
1472
1473 Parameters:
1474
1475 rows: list of row numbers to be flagged. Default is no row
1476 (must be explicitly specified to execute row-based
1477 flagging).
1478
1479 unflag: if True, unflag the data.
1480
1481 """
1482 varlist = vars()
1483 if rows is None:
1484 rows = []
1485 self._flag_row(rows, unflag)
1486 self._add_history("flag_row", varlist)
1487
1488 @asaplog_post_dec
1489 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1490 """\
1491 Flag the selected data outside a specified range (in channel-base)
1492
1493 Parameters:
1494
1495 uthres: upper threshold.
1496
1497 dthres: lower threshold
1498
1499 clipoutside: True for flagging data outside the range
1500 [dthres:uthres].
1501 False for flagging data inside the range.
1502
1503 unflag: if True, unflag the data.
1504
1505 """
1506 varlist = vars()
1507 self._clip(uthres, dthres, clipoutside, unflag)
1508 self._add_history("clip", varlist)
1509
1510 @asaplog_post_dec
1511 def lag_flag(self, start, end, unit="MHz", insitu=None):
1512 """\
1513 Flag the data in 'lag' space by providing a frequency to remove.
1514 Flagged data in the scantable get interpolated over the region.
1515 No taper is applied.
1516
1517 Parameters:
1518
1519 start: the start frequency (really a period within the
1520 bandwidth) or period to remove
1521
1522 end: the end frequency or period to remove
1523
1524 unit: the frequency unit (default 'MHz') or '' for
1525 explicit lag channels
1526
1527 *Notes*:
1528
1529 It is recommended to flag edges of the band or strong
1530 signals beforehand.
1531
1532 """
1533 if insitu is None: insitu = rcParams['insitu']
1534 self._math._setinsitu(insitu)
1535 varlist = vars()
1536 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1537 if not (unit == "" or base.has_key(unit)):
1538 raise ValueError("%s is not a valid unit." % unit)
1539 if unit == "":
1540 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1541 else:
1542 s = scantable(self._math._lag_flag(self, start*base[unit],
1543 end*base[unit], "frequency"))
1544 s._add_history("lag_flag", varlist)
1545 if insitu:
1546 self._assign(s)
1547 else:
1548 return s
1549
1550 @asaplog_post_dec
1551 def fft(self, rowno=None, mask=None, getrealimag=False):
1552 """\
1553 Apply FFT to the spectra.
1554 Flagged data in the scantable get interpolated over the region.
1555
1556 Parameters:
1557
1558 rowno: The row number(s) to be processed. int, list
1559 and tuple are accepted. By default (None), FFT
1560 is applied to the whole data.
1561
1562 mask: Auxiliary channel mask(s). Given as a boolean
1563 list, it is applied to all specified rows.
1564 A list of boolean lists can also be used to
1565 apply different masks. In the latter case, the
1566 length of 'mask' must be the same as that of
1567 'rowno'. The default is None.
1568
1569 getrealimag: If True, returns the real and imaginary part
1570 values of the complex results.
1571 If False (the default), returns the amplitude
1572 (absolute value) normalised with Ndata/2 and
1573 phase (argument, in unit of radian).
1574
1575 Returns:
1576
1577 A list of dictionaries containing the results for each spectrum.
1578 Each dictionary contains two values, the real and the imaginary
1579 parts when getrealimag = True, or the amplitude(absolute value)
1580 and the phase(argument) when getrealimag = False. The key for
1581 these values are 'real' and 'imag', or 'ampl' and 'phase',
1582 respectively.
1583 """
1584 if rowno is None:
1585 rowno = []
1586 if isinstance(rowno, int):
1587 rowno = [rowno]
1588 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1589 raise TypeError("The row number(s) must be int, list or tuple.")
1590 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1591
1592 usecommonmask = True
1593
1594 if mask is None:
1595 mask = []
1596 if isinstance(mask, list) or isinstance(mask, tuple):
1597 if len(mask) == 0:
1598 mask = [[]]
1599 else:
1600 if isinstance(mask[0], bool):
1601 if len(mask) != self.nchan(self.getif(rowno[0])):
1602 raise ValueError("The spectra and the mask have "
1603 "different length.")
1604 mask = [mask]
1605 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1606 usecommonmask = False
1607 if len(mask) != len(rowno):
1608 raise ValueError("When specifying masks for each "
1609 "spectrum, the numbers of them "
1610 "must be identical.")
1611 for i in xrange(mask):
1612 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1613 raise ValueError("The spectra and the mask have "
1614 "different length.")
1615 else:
1616 raise TypeError("The mask must be a boolean list or "
1617 "a list of boolean list.")
1618 else:
1619 raise TypeError("The mask must be a boolean list or a list of "
1620 "boolean list.")
1621
1622 res = []
1623
1624 imask = 0
1625 for whichrow in rowno:
1626 fspec = self._fft(whichrow, mask[imask], getrealimag)
1627 nspec = len(fspec)
1628
1629 i = 0
1630 v1 = []
1631 v2 = []
1632 reselem = {"real":[],"imag":[]} if getrealimag \
1633 else {"ampl":[],"phase":[]}
1634
1635 while (i < nspec):
1636 v1.append(fspec[i])
1637 v2.append(fspec[i+1])
1638 i += 2
1639
1640 if getrealimag:
1641 reselem["real"] += v1
1642 reselem["imag"] += v2
1643 else:
1644 reselem["ampl"] += v1
1645 reselem["phase"] += v2
1646
1647 res.append(reselem)
1648
1649 if not usecommonmask:
1650 imask += 1
1651
1652 return res
1653
1654 @asaplog_post_dec
1655 def create_mask(self, *args, **kwargs):
1656 """\
1657 Compute and return a mask based on [min, max] windows.
1658 The specified windows are to be INCLUDED, when the mask is
1659 applied.
1660
1661 Parameters:
1662
1663 [min, max], [min2, max2], ...
1664 Pairs of start/end points (inclusive)specifying the regions
1665 to be masked
1666
1667 invert: optional argument. If specified as True,
1668 return an inverted mask, i.e. the regions
1669 specified are EXCLUDED
1670
1671 row: create the mask using the specified row for
1672 unit conversions, default is row=0
1673 only necessary if frequency varies over rows.
1674
1675 Examples::
1676
1677 scan.set_unit('channel')
1678 # a)
1679 msk = scan.create_mask([400, 500], [800, 900])
1680 # masks everything outside 400 and 500
1681 # and 800 and 900 in the unit 'channel'
1682
1683 # b)
1684 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1685 # masks the regions between 400 and 500
1686 # and 800 and 900 in the unit 'channel'
1687
1688 # c)
1689 #mask only channel 400
1690 msk = scan.create_mask([400])
1691
1692 """
1693 row = kwargs.get("row", 0)
1694 data = self._getabcissa(row)
1695 u = self._getcoordinfo()[0]
1696 if u == "":
1697 u = "channel"
1698 msg = "The current mask window unit is %s" % u
1699 i = self._check_ifs()
1700 if not i:
1701 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1702 asaplog.push(msg)
1703 n = len(data)
1704 msk = _n_bools(n, False)
1705 # test if args is a 'list' or a 'normal *args - UGLY!!!
1706
1707 ws = (isinstance(args[-1][-1], int)
1708 or isinstance(args[-1][-1], float)) and args or args[0]
1709 for window in ws:
1710 if len(window) == 1:
1711 window = [window[0], window[0]]
1712 if len(window) == 0 or len(window) > 2:
1713 raise ValueError("A window needs to be defined as "
1714 "[start(, end)]")
1715 if window[0] > window[1]:
1716 tmp = window[0]
1717 window[0] = window[1]
1718 window[1] = tmp
1719 for i in range(n):
1720 if data[i] >= window[0] and data[i] <= window[1]:
1721 msk[i] = True
1722 if kwargs.has_key('invert'):
1723 if kwargs.get('invert'):
1724 msk = mask_not(msk)
1725 return msk
1726
1727 def get_masklist(self, mask=None, row=0, silent=False):
1728 """\
1729 Compute and return a list of mask windows, [min, max].
1730
1731 Parameters:
1732
1733 mask: channel mask, created with create_mask.
1734
1735 row: calcutate the masklist using the specified row
1736 for unit conversions, default is row=0
1737 only necessary if frequency varies over rows.
1738
1739 Returns:
1740
1741 [min, max], [min2, max2], ...
1742 Pairs of start/end points (inclusive)specifying
1743 the masked regions
1744
1745 """
1746 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1747 raise TypeError("The mask should be list or tuple.")
1748 if len(mask) <= 0:
1749 raise TypeError("The mask elements should be > 0")
1750 data = self._getabcissa(row)
1751 if len(data) != len(mask):
1752 msg = "Number of channels in scantable != number of mask elements"
1753 raise TypeError(msg)
1754 u = self._getcoordinfo()[0]
1755 if u == "":
1756 u = "channel"
1757 msg = "The current mask window unit is %s" % u
1758 i = self._check_ifs()
1759 if not i:
1760 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1761 if not silent:
1762 asaplog.push(msg)
1763 masklist = []
1764 ist, ien = None, None
1765 ist, ien=self.get_mask_indices(mask)
1766 if ist is not None and ien is not None:
1767 for i in xrange(len(ist)):
1768 range=[data[ist[i]],data[ien[i]]]
1769 range.sort()
1770 masklist.append([range[0],range[1]])
1771 return masklist
1772
1773 def get_mask_indices(self, mask=None):
1774 """\
1775 Compute and Return lists of mask start indices and mask end indices.
1776
1777 Parameters:
1778
1779 mask: channel mask, created with create_mask.
1780
1781 Returns:
1782
1783 List of mask start indices and that of mask end indices,
1784 i.e., [istart1,istart2,....], [iend1,iend2,....].
1785
1786 """
1787 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1788 raise TypeError("The mask should be list or tuple.")
1789 if len(mask) <= 0:
1790 raise TypeError("The mask elements should be > 0")
1791 istart = []
1792 iend = []
1793 if mask[0]:
1794 istart.append(0)
1795 for i in range(len(mask)-1):
1796 if not mask[i] and mask[i+1]:
1797 istart.append(i+1)
1798 elif mask[i] and not mask[i+1]:
1799 iend.append(i)
1800 if mask[len(mask)-1]:
1801 iend.append(len(mask)-1)
1802 if len(istart) != len(iend):
1803 raise RuntimeError("Numbers of mask start != mask end.")
1804 for i in range(len(istart)):
1805 if istart[i] > iend[i]:
1806 raise RuntimeError("Mask start index > mask end index")
1807 break
1808 return istart,iend
1809
1810 @asaplog_post_dec
1811 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1812 """
1813 Parse MS type spw/channel selection syntax.
1814
1815 Parameters:
1816 selectstring : A string expression of spw and channel selection.
1817 Comma-separated expressions mean different spw -
1818 channel combinations. Spws and channel selections
1819 are partitioned by a colon ':'. In a single
1820 selection expression, you can put multiple values
1821 separated by semicolons ';'. Both for spw and
1822 channel selection, allowed cases include single
1823 value, blank('') or asterisk('*') to specify all
1824 available values, two values connected with a
1825 tilde ('~') to specify an inclusive range. Unit
1826 strings for frequency or velocity can be added to
1827 the tilde-connected values. For channel selection
1828 expression, placing a '<' or a '>' is possible to
1829 specify a semi-infinite interval as well.
1830
1831 examples:
1832 '' or '*' = all spws (all channels)
1833 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1834 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1835 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1836 all channels in spw8
1837 '1.3~1.5GHz' = all spws that fall in or have at
1838 least some overwrap with frequency
1839 range between 1.3GHz and 1.5GHz.
1840 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1841 between the specified
1842 frequency range in spws
1843 that fall in or have
1844 overwrap with the
1845 specified frequency
1846 range.
1847 '1:-200~250km/s' = channels that fall between the
1848 specified velocity range in
1849 spw 1.
1850 restfreq: the rest frequency.
1851 examples: '115.2712GHz', 115271201800.0
1852 frame: an optional frame type, default 'LSRK'. Valid frames are:
1853 'TOPO', 'LSRD', 'LSRK', 'BARY',
1854 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1855 doppler: one of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1856 Returns:
1857 A dictionary of selected (valid) spw and masklist pairs,
1858 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1859 """
1860 if not isinstance(selectstring, str):
1861 asaplog.post()
1862 asaplog.push("Expression of spw/channel selection must be a string.")
1863 asaplog.post("ERROR")
1864
1865 orig_unit = self.get_unit()
1866 self.set_unit('channel')
1867
1868 if restfreq is not None:
1869 orig_molids = self._getmolidcol_list()
1870 set_restfreq(self, restfreq)
1871
1872 orig_coord = self._getcoordinfo()
1873
1874 if frame is not None:
1875 orig_frame = orig_coord[1]
1876 self.set_freqframe(frame)
1877
1878 if doppler is not None:
1879 orig_doppler = orig_coord[2]
1880 self.set_doppler(doppler)
1881
1882 valid_ifs = self.getifnos()
1883
1884 comma_sep = selectstring.split(",")
1885 res = {}
1886
1887 for cms_elem in comma_sep:
1888 colon_sep = cms_elem.split(":")
1889
1890 if (len(colon_sep) > 2):
1891 raise RuntimeError("Invalid selection expression: more than two colons!")
1892
1893 # parse spw expression and store result in spw_list.
1894 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1895 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1896 # 'a~b*m/s' (where * can be '' or 'k') and also
1897 # several of the above expressions connected with ';'.
1898
1899 spw_list = []
1900
1901 semicolon_sep = colon_sep[0].split(";")
1902
1903 for scs_elem in semicolon_sep:
1904 scs_elem = scs_elem.strip()
1905
1906 lt_sep = scs_elem.split("<")
1907 gt_sep = scs_elem.split(">")
1908 ti_sep = scs_elem.split("~")
1909
1910 lt_sep_length = len(lt_sep)
1911 gt_sep_length = len(gt_sep)
1912 ti_sep_length = len(ti_sep)
1913
1914 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1915
1916 if (len_product > 2):
1917 # '<', '>' and '~' must not coexist in a single spw expression
1918
1919 raise RuntimeError("Invalid spw selection.")
1920
1921 elif (len_product == 1):
1922 # '', '*', or single spw number.
1923
1924 if (scs_elem == "") or (scs_elem == "*"):
1925 spw_list = valid_ifs[:] # deep copy
1926
1927 else: # single number
1928 expr = int(scs_elem)
1929 spw_list.append(expr)
1930 if expr not in valid_ifs:
1931 asaplog.push("Invalid spw given. Ignored.")
1932
1933 else: # (len_product == 2)
1934 # namely, one of '<', '>' or '~' appears just once.
1935
1936 if (lt_sep_length == 2): # '<a'
1937 if is_number(lt_sep[1]):
1938 no_valid_spw = True
1939 for i in valid_ifs:
1940 if (i < float(lt_sep[1])):
1941 spw_list.append(i)
1942 no_valid_spw = False
1943
1944 if no_valid_spw:
1945 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1946
1947 else:
1948 raise RuntimeError("Invalid spw selection.")
1949
1950 elif (gt_sep_length == 2): # '>a'
1951 if is_number(gt_sep[1]):
1952 no_valid_spw = True
1953 for i in valid_ifs:
1954 if (i > float(gt_sep[1])):
1955 spw_list.append(i)
1956 no_valid_spw = False
1957
1958 if no_valid_spw:
1959 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1960
1961 else:
1962 raise RuntimeError("Invalid spw selection.")
1963
1964 else: # (ti_sep_length == 2) where both boundaries inclusive
1965 expr0 = ti_sep[0].strip()
1966 expr1 = ti_sep[1].strip()
1967
1968 if is_number(expr0) and is_number(expr1):
1969 # 'a~b'
1970 expr_pmin = min(float(expr0), float(expr1))
1971 expr_pmax = max(float(expr0), float(expr1))
1972 has_invalid_spw = False
1973 no_valid_spw = True
1974
1975 for i in valid_ifs:
1976 if (expr_pmin <= i) and (i <= expr_pmax):
1977 spw_list.append(i)
1978 no_valid_spw = False
1979 else:
1980 has_invalid_spw = True
1981
1982 if has_invalid_spw:
1983 msg = "Invalid spw is given. Ignored."
1984 asaplog.push(msg)
1985 asaplog.post()
1986
1987 if no_valid_spw:
1988 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1989
1990 elif is_number(expr0) and is_frequency(expr1):
1991 # 'a~b*Hz'
1992 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1993 no_valid_spw = True
1994
1995 for coord in self._get_coordinate_list():
1996 expr_p0 = coord['coord'].to_pixel(expr_f0)
1997 expr_p1 = coord['coord'].to_pixel(expr_f1)
1998 expr_pmin = min(expr_p0, expr_p1)
1999 expr_pmax = max(expr_p0, expr_p1)
2000
2001 spw = coord['if']
2002 pmin = 0.0
2003 pmax = float(self.nchan(spw) - 1)
2004
2005 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
2006 spw_list.append(spw)
2007 no_valid_spw = False
2008
2009 if no_valid_spw:
2010 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2011
2012 elif is_number(expr0) and is_velocity(expr1):
2013 # 'a~b*m/s'
2014 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2015 expr_vmin = min(expr_v0, expr_v1)
2016 expr_vmax = max(expr_v0, expr_v1)
2017 no_valid_spw = True
2018
2019 for coord in self._get_coordinate_list():
2020 spw = coord['if']
2021
2022 pmin = 0.0
2023 pmax = float(self.nchan(spw) - 1)
2024
2025 vel0 = coord['coord'].to_velocity(pmin)
2026 vel1 = coord['coord'].to_velocity(pmax)
2027
2028 vmin = min(vel0, vel1)
2029 vmax = max(vel0, vel1)
2030
2031 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
2032 spw_list.append(spw)
2033 no_valid_spw = False
2034
2035 if no_valid_spw:
2036 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
2037
2038 else:
2039 # cases such as 'aGHz~bkm/s' are not allowed now
2040 raise RuntimeError("Invalid spw selection.")
2041
2042 # check spw list and remove invalid ones.
2043 # if no valid spw left, emit ValueError.
2044 if len(spw_list) == 0:
2045 raise ValueError("No valid spw in given range.")
2046
2047 # parse channel expression and store the result in crange_list.
2048 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
2049 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
2050 # and also several of the above expressions connected with ';'.
2051
2052 for spw in spw_list:
2053 pmin = 0.0
2054 pmax = float(self.nchan(spw) - 1)
2055
2056 molid = self._getmolidcol_list()[self.get_first_rowno_by_if(spw)]
2057
2058 if (len(colon_sep) == 1):
2059 # no expression for channel selection,
2060 # which means all channels are to be selected.
2061 crange_list = [[pmin, pmax]]
2062
2063 else: # (len(colon_sep) == 2)
2064 crange_list = []
2065
2066 found = False
2067 for i in self._get_coordinate_list():
2068 if (i['if'] == spw):
2069 coord = i['coord']
2070 found = True
2071 break
2072
2073 if found:
2074 semicolon_sep = colon_sep[1].split(";")
2075 for scs_elem in semicolon_sep:
2076 scs_elem = scs_elem.strip()
2077
2078 ti_sep = scs_elem.split("~")
2079 ti_sep_length = len(ti_sep)
2080
2081 if (ti_sep_length > 2):
2082 raise RuntimeError("Invalid channel selection.")
2083
2084 elif (ti_sep_length == 1):
2085 if (scs_elem == "") or (scs_elem == "*"):
2086 # '' and '*' for all channels
2087 crange_list = [[pmin, pmax]]
2088 break
2089 elif (is_number(scs_elem)):
2090 # single channel given
2091 crange_list.append([float(scs_elem), float(scs_elem)])
2092 else:
2093 raise RuntimeError("Invalid channel selection.")
2094
2095 else: #(ti_sep_length == 2)
2096 expr0 = ti_sep[0].strip()
2097 expr1 = ti_sep[1].strip()
2098
2099 if is_number(expr0) and is_number(expr1):
2100 # 'a~b'
2101 expr_pmin = min(float(expr0), float(expr1))
2102 expr_pmax = max(float(expr0), float(expr1))
2103
2104 elif is_number(expr0) and is_frequency(expr1):
2105 # 'a~b*Hz'
2106 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2107 expr_p0 = coord.to_pixel(expr_f0)
2108 expr_p1 = coord.to_pixel(expr_f1)
2109 expr_pmin = min(expr_p0, expr_p1)
2110 expr_pmax = max(expr_p0, expr_p1)
2111
2112 elif is_number(expr0) and is_velocity(expr1):
2113 # 'a~b*m/s'
2114 restf = self.get_restfreqs()[molid][0]
2115 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2116 dppl = self.get_doppler()
2117 expr_f0 = get_frequency_by_velocity(restf, expr_v0, dppl)
2118 expr_f1 = get_frequency_by_velocity(restf, expr_v1, dppl)
2119 expr_p0 = coord.to_pixel(expr_f0)
2120 expr_p1 = coord.to_pixel(expr_f1)
2121 expr_pmin = min(expr_p0, expr_p1)
2122 expr_pmax = max(expr_p0, expr_p1)
2123
2124 else:
2125 # cases such as 'aGHz~bkm/s' are not allowed now
2126 raise RuntimeError("Invalid channel selection.")
2127
2128 cmin = max(pmin, expr_pmin)
2129 cmax = min(pmax, expr_pmax)
2130 # if the given range of channel selection has overwrap with
2131 # that of current spw, output the overwrap area.
2132 if (cmin <= cmax):
2133 cmin = float(int(cmin + 0.5))
2134 cmax = float(int(cmax + 0.5))
2135 crange_list.append([cmin, cmax])
2136
2137 if (len(crange_list) == 0):
2138 crange_list.append([])
2139
2140 if (len(crange_list[0]) > 0):
2141 if res.has_key(spw):
2142 res[spw].extend(crange_list)
2143 else:
2144 res[spw] = crange_list
2145
2146 for spw in res.keys():
2147 if spw in valid_ifs:
2148 # remove duplicated channel ranges
2149 for i in reversed(xrange(len(res[spw]))):
2150 for j in xrange(i):
2151 if ((res[spw][i][0]-res[spw][j][1])*(res[spw][i][1]-res[spw][j][0]) <= 0) or \
2152 (min(abs(res[spw][i][0]-res[spw][j][1]),abs(res[spw][j][0]-res[spw][i][1])) == 1):
2153 asaplog.post()
2154 merge_warn_mesg = "Spw " + str(spw) + ": overwrapping channel ranges are merged."
2155 asaplog.push(merge_warn_mesg)
2156 asaplog.post('WARN')
2157 res[spw][j][0] = min(res[spw][i][0], res[spw][j][0])
2158 res[spw][j][1] = max(res[spw][i][1], res[spw][j][1])
2159 res[spw].pop(i)
2160 break
2161 else:
2162 del res[spw]
2163
2164 if len(res) == 0:
2165 raise RuntimeError("No valid spw.")
2166
2167 # restore original values
2168 self.set_unit(orig_unit)
2169 if restfreq is not None:
2170 self._setmolidcol_list(orig_molids)
2171 if frame is not None:
2172 self.set_freqframe(orig_frame)
2173 if doppler is not None:
2174 self.set_doppler(orig_doppler)
2175
2176 return res
2177
2178 @asaplog_post_dec
2179 def get_first_rowno_by_if(self, ifno):
2180 found = False
2181 for irow in xrange(self.nrow()):
2182 if (self.getif(irow) == ifno):
2183 res = irow
2184 found = True
2185 break
2186
2187 if not found: raise RuntimeError("No valid spw.")
2188
2189 return res
2190
2191 @asaplog_post_dec
2192 def _get_coordinate_list(self):
2193 res = []
2194 spws = self.getifnos()
2195 for spw in spws:
2196 elem = {}
2197 elem['if'] = spw
2198 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2199 res.append(elem)
2200
2201 return res
2202
2203 @asaplog_post_dec
2204 def parse_maskexpr(self, maskstring):
2205 """
2206 Parse CASA type mask selection syntax (IF dependent).
2207
2208 Parameters:
2209 maskstring : A string mask selection expression.
2210 A comma separated selections mean different IF -
2211 channel combinations. IFs and channel selections
2212 are partitioned by a colon, ':'.
2213 examples:
2214 '' = all IFs (all channels)
2215 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2216 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2217 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2218 all channels in IF8
2219 Returns:
2220 A dictionary of selected (valid) IF and masklist pairs,
2221 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2222 """
2223 if not isinstance(maskstring,str):
2224 asaplog.post()
2225 asaplog.push("Mask expression should be a string.")
2226 asaplog.post("ERROR")
2227
2228 valid_ifs = self.getifnos()
2229 frequnit = self.get_unit()
2230 seldict = {}
2231 if maskstring == "":
2232 maskstring = str(valid_ifs)[1:-1]
2233 ## split each selection "IF range[:CHAN range]"
2234 # split maskstring by "<spaces>,<spaces>"
2235 comma_sep = re.compile('\s*,\s*')
2236 sellist = comma_sep.split(maskstring)
2237 # separator by "<spaces>:<spaces>"
2238 collon_sep = re.compile('\s*:\s*')
2239 for currselstr in sellist:
2240 selset = collon_sep.split(currselstr)
2241 # spw and mask string (may include ~, < or >)
2242 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2243 minval=min(valid_ifs),
2244 maxval=max(valid_ifs))
2245 for spwlist in spwmasklist:
2246 selspws = []
2247 for ispw in range(spwlist[0],spwlist[1]+1):
2248 # Put into the list only if ispw exists
2249 if valid_ifs.count(ispw):
2250 selspws.append(ispw)
2251 del spwmasklist, spwlist
2252
2253 # parse frequency mask list
2254 if len(selset) > 1:
2255 freqmasklist = self._parse_selection(selset[1], typestr='float',
2256 offset=0.)
2257 else:
2258 # want to select the whole spectrum
2259 freqmasklist = [None]
2260
2261 ## define a dictionary of spw - masklist combination
2262 for ispw in selspws:
2263 #print "working on", ispw
2264 spwstr = str(ispw)
2265 if len(selspws) == 0:
2266 # empty spw
2267 continue
2268 else:
2269 ## want to get min and max of the spw and
2270 ## offset to set for '<' and '>'
2271 if frequnit == 'channel':
2272 minfreq = 0
2273 maxfreq = self.nchan(ifno=ispw)
2274 offset = 0.5
2275 else:
2276 ## This is ugly part. need improvement
2277 for ifrow in xrange(self.nrow()):
2278 if self.getif(ifrow) == ispw:
2279 #print "IF",ispw,"found in row =",ifrow
2280 break
2281 freqcoord = self.get_coordinate(ifrow)
2282 freqs = self._getabcissa(ifrow)
2283 minfreq = min(freqs)
2284 maxfreq = max(freqs)
2285 if len(freqs) == 1:
2286 offset = 0.5
2287 elif frequnit.find('Hz') > 0:
2288 offset = abs(freqcoord.to_frequency(1,
2289 unit=frequnit)
2290 -freqcoord.to_frequency(0,
2291 unit=frequnit)
2292 )*0.5
2293 elif frequnit.find('m/s') > 0:
2294 offset = abs(freqcoord.to_velocity(1,
2295 unit=frequnit)
2296 -freqcoord.to_velocity(0,
2297 unit=frequnit)
2298 )*0.5
2299 else:
2300 asaplog.post()
2301 asaplog.push("Invalid frequency unit")
2302 asaplog.post("ERROR")
2303 del freqs, freqcoord, ifrow
2304 for freq in freqmasklist:
2305 selmask = freq or [minfreq, maxfreq]
2306 if selmask[0] == None:
2307 ## selection was "<freq[1]".
2308 if selmask[1] < minfreq:
2309 ## avoid adding region selection
2310 selmask = None
2311 else:
2312 selmask = [minfreq,selmask[1]-offset]
2313 elif selmask[1] == None:
2314 ## selection was ">freq[0]"
2315 if selmask[0] > maxfreq:
2316 ## avoid adding region selection
2317 selmask = None
2318 else:
2319 selmask = [selmask[0]+offset,maxfreq]
2320 if selmask:
2321 if not seldict.has_key(spwstr):
2322 # new spw selection
2323 seldict[spwstr] = []
2324 seldict[spwstr] += [selmask]
2325 del minfreq,maxfreq,offset,freq,selmask
2326 del spwstr
2327 del freqmasklist
2328 del valid_ifs
2329 if len(seldict) == 0:
2330 asaplog.post()
2331 asaplog.push("No valid selection in the mask expression: "
2332 +maskstring)
2333 asaplog.post("WARN")
2334 return None
2335 msg = "Selected masklist:\n"
2336 for sif, lmask in seldict.iteritems():
2337 msg += " IF"+sif+" - "+str(lmask)+"\n"
2338 asaplog.push(msg)
2339 return seldict
2340
2341 @asaplog_post_dec
2342 def parse_idx_selection(self, mode, selexpr):
2343 """
2344 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2345 BEAMNO, and row number
2346
2347 Parameters:
2348 mode : which column to select.
2349 ['scan',|'if'|'pol'|'beam'|'row']
2350 selexpr : A comma separated selection expression.
2351 examples:
2352 '' = all (returns [])
2353 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2354 (returns [0,1,4,5,6,9])
2355 Returns:
2356 A List of selected indices
2357 """
2358 if selexpr == "":
2359 return []
2360 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2361 'b': 'beam', 'r': 'row'}
2362 smode = mode.lower()[0]
2363 if not (smode in valid_modes.keys()):
2364 msg = "Invalid mode '%s'. Valid modes are %s" %\
2365 (mode, str(valid_modes.values()))
2366 asaplog.post()
2367 asaplog.push(msg)
2368 asaplog.post("ERROR")
2369 mode = valid_modes[smode]
2370 minidx = None
2371 maxidx = None
2372 if smode == 'r':
2373 minidx = 0
2374 maxidx = self.nrow()
2375 else:
2376 idx = getattr(self,"get"+mode+"nos")()
2377 minidx = min(idx)
2378 maxidx = max(idx)
2379 del idx
2380 # split selexpr by "<spaces>,<spaces>"
2381 comma_sep = re.compile('\s*,\s*')
2382 sellist = comma_sep.split(selexpr)
2383 idxlist = []
2384 for currselstr in sellist:
2385 # single range (may include ~, < or >)
2386 currlist = self._parse_selection(currselstr, typestr='integer',
2387 minval=minidx,maxval=maxidx)
2388 for thelist in currlist:
2389 idxlist += range(thelist[0],thelist[1]+1)
2390 # remove duplicated elements after first ones
2391 for i in reversed(xrange(len(idxlist))):
2392 if idxlist.index(idxlist[i]) < i:
2393 idxlist.pop(i)
2394 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2395 asaplog.push(msg)
2396 return idxlist
2397
2398 def _parse_selection(self, selstr, typestr='float', offset=0.,
2399 minval=None, maxval=None):
2400 """
2401 Parameters:
2402 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2403 typestr : The type of the values in returned list
2404 ('integer' or 'float')
2405 offset : The offset value to subtract from or add to
2406 the boundary value if the selection string
2407 includes '<' or '>' [Valid only for typestr='float']
2408 minval, maxval : The minimum/maximum values to set if the
2409 selection string includes '<' or '>'.
2410 The list element is filled with None by default.
2411 Returns:
2412 A list of min/max pair of selections.
2413 Example:
2414 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2415 --> returns [[0,2],[5,7],[9,9]]
2416 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2417 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2418 """
2419 # split selstr by '<spaces>;<spaces>'
2420 semi_sep = re.compile('\s*;\s*')
2421 selgroups = semi_sep.split(selstr)
2422 sellists = []
2423 if typestr.lower().startswith('int'):
2424 formatfunc = int
2425 offset = 1
2426 else:
2427 formatfunc = float
2428
2429 for currsel in selgroups:
2430 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2431 minsel = minval
2432 maxsel = maxval
2433 if currsel.find('~') > 0:
2434 # val0 <= x <= val1
2435 minsel = formatfunc(currsel.split('~')[0].strip())
2436 maxsel = formatfunc(currsel.split('~')[1].strip())
2437 elif currsel.strip().find('<=') > -1:
2438 bound = currsel.split('<=')
2439 try: # try "x <= val"
2440 minsel = minval
2441 maxsel = formatfunc(bound[1].strip())
2442 except ValueError: # now "val <= x"
2443 minsel = formatfunc(bound[0].strip())
2444 maxsel = maxval
2445 elif currsel.strip().find('>=') > -1:
2446 bound = currsel.split('>=')
2447 try: # try "x >= val"
2448 minsel = formatfunc(bound[1].strip())
2449 maxsel = maxval
2450 except ValueError: # now "val >= x"
2451 minsel = minval
2452 maxsel = formatfunc(bound[0].strip())
2453 elif currsel.strip().find('<') > -1:
2454 bound = currsel.split('<')
2455 try: # try "x < val"
2456 minsel = minval
2457 maxsel = formatfunc(bound[1].strip()) \
2458 - formatfunc(offset)
2459 except ValueError: # now "val < x"
2460 minsel = formatfunc(bound[0].strip()) \
2461 + formatfunc(offset)
2462 maxsel = maxval
2463 elif currsel.strip().find('>') > -1:
2464 bound = currsel.split('>')
2465 try: # try "x > val"
2466 minsel = formatfunc(bound[1].strip()) \
2467 + formatfunc(offset)
2468 maxsel = maxval
2469 except ValueError: # now "val > x"
2470 minsel = minval
2471 maxsel = formatfunc(bound[0].strip()) \
2472 - formatfunc(offset)
2473 else:
2474 minsel = formatfunc(currsel)
2475 maxsel = formatfunc(currsel)
2476 sellists.append([minsel,maxsel])
2477 return sellists
2478
2479# def get_restfreqs(self):
2480# """
2481# Get the restfrequency(s) stored in this scantable.
2482# The return value(s) are always of unit 'Hz'
2483# Parameters:
2484# none
2485# Returns:
2486# a list of doubles
2487# """
2488# return list(self._getrestfreqs())
2489
2490 def get_restfreqs(self, ids=None):
2491 """\
2492 Get the restfrequency(s) stored in this scantable.
2493 The return value(s) are always of unit 'Hz'
2494
2495 Parameters:
2496
2497 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2498 be retrieved
2499
2500 Returns:
2501
2502 dictionary containing ids and a list of doubles for each id
2503
2504 """
2505 if ids is None:
2506 rfreqs = {}
2507 idlist = self.getmolnos()
2508 for i in idlist:
2509 rfreqs[i] = list(self._getrestfreqs(i))
2510 return rfreqs
2511 else:
2512 if type(ids) == list or type(ids) == tuple:
2513 rfreqs = {}
2514 for i in ids:
2515 rfreqs[i] = list(self._getrestfreqs(i))
2516 return rfreqs
2517 else:
2518 return list(self._getrestfreqs(ids))
2519
2520 @asaplog_post_dec
2521 def set_restfreqs(self, freqs=None, unit='Hz'):
2522 """\
2523 Set or replace the restfrequency specified and
2524 if the 'freqs' argument holds a scalar,
2525 then that rest frequency will be applied to all the selected
2526 data. If the 'freqs' argument holds
2527 a vector, then it MUST be of equal or smaller length than
2528 the number of IFs (and the available restfrequencies will be
2529 replaced by this vector). In this case, *all* data have
2530 the restfrequency set per IF according
2531 to the corresponding value you give in the 'freqs' vector.
2532 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2533 IF 1 gets restfreq 2e9.
2534
2535 You can also specify the frequencies via a linecatalog.
2536
2537 Parameters:
2538
2539 freqs: list of rest frequency values or string idenitfiers
2540
2541 unit: unit for rest frequency (default 'Hz')
2542
2543
2544 Example::
2545
2546 # set the given restfrequency for the all currently selected IFs
2547 scan.set_restfreqs(freqs=1.4e9)
2548 # set restfrequencies for the n IFs (n > 1) in the order of the
2549 # list, i.e
2550 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2551 # len(list_of_restfreqs) == nIF
2552 # for nIF == 1 the following will set multiple restfrequency for
2553 # that IF
2554 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2555 # set multiple restfrequencies per IF. as a list of lists where
2556 # the outer list has nIF elements, the inner s arbitrary
2557 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2558
2559 *Note*:
2560
2561 To do more sophisticate Restfrequency setting, e.g. on a
2562 source and IF basis, use scantable.set_selection() before using
2563 this function::
2564
2565 # provided your scantable is called scan
2566 selection = selector()
2567 selection.set_name('ORION*')
2568 selection.set_ifs([1])
2569 scan.set_selection(selection)
2570 scan.set_restfreqs(freqs=86.6e9)
2571
2572 """
2573 varlist = vars()
2574 from asap import linecatalog
2575 # simple value
2576 if isinstance(freqs, int) or isinstance(freqs, float):
2577 self._setrestfreqs([freqs], [""], unit)
2578 # list of values
2579 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2580 # list values are scalars
2581 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2582 if len(freqs) == 1:
2583 self._setrestfreqs(freqs, [""], unit)
2584 else:
2585 # allow the 'old' mode of setting mulitple IFs
2586 savesel = self._getselection()
2587 sel = self.get_selection()
2588 iflist = self.getifnos()
2589 if len(freqs)>len(iflist):
2590 raise ValueError("number of elements in list of list "
2591 "exeeds the current IF selections")
2592 iflist = self.getifnos()
2593 for i, fval in enumerate(freqs):
2594 sel.set_ifs(iflist[i])
2595 self._setselection(sel)
2596 self._setrestfreqs([fval], [""], unit)
2597 self._setselection(savesel)
2598
2599 # list values are dict, {'value'=, 'name'=)
2600 elif isinstance(freqs[-1], dict):
2601 values = []
2602 names = []
2603 for d in freqs:
2604 values.append(d["value"])
2605 names.append(d["name"])
2606 self._setrestfreqs(values, names, unit)
2607 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2608 savesel = self._getselection()
2609 sel = self.get_selection()
2610 iflist = self.getifnos()
2611 if len(freqs)>len(iflist):
2612 raise ValueError("number of elements in list of list exeeds"
2613 " the current IF selections")
2614 for i, fval in enumerate(freqs):
2615 sel.set_ifs(iflist[i])
2616 self._setselection(sel)
2617 self._setrestfreqs(fval, [""], unit)
2618 self._setselection(savesel)
2619 # freqs are to be taken from a linecatalog
2620 elif isinstance(freqs, linecatalog):
2621 savesel = self._getselection()
2622 sel = self.get_selection()
2623 for i in xrange(freqs.nrow()):
2624 sel.set_ifs(iflist[i])
2625 self._setselection(sel)
2626 self._setrestfreqs([freqs.get_frequency(i)],
2627 [freqs.get_name(i)], "MHz")
2628 # ensure that we are not iterating past nIF
2629 if i == self.nif()-1: break
2630 self._setselection(savesel)
2631 else:
2632 return
2633 self._add_history("set_restfreqs", varlist)
2634
2635 @asaplog_post_dec
2636 def shift_refpix(self, delta):
2637 """\
2638 Shift the reference pixel of the Spectra Coordinate by an
2639 integer amount.
2640
2641 Parameters:
2642
2643 delta: the amount to shift by
2644
2645 *Note*:
2646
2647 Be careful using this with broadband data.
2648
2649 """
2650 varlist = vars()
2651 Scantable.shift_refpix(self, delta)
2652 s._add_history("shift_refpix", varlist)
2653
2654 @asaplog_post_dec
2655 def history(self, filename=None, nrows=-1, start=0):
2656 """\
2657 Print the history. Optionally to a file.
2658
2659 Parameters:
2660
2661 filename: The name of the file to save the history to.
2662
2663 """
2664 n = self._historylength()
2665 if nrows == -1:
2666 nrows = n
2667 if start+nrows > n:
2668 nrows = nrows-start
2669 if n > 1000 and nrows == n:
2670 nrows = 1000
2671 start = n-1000
2672 asaplog.push("Warning: History has {0} entries. Displaying last "
2673 "1000".format(n))
2674 hist = list(self._gethistory(nrows, start))
2675 out = "-"*80
2676 for h in hist:
2677 if not h.strip():
2678 continue
2679 if h.find("---") >-1:
2680 continue
2681 else:
2682 items = h.split("##")
2683 date = items[0]
2684 func = items[1]
2685 items = items[2:]
2686 out += "\n"+date+"\n"
2687 out += "Function: %s\n Parameters:" % (func)
2688 for i in items:
2689 if i == '':
2690 continue
2691 s = i.split("=")
2692 out += "\n %s = %s" % (s[0], s[1])
2693 out = "\n".join([out, "*"*80])
2694 if filename is not None:
2695 if filename is "":
2696 filename = 'scantable_history.txt'
2697 filename = os.path.expandvars(os.path.expanduser(filename))
2698 if not os.path.isdir(filename):
2699 data = open(filename, 'w')
2700 data.write(out)
2701 data.close()
2702 else:
2703 msg = "Illegal file name '%s'." % (filename)
2704 raise IOError(msg)
2705 return page(out)
2706
2707 #
2708 # Maths business
2709 #
2710 @asaplog_post_dec
2711 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2712 avmode="NONE"):
2713 """\
2714 Return the (time) weighted average of a scan. Scans will be averaged
2715 only if the source direction (RA/DEC) is within 1' otherwise
2716
2717 *Note*:
2718
2719 in channels only - align if necessary
2720
2721 Parameters:
2722
2723 mask: an optional mask (only used for 'var' and 'tsys'
2724 weighting)
2725
2726 scanav: True averages each scan separately
2727 False (default) averages all scans together,
2728
2729 weight: Weighting scheme.
2730 'none' (mean no weight)
2731 'var' (1/var(spec) weighted)
2732 'tsys' (1/Tsys**2 weighted)
2733 'tint' (integration time weighted)
2734 'tintsys' (Tint/Tsys**2)
2735 'median' ( median averaging)
2736 The default is 'tint'
2737
2738 align: align the spectra in velocity before averaging. It takes
2739 the time of the first spectrum as reference time.
2740 avmode: 'SOURCE' - also select by source name - or
2741 'NONE' (default). Not applicable for scanav=True or
2742 weight=median
2743
2744 Example::
2745
2746 # time average the scantable without using a mask
2747 newscan = scan.average_time()
2748
2749 """
2750 varlist = vars()
2751 weight = weight or 'TINT'
2752 mask = mask or ()
2753 scanav = (scanav and 'SCAN') or avmode.upper()
2754 scan = (self, )
2755
2756 if align:
2757 scan = (self.freq_align(insitu=False), )
2758 asaplog.push("Note: Alignment is don on a source-by-source basis")
2759 asaplog.push("Note: Averaging (by default) is not")
2760 # we need to set it to SOURCE averaging here
2761 s = None
2762 if weight.upper() == 'MEDIAN':
2763 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2764 scanav))
2765 else:
2766 s = scantable(self._math._average(scan, mask, weight.upper(),
2767 scanav))
2768 s._add_history("average_time", varlist)
2769 return s
2770
2771 @asaplog_post_dec
2772 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2773 """\
2774 Return a scan where all spectra are converted to either
2775 Jansky or Kelvin depending upon the flux units of the scan table.
2776 By default the function tries to look the values up internally.
2777 If it can't find them (or if you want to over-ride), you must
2778 specify EITHER jyperk OR eta (and D which it will try to look up
2779 also if you don't set it). jyperk takes precedence if you set both.
2780
2781 Parameters:
2782
2783 jyperk: the Jy / K conversion factor
2784
2785 eta: the aperture efficiency
2786
2787 d: the geometric diameter (metres)
2788
2789 insitu: if False a new scantable is returned.
2790 Otherwise, the scaling is done in-situ
2791 The default is taken from .asaprc (False)
2792
2793 """
2794 if insitu is None: insitu = rcParams['insitu']
2795 self._math._setinsitu(insitu)
2796 varlist = vars()
2797 jyperk = jyperk or -1.0
2798 d = d or -1.0
2799 eta = eta or -1.0
2800 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2801 s._add_history("convert_flux", varlist)
2802 if insitu: self._assign(s)
2803 else: return s
2804
2805 @asaplog_post_dec
2806 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2807 """\
2808 Return a scan after applying a gain-elevation correction.
2809 The correction can be made via either a polynomial or a
2810 table-based interpolation (and extrapolation if necessary).
2811 You specify polynomial coefficients, an ascii table or neither.
2812 If you specify neither, then a polynomial correction will be made
2813 with built in coefficients known for certain telescopes (an error
2814 will occur if the instrument is not known).
2815 The data and Tsys are *divided* by the scaling factors.
2816
2817 Parameters:
2818
2819 poly: Polynomial coefficients (default None) to compute a
2820 gain-elevation correction as a function of
2821 elevation (in degrees).
2822
2823 filename: The name of an ascii file holding correction factors.
2824 The first row of the ascii file must give the column
2825 names and these MUST include columns
2826 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2827 by this) somewhere.
2828 The second row must give the data type of the
2829 column. Use 'R' for Real and 'I' for Integer.
2830 An example file would be
2831 (actual factors are arbitrary) :
2832
2833 TIME ELEVATION FACTOR
2834 R R R
2835 0.1 0 0.8
2836 0.2 20 0.85
2837 0.3 40 0.9
2838 0.4 60 0.85
2839 0.5 80 0.8
2840 0.6 90 0.75
2841
2842 method: Interpolation method when correcting from a table.
2843 Values are 'nearest', 'linear' (default), 'cubic'
2844 and 'spline'
2845
2846 insitu: if False a new scantable is returned.
2847 Otherwise, the scaling is done in-situ
2848 The default is taken from .asaprc (False)
2849
2850 """
2851
2852 if insitu is None: insitu = rcParams['insitu']
2853 self._math._setinsitu(insitu)
2854 varlist = vars()
2855 poly = poly or ()
2856 from os.path import expandvars
2857 filename = expandvars(filename)
2858 s = scantable(self._math._gainel(self, poly, filename, method))
2859 s._add_history("gain_el", varlist)
2860 if insitu:
2861 self._assign(s)
2862 else:
2863 return s
2864
2865 @asaplog_post_dec
2866 def freq_align(self, reftime=None, method='cubic', insitu=None):
2867 """\
2868 Return a scan where all rows have been aligned in frequency/velocity.
2869 The alignment frequency frame (e.g. LSRK) is that set by function
2870 set_freqframe.
2871
2872 Parameters:
2873
2874 reftime: reference time to align at. By default, the time of
2875 the first row of data is used.
2876
2877 method: Interpolation method for regridding the spectra.
2878 Choose from 'nearest', 'linear', 'cubic' (default)
2879 and 'spline'
2880
2881 insitu: if False a new scantable is returned.
2882 Otherwise, the scaling is done in-situ
2883 The default is taken from .asaprc (False)
2884
2885 """
2886 if insitu is None: insitu = rcParams["insitu"]
2887 oldInsitu = self._math._insitu()
2888 self._math._setinsitu(insitu)
2889 varlist = vars()
2890 reftime = reftime or ""
2891 s = scantable(self._math._freq_align(self, reftime, method))
2892 s._add_history("freq_align", varlist)
2893 self._math._setinsitu(oldInsitu)
2894 if insitu:
2895 self._assign(s)
2896 else:
2897 return s
2898
2899 @asaplog_post_dec
2900 def opacity(self, tau=None, insitu=None):
2901 """\
2902 Apply an opacity correction. The data
2903 and Tsys are multiplied by the correction factor.
2904
2905 Parameters:
2906
2907 tau: (list of) opacity from which the correction factor is
2908 exp(tau*ZD)
2909 where ZD is the zenith-distance.
2910 If a list is provided, it has to be of length nIF,
2911 nIF*nPol or 1 and in order of IF/POL, e.g.
2912 [opif0pol0, opif0pol1, opif1pol0 ...]
2913 if tau is `None` the opacities are determined from a
2914 model.
2915
2916 insitu: if False a new scantable is returned.
2917 Otherwise, the scaling is done in-situ
2918 The default is taken from .asaprc (False)
2919
2920 """
2921 if insitu is None:
2922 insitu = rcParams['insitu']
2923 self._math._setinsitu(insitu)
2924 varlist = vars()
2925 if not hasattr(tau, "__len__"):
2926 tau = [tau]
2927 s = scantable(self._math._opacity(self, tau))
2928 s._add_history("opacity", varlist)
2929 if insitu:
2930 self._assign(s)
2931 else:
2932 return s
2933
2934 @asaplog_post_dec
2935 def bin(self, width=5, insitu=None):
2936 """\
2937 Return a scan where all spectra have been binned up.
2938
2939 Parameters:
2940
2941 width: The bin width (default=5) in pixels
2942
2943 insitu: if False a new scantable is returned.
2944 Otherwise, the scaling is done in-situ
2945 The default is taken from .asaprc (False)
2946
2947 """
2948 if insitu is None:
2949 insitu = rcParams['insitu']
2950 self._math._setinsitu(insitu)
2951 varlist = vars()
2952 s = scantable(self._math._bin(self, width))
2953 s._add_history("bin", varlist)
2954 if insitu:
2955 self._assign(s)
2956 else:
2957 return s
2958
2959 @asaplog_post_dec
2960 def reshape(self, first, last, insitu=None):
2961 """Resize the band by providing first and last channel.
2962 This will cut off all channels outside [first, last].
2963 """
2964 if insitu is None:
2965 insitu = rcParams['insitu']
2966 varlist = vars()
2967 if last < 0:
2968 last = self.nchan()-1 + last
2969 s = None
2970 if insitu:
2971 s = self
2972 else:
2973 s = self.copy()
2974 s._reshape(first,last)
2975 s._add_history("reshape", varlist)
2976 if not insitu:
2977 return s
2978
2979 @asaplog_post_dec
2980 def resample(self, width=5, method='cubic', insitu=None):
2981 """\
2982 Return a scan where all spectra have been binned up.
2983
2984 Parameters:
2985
2986 width: The bin width (default=5) in pixels
2987
2988 method: Interpolation method when correcting from a table.
2989 Values are 'nearest', 'linear', 'cubic' (default)
2990 and 'spline'
2991
2992 insitu: if False a new scantable is returned.
2993 Otherwise, the scaling is done in-situ
2994 The default is taken from .asaprc (False)
2995
2996 """
2997 if insitu is None:
2998 insitu = rcParams['insitu']
2999 self._math._setinsitu(insitu)
3000 varlist = vars()
3001 s = scantable(self._math._resample(self, method, width))
3002 s._add_history("resample", varlist)
3003 if insitu:
3004 self._assign(s)
3005 else:
3006 return s
3007
3008 @asaplog_post_dec
3009 def average_pol(self, mask=None, weight='none'):
3010 """\
3011 Average the Polarisations together.
3012
3013 Parameters:
3014
3015 mask: An optional mask defining the region, where the
3016 averaging will be applied. The output will have all
3017 specified points masked.
3018
3019 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3020 weighted), or 'tsys' (1/Tsys**2 weighted)
3021
3022 """
3023 varlist = vars()
3024 mask = mask or ()
3025 s = scantable(self._math._averagepol(self, mask, weight.upper()))
3026 s._add_history("average_pol", varlist)
3027 return s
3028
3029 @asaplog_post_dec
3030 def average_beam(self, mask=None, weight='none'):
3031 """\
3032 Average the Beams together.
3033
3034 Parameters:
3035 mask: An optional mask defining the region, where the
3036 averaging will be applied. The output will have all
3037 specified points masked.
3038
3039 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
3040 weighted), or 'tsys' (1/Tsys**2 weighted)
3041
3042 """
3043 varlist = vars()
3044 mask = mask or ()
3045 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
3046 s._add_history("average_beam", varlist)
3047 return s
3048
3049 def parallactify(self, pflag):
3050 """\
3051 Set a flag to indicate whether this data should be treated as having
3052 been 'parallactified' (total phase == 0.0)
3053
3054 Parameters:
3055
3056 pflag: Bool indicating whether to turn this on (True) or
3057 off (False)
3058
3059 """
3060 varlist = vars()
3061 self._parallactify(pflag)
3062 self._add_history("parallactify", varlist)
3063
3064 @asaplog_post_dec
3065 def convert_pol(self, poltype=None):
3066 """\
3067 Convert the data to a different polarisation type.
3068 Note that you will need cross-polarisation terms for most conversions.
3069
3070 Parameters:
3071
3072 poltype: The new polarisation type. Valid types are:
3073 'linear', 'circular', 'stokes' and 'linpol'
3074
3075 """
3076 varlist = vars()
3077 s = scantable(self._math._convertpol(self, poltype))
3078 s._add_history("convert_pol", varlist)
3079 return s
3080
3081 @asaplog_post_dec
3082 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
3083 insitu=None):
3084 """\
3085 Smooth the spectrum by the specified kernel (conserving flux).
3086
3087 Parameters:
3088
3089 kernel: The type of smoothing kernel. Select from
3090 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
3091 or 'poly'
3092
3093 width: The width of the kernel in pixels. For hanning this is
3094 ignored otherwise it defauls to 5 pixels.
3095 For 'gaussian' it is the Full Width Half
3096 Maximum. For 'boxcar' it is the full width.
3097 For 'rmedian' and 'poly' it is the half width.
3098
3099 order: Optional parameter for 'poly' kernel (default is 2), to
3100 specify the order of the polnomial. Ignored by all other
3101 kernels.
3102
3103 plot: plot the original and the smoothed spectra.
3104 In this each indivual fit has to be approved, by
3105 typing 'y' or 'n'
3106
3107 insitu: if False a new scantable is returned.
3108 Otherwise, the scaling is done in-situ
3109 The default is taken from .asaprc (False)
3110
3111 """
3112 if insitu is None: insitu = rcParams['insitu']
3113 self._math._setinsitu(insitu)
3114 varlist = vars()
3115
3116 if plot: orgscan = self.copy()
3117
3118 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
3119 s._add_history("smooth", varlist)
3120
3121 action = 'H'
3122 if plot:
3123 from asap.asapplotter import new_asaplot
3124 theplot = new_asaplot(rcParams['plotter.gui'])
3125 from matplotlib import rc as rcp
3126 rcp('lines', linewidth=1)
3127 theplot.set_panels()
3128 ylab=s._get_ordinate_label()
3129 #theplot.palette(0,["#777777","red"])
3130 for r in xrange(s.nrow()):
3131 xsm=s._getabcissa(r)
3132 ysm=s._getspectrum(r)
3133 xorg=orgscan._getabcissa(r)
3134 yorg=orgscan._getspectrum(r)
3135 if action != "N": #skip plotting if rejecting all
3136 theplot.clear()
3137 theplot.hold()
3138 theplot.set_axes('ylabel',ylab)
3139 theplot.set_axes('xlabel',s._getabcissalabel(r))
3140 theplot.set_axes('title',s._getsourcename(r))
3141 theplot.set_line(label='Original',color="#777777")
3142 theplot.plot(xorg,yorg)
3143 theplot.set_line(label='Smoothed',color="red")
3144 theplot.plot(xsm,ysm)
3145 ### Ugly part for legend
3146 for i in [0,1]:
3147 theplot.subplots[0]['lines'].append(
3148 [theplot.subplots[0]['axes'].lines[i]]
3149 )
3150 theplot.release()
3151 ### Ugly part for legend
3152 theplot.subplots[0]['lines']=[]
3153 res = self._get_verify_action("Accept smoothing?",action)
3154 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3155 if r == 0: action = None
3156 #res = raw_input("Accept smoothing ([y]/n): ")
3157 if res.upper() == 'N':
3158 # reject for the current rows
3159 s._setspectrum(yorg, r)
3160 elif res.upper() == 'R':
3161 # reject all the following rows
3162 action = "N"
3163 s._setspectrum(yorg, r)
3164 elif res.upper() == 'A':
3165 # accept all the following rows
3166 break
3167 theplot.quit()
3168 del theplot
3169 del orgscan
3170
3171 if insitu: self._assign(s)
3172 else: return s
3173
3174 @asaplog_post_dec
3175 def regrid_channel(self, width=5, plot=False, insitu=None):
3176 """\
3177 Regrid the spectra by the specified channel width
3178
3179 Parameters:
3180
3181 width: The channel width (float) of regridded spectra
3182 in the current spectral unit.
3183
3184 plot: [NOT IMPLEMENTED YET]
3185 plot the original and the regridded spectra.
3186 In this each indivual fit has to be approved, by
3187 typing 'y' or 'n'
3188
3189 insitu: if False a new scantable is returned.
3190 Otherwise, the scaling is done in-situ
3191 The default is taken from .asaprc (False)
3192
3193 """
3194 if insitu is None: insitu = rcParams['insitu']
3195 varlist = vars()
3196
3197 if plot:
3198 asaplog.post()
3199 asaplog.push("Verification plot is not implemtnetd yet.")
3200 asaplog.post("WARN")
3201
3202 s = self.copy()
3203 s._regrid_specchan(width)
3204
3205 s._add_history("regrid_channel", varlist)
3206
3207# if plot:
3208# from asap.asapplotter import new_asaplot
3209# theplot = new_asaplot(rcParams['plotter.gui'])
3210# from matplotlib import rc as rcp
3211# rcp('lines', linewidth=1)
3212# theplot.set_panels()
3213# ylab=s._get_ordinate_label()
3214# #theplot.palette(0,["#777777","red"])
3215# for r in xrange(s.nrow()):
3216# xsm=s._getabcissa(r)
3217# ysm=s._getspectrum(r)
3218# xorg=orgscan._getabcissa(r)
3219# yorg=orgscan._getspectrum(r)
3220# theplot.clear()
3221# theplot.hold()
3222# theplot.set_axes('ylabel',ylab)
3223# theplot.set_axes('xlabel',s._getabcissalabel(r))
3224# theplot.set_axes('title',s._getsourcename(r))
3225# theplot.set_line(label='Original',color="#777777")
3226# theplot.plot(xorg,yorg)
3227# theplot.set_line(label='Smoothed',color="red")
3228# theplot.plot(xsm,ysm)
3229# ### Ugly part for legend
3230# for i in [0,1]:
3231# theplot.subplots[0]['lines'].append(
3232# [theplot.subplots[0]['axes'].lines[i]]
3233# )
3234# theplot.release()
3235# ### Ugly part for legend
3236# theplot.subplots[0]['lines']=[]
3237# res = raw_input("Accept smoothing ([y]/n): ")
3238# if res.upper() == 'N':
3239# s._setspectrum(yorg, r)
3240# theplot.quit()
3241# del theplot
3242# del orgscan
3243
3244 if insitu: self._assign(s)
3245 else: return s
3246
3247 @asaplog_post_dec
3248 def _parse_wn(self, wn):
3249 if isinstance(wn, list) or isinstance(wn, tuple):
3250 return wn
3251 elif isinstance(wn, int):
3252 return [ wn ]
3253 elif isinstance(wn, str):
3254 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3255 val = wn.split('-')
3256 val = [int(val[0]), int(val[1])]
3257 val.sort()
3258 res = [i for i in xrange(val[0], val[1]+1)]
3259 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3260 val = int(wn[2:])+1
3261 res = [i for i in xrange(val)]
3262 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3263 val = int(wn[:-2])+1
3264 res = [i for i in xrange(val)]
3265 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3266 val = int(wn[1:])
3267 res = [i for i in xrange(val)]
3268 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3269 val = int(wn[:-1])
3270 res = [i for i in xrange(val)]
3271 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3272 # then interpreted in C++
3273 # side as [a,a+1,...,a_nyq]
3274 # (CAS-3759)
3275 val = int(wn[2:])
3276 res = [val, -999]
3277 #res = [i for i in xrange(val, self.nchan()/2+1)]
3278 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3279 # then interpreted in C++
3280 # side as [a,a+1,...,a_nyq]
3281 # (CAS-3759)
3282 val = int(wn[:-2])
3283 res = [val, -999]
3284 #res = [i for i in xrange(val, self.nchan()/2+1)]
3285 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3286 # then interpreted in C++
3287 # side as [a+1,a+2,...,a_nyq]
3288 # (CAS-3759)
3289 val = int(wn[1:])+1
3290 res = [val, -999]
3291 #res = [i for i in xrange(val, self.nchan()/2+1)]
3292 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3293 # then interpreted in C++
3294 # side as [a+1,a+2,...,a_nyq]
3295 # (CAS-3759)
3296 val = int(wn[:-1])+1
3297 res = [val, -999]
3298 #res = [i for i in xrange(val, self.nchan()/2+1)]
3299
3300 return res
3301 else:
3302 msg = 'wrong value given for addwn/rejwn'
3303 raise RuntimeError(msg)
3304
3305 @asaplog_post_dec
3306 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3307 """\
3308 Subtract baseline based on parameters written in Baseline Table.
3309
3310 Parameters:
3311 insitu: if True, baseline fitting/subtraction is done
3312 in-situ. If False, a new scantable with
3313 baseline subtracted is returned. Actually,
3314 format of the returned value depends on both
3315 insitu and retfitres (see below).
3316 The default is taken from .asaprc (False)
3317 retfitres: if True, the results of baseline fitting (i.e.,
3318 coefficients and rms) are returned.
3319 default is False.
3320 The format of the returned value of this
3321 function varies as follows:
3322 (1) in case insitu=True and retfitres=True:
3323 fitting result.
3324 (2) in case insitu=True and retfitres=False:
3325 None.
3326 (3) in case insitu=False and retfitres=True:
3327 a dictionary containing a new scantable
3328 (with baseline subtracted) and the fitting
3329 results.
3330 (4) in case insitu=False and retfitres=False:
3331 a new scantable (with baseline subtracted).
3332 inbltable: name of input baseline table. The row number of
3333 scantable and that of inbltable must be
3334 identical.
3335 outbltable: name of output baseline table where baseline
3336 parameters and fitting results recorded.
3337 default is ''(no output).
3338 overwrite: if True when an existing baseline table is
3339 specified for outbltable, overwrites it.
3340 Otherwise there is no harm.
3341 default is False.
3342 """
3343
3344 try:
3345 varlist = vars()
3346 if retfitres is None: retfitres = False
3347 if inbltable is None: raise ValueError("bltable missing.")
3348 if outbltable is None: outbltable = ''
3349 if overwrite is None: overwrite = False
3350
3351 if insitu is None: insitu = rcParams['insitu']
3352 if insitu:
3353 workscan = self
3354 else:
3355 workscan = self.copy()
3356
3357 sres = workscan._apply_bltable(inbltable,
3358 retfitres,
3359 outbltable,
3360 os.path.exists(outbltable),
3361 overwrite)
3362 if retfitres: res = parse_fitresult(sres)
3363
3364 workscan._add_history('apply_bltable', varlist)
3365
3366 if insitu:
3367 self._assign(workscan)
3368 if retfitres:
3369 return res
3370 else:
3371 return None
3372 else:
3373 if retfitres:
3374 return {'scantable': workscan, 'fitresults': res}
3375 else:
3376 return workscan
3377
3378 except RuntimeError, e:
3379 raise_fitting_failure_exception(e)
3380
3381 @asaplog_post_dec
3382 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3383 """\
3384 Subtract baseline based on parameters written in the input list.
3385
3386 Parameters:
3387 insitu: if True, baseline fitting/subtraction is done
3388 in-situ. If False, a new scantable with
3389 baseline subtracted is returned. Actually,
3390 format of the returned value depends on both
3391 insitu and retfitres (see below).
3392 The default is taken from .asaprc (False)
3393 retfitres: if True, the results of baseline fitting (i.e.,
3394 coefficients and rms) are returned.
3395 default is False.
3396 The format of the returned value of this
3397 function varies as follows:
3398 (1) in case insitu=True and retfitres=True:
3399 fitting result.
3400 (2) in case insitu=True and retfitres=False:
3401 None.
3402 (3) in case insitu=False and retfitres=True:
3403 a dictionary containing a new scantable
3404 (with baseline subtracted) and the fitting
3405 results.
3406 (4) in case insitu=False and retfitres=False:
3407 a new scantable (with baseline subtracted).
3408 blinfo: baseline parameter set stored in a dictionary
3409 or a list of dictionary. Each dictionary
3410 corresponds to each spectrum and must contain
3411 the following keys and values:
3412 'row': row number,
3413 'blfunc': function name. available ones include
3414 'poly', 'chebyshev', 'cspline' and
3415 'sinusoid',
3416 'order': maximum order of polynomial. needed
3417 if blfunc='poly' or 'chebyshev',
3418 'npiece': number or piecewise polynomial.
3419 needed if blfunc='cspline',
3420 'nwave': a list of sinusoidal wave numbers.
3421 needed if blfunc='sinusoid', and
3422 'masklist': min-max windows for channel mask.
3423 the specified ranges will be used
3424 for fitting.
3425 bltable: name of output baseline table where baseline
3426 parameters and fitting results recorded.
3427 default is ''(no output).
3428 overwrite: if True when an existing baseline table is
3429 specified for bltable, overwrites it.
3430 Otherwise there is no harm.
3431 default is False.
3432
3433 Example:
3434 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3435 'masklist':[[10,350],[352,510]]},
3436 {'row':1, 'blfunc':'cspline', 'npiece':3,
3437 'masklist':[[3,16],[19,404],[407,511]]}
3438 ])
3439
3440 the first spectrum (row=0) will be fitted with polynomial
3441 of order=5 and the next one (row=1) will be fitted with cubic
3442 spline consisting of 3 pieces.
3443 """
3444
3445 try:
3446 varlist = vars()
3447 if retfitres is None: retfitres = False
3448 if blinfo is None: blinfo = []
3449 if bltable is None: bltable = ''
3450 if overwrite is None: overwrite = False
3451
3452 if insitu is None: insitu = rcParams['insitu']
3453 if insitu:
3454 workscan = self
3455 else:
3456 workscan = self.copy()
3457
3458 nrow = workscan.nrow()
3459
3460 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3461
3462 print "in_blinfo=< "+ str(in_blinfo)+" >"
3463
3464 sres = workscan._sub_baseline(in_blinfo,
3465 retfitres,
3466 bltable,
3467 os.path.exists(bltable),
3468 overwrite)
3469 if retfitres: res = parse_fitresult(sres)
3470
3471 workscan._add_history('sub_baseline', varlist)
3472
3473 if insitu:
3474 self._assign(workscan)
3475 if retfitres:
3476 return res
3477 else:
3478 return None
3479 else:
3480 if retfitres:
3481 return {'scantable': workscan, 'fitresults': res}
3482 else:
3483 return workscan
3484
3485 except RuntimeError, e:
3486 raise_fitting_failure_exception(e)
3487
3488 @asaplog_post_dec
3489 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3490 whichrow=None, uselinefinder=None, edge=None,
3491 threshold=None, chan_avg_limit=None):
3492 """\
3493 Calculates and returns model selection criteria for a specified
3494 baseline model and a given spectrum data.
3495 Available values include Akaike Information Criterion (AIC), the
3496 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3497 Bayesian Information Criterion (BIC) and the Generalised Cross
3498 Validation (GCV).
3499
3500 Parameters:
3501 value: name of model selection criteria to calculate.
3502 available ones include 'aic', 'aicc', 'bic' and
3503 'gcv'. default is 'aicc'.
3504 blfunc: baseline function name. available ones include
3505 'chebyshev', 'cspline' and 'sinusoid'.
3506 default is 'chebyshev'.
3507 order: parameter for basline function. actually stands for
3508 order of polynomial (order) for 'chebyshev',
3509 number of spline pieces (npiece) for 'cspline' and
3510 maximum wave number for 'sinusoid', respectively.
3511 default is 5 (which is also the default order value
3512 for [auto_]chebyshev_baseline()).
3513 mask: an optional mask. default is [].
3514 whichrow: row number. default is 0 (the first row)
3515 uselinefinder: use sd.linefinder() to flag out line regions
3516 default is True.
3517 edge: an optional number of channel to drop at
3518 the edge of spectrum. If only one value is
3519 specified, the same number will be dropped
3520 from both sides of the spectrum. Default
3521 is to keep all channels. Nested tuples
3522 represent individual edge selection for
3523 different IFs (a number of spectral channels
3524 can be different)
3525 default is (0, 0).
3526 threshold: the threshold used by line finder. It is
3527 better to keep it large as only strong lines
3528 affect the baseline solution.
3529 default is 3.
3530 chan_avg_limit: a maximum number of consequtive spectral
3531 channels to average during the search of
3532 weak and broad lines. The default is no
3533 averaging (and no search for weak lines).
3534 If such lines can affect the fitted baseline
3535 (e.g. a high order polynomial is fitted),
3536 increase this parameter (usually values up
3537 to 8 are reasonable). Most users of this
3538 method should find the default value sufficient.
3539 default is 1.
3540
3541 Example:
3542 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3543 """
3544
3545 try:
3546 varlist = vars()
3547
3548 if value is None: value = 'aicc'
3549 if blfunc is None: blfunc = 'chebyshev'
3550 if order is None: order = 5
3551 if mask is None: mask = []
3552 if whichrow is None: whichrow = 0
3553 if uselinefinder is None: uselinefinder = True
3554 if edge is None: edge = (0, 0)
3555 if threshold is None: threshold = 3
3556 if chan_avg_limit is None: chan_avg_limit = 1
3557
3558 return self._calc_aic(value, blfunc, order, mask,
3559 whichrow, uselinefinder, edge,
3560 threshold, chan_avg_limit)
3561
3562 except RuntimeError, e:
3563 raise_fitting_failure_exception(e)
3564
3565 @asaplog_post_dec
3566 def sinusoid_baseline(self, mask=None, applyfft=None,
3567 fftmethod=None, fftthresh=None,
3568 addwn=None, rejwn=None,
3569 insitu=None,
3570 clipthresh=None, clipniter=None,
3571 plot=None,
3572 getresidual=None,
3573 showprogress=None, minnrow=None,
3574 outlog=None,
3575 blfile=None, csvformat=None,
3576 bltable=None):
3577 """\
3578 Return a scan which has been baselined (all rows) with sinusoidal
3579 functions.
3580
3581 Parameters:
3582 mask: an optional mask
3583 applyfft: if True use some method, such as FFT, to find
3584 strongest sinusoidal components in the wavenumber
3585 domain to be used for baseline fitting.
3586 default is True.
3587 fftmethod: method to find the strong sinusoidal components.
3588 now only 'fft' is available and it is the default.
3589 fftthresh: the threshold to select wave numbers to be used for
3590 fitting from the distribution of amplitudes in the
3591 wavenumber domain.
3592 both float and string values accepted.
3593 given a float value, the unit is set to sigma.
3594 for string values, allowed formats include:
3595 'xsigma' or 'x' (= x-sigma level. e.g.,
3596 '3sigma'), or
3597 'topx' (= the x strongest ones, e.g. 'top5').
3598 default is 3.0 (unit: sigma).
3599 addwn: the additional wave numbers to be used for fitting.
3600 list or integer value is accepted to specify every
3601 wave numbers. also string value can be used in case
3602 you need to specify wave numbers in a certain range,
3603 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3604 '<a' (= 0,1,...,a-2,a-1),
3605 '>=a' (= a, a+1, ... up to the maximum wave
3606 number corresponding to the Nyquist
3607 frequency for the case of FFT).
3608 default is [0].
3609 rejwn: the wave numbers NOT to be used for fitting.
3610 can be set just as addwn but has higher priority:
3611 wave numbers which are specified both in addwn
3612 and rejwn will NOT be used. default is [].
3613 insitu: if False a new scantable is returned.
3614 Otherwise, the scaling is done in-situ
3615 The default is taken from .asaprc (False)
3616 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3617 clipniter: maximum number of iteration of 'clipthresh'-sigma
3618 clipping (default is 0)
3619 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3620 plot the fit and the residual. In this each
3621 indivual fit has to be approved, by typing 'y'
3622 or 'n'
3623 getresidual: if False, returns best-fit values instead of
3624 residual. (default is True)
3625 showprogress: show progress status for large data.
3626 default is True.
3627 minnrow: minimum number of input spectra to show.
3628 default is 1000.
3629 outlog: Output the coefficients of the best-fit
3630 function to logger (default is False)
3631 blfile: Name of a text file in which the best-fit
3632 parameter values to be written
3633 (default is '': no file/logger output)
3634 csvformat: if True blfile is csv-formatted, default is False.
3635 bltable: name of a baseline table where fitting results
3636 (coefficients, rms, etc.) are to be written.
3637 if given, fitting results will NOT be output to
3638 scantable (insitu=True) or None will be
3639 returned (insitu=False).
3640 (default is "": no table output)
3641
3642 Example:
3643 # return a scan baselined by a combination of sinusoidal curves
3644 # having wave numbers in spectral window up to 10,
3645 # also with 3-sigma clipping, iteration up to 4 times
3646 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3647
3648 Note:
3649 The best-fit parameter values output in logger and/or blfile are now
3650 based on specunit of 'channel'.
3651 """
3652
3653 try:
3654 varlist = vars()
3655
3656 if insitu is None: insitu = rcParams['insitu']
3657 if insitu:
3658 workscan = self
3659 else:
3660 workscan = self.copy()
3661
3662 if mask is None: mask = []
3663 if applyfft is None: applyfft = True
3664 if fftmethod is None: fftmethod = 'fft'
3665 if fftthresh is None: fftthresh = 3.0
3666 if addwn is None: addwn = [0]
3667 if rejwn is None: rejwn = []
3668 if clipthresh is None: clipthresh = 3.0
3669 if clipniter is None: clipniter = 0
3670 if plot is None: plot = False
3671 if getresidual is None: getresidual = True
3672 if showprogress is None: showprogress = True
3673 if minnrow is None: minnrow = 1000
3674 if outlog is None: outlog = False
3675 if blfile is None: blfile = ''
3676 if csvformat is None: csvformat = False
3677 if bltable is None: bltable = ''
3678
3679 sapplyfft = 'true' if applyfft else 'false'
3680 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3681
3682 scsvformat = 'T' if csvformat else 'F'
3683
3684 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3685 workscan._sinusoid_baseline(mask,
3686 fftinfo,
3687 #applyfft, fftmethod.lower(),
3688 #str(fftthresh).lower(),
3689 workscan._parse_wn(addwn),
3690 workscan._parse_wn(rejwn),
3691 clipthresh, clipniter,
3692 getresidual,
3693 pack_progress_params(showprogress,
3694 minnrow),
3695 outlog, scsvformat+blfile,
3696 bltable)
3697 workscan._add_history('sinusoid_baseline', varlist)
3698
3699 if bltable == '':
3700 if insitu:
3701 self._assign(workscan)
3702 else:
3703 return workscan
3704 else:
3705 if not insitu:
3706 return None
3707
3708 except RuntimeError, e:
3709 raise_fitting_failure_exception(e)
3710
3711
3712 @asaplog_post_dec
3713 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3714 fftmethod=None, fftthresh=None,
3715 addwn=None, rejwn=None,
3716 insitu=None,
3717 clipthresh=None, clipniter=None,
3718 edge=None, threshold=None, chan_avg_limit=None,
3719 plot=None,
3720 getresidual=None,
3721 showprogress=None, minnrow=None,
3722 outlog=None,
3723 blfile=None, csvformat=None,
3724 bltable=None):
3725 """\
3726 Return a scan which has been baselined (all rows) with sinusoidal
3727 functions.
3728 Spectral lines are detected first using linefinder and masked out
3729 to avoid them affecting the baseline solution.
3730
3731 Parameters:
3732 mask: an optional mask retreived from scantable
3733 applyfft: if True use some method, such as FFT, to find
3734 strongest sinusoidal components in the wavenumber
3735 domain to be used for baseline fitting.
3736 default is True.
3737 fftmethod: method to find the strong sinusoidal components.
3738 now only 'fft' is available and it is the default.
3739 fftthresh: the threshold to select wave numbers to be used for
3740 fitting from the distribution of amplitudes in the
3741 wavenumber domain.
3742 both float and string values accepted.
3743 given a float value, the unit is set to sigma.
3744 for string values, allowed formats include:
3745 'xsigma' or 'x' (= x-sigma level. e.g.,
3746 '3sigma'), or
3747 'topx' (= the x strongest ones, e.g. 'top5').
3748 default is 3.0 (unit: sigma).
3749 addwn: the additional wave numbers to be used for fitting.
3750 list or integer value is accepted to specify every
3751 wave numbers. also string value can be used in case
3752 you need to specify wave numbers in a certain range,
3753 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3754 '<a' (= 0,1,...,a-2,a-1),
3755 '>=a' (= a, a+1, ... up to the maximum wave
3756 number corresponding to the Nyquist
3757 frequency for the case of FFT).
3758 default is [0].
3759 rejwn: the wave numbers NOT to be used for fitting.
3760 can be set just as addwn but has higher priority:
3761 wave numbers which are specified both in addwn
3762 and rejwn will NOT be used. default is [].
3763 insitu: if False a new scantable is returned.
3764 Otherwise, the scaling is done in-situ
3765 The default is taken from .asaprc (False)
3766 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3767 clipniter: maximum number of iteration of 'clipthresh'-sigma
3768 clipping (default is 0)
3769 edge: an optional number of channel to drop at
3770 the edge of spectrum. If only one value is
3771 specified, the same number will be dropped
3772 from both sides of the spectrum. Default
3773 is to keep all channels. Nested tuples
3774 represent individual edge selection for
3775 different IFs (a number of spectral channels
3776 can be different)
3777 threshold: the threshold used by line finder. It is
3778 better to keep it large as only strong lines
3779 affect the baseline solution.
3780 chan_avg_limit: a maximum number of consequtive spectral
3781 channels to average during the search of
3782 weak and broad lines. The default is no
3783 averaging (and no search for weak lines).
3784 If such lines can affect the fitted baseline
3785 (e.g. a high order polynomial is fitted),
3786 increase this parameter (usually values up
3787 to 8 are reasonable). Most users of this
3788 method should find the default value sufficient.
3789 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3790 plot the fit and the residual. In this each
3791 indivual fit has to be approved, by typing 'y'
3792 or 'n'
3793 getresidual: if False, returns best-fit values instead of
3794 residual. (default is True)
3795 showprogress: show progress status for large data.
3796 default is True.
3797 minnrow: minimum number of input spectra to show.
3798 default is 1000.
3799 outlog: Output the coefficients of the best-fit
3800 function to logger (default is False)
3801 blfile: Name of a text file in which the best-fit
3802 parameter values to be written
3803 (default is "": no file/logger output)
3804 csvformat: if True blfile is csv-formatted, default is False.
3805 bltable: name of a baseline table where fitting results
3806 (coefficients, rms, etc.) are to be written.
3807 if given, fitting results will NOT be output to
3808 scantable (insitu=True) or None will be
3809 returned (insitu=False).
3810 (default is "": no table output)
3811
3812 Example:
3813 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3814
3815 Note:
3816 The best-fit parameter values output in logger and/or blfile are now
3817 based on specunit of 'channel'.
3818 """
3819
3820 try:
3821 varlist = vars()
3822
3823 if insitu is None: insitu = rcParams['insitu']
3824 if insitu:
3825 workscan = self
3826 else:
3827 workscan = self.copy()
3828
3829 if mask is None: mask = []
3830 if applyfft is None: applyfft = True
3831 if fftmethod is None: fftmethod = 'fft'
3832 if fftthresh is None: fftthresh = 3.0
3833 if addwn is None: addwn = [0]
3834 if rejwn is None: rejwn = []
3835 if clipthresh is None: clipthresh = 3.0
3836 if clipniter is None: clipniter = 0
3837 if edge is None: edge = (0,0)
3838 if threshold is None: threshold = 3
3839 if chan_avg_limit is None: chan_avg_limit = 1
3840 if plot is None: plot = False
3841 if getresidual is None: getresidual = True
3842 if showprogress is None: showprogress = True
3843 if minnrow is None: minnrow = 1000
3844 if outlog is None: outlog = False
3845 if blfile is None: blfile = ''
3846 if csvformat is None: csvformat = False
3847 if bltable is None: bltable = ''
3848
3849 sapplyfft = 'true' if applyfft else 'false'
3850 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3851
3852 scsvformat = 'T' if csvformat else 'F'
3853
3854 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3855 workscan._auto_sinusoid_baseline(mask,
3856 fftinfo,
3857 workscan._parse_wn(addwn),
3858 workscan._parse_wn(rejwn),
3859 clipthresh, clipniter,
3860 normalise_edge_param(edge),
3861 threshold, chan_avg_limit,
3862 getresidual,
3863 pack_progress_params(showprogress,
3864 minnrow),
3865 outlog, scsvformat+blfile, bltable)
3866 workscan._add_history("auto_sinusoid_baseline", varlist)
3867
3868 if bltable == '':
3869 if insitu:
3870 self._assign(workscan)
3871 else:
3872 return workscan
3873 else:
3874 if not insitu:
3875 return None
3876
3877 except RuntimeError, e:
3878 raise_fitting_failure_exception(e)
3879
3880 @asaplog_post_dec
3881 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3882 clipthresh=None, clipniter=None, plot=None,
3883 getresidual=None, showprogress=None, minnrow=None,
3884 outlog=None, blfile=None, csvformat=None,
3885 bltable=None):
3886 """\
3887 Return a scan which has been baselined (all rows) by cubic spline
3888 function (piecewise cubic polynomial).
3889
3890 Parameters:
3891 mask: An optional mask
3892 npiece: Number of pieces. (default is 2)
3893 insitu: If False a new scantable is returned.
3894 Otherwise, the scaling is done in-situ
3895 The default is taken from .asaprc (False)
3896 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3897 clipniter: maximum number of iteration of 'clipthresh'-sigma
3898 clipping (default is 0)
3899 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3900 plot the fit and the residual. In this each
3901 indivual fit has to be approved, by typing 'y'
3902 or 'n'
3903 getresidual: if False, returns best-fit values instead of
3904 residual. (default is True)
3905 showprogress: show progress status for large data.
3906 default is True.
3907 minnrow: minimum number of input spectra to show.
3908 default is 1000.
3909 outlog: Output the coefficients of the best-fit
3910 function to logger (default is False)
3911 blfile: Name of a text file in which the best-fit
3912 parameter values to be written
3913 (default is "": no file/logger output)
3914 csvformat: if True blfile is csv-formatted, default is False.
3915 bltable: name of a baseline table where fitting results
3916 (coefficients, rms, etc.) are to be written.
3917 if given, fitting results will NOT be output to
3918 scantable (insitu=True) or None will be
3919 returned (insitu=False).
3920 (default is "": no table output)
3921
3922 Example:
3923 # return a scan baselined by a cubic spline consisting of 2 pieces
3924 # (i.e., 1 internal knot),
3925 # also with 3-sigma clipping, iteration up to 4 times
3926 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3927
3928 Note:
3929 The best-fit parameter values output in logger and/or blfile are now
3930 based on specunit of 'channel'.
3931 """
3932
3933 try:
3934 varlist = vars()
3935
3936 if insitu is None: insitu = rcParams['insitu']
3937 if insitu:
3938 workscan = self
3939 else:
3940 workscan = self.copy()
3941
3942 if mask is None: mask = []
3943 if npiece is None: npiece = 2
3944 if clipthresh is None: clipthresh = 3.0
3945 if clipniter is None: clipniter = 0
3946 if plot is None: plot = False
3947 if getresidual is None: getresidual = True
3948 if showprogress is None: showprogress = True
3949 if minnrow is None: minnrow = 1000
3950 if outlog is None: outlog = False
3951 if blfile is None: blfile = ''
3952 if csvformat is None: csvformat = False
3953 if bltable is None: bltable = ''
3954
3955 scsvformat = 'T' if csvformat else 'F'
3956
3957 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3958 workscan._cspline_baseline(mask, npiece,
3959 clipthresh, clipniter,
3960 getresidual,
3961 pack_progress_params(showprogress,
3962 minnrow),
3963 outlog, scsvformat+blfile,
3964 bltable)
3965 workscan._add_history("cspline_baseline", varlist)
3966
3967 if bltable == '':
3968 if insitu:
3969 self._assign(workscan)
3970 else:
3971 return workscan
3972 else:
3973 if not insitu:
3974 return None
3975
3976 except RuntimeError, e:
3977 raise_fitting_failure_exception(e)
3978
3979 @asaplog_post_dec
3980 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3981 clipthresh=None, clipniter=None,
3982 edge=None, threshold=None, chan_avg_limit=None,
3983 getresidual=None, plot=None,
3984 showprogress=None, minnrow=None, outlog=None,
3985 blfile=None, csvformat=None, bltable=None):
3986 """\
3987 Return a scan which has been baselined (all rows) by cubic spline
3988 function (piecewise cubic polynomial).
3989 Spectral lines are detected first using linefinder and masked out
3990 to avoid them affecting the baseline solution.
3991
3992 Parameters:
3993 mask: an optional mask retreived from scantable
3994 npiece: Number of pieces. (default is 2)
3995 insitu: if False a new scantable is returned.
3996 Otherwise, the scaling is done in-situ
3997 The default is taken from .asaprc (False)
3998 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3999 clipniter: maximum number of iteration of 'clipthresh'-sigma
4000 clipping (default is 0)
4001 edge: an optional number of channel to drop at
4002 the edge of spectrum. If only one value is
4003 specified, the same number will be dropped
4004 from both sides of the spectrum. Default
4005 is to keep all channels. Nested tuples
4006 represent individual edge selection for
4007 different IFs (a number of spectral channels
4008 can be different)
4009 threshold: the threshold used by line finder. It is
4010 better to keep it large as only strong lines
4011 affect the baseline solution.
4012 chan_avg_limit: a maximum number of consequtive spectral
4013 channels to average during the search of
4014 weak and broad lines. The default is no
4015 averaging (and no search for weak lines).
4016 If such lines can affect the fitted baseline
4017 (e.g. a high order polynomial is fitted),
4018 increase this parameter (usually values up
4019 to 8 are reasonable). Most users of this
4020 method should find the default value sufficient.
4021 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4022 plot the fit and the residual. In this each
4023 indivual fit has to be approved, by typing 'y'
4024 or 'n'
4025 getresidual: if False, returns best-fit values instead of
4026 residual. (default is True)
4027 showprogress: show progress status for large data.
4028 default is True.
4029 minnrow: minimum number of input spectra to show.
4030 default is 1000.
4031 outlog: Output the coefficients of the best-fit
4032 function to logger (default is False)
4033 blfile: Name of a text file in which the best-fit
4034 parameter values to be written
4035 (default is "": no file/logger output)
4036 csvformat: if True blfile is csv-formatted, default is False.
4037 bltable: name of a baseline table where fitting results
4038 (coefficients, rms, etc.) are to be written.
4039 if given, fitting results will NOT be output to
4040 scantable (insitu=True) or None will be
4041 returned (insitu=False).
4042 (default is "": no table output)
4043
4044 Example:
4045 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4046
4047 Note:
4048 The best-fit parameter values output in logger and/or blfile are now
4049 based on specunit of 'channel'.
4050 """
4051
4052 try:
4053 varlist = vars()
4054
4055 if insitu is None: insitu = rcParams['insitu']
4056 if insitu:
4057 workscan = self
4058 else:
4059 workscan = self.copy()
4060
4061 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
4062 if mask is None: mask = []
4063 if npiece is None: npiece = 2
4064 if clipthresh is None: clipthresh = 3.0
4065 if clipniter is None: clipniter = 0
4066 if edge is None: edge = (0, 0)
4067 if threshold is None: threshold = 3
4068 if chan_avg_limit is None: chan_avg_limit = 1
4069 if plot is None: plot = False
4070 if getresidual is None: getresidual = True
4071 if showprogress is None: showprogress = True
4072 if minnrow is None: minnrow = 1000
4073 if outlog is None: outlog = False
4074 if blfile is None: blfile = ''
4075 if csvformat is None: csvformat = False
4076 if bltable is None: bltable = ''
4077
4078 scsvformat = 'T' if csvformat else 'F'
4079
4080 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4081 workscan._auto_cspline_baseline(mask, npiece,
4082 clipthresh, clipniter,
4083 normalise_edge_param(edge),
4084 threshold,
4085 chan_avg_limit, getresidual,
4086 pack_progress_params(showprogress,
4087 minnrow),
4088 outlog,
4089 scsvformat+blfile,
4090 bltable)
4091 workscan._add_history("auto_cspline_baseline", varlist)
4092
4093 if bltable == '':
4094 if insitu:
4095 self._assign(workscan)
4096 else:
4097 return workscan
4098 else:
4099 if not insitu:
4100 return None
4101
4102 except RuntimeError, e:
4103 raise_fitting_failure_exception(e)
4104
4105 @asaplog_post_dec
4106 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
4107 clipthresh=None, clipniter=None, plot=None,
4108 getresidual=None, showprogress=None, minnrow=None,
4109 outlog=None, blfile=None, csvformat=None,
4110 bltable=None):
4111 """\
4112 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4113
4114 Parameters:
4115 mask: An optional mask
4116 order: the maximum order of Chebyshev polynomial (default is 5)
4117 insitu: If False a new scantable is returned.
4118 Otherwise, the scaling is done in-situ
4119 The default is taken from .asaprc (False)
4120 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4121 clipniter: maximum number of iteration of 'clipthresh'-sigma
4122 clipping (default is 0)
4123 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4124 plot the fit and the residual. In this each
4125 indivual fit has to be approved, by typing 'y'
4126 or 'n'
4127 getresidual: if False, returns best-fit values instead of
4128 residual. (default is True)
4129 showprogress: show progress status for large data.
4130 default is True.
4131 minnrow: minimum number of input spectra to show.
4132 default is 1000.
4133 outlog: Output the coefficients of the best-fit
4134 function to logger (default is False)
4135 blfile: Name of a text file in which the best-fit
4136 parameter values to be written
4137 (default is "": no file/logger output)
4138 csvformat: if True blfile is csv-formatted, default is False.
4139 bltable: name of a baseline table where fitting results
4140 (coefficients, rms, etc.) are to be written.
4141 if given, fitting results will NOT be output to
4142 scantable (insitu=True) or None will be
4143 returned (insitu=False).
4144 (default is "": no table output)
4145
4146 Example:
4147 # return a scan baselined by a cubic spline consisting of 2 pieces
4148 # (i.e., 1 internal knot),
4149 # also with 3-sigma clipping, iteration up to 4 times
4150 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4151
4152 Note:
4153 The best-fit parameter values output in logger and/or blfile are now
4154 based on specunit of 'channel'.
4155 """
4156
4157 try:
4158 varlist = vars()
4159
4160 if insitu is None: insitu = rcParams['insitu']
4161 if insitu:
4162 workscan = self
4163 else:
4164 workscan = self.copy()
4165
4166 if mask is None: mask = []
4167 if order is None: order = 5
4168 if clipthresh is None: clipthresh = 3.0
4169 if clipniter is None: clipniter = 0
4170 if plot is None: plot = False
4171 if getresidual is None: getresidual = True
4172 if showprogress is None: showprogress = True
4173 if minnrow is None: minnrow = 1000
4174 if outlog is None: outlog = False
4175 if blfile is None: blfile = ''
4176 if csvformat is None: csvformat = False
4177 if bltable is None: bltable = ''
4178
4179 scsvformat = 'T' if csvformat else 'F'
4180
4181 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4182 workscan._chebyshev_baseline(mask, order,
4183 clipthresh, clipniter,
4184 getresidual,
4185 pack_progress_params(showprogress,
4186 minnrow),
4187 outlog, scsvformat+blfile,
4188 bltable)
4189 workscan._add_history("chebyshev_baseline", varlist)
4190
4191 if bltable == '':
4192 if insitu:
4193 self._assign(workscan)
4194 else:
4195 return workscan
4196 else:
4197 if not insitu:
4198 return None
4199
4200 except RuntimeError, e:
4201 raise_fitting_failure_exception(e)
4202
4203 @asaplog_post_dec
4204 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4205 clipthresh=None, clipniter=None,
4206 edge=None, threshold=None, chan_avg_limit=None,
4207 getresidual=None, plot=None,
4208 showprogress=None, minnrow=None, outlog=None,
4209 blfile=None, csvformat=None, bltable=None):
4210 """\
4211 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4212 Spectral lines are detected first using linefinder and masked out
4213 to avoid them affecting the baseline solution.
4214
4215 Parameters:
4216 mask: an optional mask retreived from scantable
4217 order: the maximum order of Chebyshev polynomial (default is 5)
4218 insitu: if False a new scantable is returned.
4219 Otherwise, the scaling is done in-situ
4220 The default is taken from .asaprc (False)
4221 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4222 clipniter: maximum number of iteration of 'clipthresh'-sigma
4223 clipping (default is 0)
4224 edge: an optional number of channel to drop at
4225 the edge of spectrum. If only one value is
4226 specified, the same number will be dropped
4227 from both sides of the spectrum. Default
4228 is to keep all channels. Nested tuples
4229 represent individual edge selection for
4230 different IFs (a number of spectral channels
4231 can be different)
4232 threshold: the threshold used by line finder. It is
4233 better to keep it large as only strong lines
4234 affect the baseline solution.
4235 chan_avg_limit: a maximum number of consequtive spectral
4236 channels to average during the search of
4237 weak and broad lines. The default is no
4238 averaging (and no search for weak lines).
4239 If such lines can affect the fitted baseline
4240 (e.g. a high order polynomial is fitted),
4241 increase this parameter (usually values up
4242 to 8 are reasonable). Most users of this
4243 method should find the default value sufficient.
4244 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4245 plot the fit and the residual. In this each
4246 indivual fit has to be approved, by typing 'y'
4247 or 'n'
4248 getresidual: if False, returns best-fit values instead of
4249 residual. (default is True)
4250 showprogress: show progress status for large data.
4251 default is True.
4252 minnrow: minimum number of input spectra to show.
4253 default is 1000.
4254 outlog: Output the coefficients of the best-fit
4255 function to logger (default is False)
4256 blfile: Name of a text file in which the best-fit
4257 parameter values to be written
4258 (default is "": no file/logger output)
4259 csvformat: if True blfile is csv-formatted, default is False.
4260 bltable: name of a baseline table where fitting results
4261 (coefficients, rms, etc.) are to be written.
4262 if given, fitting results will NOT be output to
4263 scantable (insitu=True) or None will be
4264 returned (insitu=False).
4265 (default is "": no table output)
4266
4267 Example:
4268 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4269
4270 Note:
4271 The best-fit parameter values output in logger and/or blfile are now
4272 based on specunit of 'channel'.
4273 """
4274
4275 try:
4276 varlist = vars()
4277
4278 if insitu is None: insitu = rcParams['insitu']
4279 if insitu:
4280 workscan = self
4281 else:
4282 workscan = self.copy()
4283
4284 if mask is None: mask = []
4285 if order is None: order = 5
4286 if clipthresh is None: clipthresh = 3.0
4287 if clipniter is None: clipniter = 0
4288 if edge is None: edge = (0, 0)
4289 if threshold is None: threshold = 3
4290 if chan_avg_limit is None: chan_avg_limit = 1
4291 if plot is None: plot = False
4292 if getresidual is None: getresidual = True
4293 if showprogress is None: showprogress = True
4294 if minnrow is None: minnrow = 1000
4295 if outlog is None: outlog = False
4296 if blfile is None: blfile = ''
4297 if csvformat is None: csvformat = False
4298 if bltable is None: bltable = ''
4299
4300 scsvformat = 'T' if csvformat else 'F'
4301
4302 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4303 workscan._auto_chebyshev_baseline(mask, order,
4304 clipthresh, clipniter,
4305 normalise_edge_param(edge),
4306 threshold,
4307 chan_avg_limit, getresidual,
4308 pack_progress_params(showprogress,
4309 minnrow),
4310 outlog, scsvformat+blfile,
4311 bltable)
4312 workscan._add_history("auto_chebyshev_baseline", varlist)
4313
4314 if bltable == '':
4315 if insitu:
4316 self._assign(workscan)
4317 else:
4318 return workscan
4319 else:
4320 if not insitu:
4321 return None
4322
4323 except RuntimeError, e:
4324 raise_fitting_failure_exception(e)
4325
4326 @asaplog_post_dec
4327 def poly_baseline(self, mask=None, order=None, insitu=None,
4328 clipthresh=None, clipniter=None, plot=None,
4329 getresidual=None, showprogress=None, minnrow=None,
4330 outlog=None, blfile=None, csvformat=None,
4331 bltable=None):
4332 """\
4333 Return a scan which has been baselined (all rows) by a polynomial.
4334 Parameters:
4335 mask: an optional mask
4336 order: the order of the polynomial (default is 0)
4337 insitu: if False a new scantable is returned.
4338 Otherwise, the scaling is done in-situ
4339 The default is taken from .asaprc (False)
4340 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4341 clipniter: maximum number of iteration of 'clipthresh'-sigma
4342 clipping (default is 0)
4343 plot: plot the fit and the residual. In this each
4344 indivual fit has to be approved, by typing 'y'
4345 or 'n'
4346 getresidual: if False, returns best-fit values instead of
4347 residual. (default is True)
4348 showprogress: show progress status for large data.
4349 default is True.
4350 minnrow: minimum number of input spectra to show.
4351 default is 1000.
4352 outlog: Output the coefficients of the best-fit
4353 function to logger (default is False)
4354 blfile: Name of a text file in which the best-fit
4355 parameter values to be written
4356 (default is "": no file/logger output)
4357 csvformat: if True blfile is csv-formatted, default is False.
4358 bltable: name of a baseline table where fitting results
4359 (coefficients, rms, etc.) are to be written.
4360 if given, fitting results will NOT be output to
4361 scantable (insitu=True) or None will be
4362 returned (insitu=False).
4363 (default is "": no table output)
4364
4365 Example:
4366 # return a scan baselined by a third order polynomial,
4367 # not using a mask
4368 bscan = scan.poly_baseline(order=3)
4369 """
4370
4371 try:
4372 varlist = vars()
4373
4374 if insitu is None:
4375 insitu = rcParams["insitu"]
4376 if insitu:
4377 workscan = self
4378 else:
4379 workscan = self.copy()
4380
4381 if mask is None: mask = []
4382 if order is None: order = 0
4383 if clipthresh is None: clipthresh = 3.0
4384 if clipniter is None: clipniter = 0
4385 if plot is None: plot = False
4386 if getresidual is None: getresidual = True
4387 if showprogress is None: showprogress = True
4388 if minnrow is None: minnrow = 1000
4389 if outlog is None: outlog = False
4390 if blfile is None: blfile = ''
4391 if csvformat is None: csvformat = False
4392 if bltable is None: bltable = ''
4393
4394 scsvformat = 'T' if csvformat else 'F'
4395
4396 if plot:
4397 outblfile = (blfile != "") and \
4398 os.path.exists(os.path.expanduser(
4399 os.path.expandvars(blfile))
4400 )
4401 if outblfile:
4402 blf = open(blfile, "a")
4403
4404 f = fitter()
4405 f.set_function(lpoly=order)
4406
4407 rows = xrange(workscan.nrow())
4408 #if len(rows) > 0: workscan._init_blinfo()
4409
4410 action = "H"
4411 for r in rows:
4412 f.x = workscan._getabcissa(r)
4413 f.y = workscan._getspectrum(r)
4414 if mask:
4415 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4416 else: # mask=None
4417 f.mask = workscan._getmask(r)
4418
4419 f.data = None
4420 f.fit()
4421
4422 if action != "Y": # skip plotting when accepting all
4423 f.plot(residual=True)
4424 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4425 #if accept_fit.upper() == "N":
4426 # #workscan._append_blinfo(None, None, None)
4427 # continue
4428 accept_fit = self._get_verify_action("Accept fit?",action)
4429 if r == 0: action = None
4430 if accept_fit.upper() == "N":
4431 continue
4432 elif accept_fit.upper() == "R":
4433 break
4434 elif accept_fit.upper() == "A":
4435 action = "Y"
4436
4437 blpars = f.get_parameters()
4438 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4439 #workscan._append_blinfo(blpars, masklist, f.mask)
4440 workscan._setspectrum((f.fitter.getresidual()
4441 if getresidual else f.fitter.getfit()), r)
4442
4443 if outblfile:
4444 rms = workscan.get_rms(f.mask, r)
4445 dataout = \
4446 workscan.format_blparams_row(blpars["params"],
4447 blpars["fixed"],
4448 rms, str(masklist),
4449 r, True, csvformat)
4450 blf.write(dataout)
4451
4452 f._p.unmap()
4453 f._p = None
4454
4455 if outblfile:
4456 blf.close()
4457 else:
4458 workscan._poly_baseline(mask, order,
4459 clipthresh, clipniter, #
4460 getresidual,
4461 pack_progress_params(showprogress,
4462 minnrow),
4463 outlog, scsvformat+blfile,
4464 bltable) #
4465
4466 workscan._add_history("poly_baseline", varlist)
4467
4468 if insitu:
4469 self._assign(workscan)
4470 else:
4471 return workscan
4472
4473 except RuntimeError, e:
4474 raise_fitting_failure_exception(e)
4475
4476 @asaplog_post_dec
4477 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4478 clipthresh=None, clipniter=None,
4479 edge=None, threshold=None, chan_avg_limit=None,
4480 getresidual=None, plot=None,
4481 showprogress=None, minnrow=None, outlog=None,
4482 blfile=None, csvformat=None, bltable=None):
4483 """\
4484 Return a scan which has been baselined (all rows) by a polynomial.
4485 Spectral lines are detected first using linefinder and masked out
4486 to avoid them affecting the baseline solution.
4487
4488 Parameters:
4489 mask: an optional mask retreived from scantable
4490 order: the order of the polynomial (default is 0)
4491 insitu: if False a new scantable is returned.
4492 Otherwise, the scaling is done in-situ
4493 The default is taken from .asaprc (False)
4494 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4495 clipniter: maximum number of iteration of 'clipthresh'-sigma
4496 clipping (default is 0)
4497 edge: an optional number of channel to drop at
4498 the edge of spectrum. If only one value is
4499 specified, the same number will be dropped
4500 from both sides of the spectrum. Default
4501 is to keep all channels. Nested tuples
4502 represent individual edge selection for
4503 different IFs (a number of spectral channels
4504 can be different)
4505 threshold: the threshold used by line finder. It is
4506 better to keep it large as only strong lines
4507 affect the baseline solution.
4508 chan_avg_limit: a maximum number of consequtive spectral
4509 channels to average during the search of
4510 weak and broad lines. The default is no
4511 averaging (and no search for weak lines).
4512 If such lines can affect the fitted baseline
4513 (e.g. a high order polynomial is fitted),
4514 increase this parameter (usually values up
4515 to 8 are reasonable). Most users of this
4516 method should find the default value sufficient.
4517 plot: plot the fit and the residual. In this each
4518 indivual fit has to be approved, by typing 'y'
4519 or 'n'
4520 getresidual: if False, returns best-fit values instead of
4521 residual. (default is True)
4522 showprogress: show progress status for large data.
4523 default is True.
4524 minnrow: minimum number of input spectra to show.
4525 default is 1000.
4526 outlog: Output the coefficients of the best-fit
4527 function to logger (default is False)
4528 blfile: Name of a text file in which the best-fit
4529 parameter values to be written
4530 (default is "": no file/logger output)
4531 csvformat: if True blfile is csv-formatted, default is False.
4532 bltable: name of a baseline table where fitting results
4533 (coefficients, rms, etc.) are to be written.
4534 if given, fitting results will NOT be output to
4535 scantable (insitu=True) or None will be
4536 returned (insitu=False).
4537 (default is "": no table output)
4538
4539 Example:
4540 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4541 """
4542
4543 try:
4544 varlist = vars()
4545
4546 if insitu is None:
4547 insitu = rcParams['insitu']
4548 if insitu:
4549 workscan = self
4550 else:
4551 workscan = self.copy()
4552
4553 if mask is None: mask = []
4554 if order is None: order = 0
4555 if clipthresh is None: clipthresh = 3.0
4556 if clipniter is None: clipniter = 0
4557 if edge is None: edge = (0, 0)
4558 if threshold is None: threshold = 3
4559 if chan_avg_limit is None: chan_avg_limit = 1
4560 if plot is None: plot = False
4561 if getresidual is None: getresidual = True
4562 if showprogress is None: showprogress = True
4563 if minnrow is None: minnrow = 1000
4564 if outlog is None: outlog = False
4565 if blfile is None: blfile = ''
4566 if csvformat is None: csvformat = False
4567 if bltable is None: bltable = ''
4568
4569 scsvformat = 'T' if csvformat else 'F'
4570
4571 edge = normalise_edge_param(edge)
4572
4573 if plot:
4574 outblfile = (blfile != "") and \
4575 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4576 if outblfile: blf = open(blfile, "a")
4577
4578 from asap.asaplinefind import linefinder
4579 fl = linefinder()
4580 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4581 fl.set_scan(workscan)
4582
4583 f = fitter()
4584 f.set_function(lpoly=order)
4585
4586 rows = xrange(workscan.nrow())
4587 #if len(rows) > 0: workscan._init_blinfo()
4588
4589 action = "H"
4590 for r in rows:
4591 idx = 2*workscan.getif(r)
4592 if mask:
4593 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4594 else: # mask=None
4595 msk = workscan._getmask(r)
4596 fl.find_lines(r, msk, edge[idx:idx+2])
4597
4598 f.x = workscan._getabcissa(r)
4599 f.y = workscan._getspectrum(r)
4600 f.mask = fl.get_mask()
4601 f.data = None
4602 f.fit()
4603
4604 if action != "Y": # skip plotting when accepting all
4605 f.plot(residual=True)
4606 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4607 accept_fit = self._get_verify_action("Accept fit?",action)
4608 if r == 0: action = None
4609 if accept_fit.upper() == "N":
4610 #workscan._append_blinfo(None, None, None)
4611 continue
4612 elif accept_fit.upper() == "R":
4613 break
4614 elif accept_fit.upper() == "A":
4615 action = "Y"
4616
4617 blpars = f.get_parameters()
4618 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4619 #workscan._append_blinfo(blpars, masklist, f.mask)
4620 workscan._setspectrum(
4621 (f.fitter.getresidual() if getresidual
4622 else f.fitter.getfit()), r
4623 )
4624
4625 if outblfile:
4626 rms = workscan.get_rms(f.mask, r)
4627 dataout = \
4628 workscan.format_blparams_row(blpars["params"],
4629 blpars["fixed"],
4630 rms, str(masklist),
4631 r, True, csvformat)
4632 blf.write(dataout)
4633
4634 f._p.unmap()
4635 f._p = None
4636
4637 if outblfile: blf.close()
4638 else:
4639 workscan._auto_poly_baseline(mask, order,
4640 clipthresh, clipniter,
4641 edge, threshold,
4642 chan_avg_limit, getresidual,
4643 pack_progress_params(showprogress,
4644 minnrow),
4645 outlog, scsvformat+blfile,
4646 bltable)
4647 workscan._add_history("auto_poly_baseline", varlist)
4648
4649 if bltable == '':
4650 if insitu:
4651 self._assign(workscan)
4652 else:
4653 return workscan
4654 else:
4655 if not insitu:
4656 return None
4657
4658 except RuntimeError, e:
4659 raise_fitting_failure_exception(e)
4660
4661 def _init_blinfo(self):
4662 """\
4663 Initialise the following three auxiliary members:
4664 blpars : parameters of the best-fit baseline,
4665 masklists : mask data (edge positions of masked channels) and
4666 actualmask : mask data (in boolean list),
4667 to keep for use later (including output to logger/text files).
4668 Used by poly_baseline() and auto_poly_baseline() in case of
4669 'plot=True'.
4670 """
4671 self.blpars = []
4672 self.masklists = []
4673 self.actualmask = []
4674 return
4675
4676 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4677 """\
4678 Append baseline-fitting related info to blpars, masklist and
4679 actualmask.
4680 """
4681 self.blpars.append(data_blpars)
4682 self.masklists.append(data_masklists)
4683 self.actualmask.append(data_actualmask)
4684 return
4685
4686 @asaplog_post_dec
4687 def rotate_linpolphase(self, angle):
4688 """\
4689 Rotate the phase of the complex polarization O=Q+iU correlation.
4690 This is always done in situ in the raw data. So if you call this
4691 function more than once then each call rotates the phase further.
4692
4693 Parameters:
4694
4695 angle: The angle (degrees) to rotate (add) by.
4696
4697 Example::
4698
4699 scan.rotate_linpolphase(2.3)
4700
4701 """
4702 varlist = vars()
4703 self._math._rotate_linpolphase(self, angle)
4704 self._add_history("rotate_linpolphase", varlist)
4705 return
4706
4707 @asaplog_post_dec
4708 def rotate_xyphase(self, angle):
4709 """\
4710 Rotate the phase of the XY correlation. This is always done in situ
4711 in the data. So if you call this function more than once
4712 then each call rotates the phase further.
4713
4714 Parameters:
4715
4716 angle: The angle (degrees) to rotate (add) by.
4717
4718 Example::
4719
4720 scan.rotate_xyphase(2.3)
4721
4722 """
4723 varlist = vars()
4724 self._math._rotate_xyphase(self, angle)
4725 self._add_history("rotate_xyphase", varlist)
4726 return
4727
4728 @asaplog_post_dec
4729 def swap_linears(self):
4730 """\
4731 Swap the linear polarisations XX and YY, or better the first two
4732 polarisations as this also works for ciculars.
4733 """
4734 varlist = vars()
4735 self._math._swap_linears(self)
4736 self._add_history("swap_linears", varlist)
4737 return
4738
4739 @asaplog_post_dec
4740 def invert_phase(self):
4741 """\
4742 Invert the phase of the complex polarisation
4743 """
4744 varlist = vars()
4745 self._math._invert_phase(self)
4746 self._add_history("invert_phase", varlist)
4747 return
4748
4749 @asaplog_post_dec
4750 def add(self, offset, insitu=None):
4751 """\
4752 Return a scan where all spectra have the offset added
4753
4754 Parameters:
4755
4756 offset: the offset
4757
4758 insitu: if False a new scantable is returned.
4759 Otherwise, the scaling is done in-situ
4760 The default is taken from .asaprc (False)
4761
4762 """
4763 if insitu is None: insitu = rcParams['insitu']
4764 self._math._setinsitu(insitu)
4765 varlist = vars()
4766 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4767 s._add_history("add", varlist)
4768 if insitu:
4769 self._assign(s)
4770 else:
4771 return s
4772
4773 @asaplog_post_dec
4774 def scale(self, factor, tsys=True, insitu=None):
4775 """\
4776
4777 Return a scan where all spectra are scaled by the given 'factor'
4778
4779 Parameters:
4780
4781 factor: the scaling factor (float or 1D float list)
4782
4783 insitu: if False a new scantable is returned.
4784 Otherwise, the scaling is done in-situ
4785 The default is taken from .asaprc (False)
4786
4787 tsys: if True (default) then apply the operation to Tsys
4788 as well as the data
4789
4790 """
4791 if insitu is None: insitu = rcParams['insitu']
4792 self._math._setinsitu(insitu)
4793 varlist = vars()
4794 s = None
4795 import numpy
4796 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4797 if isinstance(factor[0], list) or isinstance(factor[0],
4798 numpy.ndarray):
4799 from asapmath import _array2dOp
4800 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4801 else:
4802 s = scantable( self._math._arrayop( self, factor,
4803 "MUL", tsys ) )
4804 else:
4805 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4806 s._add_history("scale", varlist)
4807 if insitu:
4808 self._assign(s)
4809 else:
4810 return s
4811
4812 @preserve_selection
4813 def set_sourcetype(self, match, matchtype="pattern",
4814 sourcetype="reference"):
4815 """\
4816 Set the type of the source to be an source or reference scan
4817 using the provided pattern.
4818
4819 Parameters:
4820
4821 match: a Unix style pattern, regular expression or selector
4822
4823 matchtype: 'pattern' (default) UNIX style pattern or
4824 'regex' regular expression
4825
4826 sourcetype: the type of the source to use (source/reference)
4827
4828 """
4829 varlist = vars()
4830 stype = -1
4831 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4832 stype = 1
4833 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4834 stype = 0
4835 else:
4836 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4837 if matchtype.lower().startswith("p"):
4838 matchtype = "pattern"
4839 elif matchtype.lower().startswith("r"):
4840 matchtype = "regex"
4841 else:
4842 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4843 sel = selector()
4844 if isinstance(match, selector):
4845 sel = match
4846 else:
4847 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4848 self.set_selection(sel)
4849 self._setsourcetype(stype)
4850 self._add_history("set_sourcetype", varlist)
4851
4852
4853 def set_sourcename(self, name):
4854 varlist = vars()
4855 self._setsourcename(name)
4856 self._add_history("set_sourcename", varlist)
4857
4858 @asaplog_post_dec
4859 @preserve_selection
4860 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4861 """\
4862 This function allows to build quotients automatically.
4863 It assumes the observation to have the same number of
4864 "ons" and "offs"
4865
4866 Parameters:
4867
4868 preserve: you can preserve (default) the continuum or
4869 remove it. The equations used are
4870
4871 preserve: Output = Toff * (on/off) - Toff
4872
4873 remove: Output = Toff * (on/off) - Ton
4874
4875 mode: the on/off detection mode
4876 'paired' (default)
4877 identifies 'off' scans by the
4878 trailing '_R' (Mopra/Parkes) or
4879 '_e'/'_w' (Tid) and matches
4880 on/off pairs from the observing pattern
4881 'time'
4882 finds the closest off in time
4883
4884 .. todo:: verify argument is not implemented
4885
4886 """
4887 varlist = vars()
4888 modes = ["time", "paired"]
4889 if not mode in modes:
4890 msg = "please provide valid mode. Valid modes are %s" % (modes)
4891 raise ValueError(msg)
4892 s = None
4893 if mode.lower() == "paired":
4894 from asap._asap import srctype
4895 sel = self.get_selection()
4896 #sel.set_query("SRCTYPE==psoff")
4897 sel.set_types(srctype.psoff)
4898 self.set_selection(sel)
4899 offs = self.copy()
4900 #sel.set_query("SRCTYPE==pson")
4901 sel.set_types(srctype.pson)
4902 self.set_selection(sel)
4903 ons = self.copy()
4904 s = scantable(self._math._quotient(ons, offs, preserve))
4905 elif mode.lower() == "time":
4906 s = scantable(self._math._auto_quotient(self, mode, preserve))
4907 s._add_history("auto_quotient", varlist)
4908 return s
4909
4910 @asaplog_post_dec
4911 def mx_quotient(self, mask = None, weight='median', preserve=True):
4912 """\
4913 Form a quotient using "off" beams when observing in "MX" mode.
4914
4915 Parameters:
4916
4917 mask: an optional mask to be used when weight == 'stddev'
4918
4919 weight: How to average the off beams. Default is 'median'.
4920
4921 preserve: you can preserve (default) the continuum or
4922 remove it. The equations used are:
4923
4924 preserve: Output = Toff * (on/off) - Toff
4925
4926 remove: Output = Toff * (on/off) - Ton
4927
4928 """
4929 mask = mask or ()
4930 varlist = vars()
4931 on = scantable(self._math._mx_extract(self, 'on'))
4932 preoff = scantable(self._math._mx_extract(self, 'off'))
4933 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4934 from asapmath import quotient
4935 q = quotient(on, off, preserve)
4936 q._add_history("mx_quotient", varlist)
4937 return q
4938
4939 @asaplog_post_dec
4940 def freq_switch(self, insitu=None):
4941 """\
4942 Apply frequency switching to the data.
4943
4944 Parameters:
4945
4946 insitu: if False a new scantable is returned.
4947 Otherwise, the swictching is done in-situ
4948 The default is taken from .asaprc (False)
4949
4950 """
4951 if insitu is None: insitu = rcParams['insitu']
4952 self._math._setinsitu(insitu)
4953 varlist = vars()
4954 s = scantable(self._math._freqswitch(self))
4955 s._add_history("freq_switch", varlist)
4956 if insitu:
4957 self._assign(s)
4958 else:
4959 return s
4960
4961 @asaplog_post_dec
4962 def recalc_azel(self):
4963 """Recalculate the azimuth and elevation for each position."""
4964 varlist = vars()
4965 self._recalcazel()
4966 self._add_history("recalc_azel", varlist)
4967 return
4968
4969 @asaplog_post_dec
4970 def __add__(self, other):
4971 """
4972 implicit on all axes and on Tsys
4973 """
4974 varlist = vars()
4975 s = self.__op( other, "ADD" )
4976 s._add_history("operator +", varlist)
4977 return s
4978
4979 @asaplog_post_dec
4980 def __sub__(self, other):
4981 """
4982 implicit on all axes and on Tsys
4983 """
4984 varlist = vars()
4985 s = self.__op( other, "SUB" )
4986 s._add_history("operator -", varlist)
4987 return s
4988
4989 @asaplog_post_dec
4990 def __mul__(self, other):
4991 """
4992 implicit on all axes and on Tsys
4993 """
4994 varlist = vars()
4995 s = self.__op( other, "MUL" ) ;
4996 s._add_history("operator *", varlist)
4997 return s
4998
4999
5000 @asaplog_post_dec
5001 def __div__(self, other):
5002 """
5003 implicit on all axes and on Tsys
5004 """
5005 varlist = vars()
5006 s = self.__op( other, "DIV" )
5007 s._add_history("operator /", varlist)
5008 return s
5009
5010 @asaplog_post_dec
5011 def __op( self, other, mode ):
5012 s = None
5013 if isinstance(other, scantable):
5014 s = scantable(self._math._binaryop(self, other, mode))
5015 elif isinstance(other, float):
5016 if other == 0.0:
5017 raise ZeroDivisionError("Dividing by zero is not recommended")
5018 s = scantable(self._math._unaryop(self, other, mode, False))
5019 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
5020 if isinstance(other[0], list) \
5021 or isinstance(other[0], numpy.ndarray):
5022 from asapmath import _array2dOp
5023 s = _array2dOp( self, other, mode, False )
5024 else:
5025 s = scantable( self._math._arrayop( self, other,
5026 mode, False ) )
5027 else:
5028 raise TypeError("Other input is not a scantable or float value")
5029 return s
5030
5031 @asaplog_post_dec
5032 def get_fit(self, row=0):
5033 """\
5034 Print or return the stored fits for a row in the scantable
5035
5036 Parameters:
5037
5038 row: the row which the fit has been applied to.
5039
5040 """
5041 if row > self.nrow():
5042 return
5043 from asap.asapfit import asapfit
5044 fit = asapfit(self._getfit(row))
5045 asaplog.push( '%s' %(fit) )
5046 return fit.as_dict()
5047
5048 @preserve_selection
5049 def flag_nans(self):
5050 """\
5051 Utility function to flag NaN values in the scantable.
5052 """
5053 import numpy
5054 basesel = self.get_selection()
5055 for i in range(self.nrow()):
5056 sel = self.get_row_selector(i)
5057 self.set_selection(basesel+sel)
5058 nans = numpy.isnan(self._getspectrum(0))
5059 if numpy.any(nans):
5060 bnans = [ bool(v) for v in nans]
5061 self.flag(bnans)
5062
5063 self.set_selection(basesel)
5064
5065 def get_row_selector(self, rowno):
5066 return selector(rows=[rowno])
5067
5068 def _add_history(self, funcname, parameters):
5069 if not rcParams['scantable.history']:
5070 return
5071 # create date
5072 sep = "##"
5073 from datetime import datetime
5074 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
5075 hist = dstr+sep
5076 hist += funcname+sep#cdate+sep
5077 if parameters.has_key('self'):
5078 del parameters['self']
5079 for k, v in parameters.iteritems():
5080 if type(v) is dict:
5081 for k2, v2 in v.iteritems():
5082 hist += k2
5083 hist += "="
5084 if isinstance(v2, scantable):
5085 hist += 'scantable'
5086 elif k2 == 'mask':
5087 if isinstance(v2, list) or isinstance(v2, tuple):
5088 hist += str(self._zip_mask(v2))
5089 else:
5090 hist += str(v2)
5091 else:
5092 hist += str(v2)
5093 else:
5094 hist += k
5095 hist += "="
5096 if isinstance(v, scantable):
5097 hist += 'scantable'
5098 elif k == 'mask':
5099 if isinstance(v, list) or isinstance(v, tuple):
5100 hist += str(self._zip_mask(v))
5101 else:
5102 hist += str(v)
5103 else:
5104 hist += str(v)
5105 hist += sep
5106 hist = hist[:-2] # remove trailing '##'
5107 self._addhistory(hist)
5108
5109
5110 def _zip_mask(self, mask):
5111 mask = list(mask)
5112 i = 0
5113 segments = []
5114 while mask[i:].count(1):
5115 i += mask[i:].index(1)
5116 if mask[i:].count(0):
5117 j = i + mask[i:].index(0)
5118 else:
5119 j = len(mask)
5120 segments.append([i, j])
5121 i = j
5122 return segments
5123
5124 def _get_ordinate_label(self):
5125 fu = "("+self.get_fluxunit()+")"
5126 import re
5127 lbl = "Intensity"
5128 if re.match(".K.", fu):
5129 lbl = "Brightness Temperature "+ fu
5130 elif re.match(".Jy.", fu):
5131 lbl = "Flux density "+ fu
5132 return lbl
5133
5134 def _check_ifs(self):
5135# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5136 nchans = [self.nchan(i) for i in self.getifnos()]
5137 nchans = filter(lambda t: t > 0, nchans)
5138 return (sum(nchans)/len(nchans) == nchans[0])
5139
5140 @asaplog_post_dec
5141 def _fill(self, names, unit, average, opts={}):
5142 first = True
5143 fullnames = []
5144 for name in names:
5145 name = os.path.expandvars(name)
5146 name = os.path.expanduser(name)
5147 if not os.path.exists(name):
5148 msg = "File '%s' does not exists" % (name)
5149 raise IOError(msg)
5150 fullnames.append(name)
5151 if average:
5152 asaplog.push('Auto averaging integrations')
5153 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5154 for name in fullnames:
5155 tbl = Scantable(stype)
5156 if is_ms( name ):
5157 r = msfiller( tbl )
5158 else:
5159 r = filler( tbl )
5160 msg = "Importing %s..." % (name)
5161 asaplog.push(msg, False)
5162 r.open(name, opts)
5163 rx = rcParams['scantable.reference']
5164 r.setreferenceexpr(rx)
5165 r.fill()
5166 if average:
5167 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5168 if not first:
5169 tbl = self._math._merge([self, tbl])
5170 Scantable.__init__(self, tbl)
5171 r.close()
5172 del r, tbl
5173 first = False
5174 #flush log
5175 asaplog.post()
5176 if unit is not None:
5177 self.set_fluxunit(unit)
5178 if not is_casapy():
5179 self.set_freqframe(rcParams['scantable.freqframe'])
5180
5181 def _get_verify_action( self, msg, action=None ):
5182 valid_act = ['Y', 'N', 'A', 'R']
5183 if not action or not isinstance(action, str):
5184 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5185 if action == '':
5186 return "Y"
5187 elif (action.upper()[0] in valid_act):
5188 return action.upper()[0]
5189 elif (action.upper()[0] in ['H','?']):
5190 print "Available actions of verification [Y|n|a|r]"
5191 print " Y : Yes for current data (default)"
5192 print " N : No for current data"
5193 print " A : Accept all in the following and exit from verification"
5194 print " R : Reject all in the following and exit from verification"
5195 print " H or ?: help (show this message)"
5196 return self._get_verify_action(msg)
5197 else:
5198 return 'Y'
5199
5200 def __getitem__(self, key):
5201 if key < 0:
5202 key += self.nrow()
5203 if key >= self.nrow():
5204 raise IndexError("Row index out of range.")
5205 return self._getspectrum(key)
5206
5207 def __setitem__(self, key, value):
5208 if key < 0:
5209 key += self.nrow()
5210 if key >= self.nrow():
5211 raise IndexError("Row index out of range.")
5212 if not hasattr(value, "__len__") or \
5213 len(value) > self.nchan(self.getif(key)):
5214 raise ValueError("Spectrum length doesn't match.")
5215 return self._setspectrum(value, key)
5216
5217 def __len__(self):
5218 return self.nrow()
5219
5220 def __iter__(self):
5221 for i in range(len(self)):
5222 yield self[i]
Note: See TracBrowser for help on using the repository browser.