source: trunk/python/scantable.py@ 2890

Last change on this file since 2890 was 2890, checked in by WataruKawasaki, 12 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes:

Module(s): sd

Description: (1) fixed Scantable::do{CubicSpline}LeastSquareFitting() to correctly avoid using NaN/Inf data (in calculating sigma of residual spectrum). (2) clean-up parse_spw_selection() code in scantable.py.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 201.8 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel, doppler):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
380
381
382class scantable(Scantable):
383 """\
384 The ASAP container for scans (single-dish data).
385 """
386
387 @asaplog_post_dec
388 def __init__(self, filename, average=None, unit=None, parallactify=None,
389 **args):
390 """\
391 Create a scantable from a saved one or make a reference
392
393 Parameters:
394
395 filename: the name of an asap table on disk
396 or
397 the name of a rpfits/sdfits/ms file
398 (integrations within scans are auto averaged
399 and the whole file is read) or
400 [advanced] a reference to an existing scantable
401
402 average: average all integrations withinb a scan on read.
403 The default (True) is taken from .asaprc.
404
405 unit: brightness unit; must be consistent with K or Jy.
406 Over-rides the default selected by the filler
407 (input rpfits/sdfits/ms) or replaces the value
408 in existing scantables
409
410 antenna: for MeasurementSet input data only:
411 Antenna selection. integer (id) or string
412 (name or id).
413
414 parallactify: Indicate that the data had been parallactified.
415 Default (false) is taken from rc file.
416
417 getpt: Whether to import direction from MS/POINTING
418 table properly or not.
419 This is effective only when filename is MS.
420 The default (True) is to import direction
421 from MS/POINTING.
422 """
423 if average is None:
424 average = rcParams['scantable.autoaverage']
425 parallactify = parallactify or rcParams['scantable.parallactify']
426 varlist = vars()
427 from asap._asap import stmath
428 self._math = stmath( rcParams['insitu'] )
429 if isinstance(filename, Scantable):
430 Scantable.__init__(self, filename)
431 else:
432 if isinstance(filename, str):
433 filename = os.path.expandvars(filename)
434 filename = os.path.expanduser(filename)
435 if not os.path.exists(filename):
436 s = "File '%s' not found." % (filename)
437 raise IOError(s)
438 if is_scantable(filename):
439 ondisk = rcParams['scantable.storage'] == 'disk'
440 Scantable.__init__(self, filename, ondisk)
441 if unit is not None:
442 self.set_fluxunit(unit)
443 if average:
444 self._assign( self.average_time( scanav=True ) )
445 # do not reset to the default freqframe
446 #self.set_freqframe(rcParams['scantable.freqframe'])
447 elif is_ms(filename):
448 # Measurement Set
449 opts={'ms': {}}
450 mskeys=['getpt','antenna']
451 for key in mskeys:
452 if key in args.keys():
453 opts['ms'][key] = args[key]
454 self._fill([filename], unit, average, opts)
455 elif os.path.isfile(filename):
456 opts={'nro': {}}
457 nrokeys=['freqref']
458 for key in nrokeys:
459 if key in args.keys():
460 opts['nro'][key] = args[key]
461 self._fill([filename], unit, average, opts)
462 # only apply to new data not "copy constructor"
463 self.parallactify(parallactify)
464 else:
465 msg = "The given file '%s'is not a valid " \
466 "asap table." % (filename)
467 raise IOError(msg)
468 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
469 and isinstance(filename[-1], str):
470 self._fill(filename, unit, average)
471 self.parallactify(parallactify)
472 self._add_history("scantable", varlist)
473
474 @asaplog_post_dec
475 def save(self, name=None, format=None, overwrite=False):
476 """\
477 Store the scantable on disk. This can be an asap (aips++) Table,
478 SDFITS or MS2 format.
479
480 Parameters:
481
482 name: the name of the outputfile. For format 'ASCII'
483 this is the root file name (data in 'name'.txt
484 and header in 'name'_header.txt)
485
486 format: an optional file format. Default is ASAP.
487 Allowed are:
488
489 * 'ASAP' (save as ASAP [aips++] Table),
490 * 'SDFITS' (save as SDFITS file)
491 * 'ASCII' (saves as ascii text file)
492 * 'MS2' (saves as an casacore MeasurementSet V2)
493 * 'FITS' (save as image FITS - not readable by
494 class)
495 * 'CLASS' (save as FITS readable by CLASS)
496
497 overwrite: If the file should be overwritten if it exists.
498 The default False is to return with warning
499 without writing the output. USE WITH CARE.
500
501 Example::
502
503 scan.save('myscan.asap')
504 scan.save('myscan.sdfits', 'SDFITS')
505
506 """
507 from os import path
508 format = format or rcParams['scantable.save']
509 suffix = '.'+format.lower()
510 if name is None or name == "":
511 name = 'scantable'+suffix
512 msg = "No filename given. Using default name %s..." % name
513 asaplog.push(msg)
514 name = path.expandvars(name)
515 if path.isfile(name) or path.isdir(name):
516 if not overwrite:
517 msg = "File %s exists." % name
518 raise IOError(msg)
519 format2 = format.upper()
520 if format2 == 'ASAP':
521 self._save(name)
522 elif format2 == 'MS2':
523 msopt = {'ms': {'overwrite': overwrite } }
524 from asap._asap import mswriter
525 writer = mswriter( self )
526 writer.write( name, msopt )
527 else:
528 from asap._asap import stwriter as stw
529 writer = stw(format2)
530 writer.write(self, name)
531 return
532
533 def copy(self):
534 """Return a copy of this scantable.
535
536 *Note*:
537
538 This makes a full (deep) copy. scan2 = scan1 makes a reference.
539
540 Example::
541
542 copiedscan = scan.copy()
543
544 """
545 sd = scantable(Scantable._copy(self))
546 return sd
547
548 def drop_scan(self, scanid=None):
549 """\
550 Return a new scantable where the specified scan number(s) has(have)
551 been dropped.
552
553 Parameters:
554
555 scanid: a (list of) scan number(s)
556
557 """
558 from asap import _is_sequence_or_number as _is_valid
559 from asap import _to_list
560 from asap import unique
561 if not _is_valid(scanid):
562 raise RuntimeError( 'Please specify a scanno to drop from the'
563 ' scantable' )
564 scanid = _to_list(scanid)
565 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
566 for sid in scanid: allscans.remove(sid)
567 if len(allscans) == 0:
568 raise ValueError("Can't remove all scans")
569 sel = selector(scans=allscans)
570 return self._select_copy(sel)
571
572 def _select_copy(self, selection):
573 orig = self.get_selection()
574 self.set_selection(orig+selection)
575 cp = self.copy()
576 self.set_selection(orig)
577 return cp
578
579 def get_scan(self, scanid=None):
580 """\
581 Return a specific scan (by scanno) or collection of scans (by
582 source name) in a new scantable.
583
584 *Note*:
585
586 See scantable.drop_scan() for the inverse operation.
587
588 Parameters:
589
590 scanid: a (list of) scanno or a source name, unix-style
591 patterns are accepted for source name matching, e.g.
592 '*_R' gets all 'ref scans
593
594 Example::
595
596 # get all scans containing the source '323p459'
597 newscan = scan.get_scan('323p459')
598 # get all 'off' scans
599 refscans = scan.get_scan('*_R')
600 # get a susbset of scans by scanno (as listed in scan.summary())
601 newscan = scan.get_scan([0, 2, 7, 10])
602
603 """
604 if scanid is None:
605 raise RuntimeError( 'Please specify a scan no or name to '
606 'retrieve from the scantable' )
607 try:
608 bsel = self.get_selection()
609 sel = selector()
610 if type(scanid) is str:
611 sel.set_name(scanid)
612 return self._select_copy(sel)
613 elif type(scanid) is int:
614 sel.set_scans([scanid])
615 return self._select_copy(sel)
616 elif type(scanid) is list:
617 sel.set_scans(scanid)
618 return self._select_copy(sel)
619 else:
620 msg = "Illegal scanid type, use 'int' or 'list' if ints."
621 raise TypeError(msg)
622 except RuntimeError:
623 raise
624
625 def __str__(self):
626 tempFile = tempfile.NamedTemporaryFile()
627 Scantable._summary(self, tempFile.name)
628 tempFile.seek(0)
629 asaplog.clear()
630 return tempFile.file.read()
631
632 @asaplog_post_dec
633 def summary(self, filename=None):
634 """\
635 Print a summary of the contents of this scantable.
636
637 Parameters:
638
639 filename: the name of a file to write the putput to
640 Default - no file output
641
642 """
643 if filename is not None:
644 if filename is "":
645 filename = 'scantable_summary.txt'
646 from os.path import expandvars, isdir
647 filename = expandvars(filename)
648 if isdir(filename):
649 msg = "Illegal file name '%s'." % (filename)
650 raise IOError(msg)
651 else:
652 filename = ""
653 Scantable._summary(self, filename)
654
655 def get_spectrum(self, rowno):
656 """Return the spectrum for the current row in the scantable as a list.
657
658 Parameters:
659
660 rowno: the row number to retrieve the spectrum from
661
662 """
663 return self._getspectrum(rowno)
664
665 def get_mask(self, rowno):
666 """Return the mask for the current row in the scantable as a list.
667
668 Parameters:
669
670 rowno: the row number to retrieve the mask from
671
672 """
673 return self._getmask(rowno)
674
675 def set_spectrum(self, spec, rowno):
676 """Set the spectrum for the current row in the scantable.
677
678 Parameters:
679
680 spec: the new spectrum
681
682 rowno: the row number to set the spectrum for
683
684 """
685 assert(len(spec) == self.nchan(self.getif(rowno)))
686 return self._setspectrum(spec, rowno)
687
688 def get_coordinate(self, rowno):
689 """Return the (spectral) coordinate for a a given 'rowno'.
690
691 *Note*:
692
693 * This coordinate is only valid until a scantable method modifies
694 the frequency axis.
695 * This coordinate does contain the original frequency set-up
696 NOT the new frame. The conversions however are done using the user
697 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
698 use scantable.freq_align first. Without it there is no closure,
699 i.e.::
700
701 c = myscan.get_coordinate(0)
702 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
703
704 Parameters:
705
706 rowno: the row number for the spectral coordinate
707
708 """
709 return coordinate(Scantable.get_coordinate(self, rowno))
710
711 def get_selection(self):
712 """\
713 Get the selection object currently set on this scantable.
714
715 Example::
716
717 sel = scan.get_selection()
718 sel.set_ifs(0) # select IF 0
719 scan.set_selection(sel) # apply modified selection
720
721 """
722 return selector(self._getselection())
723
724 def set_selection(self, selection=None, **kw):
725 """\
726 Select a subset of the data. All following operations on this scantable
727 are only applied to thi selection.
728
729 Parameters:
730
731 selection: a selector object (default unset the selection), or
732 any combination of 'pols', 'ifs', 'beams', 'scans',
733 'cycles', 'name', 'query'
734
735 Examples::
736
737 sel = selector() # create a selection object
738 self.set_scans([0, 3]) # select SCANNO 0 and 3
739 scan.set_selection(sel) # set the selection
740 scan.summary() # will only print summary of scanno 0 an 3
741 scan.set_selection() # unset the selection
742 # or the equivalent
743 scan.set_selection(scans=[0,3])
744 scan.summary() # will only print summary of scanno 0 an 3
745 scan.set_selection() # unset the selection
746
747 """
748 if selection is None:
749 # reset
750 if len(kw) == 0:
751 selection = selector()
752 else:
753 # try keywords
754 for k in kw:
755 if k not in selector.fields:
756 raise KeyError("Invalid selection key '%s', "
757 "valid keys are %s" % (k,
758 selector.fields))
759 selection = selector(**kw)
760 self._setselection(selection)
761
762 def get_row(self, row=0, insitu=None):
763 """\
764 Select a row in the scantable.
765 Return a scantable with single row.
766
767 Parameters:
768
769 row: row no of integration, default is 0.
770 insitu: if False a new scantable is returned. Otherwise, the
771 scaling is done in-situ. The default is taken from .asaprc
772 (False)
773
774 """
775 if insitu is None:
776 insitu = rcParams['insitu']
777 if not insitu:
778 workscan = self.copy()
779 else:
780 workscan = self
781 # Select a row
782 sel = selector()
783 sel.set_rows([row])
784 workscan.set_selection(sel)
785 if not workscan.nrow() == 1:
786 msg = "Could not identify single row. %d rows selected." \
787 % (workscan.nrow())
788 raise RuntimeError(msg)
789 if insitu:
790 self._assign(workscan)
791 else:
792 return workscan
793
794 @asaplog_post_dec
795 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
796 """\
797 Determine the specified statistic of the current beam/if/pol
798 Takes a 'mask' as an optional parameter to specify which
799 channels should be excluded.
800
801 Parameters:
802
803 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
804 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
805
806 mask: an optional mask specifying where the statistic
807 should be determined.
808
809 form: format string to print statistic values
810
811 row: row number of spectrum to process.
812 (default is None: for all rows)
813
814 Example:
815 scan.set_unit('channel')
816 msk = scan.create_mask([100, 200], [500, 600])
817 scan.stats(stat='mean', mask=m)
818
819 """
820 mask = mask or []
821 if not self._check_ifs():
822 raise ValueError("Cannot apply mask as the IFs have different "
823 "number of channels. Please use setselection() "
824 "to select individual IFs")
825 rtnabc = False
826 if stat.lower().endswith('_abc'): rtnabc = True
827 getchan = False
828 if stat.lower().startswith('min') or stat.lower().startswith('max'):
829 chan = self._math._minmaxchan(self, mask, stat)
830 getchan = True
831 statvals = []
832 if not rtnabc:
833 if row == None:
834 statvals = self._math._stats(self, mask, stat)
835 else:
836 statvals = self._math._statsrow(self, mask, stat, int(row))
837
838 #def cb(i):
839 # return statvals[i]
840
841 #return self._row_callback(cb, stat)
842
843 label=stat
844 #callback=cb
845 out = ""
846 #outvec = []
847 sep = '-'*50
848
849 if row == None:
850 rows = xrange(self.nrow())
851 elif isinstance(row, int):
852 rows = [ row ]
853
854 for i in rows:
855 refstr = ''
856 statunit= ''
857 if getchan:
858 qx, qy = self.chan2data(rowno=i, chan=chan[i])
859 if rtnabc:
860 statvals.append(qx['value'])
861 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
862 statunit= '['+qx['unit']+']'
863 else:
864 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
865
866 tm = self._gettime(i)
867 src = self._getsourcename(i)
868 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
869 out += 'Time[%s]:\n' % (tm)
870 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
871 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
872 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
873 #outvec.append(callback(i))
874 if len(rows) > 1:
875 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
876 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
877 else:
878 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
879 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
880 out += sep+"\n"
881
882 import os
883 if os.environ.has_key( 'USER' ):
884 usr = os.environ['USER']
885 else:
886 import commands
887 usr = commands.getoutput( 'whoami' )
888 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
889 f = open(tmpfile,'w')
890 print >> f, sep
891 print >> f, ' %s %s' % (label, statunit)
892 print >> f, sep
893 print >> f, out
894 f.close()
895 f = open(tmpfile,'r')
896 x = f.readlines()
897 f.close()
898 asaplog.push(''.join(x), False)
899
900 return statvals
901
902 def chan2data(self, rowno=0, chan=0):
903 """\
904 Returns channel/frequency/velocity and spectral value
905 at an arbitrary row and channel in the scantable.
906
907 Parameters:
908
909 rowno: a row number in the scantable. Default is the
910 first row, i.e. rowno=0
911
912 chan: a channel in the scantable. Default is the first
913 channel, i.e. pos=0
914
915 """
916 if isinstance(rowno, int) and isinstance(chan, int):
917 qx = {'unit': self.get_unit(),
918 'value': self._getabcissa(rowno)[chan]}
919 qy = {'unit': self.get_fluxunit(),
920 'value': self._getspectrum(rowno)[chan]}
921 return qx, qy
922
923 def stddev(self, mask=None):
924 """\
925 Determine the standard deviation of the current beam/if/pol
926 Takes a 'mask' as an optional parameter to specify which
927 channels should be excluded.
928
929 Parameters:
930
931 mask: an optional mask specifying where the standard
932 deviation should be determined.
933
934 Example::
935
936 scan.set_unit('channel')
937 msk = scan.create_mask([100, 200], [500, 600])
938 scan.stddev(mask=m)
939
940 """
941 return self.stats(stat='stddev', mask=mask);
942
943
944 def get_column_names(self):
945 """\
946 Return a list of column names, which can be used for selection.
947 """
948 return list(Scantable.get_column_names(self))
949
950 def get_tsys(self, row=-1):
951 """\
952 Return the System temperatures.
953
954 Parameters:
955
956 row: the rowno to get the information for. (default all rows)
957
958 Returns:
959
960 a list of Tsys values for the current selection
961
962 """
963 if row > -1:
964 return self._get_column(self._gettsys, row)
965 return self._row_callback(self._gettsys, "Tsys")
966
967 def get_tsysspectrum(self, row=-1):
968 """\
969 Return the channel dependent system temperatures.
970
971 Parameters:
972
973 row: the rowno to get the information for. (default all rows)
974
975 Returns:
976
977 a list of Tsys values for the current selection
978
979 """
980 return self._get_column( self._gettsysspectrum, row )
981
982 def set_tsys(self, values, row=-1):
983 """\
984 Set the Tsys value(s) of the given 'row' or the whole scantable
985 (selection).
986
987 Parameters:
988
989 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
990 row: the row number to apply Tsys values to.
991 (default all rows)
992
993 """
994
995 if not hasattr(values, "__len__"):
996 values = [values]
997 self._settsys(values, row)
998
999 def get_weather(self, row=-1):
1000 """\
1001 Return the weather informations.
1002
1003 Parameters:
1004
1005 row: the rowno to get the information for. (default all rows)
1006
1007 Returns:
1008
1009 a dict or list of of dicts of values for the current selection
1010
1011 """
1012
1013 values = self._get_column(self._get_weather, row)
1014 if row > -1:
1015 return {'temperature': values[0],
1016 'pressure': values[1], 'humidity' : values[2],
1017 'windspeed' : values[3], 'windaz' : values[4]
1018 }
1019 else:
1020 out = []
1021 for r in values:
1022
1023 out.append({'temperature': r[0],
1024 'pressure': r[1], 'humidity' : r[2],
1025 'windspeed' : r[3], 'windaz' : r[4]
1026 })
1027 return out
1028
1029 def _row_callback(self, callback, label):
1030 out = ""
1031 outvec = []
1032 sep = '-'*50
1033 for i in range(self.nrow()):
1034 tm = self._gettime(i)
1035 src = self._getsourcename(i)
1036 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1037 out += 'Time[%s]:\n' % (tm)
1038 if self.nbeam(-1) > 1:
1039 out += ' Beam[%d] ' % (self.getbeam(i))
1040 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1041 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1042 outvec.append(callback(i))
1043 out += '= %3.3f\n' % (outvec[i])
1044 out += sep+'\n'
1045
1046 asaplog.push(sep)
1047 asaplog.push(" %s" % (label))
1048 asaplog.push(sep)
1049 asaplog.push(out)
1050 asaplog.post()
1051 return outvec
1052
1053 def _get_column(self, callback, row=-1, *args):
1054 """
1055 """
1056 if row == -1:
1057 return [callback(i, *args) for i in range(self.nrow())]
1058 else:
1059 if 0 <= row < self.nrow():
1060 return callback(row, *args)
1061
1062
1063 def get_time(self, row=-1, asdatetime=False, prec=-1):
1064 """\
1065 Get a list of time stamps for the observations.
1066 Return a datetime object or a string (default) for each
1067 integration time stamp in the scantable.
1068
1069 Parameters:
1070
1071 row: row no of integration. Default -1 return all rows
1072
1073 asdatetime: return values as datetime objects rather than strings
1074
1075 prec: number of digits shown. Default -1 to automatic
1076 calculation.
1077 Note this number is equals to the digits of MVTime,
1078 i.e., 0<prec<3: dates with hh:: only,
1079 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1080 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1081
1082 """
1083 from datetime import datetime
1084 if prec < 0:
1085 # automagically set necessary precision +1
1086 prec = 7 - \
1087 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1088 prec = max(6, int(prec))
1089 else:
1090 prec = max(0, prec)
1091 if asdatetime:
1092 #precision can be 1 millisecond at max
1093 prec = min(12, prec)
1094
1095 times = self._get_column(self._gettime, row, prec)
1096 if not asdatetime:
1097 return times
1098 format = "%Y/%m/%d/%H:%M:%S.%f"
1099 if prec < 7:
1100 nsub = 1 + (((6-prec)/2) % 3)
1101 substr = [".%f","%S","%M"]
1102 for i in range(nsub):
1103 format = format.replace(substr[i],"")
1104 if isinstance(times, list):
1105 return [datetime.strptime(i, format) for i in times]
1106 else:
1107 return datetime.strptime(times, format)
1108
1109
1110 def get_inttime(self, row=-1):
1111 """\
1112 Get a list of integration times for the observations.
1113 Return a time in seconds for each integration in the scantable.
1114
1115 Parameters:
1116
1117 row: row no of integration. Default -1 return all rows.
1118
1119 """
1120 return self._get_column(self._getinttime, row)
1121
1122
1123 def get_sourcename(self, row=-1):
1124 """\
1125 Get a list source names for the observations.
1126 Return a string for each integration in the scantable.
1127 Parameters:
1128
1129 row: row no of integration. Default -1 return all rows.
1130
1131 """
1132 return self._get_column(self._getsourcename, row)
1133
1134 def get_elevation(self, row=-1):
1135 """\
1136 Get a list of elevations for the observations.
1137 Return a float for each integration in the scantable.
1138
1139 Parameters:
1140
1141 row: row no of integration. Default -1 return all rows.
1142
1143 """
1144 return self._get_column(self._getelevation, row)
1145
1146 def get_azimuth(self, row=-1):
1147 """\
1148 Get a list of azimuths for the observations.
1149 Return a float for each integration in the scantable.
1150
1151 Parameters:
1152 row: row no of integration. Default -1 return all rows.
1153
1154 """
1155 return self._get_column(self._getazimuth, row)
1156
1157 def get_parangle(self, row=-1):
1158 """\
1159 Get a list of parallactic angles for the observations.
1160 Return a float for each integration in the scantable.
1161
1162 Parameters:
1163
1164 row: row no of integration. Default -1 return all rows.
1165
1166 """
1167 return self._get_column(self._getparangle, row)
1168
1169 def get_direction(self, row=-1):
1170 """
1171 Get a list of Positions on the sky (direction) for the observations.
1172 Return a string for each integration in the scantable.
1173
1174 Parameters:
1175
1176 row: row no of integration. Default -1 return all rows
1177
1178 """
1179 return self._get_column(self._getdirection, row)
1180
1181 def get_directionval(self, row=-1):
1182 """\
1183 Get a list of Positions on the sky (direction) for the observations.
1184 Return a float for each integration in the scantable.
1185
1186 Parameters:
1187
1188 row: row no of integration. Default -1 return all rows
1189
1190 """
1191 return self._get_column(self._getdirectionvec, row)
1192
1193 @asaplog_post_dec
1194 def set_unit(self, unit='channel'):
1195 """\
1196 Set the unit for all following operations on this scantable
1197
1198 Parameters:
1199
1200 unit: optional unit, default is 'channel'. Use one of '*Hz',
1201 'km/s', 'channel' or equivalent ''
1202
1203 """
1204 varlist = vars()
1205 if unit in ['', 'pixel', 'channel']:
1206 unit = ''
1207 inf = list(self._getcoordinfo())
1208 inf[0] = unit
1209 self._setcoordinfo(inf)
1210 self._add_history("set_unit", varlist)
1211
1212 @asaplog_post_dec
1213 def set_instrument(self, instr):
1214 """\
1215 Set the instrument for subsequent processing.
1216
1217 Parameters:
1218
1219 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1220 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1221
1222 """
1223 self._setInstrument(instr)
1224 self._add_history("set_instument", vars())
1225
1226 @asaplog_post_dec
1227 def set_feedtype(self, feedtype):
1228 """\
1229 Overwrite the feed type, which might not be set correctly.
1230
1231 Parameters:
1232
1233 feedtype: 'linear' or 'circular'
1234
1235 """
1236 self._setfeedtype(feedtype)
1237 self._add_history("set_feedtype", vars())
1238
1239 @asaplog_post_dec
1240 def set_doppler(self, doppler='RADIO'):
1241 """\
1242 Set the doppler for all following operations on this scantable.
1243
1244 Parameters:
1245
1246 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1247
1248 """
1249 varlist = vars()
1250 inf = list(self._getcoordinfo())
1251 inf[2] = doppler
1252 self._setcoordinfo(inf)
1253 self._add_history("set_doppler", vars())
1254
1255 @asaplog_post_dec
1256 def set_freqframe(self, frame=None):
1257 """\
1258 Set the frame type of the Spectral Axis.
1259
1260 Parameters:
1261
1262 frame: an optional frame type, default 'LSRK'. Valid frames are:
1263 'TOPO', 'LSRD', 'LSRK', 'BARY',
1264 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1265
1266 Example::
1267
1268 scan.set_freqframe('BARY')
1269
1270 """
1271 frame = frame or rcParams['scantable.freqframe']
1272 varlist = vars()
1273 # "REST" is not implemented in casacore
1274 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1275 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1276 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1277 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1278
1279 if frame in valid:
1280 inf = list(self._getcoordinfo())
1281 inf[1] = frame
1282 self._setcoordinfo(inf)
1283 self._add_history("set_freqframe", varlist)
1284 else:
1285 msg = "Please specify a valid freq type. Valid types are:\n", valid
1286 raise TypeError(msg)
1287
1288 @asaplog_post_dec
1289 def set_dirframe(self, frame=""):
1290 """\
1291 Set the frame type of the Direction on the sky.
1292
1293 Parameters:
1294
1295 frame: an optional frame type, default ''. Valid frames are:
1296 'J2000', 'B1950', 'GALACTIC'
1297
1298 Example:
1299
1300 scan.set_dirframe('GALACTIC')
1301
1302 """
1303 varlist = vars()
1304 Scantable.set_dirframe(self, frame)
1305 self._add_history("set_dirframe", varlist)
1306
1307 def get_unit(self):
1308 """\
1309 Get the default unit set in this scantable
1310
1311 Returns:
1312
1313 A unit string
1314
1315 """
1316 inf = self._getcoordinfo()
1317 unit = inf[0]
1318 if unit == '': unit = 'channel'
1319 return unit
1320
1321 @asaplog_post_dec
1322 def get_abcissa(self, rowno=0):
1323 """\
1324 Get the abcissa in the current coordinate setup for the currently
1325 selected Beam/IF/Pol
1326
1327 Parameters:
1328
1329 rowno: an optional row number in the scantable. Default is the
1330 first row, i.e. rowno=0
1331
1332 Returns:
1333
1334 The abcissa values and the format string (as a dictionary)
1335
1336 """
1337 abc = self._getabcissa(rowno)
1338 lbl = self._getabcissalabel(rowno)
1339 return abc, lbl
1340
1341 @asaplog_post_dec
1342 def flag(self, mask=None, unflag=False, row=-1):
1343 """\
1344 Flag the selected data using an optional channel mask.
1345
1346 Parameters:
1347
1348 mask: an optional channel mask, created with create_mask. Default
1349 (no mask) is all channels.
1350
1351 unflag: if True, unflag the data
1352
1353 row: an optional row number in the scantable.
1354 Default -1 flags all rows
1355
1356 """
1357 varlist = vars()
1358 mask = mask or []
1359 self._flag(row, mask, unflag)
1360 self._add_history("flag", varlist)
1361
1362 @asaplog_post_dec
1363 def flag_row(self, rows=None, unflag=False):
1364 """\
1365 Flag the selected data in row-based manner.
1366
1367 Parameters:
1368
1369 rows: list of row numbers to be flagged. Default is no row
1370 (must be explicitly specified to execute row-based
1371 flagging).
1372
1373 unflag: if True, unflag the data.
1374
1375 """
1376 varlist = vars()
1377 if rows is None:
1378 rows = []
1379 self._flag_row(rows, unflag)
1380 self._add_history("flag_row", varlist)
1381
1382 @asaplog_post_dec
1383 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1384 """\
1385 Flag the selected data outside a specified range (in channel-base)
1386
1387 Parameters:
1388
1389 uthres: upper threshold.
1390
1391 dthres: lower threshold
1392
1393 clipoutside: True for flagging data outside the range
1394 [dthres:uthres].
1395 False for flagging data inside the range.
1396
1397 unflag: if True, unflag the data.
1398
1399 """
1400 varlist = vars()
1401 self._clip(uthres, dthres, clipoutside, unflag)
1402 self._add_history("clip", varlist)
1403
1404 @asaplog_post_dec
1405 def lag_flag(self, start, end, unit="MHz", insitu=None):
1406 """\
1407 Flag the data in 'lag' space by providing a frequency to remove.
1408 Flagged data in the scantable get interpolated over the region.
1409 No taper is applied.
1410
1411 Parameters:
1412
1413 start: the start frequency (really a period within the
1414 bandwidth) or period to remove
1415
1416 end: the end frequency or period to remove
1417
1418 unit: the frequency unit (default 'MHz') or '' for
1419 explicit lag channels
1420
1421 *Notes*:
1422
1423 It is recommended to flag edges of the band or strong
1424 signals beforehand.
1425
1426 """
1427 if insitu is None: insitu = rcParams['insitu']
1428 self._math._setinsitu(insitu)
1429 varlist = vars()
1430 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1431 if not (unit == "" or base.has_key(unit)):
1432 raise ValueError("%s is not a valid unit." % unit)
1433 if unit == "":
1434 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1435 else:
1436 s = scantable(self._math._lag_flag(self, start*base[unit],
1437 end*base[unit], "frequency"))
1438 s._add_history("lag_flag", varlist)
1439 if insitu:
1440 self._assign(s)
1441 else:
1442 return s
1443
1444 @asaplog_post_dec
1445 def fft(self, rowno=None, mask=None, getrealimag=False):
1446 """\
1447 Apply FFT to the spectra.
1448 Flagged data in the scantable get interpolated over the region.
1449
1450 Parameters:
1451
1452 rowno: The row number(s) to be processed. int, list
1453 and tuple are accepted. By default (None), FFT
1454 is applied to the whole data.
1455
1456 mask: Auxiliary channel mask(s). Given as a boolean
1457 list, it is applied to all specified rows.
1458 A list of boolean lists can also be used to
1459 apply different masks. In the latter case, the
1460 length of 'mask' must be the same as that of
1461 'rowno'. The default is None.
1462
1463 getrealimag: If True, returns the real and imaginary part
1464 values of the complex results.
1465 If False (the default), returns the amplitude
1466 (absolute value) normalised with Ndata/2 and
1467 phase (argument, in unit of radian).
1468
1469 Returns:
1470
1471 A list of dictionaries containing the results for each spectrum.
1472 Each dictionary contains two values, the real and the imaginary
1473 parts when getrealimag = True, or the amplitude(absolute value)
1474 and the phase(argument) when getrealimag = False. The key for
1475 these values are 'real' and 'imag', or 'ampl' and 'phase',
1476 respectively.
1477 """
1478 if rowno is None:
1479 rowno = []
1480 if isinstance(rowno, int):
1481 rowno = [rowno]
1482 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1483 raise TypeError("The row number(s) must be int, list or tuple.")
1484 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1485
1486 usecommonmask = True
1487
1488 if mask is None:
1489 mask = []
1490 if isinstance(mask, list) or isinstance(mask, tuple):
1491 if len(mask) == 0:
1492 mask = [[]]
1493 else:
1494 if isinstance(mask[0], bool):
1495 if len(mask) != self.nchan(self.getif(rowno[0])):
1496 raise ValueError("The spectra and the mask have "
1497 "different length.")
1498 mask = [mask]
1499 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1500 usecommonmask = False
1501 if len(mask) != len(rowno):
1502 raise ValueError("When specifying masks for each "
1503 "spectrum, the numbers of them "
1504 "must be identical.")
1505 for i in xrange(mask):
1506 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1507 raise ValueError("The spectra and the mask have "
1508 "different length.")
1509 else:
1510 raise TypeError("The mask must be a boolean list or "
1511 "a list of boolean list.")
1512 else:
1513 raise TypeError("The mask must be a boolean list or a list of "
1514 "boolean list.")
1515
1516 res = []
1517
1518 imask = 0
1519 for whichrow in rowno:
1520 fspec = self._fft(whichrow, mask[imask], getrealimag)
1521 nspec = len(fspec)
1522
1523 i = 0
1524 v1 = []
1525 v2 = []
1526 reselem = {"real":[],"imag":[]} if getrealimag \
1527 else {"ampl":[],"phase":[]}
1528
1529 while (i < nspec):
1530 v1.append(fspec[i])
1531 v2.append(fspec[i+1])
1532 i += 2
1533
1534 if getrealimag:
1535 reselem["real"] += v1
1536 reselem["imag"] += v2
1537 else:
1538 reselem["ampl"] += v1
1539 reselem["phase"] += v2
1540
1541 res.append(reselem)
1542
1543 if not usecommonmask:
1544 imask += 1
1545
1546 return res
1547
1548 @asaplog_post_dec
1549 def create_mask(self, *args, **kwargs):
1550 """\
1551 Compute and return a mask based on [min, max] windows.
1552 The specified windows are to be INCLUDED, when the mask is
1553 applied.
1554
1555 Parameters:
1556
1557 [min, max], [min2, max2], ...
1558 Pairs of start/end points (inclusive)specifying the regions
1559 to be masked
1560
1561 invert: optional argument. If specified as True,
1562 return an inverted mask, i.e. the regions
1563 specified are EXCLUDED
1564
1565 row: create the mask using the specified row for
1566 unit conversions, default is row=0
1567 only necessary if frequency varies over rows.
1568
1569 Examples::
1570
1571 scan.set_unit('channel')
1572 # a)
1573 msk = scan.create_mask([400, 500], [800, 900])
1574 # masks everything outside 400 and 500
1575 # and 800 and 900 in the unit 'channel'
1576
1577 # b)
1578 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1579 # masks the regions between 400 and 500
1580 # and 800 and 900 in the unit 'channel'
1581
1582 # c)
1583 #mask only channel 400
1584 msk = scan.create_mask([400])
1585
1586 """
1587 row = kwargs.get("row", 0)
1588 data = self._getabcissa(row)
1589 u = self._getcoordinfo()[0]
1590 if u == "":
1591 u = "channel"
1592 msg = "The current mask window unit is %s" % u
1593 i = self._check_ifs()
1594 if not i:
1595 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1596 asaplog.push(msg)
1597 n = len(data)
1598 msk = _n_bools(n, False)
1599 # test if args is a 'list' or a 'normal *args - UGLY!!!
1600
1601 ws = (isinstance(args[-1][-1], int)
1602 or isinstance(args[-1][-1], float)) and args or args[0]
1603 for window in ws:
1604 if len(window) == 1:
1605 window = [window[0], window[0]]
1606 if len(window) == 0 or len(window) > 2:
1607 raise ValueError("A window needs to be defined as "
1608 "[start(, end)]")
1609 if window[0] > window[1]:
1610 tmp = window[0]
1611 window[0] = window[1]
1612 window[1] = tmp
1613 for i in range(n):
1614 if data[i] >= window[0] and data[i] <= window[1]:
1615 msk[i] = True
1616 if kwargs.has_key('invert'):
1617 if kwargs.get('invert'):
1618 msk = mask_not(msk)
1619 return msk
1620
1621 def get_masklist(self, mask=None, row=0, silent=False):
1622 """\
1623 Compute and return a list of mask windows, [min, max].
1624
1625 Parameters:
1626
1627 mask: channel mask, created with create_mask.
1628
1629 row: calcutate the masklist using the specified row
1630 for unit conversions, default is row=0
1631 only necessary if frequency varies over rows.
1632
1633 Returns:
1634
1635 [min, max], [min2, max2], ...
1636 Pairs of start/end points (inclusive)specifying
1637 the masked regions
1638
1639 """
1640 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1641 raise TypeError("The mask should be list or tuple.")
1642 if len(mask) <= 0:
1643 raise TypeError("The mask elements should be > 0")
1644 data = self._getabcissa(row)
1645 if len(data) != len(mask):
1646 msg = "Number of channels in scantable != number of mask elements"
1647 raise TypeError(msg)
1648 u = self._getcoordinfo()[0]
1649 if u == "":
1650 u = "channel"
1651 msg = "The current mask window unit is %s" % u
1652 i = self._check_ifs()
1653 if not i:
1654 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1655 if not silent:
1656 asaplog.push(msg)
1657 masklist = []
1658 ist, ien = None, None
1659 ist, ien=self.get_mask_indices(mask)
1660 if ist is not None and ien is not None:
1661 for i in xrange(len(ist)):
1662 range=[data[ist[i]],data[ien[i]]]
1663 range.sort()
1664 masklist.append([range[0],range[1]])
1665 return masklist
1666
1667 def get_mask_indices(self, mask=None):
1668 """\
1669 Compute and Return lists of mask start indices and mask end indices.
1670
1671 Parameters:
1672
1673 mask: channel mask, created with create_mask.
1674
1675 Returns:
1676
1677 List of mask start indices and that of mask end indices,
1678 i.e., [istart1,istart2,....], [iend1,iend2,....].
1679
1680 """
1681 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1682 raise TypeError("The mask should be list or tuple.")
1683 if len(mask) <= 0:
1684 raise TypeError("The mask elements should be > 0")
1685 istart = []
1686 iend = []
1687 if mask[0]:
1688 istart.append(0)
1689 for i in range(len(mask)-1):
1690 if not mask[i] and mask[i+1]:
1691 istart.append(i+1)
1692 elif mask[i] and not mask[i+1]:
1693 iend.append(i)
1694 if mask[len(mask)-1]:
1695 iend.append(len(mask)-1)
1696 if len(istart) != len(iend):
1697 raise RuntimeError("Numbers of mask start != mask end.")
1698 for i in range(len(istart)):
1699 if istart[i] > iend[i]:
1700 raise RuntimeError("Mask start index > mask end index")
1701 break
1702 return istart,iend
1703
1704 @asaplog_post_dec
1705 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1706 """
1707 Parse MS type spw/channel selection syntax.
1708
1709 Parameters:
1710 selectstring : A string expression of spw and channel selection.
1711 Comma-separated expressions mean different spw -
1712 channel combinations. Spws and channel selections
1713 are partitioned by a colon ':'. In a single
1714 selection expression, you can put multiple values
1715 separated by semicolons ';'. Both for spw and
1716 channel selection, allowed cases include single
1717 value, blank('') or asterisk('*') to specify all
1718 available values, two values connected with a
1719 tilde ('~') to specify an inclusive range. Unit
1720 strings for frequency or velocity can be added to
1721 the tilde-connected values. For channel selection
1722 expression, placing a '<' or a '>' is possible to
1723 specify a semi-infinite interval as well.
1724
1725 examples:
1726 '' or '*' = all spws (all channels)
1727 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1728 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1729 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1730 all channels in spw8
1731 '1.3~1.5GHz' = all spws that fall in or have at
1732 least some overwrap with frequency
1733 range between 1.3GHz and 1.5GHz.
1734 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1735 between the specified
1736 frequency range in spws
1737 that fall in or have
1738 overwrap with the
1739 specified frequency
1740 range.
1741 '1:-200~250km/s' = channels that fall between the
1742 specified velocity range in
1743 spw 1.
1744 Returns:
1745 A dictionary of selected (valid) spw and masklist pairs,
1746 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1747 """
1748 if not isinstance(selectstring, str):
1749 asaplog.post()
1750 asaplog.push("Expression of spw/channel selection must be a string.")
1751 asaplog.post("ERROR")
1752
1753 orig_unit = self.get_unit()
1754 self.set_unit('channel')
1755
1756 orig_restfreq = self.get_restfreqs()
1757 orig_restfreq_list = []
1758 for i in orig_restfreq.keys():
1759 if len(orig_restfreq[i]) == 1:
1760 orig_restfreq_list.append(orig_restfreq[i][0])
1761 else:
1762 orig_restfreq_list.append(orig_restfreq[i])
1763
1764 orig_coord = self._getcoordinfo()
1765 orig_frame = orig_coord[1]
1766 orig_doppler = orig_coord[2]
1767
1768 if restfreq is None: restfreq = orig_restfreq_list
1769 if frame is None: frame = orig_frame
1770 if doppler is None: doppler = orig_doppler
1771
1772 self.set_restfreqs(restfreq)
1773 self.set_freqframe(frame)
1774 self.set_doppler(doppler)
1775
1776 valid_ifs = self.getifnos()
1777
1778 comma_sep = selectstring.split(",")
1779 res = {}
1780
1781 for cms_elem in comma_sep:
1782 colon_sep = cms_elem.split(":")
1783
1784 if (len(colon_sep) > 2):
1785 raise RuntimeError("Invalid selection expression: more than two colons!")
1786
1787 # parse spw expression and store result in spw_list.
1788 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1789 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1790 # 'a~b*m/s' (where * can be '' or 'k') and also
1791 # several of the above expressions connected with ';'.
1792
1793 spw_list = []
1794
1795 semicolon_sep = colon_sep[0].split(";")
1796
1797 for scs_elem in semicolon_sep:
1798 scs_elem = scs_elem.strip()
1799
1800 lt_sep = scs_elem.split("<")
1801 gt_sep = scs_elem.split(">")
1802 ti_sep = scs_elem.split("~")
1803
1804 lt_sep_length = len(lt_sep)
1805 gt_sep_length = len(gt_sep)
1806 ti_sep_length = len(ti_sep)
1807
1808 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1809
1810 if (len_product > 2):
1811 # '<', '>' and '~' must not coexist in a single spw expression
1812
1813 raise RuntimeError("Invalid spw selection.")
1814
1815 elif (len_product == 1):
1816 # '', '*', or single spw number.
1817
1818 if (scs_elem == "") or (scs_elem == "*"):
1819 spw_list = valid_ifs[:] # deep copy
1820
1821 else: # single number
1822 expr = int(scs_elem)
1823 spw_list.append(expr)
1824 if expr not in valid_ifs:
1825 asaplog.push("Invalid spw given. Ignored.")
1826
1827 else: # (len_product == 2)
1828 # namely, one of '<', '>' or '~' appears just once.
1829
1830 if (lt_sep_length == 2): # '<a'
1831 if is_number(lt_sep[1]):
1832 no_valid_spw = True
1833 for i in valid_ifs:
1834 if (i < float(lt_sep[1])):
1835 spw_list.append(i)
1836 no_valid_spw = False
1837
1838 if no_valid_spw:
1839 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1840
1841 else:
1842 raise RuntimeError("Invalid spw selection.")
1843
1844 elif (gt_sep_length == 2): # '>a'
1845 if is_number(gt_sep[1]):
1846 no_valid_spw = True
1847 for i in valid_ifs:
1848 if (i > float(gt_sep[1])):
1849 spw_list.append(i)
1850 no_valid_spw = False
1851
1852 if no_valid_spw:
1853 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1854
1855 else:
1856 raise RuntimeError("Invalid spw selection.")
1857
1858 else: # (ti_sep_length == 2) where both boundaries inclusive
1859 expr0 = ti_sep[0].strip()
1860 expr1 = ti_sep[1].strip()
1861
1862 if is_number(expr0) and is_number(expr1):
1863 # 'a~b'
1864 expr_pmin = min(float(expr0), float(expr1))
1865 expr_pmax = max(float(expr0), float(expr1))
1866 has_invalid_spw = False
1867 no_valid_spw = True
1868
1869 for i in valid_ifs:
1870 if (expr_pmin <= i) and (i <= expr_pmax):
1871 spw_list.append(i)
1872 no_valid_spw = False
1873 else:
1874 has_invalid_spw = True
1875
1876 if has_invalid_spw:
1877 msg = "Invalid spw is given. Ignored."
1878 asaplog.push(msg)
1879 asaplog.post()
1880
1881 if no_valid_spw:
1882 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1883
1884 elif is_number(expr0) and is_frequency(expr1):
1885 # 'a~b*Hz'
1886 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1887 no_valid_spw = True
1888
1889 for coord in self._get_coordinate_list():
1890 expr_p0 = coord['coord'].to_pixel(expr_f0)
1891 expr_p1 = coord['coord'].to_pixel(expr_f1)
1892 expr_pmin = min(expr_p0, expr_p1)
1893 expr_pmax = max(expr_p0, expr_p1)
1894
1895 spw = coord['if']
1896 pmin = 0.0
1897 pmax = float(self.nchan(spw) - 1)
1898
1899 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
1900 spw_list.append(spw)
1901 no_valid_spw = False
1902
1903 if no_valid_spw:
1904 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
1905
1906 elif is_number(expr0) and is_velocity(expr1):
1907 # 'a~b*m/s'
1908 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
1909 expr_vmin = min(expr_v0, expr_v1)
1910 expr_vmax = max(expr_v0, expr_v1)
1911 no_valid_spw = True
1912
1913 for coord in self._get_coordinate_list():
1914 spw = coord['if']
1915
1916 pmin = 0.0
1917 pmax = float(self.nchan(spw) - 1)
1918
1919 vel0 = coord['coord'].to_velocity(pmin)
1920 vel1 = coord['coord'].to_velocity(pmax)
1921
1922 vmin = min(vel0, vel1)
1923 vmax = max(vel0, vel1)
1924
1925 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
1926 spw_list.append(spw)
1927 no_valid_spw = False
1928
1929 if no_valid_spw:
1930 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
1931
1932 else:
1933 # cases such as 'aGHz~bkm/s' are not allowed now
1934 raise RuntimeError("Invalid spw selection.")
1935
1936 # check spw list and remove invalid ones.
1937 # if no valid spw left, emit ValueError.
1938 if len(spw_list) == 0:
1939 raise ValueError("No valid spw in given range.")
1940
1941 # parse channel expression and store the result in crange_list.
1942 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
1943 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
1944 # and also several of the above expressions connected with ';'.
1945
1946 for spw in spw_list:
1947 pmin = 0.0
1948 pmax = float(self.nchan(spw) - 1)
1949
1950 if (len(colon_sep) == 1):
1951 # no expression for channel selection,
1952 # which means all channels are to be selected.
1953 crange_list = [[pmin, pmax]]
1954
1955 else: # (len(colon_sep) == 2)
1956 crange_list = []
1957
1958 found = False
1959 for i in self._get_coordinate_list():
1960 if (i['if'] == spw):
1961 coord = i['coord']
1962 found = True
1963 break
1964
1965 if found:
1966 semicolon_sep = colon_sep[1].split(";")
1967 for scs_elem in semicolon_sep:
1968 scs_elem = scs_elem.strip()
1969
1970 ti_sep = scs_elem.split("~")
1971 ti_sep_length = len(ti_sep)
1972
1973 if (ti_sep_length > 2):
1974 raise RuntimeError("Invalid channel selection.")
1975
1976 elif (ti_sep_length == 1):
1977 if (scs_elem == "") or (scs_elem == "*"):
1978 # '' and '*' for all channels
1979 crange_list = [[pmin, pmax]]
1980 break
1981 elif (is_number(scs_elem)):
1982 # single channel given
1983 crange_list.append([float(scs_elem), float(scs_elem)])
1984 else:
1985 raise RuntimeError("Invalid channel selection.")
1986
1987 else: #(ti_sep_length == 2)
1988 expr0 = ti_sep[0].strip()
1989 expr1 = ti_sep[1].strip()
1990
1991 if is_number(expr0) and is_number(expr1):
1992 # 'a~b'
1993 expr_pmin = min(float(expr0), float(expr1))
1994 expr_pmax = max(float(expr0), float(expr1))
1995
1996 elif is_number(expr0) and is_frequency(expr1):
1997 # 'a~b*Hz'
1998 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1999 expr_p0 = coord.to_pixel(expr_f0)
2000 expr_p1 = coord.to_pixel(expr_f1)
2001 expr_pmin = min(expr_p0, expr_p1)
2002 expr_pmax = max(expr_p0, expr_p1)
2003
2004 elif is_number(expr0) and is_velocity(expr1):
2005 # 'a~b*m/s'
2006 restf = self.get_restfreqs().values()[0][0]
2007 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2008 expr_f0 = get_frequency_by_velocity(restf, expr_v0, doppler)
2009 expr_f1 = get_frequency_by_velocity(restf, expr_v1, doppler)
2010 expr_p0 = coord.to_pixel(expr_f0)
2011 expr_p1 = coord.to_pixel(expr_f1)
2012 expr_pmin = min(expr_p0, expr_p1)
2013 expr_pmax = max(expr_p0, expr_p1)
2014
2015 else:
2016 # cases such as 'aGHz~bkm/s' are not allowed now
2017 raise RuntimeError("Invalid channel selection.")
2018
2019 cmin = max(pmin, expr_pmin)
2020 cmax = min(pmax, expr_pmax)
2021 # if the given range of channel selection has overwrap with
2022 # that of current spw, output the overwrap area.
2023 if (cmin <= cmax):
2024 cmin = float(int(cmin + 0.5))
2025 cmax = float(int(cmax + 0.5))
2026 crange_list.append([cmin, cmax])
2027
2028 if (len(crange_list) == 0):
2029 crange_list.append([])
2030
2031 if res.has_key(spw):
2032 res[spw].extend(crange_list)
2033 else:
2034 res[spw] = crange_list
2035
2036 for spw in res.keys():
2037 if spw not in valid_ifs:
2038 del res[spw]
2039
2040 if len(res) == 0:
2041 raise RuntimeError("No valid spw.")
2042
2043 # restore original values
2044 self.set_restfreqs(orig_restfreq_list)
2045 self.set_freqframe(orig_frame)
2046 self.set_doppler(orig_doppler)
2047 self.set_unit(orig_unit)
2048
2049 return res
2050
2051 @asaplog_post_dec
2052 def get_first_rowno_by_if(self, ifno):
2053 found = False
2054 for irow in xrange(self.nrow()):
2055 if (self.getif(irow) == ifno):
2056 res = irow
2057 found = True
2058 break
2059
2060 if not found: raise RuntimeError("Invalid IF value.")
2061
2062 return res
2063
2064 @asaplog_post_dec
2065 def _get_coordinate_list(self):
2066 res = []
2067 spws = self.getifnos()
2068 for spw in spws:
2069 elem = {}
2070 elem['if'] = spw
2071 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2072 res.append(elem)
2073
2074 return res
2075
2076 @asaplog_post_dec
2077 def parse_maskexpr(self, maskstring):
2078 """
2079 Parse CASA type mask selection syntax (IF dependent).
2080
2081 Parameters:
2082 maskstring : A string mask selection expression.
2083 A comma separated selections mean different IF -
2084 channel combinations. IFs and channel selections
2085 are partitioned by a colon, ':'.
2086 examples:
2087 '' = all IFs (all channels)
2088 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2089 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2090 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2091 all channels in IF8
2092 Returns:
2093 A dictionary of selected (valid) IF and masklist pairs,
2094 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2095 """
2096 if not isinstance(maskstring,str):
2097 asaplog.post()
2098 asaplog.push("Mask expression should be a string.")
2099 asaplog.post("ERROR")
2100
2101 valid_ifs = self.getifnos()
2102 frequnit = self.get_unit()
2103 seldict = {}
2104 if maskstring == "":
2105 maskstring = str(valid_ifs)[1:-1]
2106 ## split each selection "IF range[:CHAN range]"
2107 # split maskstring by "<spaces>,<spaces>"
2108 comma_sep = re.compile('\s*,\s*')
2109 sellist = comma_sep.split(maskstring)
2110 # separator by "<spaces>:<spaces>"
2111 collon_sep = re.compile('\s*:\s*')
2112 for currselstr in sellist:
2113 selset = collon_sep.split(currselstr)
2114 # spw and mask string (may include ~, < or >)
2115 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2116 minval=min(valid_ifs),
2117 maxval=max(valid_ifs))
2118 for spwlist in spwmasklist:
2119 selspws = []
2120 for ispw in range(spwlist[0],spwlist[1]+1):
2121 # Put into the list only if ispw exists
2122 if valid_ifs.count(ispw):
2123 selspws.append(ispw)
2124 del spwmasklist, spwlist
2125
2126 # parse frequency mask list
2127 if len(selset) > 1:
2128 freqmasklist = self._parse_selection(selset[1], typestr='float',
2129 offset=0.)
2130 else:
2131 # want to select the whole spectrum
2132 freqmasklist = [None]
2133
2134 ## define a dictionary of spw - masklist combination
2135 for ispw in selspws:
2136 #print "working on", ispw
2137 spwstr = str(ispw)
2138 if len(selspws) == 0:
2139 # empty spw
2140 continue
2141 else:
2142 ## want to get min and max of the spw and
2143 ## offset to set for '<' and '>'
2144 if frequnit == 'channel':
2145 minfreq = 0
2146 maxfreq = self.nchan(ifno=ispw)
2147 offset = 0.5
2148 else:
2149 ## This is ugly part. need improvement
2150 for ifrow in xrange(self.nrow()):
2151 if self.getif(ifrow) == ispw:
2152 #print "IF",ispw,"found in row =",ifrow
2153 break
2154 freqcoord = self.get_coordinate(ifrow)
2155 freqs = self._getabcissa(ifrow)
2156 minfreq = min(freqs)
2157 maxfreq = max(freqs)
2158 if len(freqs) == 1:
2159 offset = 0.5
2160 elif frequnit.find('Hz') > 0:
2161 offset = abs(freqcoord.to_frequency(1,
2162 unit=frequnit)
2163 -freqcoord.to_frequency(0,
2164 unit=frequnit)
2165 )*0.5
2166 elif frequnit.find('m/s') > 0:
2167 offset = abs(freqcoord.to_velocity(1,
2168 unit=frequnit)
2169 -freqcoord.to_velocity(0,
2170 unit=frequnit)
2171 )*0.5
2172 else:
2173 asaplog.post()
2174 asaplog.push("Invalid frequency unit")
2175 asaplog.post("ERROR")
2176 del freqs, freqcoord, ifrow
2177 for freq in freqmasklist:
2178 selmask = freq or [minfreq, maxfreq]
2179 if selmask[0] == None:
2180 ## selection was "<freq[1]".
2181 if selmask[1] < minfreq:
2182 ## avoid adding region selection
2183 selmask = None
2184 else:
2185 selmask = [minfreq,selmask[1]-offset]
2186 elif selmask[1] == None:
2187 ## selection was ">freq[0]"
2188 if selmask[0] > maxfreq:
2189 ## avoid adding region selection
2190 selmask = None
2191 else:
2192 selmask = [selmask[0]+offset,maxfreq]
2193 if selmask:
2194 if not seldict.has_key(spwstr):
2195 # new spw selection
2196 seldict[spwstr] = []
2197 seldict[spwstr] += [selmask]
2198 del minfreq,maxfreq,offset,freq,selmask
2199 del spwstr
2200 del freqmasklist
2201 del valid_ifs
2202 if len(seldict) == 0:
2203 asaplog.post()
2204 asaplog.push("No valid selection in the mask expression: "
2205 +maskstring)
2206 asaplog.post("WARN")
2207 return None
2208 msg = "Selected masklist:\n"
2209 for sif, lmask in seldict.iteritems():
2210 msg += " IF"+sif+" - "+str(lmask)+"\n"
2211 asaplog.push(msg)
2212 return seldict
2213
2214 @asaplog_post_dec
2215 def parse_idx_selection(self, mode, selexpr):
2216 """
2217 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2218 BEAMNO, and row number
2219
2220 Parameters:
2221 mode : which column to select.
2222 ['scan',|'if'|'pol'|'beam'|'row']
2223 selexpr : A comma separated selection expression.
2224 examples:
2225 '' = all (returns [])
2226 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2227 (returns [0,1,4,5,6,9])
2228 Returns:
2229 A List of selected indices
2230 """
2231 if selexpr == "":
2232 return []
2233 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2234 'b': 'beam', 'r': 'row'}
2235 smode = mode.lower()[0]
2236 if not (smode in valid_modes.keys()):
2237 msg = "Invalid mode '%s'. Valid modes are %s" %\
2238 (mode, str(valid_modes.values()))
2239 asaplog.post()
2240 asaplog.push(msg)
2241 asaplog.post("ERROR")
2242 mode = valid_modes[smode]
2243 minidx = None
2244 maxidx = None
2245 if smode == 'r':
2246 minidx = 0
2247 maxidx = self.nrow()
2248 else:
2249 idx = getattr(self,"get"+mode+"nos")()
2250 minidx = min(idx)
2251 maxidx = max(idx)
2252 del idx
2253 # split selexpr by "<spaces>,<spaces>"
2254 comma_sep = re.compile('\s*,\s*')
2255 sellist = comma_sep.split(selexpr)
2256 idxlist = []
2257 for currselstr in sellist:
2258 # single range (may include ~, < or >)
2259 currlist = self._parse_selection(currselstr, typestr='integer',
2260 minval=minidx,maxval=maxidx)
2261 for thelist in currlist:
2262 idxlist += range(thelist[0],thelist[1]+1)
2263 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2264 asaplog.push(msg)
2265 return idxlist
2266
2267 def _parse_selection(self, selstr, typestr='float', offset=0.,
2268 minval=None, maxval=None):
2269 """
2270 Parameters:
2271 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2272 typestr : The type of the values in returned list
2273 ('integer' or 'float')
2274 offset : The offset value to subtract from or add to
2275 the boundary value if the selection string
2276 includes '<' or '>' [Valid only for typestr='float']
2277 minval, maxval : The minimum/maximum values to set if the
2278 selection string includes '<' or '>'.
2279 The list element is filled with None by default.
2280 Returns:
2281 A list of min/max pair of selections.
2282 Example:
2283 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2284 --> returns [[0,2],[5,7],[9,9]]
2285 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2286 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2287 """
2288 # split selstr by '<spaces>;<spaces>'
2289 semi_sep = re.compile('\s*;\s*')
2290 selgroups = semi_sep.split(selstr)
2291 sellists = []
2292 if typestr.lower().startswith('int'):
2293 formatfunc = int
2294 offset = 1
2295 else:
2296 formatfunc = float
2297
2298 for currsel in selgroups:
2299 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2300 minsel = minval
2301 maxsel = maxval
2302 if currsel.find('~') > 0:
2303 # val0 <= x <= val1
2304 minsel = formatfunc(currsel.split('~')[0].strip())
2305 maxsel = formatfunc(currsel.split('~')[1].strip())
2306 elif currsel.strip().find('<=') > -1:
2307 bound = currsel.split('<=')
2308 try: # try "x <= val"
2309 minsel = minval
2310 maxsel = formatfunc(bound[1].strip())
2311 except ValueError: # now "val <= x"
2312 minsel = formatfunc(bound[0].strip())
2313 maxsel = maxval
2314 elif currsel.strip().find('>=') > -1:
2315 bound = currsel.split('>=')
2316 try: # try "x >= val"
2317 minsel = formatfunc(bound[1].strip())
2318 maxsel = maxval
2319 except ValueError: # now "val >= x"
2320 minsel = minval
2321 maxsel = formatfunc(bound[0].strip())
2322 elif currsel.strip().find('<') > -1:
2323 bound = currsel.split('<')
2324 try: # try "x < val"
2325 minsel = minval
2326 maxsel = formatfunc(bound[1].strip()) \
2327 - formatfunc(offset)
2328 except ValueError: # now "val < x"
2329 minsel = formatfunc(bound[0].strip()) \
2330 + formatfunc(offset)
2331 maxsel = maxval
2332 elif currsel.strip().find('>') > -1:
2333 bound = currsel.split('>')
2334 try: # try "x > val"
2335 minsel = formatfunc(bound[1].strip()) \
2336 + formatfunc(offset)
2337 maxsel = maxval
2338 except ValueError: # now "val > x"
2339 minsel = minval
2340 maxsel = formatfunc(bound[0].strip()) \
2341 - formatfunc(offset)
2342 else:
2343 minsel = formatfunc(currsel)
2344 maxsel = formatfunc(currsel)
2345 sellists.append([minsel,maxsel])
2346 return sellists
2347
2348# def get_restfreqs(self):
2349# """
2350# Get the restfrequency(s) stored in this scantable.
2351# The return value(s) are always of unit 'Hz'
2352# Parameters:
2353# none
2354# Returns:
2355# a list of doubles
2356# """
2357# return list(self._getrestfreqs())
2358
2359 def get_restfreqs(self, ids=None):
2360 """\
2361 Get the restfrequency(s) stored in this scantable.
2362 The return value(s) are always of unit 'Hz'
2363
2364 Parameters:
2365
2366 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2367 be retrieved
2368
2369 Returns:
2370
2371 dictionary containing ids and a list of doubles for each id
2372
2373 """
2374 if ids is None:
2375 rfreqs = {}
2376 idlist = self.getmolnos()
2377 for i in idlist:
2378 rfreqs[i] = list(self._getrestfreqs(i))
2379 return rfreqs
2380 else:
2381 if type(ids) == list or type(ids) == tuple:
2382 rfreqs = {}
2383 for i in ids:
2384 rfreqs[i] = list(self._getrestfreqs(i))
2385 return rfreqs
2386 else:
2387 return list(self._getrestfreqs(ids))
2388
2389 @asaplog_post_dec
2390 def set_restfreqs(self, freqs=None, unit='Hz'):
2391 """\
2392 Set or replace the restfrequency specified and
2393 if the 'freqs' argument holds a scalar,
2394 then that rest frequency will be applied to all the selected
2395 data. If the 'freqs' argument holds
2396 a vector, then it MUST be of equal or smaller length than
2397 the number of IFs (and the available restfrequencies will be
2398 replaced by this vector). In this case, *all* data have
2399 the restfrequency set per IF according
2400 to the corresponding value you give in the 'freqs' vector.
2401 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2402 IF 1 gets restfreq 2e9.
2403
2404 You can also specify the frequencies via a linecatalog.
2405
2406 Parameters:
2407
2408 freqs: list of rest frequency values or string idenitfiers
2409
2410 unit: unit for rest frequency (default 'Hz')
2411
2412
2413 Example::
2414
2415 # set the given restfrequency for the all currently selected IFs
2416 scan.set_restfreqs(freqs=1.4e9)
2417 # set restfrequencies for the n IFs (n > 1) in the order of the
2418 # list, i.e
2419 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2420 # len(list_of_restfreqs) == nIF
2421 # for nIF == 1 the following will set multiple restfrequency for
2422 # that IF
2423 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2424 # set multiple restfrequencies per IF. as a list of lists where
2425 # the outer list has nIF elements, the inner s arbitrary
2426 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2427
2428 *Note*:
2429
2430 To do more sophisticate Restfrequency setting, e.g. on a
2431 source and IF basis, use scantable.set_selection() before using
2432 this function::
2433
2434 # provided your scantable is called scan
2435 selection = selector()
2436 selection.set_name('ORION*')
2437 selection.set_ifs([1])
2438 scan.set_selection(selection)
2439 scan.set_restfreqs(freqs=86.6e9)
2440
2441 """
2442 varlist = vars()
2443 from asap import linecatalog
2444 # simple value
2445 if isinstance(freqs, int) or isinstance(freqs, float):
2446 self._setrestfreqs([freqs], [""], unit)
2447 # list of values
2448 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2449 # list values are scalars
2450 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2451 if len(freqs) == 1:
2452 self._setrestfreqs(freqs, [""], unit)
2453 else:
2454 # allow the 'old' mode of setting mulitple IFs
2455 savesel = self._getselection()
2456 sel = self.get_selection()
2457 iflist = self.getifnos()
2458 if len(freqs)>len(iflist):
2459 raise ValueError("number of elements in list of list "
2460 "exeeds the current IF selections")
2461 iflist = self.getifnos()
2462 for i, fval in enumerate(freqs):
2463 sel.set_ifs(iflist[i])
2464 self._setselection(sel)
2465 self._setrestfreqs([fval], [""], unit)
2466 self._setselection(savesel)
2467
2468 # list values are dict, {'value'=, 'name'=)
2469 elif isinstance(freqs[-1], dict):
2470 values = []
2471 names = []
2472 for d in freqs:
2473 values.append(d["value"])
2474 names.append(d["name"])
2475 self._setrestfreqs(values, names, unit)
2476 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2477 savesel = self._getselection()
2478 sel = self.get_selection()
2479 iflist = self.getifnos()
2480 if len(freqs)>len(iflist):
2481 raise ValueError("number of elements in list of list exeeds"
2482 " the current IF selections")
2483 for i, fval in enumerate(freqs):
2484 sel.set_ifs(iflist[i])
2485 self._setselection(sel)
2486 self._setrestfreqs(fval, [""], unit)
2487 self._setselection(savesel)
2488 # freqs are to be taken from a linecatalog
2489 elif isinstance(freqs, linecatalog):
2490 savesel = self._getselection()
2491 sel = self.get_selection()
2492 for i in xrange(freqs.nrow()):
2493 sel.set_ifs(iflist[i])
2494 self._setselection(sel)
2495 self._setrestfreqs([freqs.get_frequency(i)],
2496 [freqs.get_name(i)], "MHz")
2497 # ensure that we are not iterating past nIF
2498 if i == self.nif()-1: break
2499 self._setselection(savesel)
2500 else:
2501 return
2502 self._add_history("set_restfreqs", varlist)
2503
2504 @asaplog_post_dec
2505 def shift_refpix(self, delta):
2506 """\
2507 Shift the reference pixel of the Spectra Coordinate by an
2508 integer amount.
2509
2510 Parameters:
2511
2512 delta: the amount to shift by
2513
2514 *Note*:
2515
2516 Be careful using this with broadband data.
2517
2518 """
2519 varlist = vars()
2520 Scantable.shift_refpix(self, delta)
2521 s._add_history("shift_refpix", varlist)
2522
2523 @asaplog_post_dec
2524 def history(self, filename=None, nrows=-1, start=0):
2525 """\
2526 Print the history. Optionally to a file.
2527
2528 Parameters:
2529
2530 filename: The name of the file to save the history to.
2531
2532 """
2533 n = self._historylength()
2534 if nrows == -1:
2535 nrows = n
2536 if start+nrows > n:
2537 nrows = nrows-start
2538 if n > 1000 and nrows == n:
2539 nrows = 1000
2540 start = n-1000
2541 asaplog.push("Warning: History has {0} entries. Displaying last "
2542 "1000".format(n))
2543 hist = list(self._gethistory(nrows, start))
2544 out = "-"*80
2545 for h in hist:
2546 if not h.strip():
2547 continue
2548 if h.find("---") >-1:
2549 continue
2550 else:
2551 items = h.split("##")
2552 date = items[0]
2553 func = items[1]
2554 items = items[2:]
2555 out += "\n"+date+"\n"
2556 out += "Function: %s\n Parameters:" % (func)
2557 for i in items:
2558 if i == '':
2559 continue
2560 s = i.split("=")
2561 out += "\n %s = %s" % (s[0], s[1])
2562 out = "\n".join([out, "*"*80])
2563 if filename is not None:
2564 if filename is "":
2565 filename = 'scantable_history.txt'
2566 filename = os.path.expandvars(os.path.expanduser(filename))
2567 if not os.path.isdir(filename):
2568 data = open(filename, 'w')
2569 data.write(out)
2570 data.close()
2571 else:
2572 msg = "Illegal file name '%s'." % (filename)
2573 raise IOError(msg)
2574 return page(out)
2575
2576 #
2577 # Maths business
2578 #
2579 @asaplog_post_dec
2580 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2581 avmode="NONE"):
2582 """\
2583 Return the (time) weighted average of a scan. Scans will be averaged
2584 only if the source direction (RA/DEC) is within 1' otherwise
2585
2586 *Note*:
2587
2588 in channels only - align if necessary
2589
2590 Parameters:
2591
2592 mask: an optional mask (only used for 'var' and 'tsys'
2593 weighting)
2594
2595 scanav: True averages each scan separately
2596 False (default) averages all scans together,
2597
2598 weight: Weighting scheme.
2599 'none' (mean no weight)
2600 'var' (1/var(spec) weighted)
2601 'tsys' (1/Tsys**2 weighted)
2602 'tint' (integration time weighted)
2603 'tintsys' (Tint/Tsys**2)
2604 'median' ( median averaging)
2605 The default is 'tint'
2606
2607 align: align the spectra in velocity before averaging. It takes
2608 the time of the first spectrum as reference time.
2609 avmode: 'SOURCE' - also select by source name - or
2610 'NONE' (default). Not applicable for scanav=True or
2611 weight=median
2612
2613 Example::
2614
2615 # time average the scantable without using a mask
2616 newscan = scan.average_time()
2617
2618 """
2619 varlist = vars()
2620 weight = weight or 'TINT'
2621 mask = mask or ()
2622 scanav = (scanav and 'SCAN') or avmode.upper()
2623 scan = (self, )
2624
2625 if align:
2626 scan = (self.freq_align(insitu=False), )
2627 asaplog.push("Note: Alignment is don on a source-by-source basis")
2628 asaplog.push("Note: Averaging (by default) is not")
2629 # we need to set it to SOURCE averaging here
2630 s = None
2631 if weight.upper() == 'MEDIAN':
2632 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2633 scanav))
2634 else:
2635 s = scantable(self._math._average(scan, mask, weight.upper(),
2636 scanav))
2637 s._add_history("average_time", varlist)
2638 return s
2639
2640 @asaplog_post_dec
2641 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2642 """\
2643 Return a scan where all spectra are converted to either
2644 Jansky or Kelvin depending upon the flux units of the scan table.
2645 By default the function tries to look the values up internally.
2646 If it can't find them (or if you want to over-ride), you must
2647 specify EITHER jyperk OR eta (and D which it will try to look up
2648 also if you don't set it). jyperk takes precedence if you set both.
2649
2650 Parameters:
2651
2652 jyperk: the Jy / K conversion factor
2653
2654 eta: the aperture efficiency
2655
2656 d: the geometric diameter (metres)
2657
2658 insitu: if False a new scantable is returned.
2659 Otherwise, the scaling is done in-situ
2660 The default is taken from .asaprc (False)
2661
2662 """
2663 if insitu is None: insitu = rcParams['insitu']
2664 self._math._setinsitu(insitu)
2665 varlist = vars()
2666 jyperk = jyperk or -1.0
2667 d = d or -1.0
2668 eta = eta or -1.0
2669 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2670 s._add_history("convert_flux", varlist)
2671 if insitu: self._assign(s)
2672 else: return s
2673
2674 @asaplog_post_dec
2675 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2676 """\
2677 Return a scan after applying a gain-elevation correction.
2678 The correction can be made via either a polynomial or a
2679 table-based interpolation (and extrapolation if necessary).
2680 You specify polynomial coefficients, an ascii table or neither.
2681 If you specify neither, then a polynomial correction will be made
2682 with built in coefficients known for certain telescopes (an error
2683 will occur if the instrument is not known).
2684 The data and Tsys are *divided* by the scaling factors.
2685
2686 Parameters:
2687
2688 poly: Polynomial coefficients (default None) to compute a
2689 gain-elevation correction as a function of
2690 elevation (in degrees).
2691
2692 filename: The name of an ascii file holding correction factors.
2693 The first row of the ascii file must give the column
2694 names and these MUST include columns
2695 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2696 by this) somewhere.
2697 The second row must give the data type of the
2698 column. Use 'R' for Real and 'I' for Integer.
2699 An example file would be
2700 (actual factors are arbitrary) :
2701
2702 TIME ELEVATION FACTOR
2703 R R R
2704 0.1 0 0.8
2705 0.2 20 0.85
2706 0.3 40 0.9
2707 0.4 60 0.85
2708 0.5 80 0.8
2709 0.6 90 0.75
2710
2711 method: Interpolation method when correcting from a table.
2712 Values are 'nearest', 'linear' (default), 'cubic'
2713 and 'spline'
2714
2715 insitu: if False a new scantable is returned.
2716 Otherwise, the scaling is done in-situ
2717 The default is taken from .asaprc (False)
2718
2719 """
2720
2721 if insitu is None: insitu = rcParams['insitu']
2722 self._math._setinsitu(insitu)
2723 varlist = vars()
2724 poly = poly or ()
2725 from os.path import expandvars
2726 filename = expandvars(filename)
2727 s = scantable(self._math._gainel(self, poly, filename, method))
2728 s._add_history("gain_el", varlist)
2729 if insitu:
2730 self._assign(s)
2731 else:
2732 return s
2733
2734 @asaplog_post_dec
2735 def freq_align(self, reftime=None, method='cubic', insitu=None):
2736 """\
2737 Return a scan where all rows have been aligned in frequency/velocity.
2738 The alignment frequency frame (e.g. LSRK) is that set by function
2739 set_freqframe.
2740
2741 Parameters:
2742
2743 reftime: reference time to align at. By default, the time of
2744 the first row of data is used.
2745
2746 method: Interpolation method for regridding the spectra.
2747 Choose from 'nearest', 'linear', 'cubic' (default)
2748 and 'spline'
2749
2750 insitu: if False a new scantable is returned.
2751 Otherwise, the scaling is done in-situ
2752 The default is taken from .asaprc (False)
2753
2754 """
2755 if insitu is None: insitu = rcParams["insitu"]
2756 oldInsitu = self._math._insitu()
2757 self._math._setinsitu(insitu)
2758 varlist = vars()
2759 reftime = reftime or ""
2760 s = scantable(self._math._freq_align(self, reftime, method))
2761 s._add_history("freq_align", varlist)
2762 self._math._setinsitu(oldInsitu)
2763 if insitu:
2764 self._assign(s)
2765 else:
2766 return s
2767
2768 @asaplog_post_dec
2769 def opacity(self, tau=None, insitu=None):
2770 """\
2771 Apply an opacity correction. The data
2772 and Tsys are multiplied by the correction factor.
2773
2774 Parameters:
2775
2776 tau: (list of) opacity from which the correction factor is
2777 exp(tau*ZD)
2778 where ZD is the zenith-distance.
2779 If a list is provided, it has to be of length nIF,
2780 nIF*nPol or 1 and in order of IF/POL, e.g.
2781 [opif0pol0, opif0pol1, opif1pol0 ...]
2782 if tau is `None` the opacities are determined from a
2783 model.
2784
2785 insitu: if False a new scantable is returned.
2786 Otherwise, the scaling is done in-situ
2787 The default is taken from .asaprc (False)
2788
2789 """
2790 if insitu is None:
2791 insitu = rcParams['insitu']
2792 self._math._setinsitu(insitu)
2793 varlist = vars()
2794 if not hasattr(tau, "__len__"):
2795 tau = [tau]
2796 s = scantable(self._math._opacity(self, tau))
2797 s._add_history("opacity", varlist)
2798 if insitu:
2799 self._assign(s)
2800 else:
2801 return s
2802
2803 @asaplog_post_dec
2804 def bin(self, width=5, insitu=None):
2805 """\
2806 Return a scan where all spectra have been binned up.
2807
2808 Parameters:
2809
2810 width: The bin width (default=5) in pixels
2811
2812 insitu: if False a new scantable is returned.
2813 Otherwise, the scaling is done in-situ
2814 The default is taken from .asaprc (False)
2815
2816 """
2817 if insitu is None:
2818 insitu = rcParams['insitu']
2819 self._math._setinsitu(insitu)
2820 varlist = vars()
2821 s = scantable(self._math._bin(self, width))
2822 s._add_history("bin", varlist)
2823 if insitu:
2824 self._assign(s)
2825 else:
2826 return s
2827
2828 @asaplog_post_dec
2829 def reshape(self, first, last, insitu=None):
2830 """Resize the band by providing first and last channel.
2831 This will cut off all channels outside [first, last].
2832 """
2833 if insitu is None:
2834 insitu = rcParams['insitu']
2835 varlist = vars()
2836 if last < 0:
2837 last = self.nchan()-1 + last
2838 s = None
2839 if insitu:
2840 s = self
2841 else:
2842 s = self.copy()
2843 s._reshape(first,last)
2844 s._add_history("reshape", varlist)
2845 if not insitu:
2846 return s
2847
2848 @asaplog_post_dec
2849 def resample(self, width=5, method='cubic', insitu=None):
2850 """\
2851 Return a scan where all spectra have been binned up.
2852
2853 Parameters:
2854
2855 width: The bin width (default=5) in pixels
2856
2857 method: Interpolation method when correcting from a table.
2858 Values are 'nearest', 'linear', 'cubic' (default)
2859 and 'spline'
2860
2861 insitu: if False a new scantable is returned.
2862 Otherwise, the scaling is done in-situ
2863 The default is taken from .asaprc (False)
2864
2865 """
2866 if insitu is None:
2867 insitu = rcParams['insitu']
2868 self._math._setinsitu(insitu)
2869 varlist = vars()
2870 s = scantable(self._math._resample(self, method, width))
2871 s._add_history("resample", varlist)
2872 if insitu:
2873 self._assign(s)
2874 else:
2875 return s
2876
2877 @asaplog_post_dec
2878 def average_pol(self, mask=None, weight='none'):
2879 """\
2880 Average the Polarisations together.
2881
2882 Parameters:
2883
2884 mask: An optional mask defining the region, where the
2885 averaging will be applied. The output will have all
2886 specified points masked.
2887
2888 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2889 weighted), or 'tsys' (1/Tsys**2 weighted)
2890
2891 """
2892 varlist = vars()
2893 mask = mask or ()
2894 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2895 s._add_history("average_pol", varlist)
2896 return s
2897
2898 @asaplog_post_dec
2899 def average_beam(self, mask=None, weight='none'):
2900 """\
2901 Average the Beams together.
2902
2903 Parameters:
2904 mask: An optional mask defining the region, where the
2905 averaging will be applied. The output will have all
2906 specified points masked.
2907
2908 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2909 weighted), or 'tsys' (1/Tsys**2 weighted)
2910
2911 """
2912 varlist = vars()
2913 mask = mask or ()
2914 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2915 s._add_history("average_beam", varlist)
2916 return s
2917
2918 def parallactify(self, pflag):
2919 """\
2920 Set a flag to indicate whether this data should be treated as having
2921 been 'parallactified' (total phase == 0.0)
2922
2923 Parameters:
2924
2925 pflag: Bool indicating whether to turn this on (True) or
2926 off (False)
2927
2928 """
2929 varlist = vars()
2930 self._parallactify(pflag)
2931 self._add_history("parallactify", varlist)
2932
2933 @asaplog_post_dec
2934 def convert_pol(self, poltype=None):
2935 """\
2936 Convert the data to a different polarisation type.
2937 Note that you will need cross-polarisation terms for most conversions.
2938
2939 Parameters:
2940
2941 poltype: The new polarisation type. Valid types are:
2942 'linear', 'circular', 'stokes' and 'linpol'
2943
2944 """
2945 varlist = vars()
2946 s = scantable(self._math._convertpol(self, poltype))
2947 s._add_history("convert_pol", varlist)
2948 return s
2949
2950 @asaplog_post_dec
2951 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2952 insitu=None):
2953 """\
2954 Smooth the spectrum by the specified kernel (conserving flux).
2955
2956 Parameters:
2957
2958 kernel: The type of smoothing kernel. Select from
2959 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2960 or 'poly'
2961
2962 width: The width of the kernel in pixels. For hanning this is
2963 ignored otherwise it defauls to 5 pixels.
2964 For 'gaussian' it is the Full Width Half
2965 Maximum. For 'boxcar' it is the full width.
2966 For 'rmedian' and 'poly' it is the half width.
2967
2968 order: Optional parameter for 'poly' kernel (default is 2), to
2969 specify the order of the polnomial. Ignored by all other
2970 kernels.
2971
2972 plot: plot the original and the smoothed spectra.
2973 In this each indivual fit has to be approved, by
2974 typing 'y' or 'n'
2975
2976 insitu: if False a new scantable is returned.
2977 Otherwise, the scaling is done in-situ
2978 The default is taken from .asaprc (False)
2979
2980 """
2981 if insitu is None: insitu = rcParams['insitu']
2982 self._math._setinsitu(insitu)
2983 varlist = vars()
2984
2985 if plot: orgscan = self.copy()
2986
2987 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2988 s._add_history("smooth", varlist)
2989
2990 action = 'H'
2991 if plot:
2992 from asap.asapplotter import new_asaplot
2993 theplot = new_asaplot(rcParams['plotter.gui'])
2994 from matplotlib import rc as rcp
2995 rcp('lines', linewidth=1)
2996 theplot.set_panels()
2997 ylab=s._get_ordinate_label()
2998 #theplot.palette(0,["#777777","red"])
2999 for r in xrange(s.nrow()):
3000 xsm=s._getabcissa(r)
3001 ysm=s._getspectrum(r)
3002 xorg=orgscan._getabcissa(r)
3003 yorg=orgscan._getspectrum(r)
3004 if action != "N": #skip plotting if rejecting all
3005 theplot.clear()
3006 theplot.hold()
3007 theplot.set_axes('ylabel',ylab)
3008 theplot.set_axes('xlabel',s._getabcissalabel(r))
3009 theplot.set_axes('title',s._getsourcename(r))
3010 theplot.set_line(label='Original',color="#777777")
3011 theplot.plot(xorg,yorg)
3012 theplot.set_line(label='Smoothed',color="red")
3013 theplot.plot(xsm,ysm)
3014 ### Ugly part for legend
3015 for i in [0,1]:
3016 theplot.subplots[0]['lines'].append(
3017 [theplot.subplots[0]['axes'].lines[i]]
3018 )
3019 theplot.release()
3020 ### Ugly part for legend
3021 theplot.subplots[0]['lines']=[]
3022 res = self._get_verify_action("Accept smoothing?",action)
3023 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3024 if r == 0: action = None
3025 #res = raw_input("Accept smoothing ([y]/n): ")
3026 if res.upper() == 'N':
3027 # reject for the current rows
3028 s._setspectrum(yorg, r)
3029 elif res.upper() == 'R':
3030 # reject all the following rows
3031 action = "N"
3032 s._setspectrum(yorg, r)
3033 elif res.upper() == 'A':
3034 # accept all the following rows
3035 break
3036 theplot.quit()
3037 del theplot
3038 del orgscan
3039
3040 if insitu: self._assign(s)
3041 else: return s
3042
3043 @asaplog_post_dec
3044 def regrid_channel(self, width=5, plot=False, insitu=None):
3045 """\
3046 Regrid the spectra by the specified channel width
3047
3048 Parameters:
3049
3050 width: The channel width (float) of regridded spectra
3051 in the current spectral unit.
3052
3053 plot: [NOT IMPLEMENTED YET]
3054 plot the original and the regridded spectra.
3055 In this each indivual fit has to be approved, by
3056 typing 'y' or 'n'
3057
3058 insitu: if False a new scantable is returned.
3059 Otherwise, the scaling is done in-situ
3060 The default is taken from .asaprc (False)
3061
3062 """
3063 if insitu is None: insitu = rcParams['insitu']
3064 varlist = vars()
3065
3066 if plot:
3067 asaplog.post()
3068 asaplog.push("Verification plot is not implemtnetd yet.")
3069 asaplog.post("WARN")
3070
3071 s = self.copy()
3072 s._regrid_specchan(width)
3073
3074 s._add_history("regrid_channel", varlist)
3075
3076# if plot:
3077# from asap.asapplotter import new_asaplot
3078# theplot = new_asaplot(rcParams['plotter.gui'])
3079# from matplotlib import rc as rcp
3080# rcp('lines', linewidth=1)
3081# theplot.set_panels()
3082# ylab=s._get_ordinate_label()
3083# #theplot.palette(0,["#777777","red"])
3084# for r in xrange(s.nrow()):
3085# xsm=s._getabcissa(r)
3086# ysm=s._getspectrum(r)
3087# xorg=orgscan._getabcissa(r)
3088# yorg=orgscan._getspectrum(r)
3089# theplot.clear()
3090# theplot.hold()
3091# theplot.set_axes('ylabel',ylab)
3092# theplot.set_axes('xlabel',s._getabcissalabel(r))
3093# theplot.set_axes('title',s._getsourcename(r))
3094# theplot.set_line(label='Original',color="#777777")
3095# theplot.plot(xorg,yorg)
3096# theplot.set_line(label='Smoothed',color="red")
3097# theplot.plot(xsm,ysm)
3098# ### Ugly part for legend
3099# for i in [0,1]:
3100# theplot.subplots[0]['lines'].append(
3101# [theplot.subplots[0]['axes'].lines[i]]
3102# )
3103# theplot.release()
3104# ### Ugly part for legend
3105# theplot.subplots[0]['lines']=[]
3106# res = raw_input("Accept smoothing ([y]/n): ")
3107# if res.upper() == 'N':
3108# s._setspectrum(yorg, r)
3109# theplot.quit()
3110# del theplot
3111# del orgscan
3112
3113 if insitu: self._assign(s)
3114 else: return s
3115
3116 @asaplog_post_dec
3117 def _parse_wn(self, wn):
3118 if isinstance(wn, list) or isinstance(wn, tuple):
3119 return wn
3120 elif isinstance(wn, int):
3121 return [ wn ]
3122 elif isinstance(wn, str):
3123 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3124 val = wn.split('-')
3125 val = [int(val[0]), int(val[1])]
3126 val.sort()
3127 res = [i for i in xrange(val[0], val[1]+1)]
3128 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3129 val = int(wn[2:])+1
3130 res = [i for i in xrange(val)]
3131 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3132 val = int(wn[:-2])+1
3133 res = [i for i in xrange(val)]
3134 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3135 val = int(wn[1:])
3136 res = [i for i in xrange(val)]
3137 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3138 val = int(wn[:-1])
3139 res = [i for i in xrange(val)]
3140 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3141 # then interpreted in C++
3142 # side as [a,a+1,...,a_nyq]
3143 # (CAS-3759)
3144 val = int(wn[2:])
3145 res = [val, -999]
3146 #res = [i for i in xrange(val, self.nchan()/2+1)]
3147 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3148 # then interpreted in C++
3149 # side as [a,a+1,...,a_nyq]
3150 # (CAS-3759)
3151 val = int(wn[:-2])
3152 res = [val, -999]
3153 #res = [i for i in xrange(val, self.nchan()/2+1)]
3154 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3155 # then interpreted in C++
3156 # side as [a+1,a+2,...,a_nyq]
3157 # (CAS-3759)
3158 val = int(wn[1:])+1
3159 res = [val, -999]
3160 #res = [i for i in xrange(val, self.nchan()/2+1)]
3161 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3162 # then interpreted in C++
3163 # side as [a+1,a+2,...,a_nyq]
3164 # (CAS-3759)
3165 val = int(wn[:-1])+1
3166 res = [val, -999]
3167 #res = [i for i in xrange(val, self.nchan()/2+1)]
3168
3169 return res
3170 else:
3171 msg = 'wrong value given for addwn/rejwn'
3172 raise RuntimeError(msg)
3173
3174 @asaplog_post_dec
3175 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3176 """\
3177 Subtract baseline based on parameters written in Baseline Table.
3178
3179 Parameters:
3180 insitu: if True, baseline fitting/subtraction is done
3181 in-situ. If False, a new scantable with
3182 baseline subtracted is returned. Actually,
3183 format of the returned value depends on both
3184 insitu and retfitres (see below).
3185 The default is taken from .asaprc (False)
3186 retfitres: if True, the results of baseline fitting (i.e.,
3187 coefficients and rms) are returned.
3188 default is False.
3189 The format of the returned value of this
3190 function varies as follows:
3191 (1) in case insitu=True and retfitres=True:
3192 fitting result.
3193 (2) in case insitu=True and retfitres=False:
3194 None.
3195 (3) in case insitu=False and retfitres=True:
3196 a dictionary containing a new scantable
3197 (with baseline subtracted) and the fitting
3198 results.
3199 (4) in case insitu=False and retfitres=False:
3200 a new scantable (with baseline subtracted).
3201 inbltable: name of input baseline table. The row number of
3202 scantable and that of inbltable must be
3203 identical.
3204 outbltable: name of output baseline table where baseline
3205 parameters and fitting results recorded.
3206 default is ''(no output).
3207 overwrite: if True when an existing baseline table is
3208 specified for outbltable, overwrites it.
3209 Otherwise there is no harm.
3210 default is False.
3211 """
3212
3213 try:
3214 varlist = vars()
3215 if retfitres is None: retfitres = False
3216 if inbltable is None: raise ValueError("bltable missing.")
3217 if outbltable is None: outbltable = ''
3218 if overwrite is None: overwrite = False
3219
3220 if insitu is None: insitu = rcParams['insitu']
3221 if insitu:
3222 workscan = self
3223 else:
3224 workscan = self.copy()
3225
3226 sres = workscan._apply_bltable(inbltable,
3227 retfitres,
3228 outbltable,
3229 os.path.exists(outbltable),
3230 overwrite)
3231 if retfitres: res = parse_fitresult(sres)
3232
3233 workscan._add_history('apply_bltable', varlist)
3234
3235 if insitu:
3236 self._assign(workscan)
3237 if retfitres:
3238 return res
3239 else:
3240 return None
3241 else:
3242 if retfitres:
3243 return {'scantable': workscan, 'fitresults': res}
3244 else:
3245 return workscan
3246
3247 except RuntimeError, e:
3248 raise_fitting_failure_exception(e)
3249
3250 @asaplog_post_dec
3251 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3252 """\
3253 Subtract baseline based on parameters written in the input list.
3254
3255 Parameters:
3256 insitu: if True, baseline fitting/subtraction is done
3257 in-situ. If False, a new scantable with
3258 baseline subtracted is returned. Actually,
3259 format of the returned value depends on both
3260 insitu and retfitres (see below).
3261 The default is taken from .asaprc (False)
3262 retfitres: if True, the results of baseline fitting (i.e.,
3263 coefficients and rms) are returned.
3264 default is False.
3265 The format of the returned value of this
3266 function varies as follows:
3267 (1) in case insitu=True and retfitres=True:
3268 fitting result.
3269 (2) in case insitu=True and retfitres=False:
3270 None.
3271 (3) in case insitu=False and retfitres=True:
3272 a dictionary containing a new scantable
3273 (with baseline subtracted) and the fitting
3274 results.
3275 (4) in case insitu=False and retfitres=False:
3276 a new scantable (with baseline subtracted).
3277 blinfo: baseline parameter set stored in a dictionary
3278 or a list of dictionary. Each dictionary
3279 corresponds to each spectrum and must contain
3280 the following keys and values:
3281 'row': row number,
3282 'blfunc': function name. available ones include
3283 'poly', 'chebyshev', 'cspline' and
3284 'sinusoid',
3285 'order': maximum order of polynomial. needed
3286 if blfunc='poly' or 'chebyshev',
3287 'npiece': number or piecewise polynomial.
3288 needed if blfunc='cspline',
3289 'nwave': a list of sinusoidal wave numbers.
3290 needed if blfunc='sinusoid', and
3291 'masklist': min-max windows for channel mask.
3292 the specified ranges will be used
3293 for fitting.
3294 bltable: name of output baseline table where baseline
3295 parameters and fitting results recorded.
3296 default is ''(no output).
3297 overwrite: if True when an existing baseline table is
3298 specified for bltable, overwrites it.
3299 Otherwise there is no harm.
3300 default is False.
3301
3302 Example:
3303 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3304 'masklist':[[10,350],[352,510]]},
3305 {'row':1, 'blfunc':'cspline', 'npiece':3,
3306 'masklist':[[3,16],[19,404],[407,511]]}
3307 ])
3308
3309 the first spectrum (row=0) will be fitted with polynomial
3310 of order=5 and the next one (row=1) will be fitted with cubic
3311 spline consisting of 3 pieces.
3312 """
3313
3314 try:
3315 varlist = vars()
3316 if retfitres is None: retfitres = False
3317 if blinfo is None: blinfo = []
3318 if bltable is None: bltable = ''
3319 if overwrite is None: overwrite = False
3320
3321 if insitu is None: insitu = rcParams['insitu']
3322 if insitu:
3323 workscan = self
3324 else:
3325 workscan = self.copy()
3326
3327 nrow = workscan.nrow()
3328
3329 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3330
3331 print "in_blinfo=< "+ str(in_blinfo)+" >"
3332
3333 sres = workscan._sub_baseline(in_blinfo,
3334 retfitres,
3335 bltable,
3336 os.path.exists(bltable),
3337 overwrite)
3338 if retfitres: res = parse_fitresult(sres)
3339
3340 workscan._add_history('sub_baseline', varlist)
3341
3342 if insitu:
3343 self._assign(workscan)
3344 if retfitres:
3345 return res
3346 else:
3347 return None
3348 else:
3349 if retfitres:
3350 return {'scantable': workscan, 'fitresults': res}
3351 else:
3352 return workscan
3353
3354 except RuntimeError, e:
3355 raise_fitting_failure_exception(e)
3356
3357 @asaplog_post_dec
3358 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3359 whichrow=None, uselinefinder=None, edge=None,
3360 threshold=None, chan_avg_limit=None):
3361 """\
3362 Calculates and returns model selection criteria for a specified
3363 baseline model and a given spectrum data.
3364 Available values include Akaike Information Criterion (AIC), the
3365 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3366 Bayesian Information Criterion (BIC) and the Generalised Cross
3367 Validation (GCV).
3368
3369 Parameters:
3370 value: name of model selection criteria to calculate.
3371 available ones include 'aic', 'aicc', 'bic' and
3372 'gcv'. default is 'aicc'.
3373 blfunc: baseline function name. available ones include
3374 'chebyshev', 'cspline' and 'sinusoid'.
3375 default is 'chebyshev'.
3376 order: parameter for basline function. actually stands for
3377 order of polynomial (order) for 'chebyshev',
3378 number of spline pieces (npiece) for 'cspline' and
3379 maximum wave number for 'sinusoid', respectively.
3380 default is 5 (which is also the default order value
3381 for [auto_]chebyshev_baseline()).
3382 mask: an optional mask. default is [].
3383 whichrow: row number. default is 0 (the first row)
3384 uselinefinder: use sd.linefinder() to flag out line regions
3385 default is True.
3386 edge: an optional number of channel to drop at
3387 the edge of spectrum. If only one value is
3388 specified, the same number will be dropped
3389 from both sides of the spectrum. Default
3390 is to keep all channels. Nested tuples
3391 represent individual edge selection for
3392 different IFs (a number of spectral channels
3393 can be different)
3394 default is (0, 0).
3395 threshold: the threshold used by line finder. It is
3396 better to keep it large as only strong lines
3397 affect the baseline solution.
3398 default is 3.
3399 chan_avg_limit: a maximum number of consequtive spectral
3400 channels to average during the search of
3401 weak and broad lines. The default is no
3402 averaging (and no search for weak lines).
3403 If such lines can affect the fitted baseline
3404 (e.g. a high order polynomial is fitted),
3405 increase this parameter (usually values up
3406 to 8 are reasonable). Most users of this
3407 method should find the default value sufficient.
3408 default is 1.
3409
3410 Example:
3411 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3412 """
3413
3414 try:
3415 varlist = vars()
3416
3417 if value is None: value = 'aicc'
3418 if blfunc is None: blfunc = 'chebyshev'
3419 if order is None: order = 5
3420 if mask is None: mask = []
3421 if whichrow is None: whichrow = 0
3422 if uselinefinder is None: uselinefinder = True
3423 if edge is None: edge = (0, 0)
3424 if threshold is None: threshold = 3
3425 if chan_avg_limit is None: chan_avg_limit = 1
3426
3427 return self._calc_aic(value, blfunc, order, mask,
3428 whichrow, uselinefinder, edge,
3429 threshold, chan_avg_limit)
3430
3431 except RuntimeError, e:
3432 raise_fitting_failure_exception(e)
3433
3434 @asaplog_post_dec
3435 def sinusoid_baseline(self, mask=None, applyfft=None,
3436 fftmethod=None, fftthresh=None,
3437 addwn=None, rejwn=None,
3438 insitu=None,
3439 clipthresh=None, clipniter=None,
3440 plot=None,
3441 getresidual=None,
3442 showprogress=None, minnrow=None,
3443 outlog=None,
3444 blfile=None, csvformat=None,
3445 bltable=None):
3446 """\
3447 Return a scan which has been baselined (all rows) with sinusoidal
3448 functions.
3449
3450 Parameters:
3451 mask: an optional mask
3452 applyfft: if True use some method, such as FFT, to find
3453 strongest sinusoidal components in the wavenumber
3454 domain to be used for baseline fitting.
3455 default is True.
3456 fftmethod: method to find the strong sinusoidal components.
3457 now only 'fft' is available and it is the default.
3458 fftthresh: the threshold to select wave numbers to be used for
3459 fitting from the distribution of amplitudes in the
3460 wavenumber domain.
3461 both float and string values accepted.
3462 given a float value, the unit is set to sigma.
3463 for string values, allowed formats include:
3464 'xsigma' or 'x' (= x-sigma level. e.g.,
3465 '3sigma'), or
3466 'topx' (= the x strongest ones, e.g. 'top5').
3467 default is 3.0 (unit: sigma).
3468 addwn: the additional wave numbers to be used for fitting.
3469 list or integer value is accepted to specify every
3470 wave numbers. also string value can be used in case
3471 you need to specify wave numbers in a certain range,
3472 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3473 '<a' (= 0,1,...,a-2,a-1),
3474 '>=a' (= a, a+1, ... up to the maximum wave
3475 number corresponding to the Nyquist
3476 frequency for the case of FFT).
3477 default is [0].
3478 rejwn: the wave numbers NOT to be used for fitting.
3479 can be set just as addwn but has higher priority:
3480 wave numbers which are specified both in addwn
3481 and rejwn will NOT be used. default is [].
3482 insitu: if False a new scantable is returned.
3483 Otherwise, the scaling is done in-situ
3484 The default is taken from .asaprc (False)
3485 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3486 clipniter: maximum number of iteration of 'clipthresh'-sigma
3487 clipping (default is 0)
3488 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3489 plot the fit and the residual. In this each
3490 indivual fit has to be approved, by typing 'y'
3491 or 'n'
3492 getresidual: if False, returns best-fit values instead of
3493 residual. (default is True)
3494 showprogress: show progress status for large data.
3495 default is True.
3496 minnrow: minimum number of input spectra to show.
3497 default is 1000.
3498 outlog: Output the coefficients of the best-fit
3499 function to logger (default is False)
3500 blfile: Name of a text file in which the best-fit
3501 parameter values to be written
3502 (default is '': no file/logger output)
3503 csvformat: if True blfile is csv-formatted, default is False.
3504 bltable: name of a baseline table where fitting results
3505 (coefficients, rms, etc.) are to be written.
3506 if given, fitting results will NOT be output to
3507 scantable (insitu=True) or None will be
3508 returned (insitu=False).
3509 (default is "": no table output)
3510
3511 Example:
3512 # return a scan baselined by a combination of sinusoidal curves
3513 # having wave numbers in spectral window up to 10,
3514 # also with 3-sigma clipping, iteration up to 4 times
3515 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3516
3517 Note:
3518 The best-fit parameter values output in logger and/or blfile are now
3519 based on specunit of 'channel'.
3520 """
3521
3522 try:
3523 varlist = vars()
3524
3525 if insitu is None: insitu = rcParams['insitu']
3526 if insitu:
3527 workscan = self
3528 else:
3529 workscan = self.copy()
3530
3531 if mask is None: mask = []
3532 if applyfft is None: applyfft = True
3533 if fftmethod is None: fftmethod = 'fft'
3534 if fftthresh is None: fftthresh = 3.0
3535 if addwn is None: addwn = [0]
3536 if rejwn is None: rejwn = []
3537 if clipthresh is None: clipthresh = 3.0
3538 if clipniter is None: clipniter = 0
3539 if plot is None: plot = False
3540 if getresidual is None: getresidual = True
3541 if showprogress is None: showprogress = True
3542 if minnrow is None: minnrow = 1000
3543 if outlog is None: outlog = False
3544 if blfile is None: blfile = ''
3545 if csvformat is None: csvformat = False
3546 if bltable is None: bltable = ''
3547
3548 sapplyfft = 'true' if applyfft else 'false'
3549 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3550
3551 scsvformat = 'T' if csvformat else 'F'
3552
3553 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3554 workscan._sinusoid_baseline(mask,
3555 fftinfo,
3556 #applyfft, fftmethod.lower(),
3557 #str(fftthresh).lower(),
3558 workscan._parse_wn(addwn),
3559 workscan._parse_wn(rejwn),
3560 clipthresh, clipniter,
3561 getresidual,
3562 pack_progress_params(showprogress,
3563 minnrow),
3564 outlog, scsvformat+blfile,
3565 bltable)
3566 workscan._add_history('sinusoid_baseline', varlist)
3567
3568 if bltable == '':
3569 if insitu:
3570 self._assign(workscan)
3571 else:
3572 return workscan
3573 else:
3574 if not insitu:
3575 return None
3576
3577 except RuntimeError, e:
3578 raise_fitting_failure_exception(e)
3579
3580
3581 @asaplog_post_dec
3582 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3583 fftmethod=None, fftthresh=None,
3584 addwn=None, rejwn=None,
3585 insitu=None,
3586 clipthresh=None, clipniter=None,
3587 edge=None, threshold=None, chan_avg_limit=None,
3588 plot=None,
3589 getresidual=None,
3590 showprogress=None, minnrow=None,
3591 outlog=None,
3592 blfile=None, csvformat=None,
3593 bltable=None):
3594 """\
3595 Return a scan which has been baselined (all rows) with sinusoidal
3596 functions.
3597 Spectral lines are detected first using linefinder and masked out
3598 to avoid them affecting the baseline solution.
3599
3600 Parameters:
3601 mask: an optional mask retreived from scantable
3602 applyfft: if True use some method, such as FFT, to find
3603 strongest sinusoidal components in the wavenumber
3604 domain to be used for baseline fitting.
3605 default is True.
3606 fftmethod: method to find the strong sinusoidal components.
3607 now only 'fft' is available and it is the default.
3608 fftthresh: the threshold to select wave numbers to be used for
3609 fitting from the distribution of amplitudes in the
3610 wavenumber domain.
3611 both float and string values accepted.
3612 given a float value, the unit is set to sigma.
3613 for string values, allowed formats include:
3614 'xsigma' or 'x' (= x-sigma level. e.g.,
3615 '3sigma'), or
3616 'topx' (= the x strongest ones, e.g. 'top5').
3617 default is 3.0 (unit: sigma).
3618 addwn: the additional wave numbers to be used for fitting.
3619 list or integer value is accepted to specify every
3620 wave numbers. also string value can be used in case
3621 you need to specify wave numbers in a certain range,
3622 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3623 '<a' (= 0,1,...,a-2,a-1),
3624 '>=a' (= a, a+1, ... up to the maximum wave
3625 number corresponding to the Nyquist
3626 frequency for the case of FFT).
3627 default is [0].
3628 rejwn: the wave numbers NOT to be used for fitting.
3629 can be set just as addwn but has higher priority:
3630 wave numbers which are specified both in addwn
3631 and rejwn will NOT be used. default is [].
3632 insitu: if False a new scantable is returned.
3633 Otherwise, the scaling is done in-situ
3634 The default is taken from .asaprc (False)
3635 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3636 clipniter: maximum number of iteration of 'clipthresh'-sigma
3637 clipping (default is 0)
3638 edge: an optional number of channel to drop at
3639 the edge of spectrum. If only one value is
3640 specified, the same number will be dropped
3641 from both sides of the spectrum. Default
3642 is to keep all channels. Nested tuples
3643 represent individual edge selection for
3644 different IFs (a number of spectral channels
3645 can be different)
3646 threshold: the threshold used by line finder. It is
3647 better to keep it large as only strong lines
3648 affect the baseline solution.
3649 chan_avg_limit: a maximum number of consequtive spectral
3650 channels to average during the search of
3651 weak and broad lines. The default is no
3652 averaging (and no search for weak lines).
3653 If such lines can affect the fitted baseline
3654 (e.g. a high order polynomial is fitted),
3655 increase this parameter (usually values up
3656 to 8 are reasonable). Most users of this
3657 method should find the default value sufficient.
3658 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3659 plot the fit and the residual. In this each
3660 indivual fit has to be approved, by typing 'y'
3661 or 'n'
3662 getresidual: if False, returns best-fit values instead of
3663 residual. (default is True)
3664 showprogress: show progress status for large data.
3665 default is True.
3666 minnrow: minimum number of input spectra to show.
3667 default is 1000.
3668 outlog: Output the coefficients of the best-fit
3669 function to logger (default is False)
3670 blfile: Name of a text file in which the best-fit
3671 parameter values to be written
3672 (default is "": no file/logger output)
3673 csvformat: if True blfile is csv-formatted, default is False.
3674 bltable: name of a baseline table where fitting results
3675 (coefficients, rms, etc.) are to be written.
3676 if given, fitting results will NOT be output to
3677 scantable (insitu=True) or None will be
3678 returned (insitu=False).
3679 (default is "": no table output)
3680
3681 Example:
3682 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3683
3684 Note:
3685 The best-fit parameter values output in logger and/or blfile are now
3686 based on specunit of 'channel'.
3687 """
3688
3689 try:
3690 varlist = vars()
3691
3692 if insitu is None: insitu = rcParams['insitu']
3693 if insitu:
3694 workscan = self
3695 else:
3696 workscan = self.copy()
3697
3698 if mask is None: mask = []
3699 if applyfft is None: applyfft = True
3700 if fftmethod is None: fftmethod = 'fft'
3701 if fftthresh is None: fftthresh = 3.0
3702 if addwn is None: addwn = [0]
3703 if rejwn is None: rejwn = []
3704 if clipthresh is None: clipthresh = 3.0
3705 if clipniter is None: clipniter = 0
3706 if edge is None: edge = (0,0)
3707 if threshold is None: threshold = 3
3708 if chan_avg_limit is None: chan_avg_limit = 1
3709 if plot is None: plot = False
3710 if getresidual is None: getresidual = True
3711 if showprogress is None: showprogress = True
3712 if minnrow is None: minnrow = 1000
3713 if outlog is None: outlog = False
3714 if blfile is None: blfile = ''
3715 if csvformat is None: csvformat = False
3716 if bltable is None: bltable = ''
3717
3718 sapplyfft = 'true' if applyfft else 'false'
3719 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3720
3721 scsvformat = 'T' if csvformat else 'F'
3722
3723 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3724 workscan._auto_sinusoid_baseline(mask,
3725 fftinfo,
3726 workscan._parse_wn(addwn),
3727 workscan._parse_wn(rejwn),
3728 clipthresh, clipniter,
3729 normalise_edge_param(edge),
3730 threshold, chan_avg_limit,
3731 getresidual,
3732 pack_progress_params(showprogress,
3733 minnrow),
3734 outlog, scsvformat+blfile, bltable)
3735 workscan._add_history("auto_sinusoid_baseline", varlist)
3736
3737 if bltable == '':
3738 if insitu:
3739 self._assign(workscan)
3740 else:
3741 return workscan
3742 else:
3743 if not insitu:
3744 return None
3745
3746 except RuntimeError, e:
3747 raise_fitting_failure_exception(e)
3748
3749 @asaplog_post_dec
3750 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3751 clipthresh=None, clipniter=None, plot=None,
3752 getresidual=None, showprogress=None, minnrow=None,
3753 outlog=None, blfile=None, csvformat=None,
3754 bltable=None):
3755 """\
3756 Return a scan which has been baselined (all rows) by cubic spline
3757 function (piecewise cubic polynomial).
3758
3759 Parameters:
3760 mask: An optional mask
3761 npiece: Number of pieces. (default is 2)
3762 insitu: If False a new scantable is returned.
3763 Otherwise, the scaling is done in-situ
3764 The default is taken from .asaprc (False)
3765 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3766 clipniter: maximum number of iteration of 'clipthresh'-sigma
3767 clipping (default is 0)
3768 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3769 plot the fit and the residual. In this each
3770 indivual fit has to be approved, by typing 'y'
3771 or 'n'
3772 getresidual: if False, returns best-fit values instead of
3773 residual. (default is True)
3774 showprogress: show progress status for large data.
3775 default is True.
3776 minnrow: minimum number of input spectra to show.
3777 default is 1000.
3778 outlog: Output the coefficients of the best-fit
3779 function to logger (default is False)
3780 blfile: Name of a text file in which the best-fit
3781 parameter values to be written
3782 (default is "": no file/logger output)
3783 csvformat: if True blfile is csv-formatted, default is False.
3784 bltable: name of a baseline table where fitting results
3785 (coefficients, rms, etc.) are to be written.
3786 if given, fitting results will NOT be output to
3787 scantable (insitu=True) or None will be
3788 returned (insitu=False).
3789 (default is "": no table output)
3790
3791 Example:
3792 # return a scan baselined by a cubic spline consisting of 2 pieces
3793 # (i.e., 1 internal knot),
3794 # also with 3-sigma clipping, iteration up to 4 times
3795 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3796
3797 Note:
3798 The best-fit parameter values output in logger and/or blfile are now
3799 based on specunit of 'channel'.
3800 """
3801
3802 try:
3803 varlist = vars()
3804
3805 if insitu is None: insitu = rcParams['insitu']
3806 if insitu:
3807 workscan = self
3808 else:
3809 workscan = self.copy()
3810
3811 if mask is None: mask = []
3812 if npiece is None: npiece = 2
3813 if clipthresh is None: clipthresh = 3.0
3814 if clipniter is None: clipniter = 0
3815 if plot is None: plot = False
3816 if getresidual is None: getresidual = True
3817 if showprogress is None: showprogress = True
3818 if minnrow is None: minnrow = 1000
3819 if outlog is None: outlog = False
3820 if blfile is None: blfile = ''
3821 if csvformat is None: csvformat = False
3822 if bltable is None: bltable = ''
3823
3824 scsvformat = 'T' if csvformat else 'F'
3825
3826 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3827 workscan._cspline_baseline(mask, npiece,
3828 clipthresh, clipniter,
3829 getresidual,
3830 pack_progress_params(showprogress,
3831 minnrow),
3832 outlog, scsvformat+blfile,
3833 bltable)
3834 workscan._add_history("cspline_baseline", varlist)
3835
3836 if bltable == '':
3837 if insitu:
3838 self._assign(workscan)
3839 else:
3840 return workscan
3841 else:
3842 if not insitu:
3843 return None
3844
3845 except RuntimeError, e:
3846 raise_fitting_failure_exception(e)
3847
3848 @asaplog_post_dec
3849 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3850 clipthresh=None, clipniter=None,
3851 edge=None, threshold=None, chan_avg_limit=None,
3852 getresidual=None, plot=None,
3853 showprogress=None, minnrow=None, outlog=None,
3854 blfile=None, csvformat=None, bltable=None):
3855 """\
3856 Return a scan which has been baselined (all rows) by cubic spline
3857 function (piecewise cubic polynomial).
3858 Spectral lines are detected first using linefinder and masked out
3859 to avoid them affecting the baseline solution.
3860
3861 Parameters:
3862 mask: an optional mask retreived from scantable
3863 npiece: Number of pieces. (default is 2)
3864 insitu: if False a new scantable is returned.
3865 Otherwise, the scaling is done in-situ
3866 The default is taken from .asaprc (False)
3867 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3868 clipniter: maximum number of iteration of 'clipthresh'-sigma
3869 clipping (default is 0)
3870 edge: an optional number of channel to drop at
3871 the edge of spectrum. If only one value is
3872 specified, the same number will be dropped
3873 from both sides of the spectrum. Default
3874 is to keep all channels. Nested tuples
3875 represent individual edge selection for
3876 different IFs (a number of spectral channels
3877 can be different)
3878 threshold: the threshold used by line finder. It is
3879 better to keep it large as only strong lines
3880 affect the baseline solution.
3881 chan_avg_limit: a maximum number of consequtive spectral
3882 channels to average during the search of
3883 weak and broad lines. The default is no
3884 averaging (and no search for weak lines).
3885 If such lines can affect the fitted baseline
3886 (e.g. a high order polynomial is fitted),
3887 increase this parameter (usually values up
3888 to 8 are reasonable). Most users of this
3889 method should find the default value sufficient.
3890 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3891 plot the fit and the residual. In this each
3892 indivual fit has to be approved, by typing 'y'
3893 or 'n'
3894 getresidual: if False, returns best-fit values instead of
3895 residual. (default is True)
3896 showprogress: show progress status for large data.
3897 default is True.
3898 minnrow: minimum number of input spectra to show.
3899 default is 1000.
3900 outlog: Output the coefficients of the best-fit
3901 function to logger (default is False)
3902 blfile: Name of a text file in which the best-fit
3903 parameter values to be written
3904 (default is "": no file/logger output)
3905 csvformat: if True blfile is csv-formatted, default is False.
3906 bltable: name of a baseline table where fitting results
3907 (coefficients, rms, etc.) are to be written.
3908 if given, fitting results will NOT be output to
3909 scantable (insitu=True) or None will be
3910 returned (insitu=False).
3911 (default is "": no table output)
3912
3913 Example:
3914 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3915
3916 Note:
3917 The best-fit parameter values output in logger and/or blfile are now
3918 based on specunit of 'channel'.
3919 """
3920
3921 try:
3922 varlist = vars()
3923
3924 if insitu is None: insitu = rcParams['insitu']
3925 if insitu:
3926 workscan = self
3927 else:
3928 workscan = self.copy()
3929
3930 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3931 if mask is None: mask = []
3932 if npiece is None: npiece = 2
3933 if clipthresh is None: clipthresh = 3.0
3934 if clipniter is None: clipniter = 0
3935 if edge is None: edge = (0, 0)
3936 if threshold is None: threshold = 3
3937 if chan_avg_limit is None: chan_avg_limit = 1
3938 if plot is None: plot = False
3939 if getresidual is None: getresidual = True
3940 if showprogress is None: showprogress = True
3941 if minnrow is None: minnrow = 1000
3942 if outlog is None: outlog = False
3943 if blfile is None: blfile = ''
3944 if csvformat is None: csvformat = False
3945 if bltable is None: bltable = ''
3946
3947 scsvformat = 'T' if csvformat else 'F'
3948
3949 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3950 workscan._auto_cspline_baseline(mask, npiece,
3951 clipthresh, clipniter,
3952 normalise_edge_param(edge),
3953 threshold,
3954 chan_avg_limit, getresidual,
3955 pack_progress_params(showprogress,
3956 minnrow),
3957 outlog,
3958 scsvformat+blfile,
3959 bltable)
3960 workscan._add_history("auto_cspline_baseline", varlist)
3961
3962 if bltable == '':
3963 if insitu:
3964 self._assign(workscan)
3965 else:
3966 return workscan
3967 else:
3968 if not insitu:
3969 return None
3970
3971 except RuntimeError, e:
3972 raise_fitting_failure_exception(e)
3973
3974 @asaplog_post_dec
3975 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
3976 clipthresh=None, clipniter=None, plot=None,
3977 getresidual=None, showprogress=None, minnrow=None,
3978 outlog=None, blfile=None, csvformat=None,
3979 bltable=None):
3980 """\
3981 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3982
3983 Parameters:
3984 mask: An optional mask
3985 order: the maximum order of Chebyshev polynomial (default is 5)
3986 insitu: If False a new scantable is returned.
3987 Otherwise, the scaling is done in-situ
3988 The default is taken from .asaprc (False)
3989 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3990 clipniter: maximum number of iteration of 'clipthresh'-sigma
3991 clipping (default is 0)
3992 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3993 plot the fit and the residual. In this each
3994 indivual fit has to be approved, by typing 'y'
3995 or 'n'
3996 getresidual: if False, returns best-fit values instead of
3997 residual. (default is True)
3998 showprogress: show progress status for large data.
3999 default is True.
4000 minnrow: minimum number of input spectra to show.
4001 default is 1000.
4002 outlog: Output the coefficients of the best-fit
4003 function to logger (default is False)
4004 blfile: Name of a text file in which the best-fit
4005 parameter values to be written
4006 (default is "": no file/logger output)
4007 csvformat: if True blfile is csv-formatted, default is False.
4008 bltable: name of a baseline table where fitting results
4009 (coefficients, rms, etc.) are to be written.
4010 if given, fitting results will NOT be output to
4011 scantable (insitu=True) or None will be
4012 returned (insitu=False).
4013 (default is "": no table output)
4014
4015 Example:
4016 # return a scan baselined by a cubic spline consisting of 2 pieces
4017 # (i.e., 1 internal knot),
4018 # also with 3-sigma clipping, iteration up to 4 times
4019 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4020
4021 Note:
4022 The best-fit parameter values output in logger and/or blfile are now
4023 based on specunit of 'channel'.
4024 """
4025
4026 try:
4027 varlist = vars()
4028
4029 if insitu is None: insitu = rcParams['insitu']
4030 if insitu:
4031 workscan = self
4032 else:
4033 workscan = self.copy()
4034
4035 if mask is None: mask = []
4036 if order is None: order = 5
4037 if clipthresh is None: clipthresh = 3.0
4038 if clipniter is None: clipniter = 0
4039 if plot is None: plot = False
4040 if getresidual is None: getresidual = True
4041 if showprogress is None: showprogress = True
4042 if minnrow is None: minnrow = 1000
4043 if outlog is None: outlog = False
4044 if blfile is None: blfile = ''
4045 if csvformat is None: csvformat = False
4046 if bltable is None: bltable = ''
4047
4048 scsvformat = 'T' if csvformat else 'F'
4049
4050 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4051 workscan._chebyshev_baseline(mask, order,
4052 clipthresh, clipniter,
4053 getresidual,
4054 pack_progress_params(showprogress,
4055 minnrow),
4056 outlog, scsvformat+blfile,
4057 bltable)
4058 workscan._add_history("chebyshev_baseline", varlist)
4059
4060 if bltable == '':
4061 if insitu:
4062 self._assign(workscan)
4063 else:
4064 return workscan
4065 else:
4066 if not insitu:
4067 return None
4068
4069 except RuntimeError, e:
4070 raise_fitting_failure_exception(e)
4071
4072 @asaplog_post_dec
4073 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4074 clipthresh=None, clipniter=None,
4075 edge=None, threshold=None, chan_avg_limit=None,
4076 getresidual=None, plot=None,
4077 showprogress=None, minnrow=None, outlog=None,
4078 blfile=None, csvformat=None, bltable=None):
4079 """\
4080 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4081 Spectral lines are detected first using linefinder and masked out
4082 to avoid them affecting the baseline solution.
4083
4084 Parameters:
4085 mask: an optional mask retreived from scantable
4086 order: the maximum order of Chebyshev polynomial (default is 5)
4087 insitu: if False a new scantable is returned.
4088 Otherwise, the scaling is done in-situ
4089 The default is taken from .asaprc (False)
4090 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4091 clipniter: maximum number of iteration of 'clipthresh'-sigma
4092 clipping (default is 0)
4093 edge: an optional number of channel to drop at
4094 the edge of spectrum. If only one value is
4095 specified, the same number will be dropped
4096 from both sides of the spectrum. Default
4097 is to keep all channels. Nested tuples
4098 represent individual edge selection for
4099 different IFs (a number of spectral channels
4100 can be different)
4101 threshold: the threshold used by line finder. It is
4102 better to keep it large as only strong lines
4103 affect the baseline solution.
4104 chan_avg_limit: a maximum number of consequtive spectral
4105 channels to average during the search of
4106 weak and broad lines. The default is no
4107 averaging (and no search for weak lines).
4108 If such lines can affect the fitted baseline
4109 (e.g. a high order polynomial is fitted),
4110 increase this parameter (usually values up
4111 to 8 are reasonable). Most users of this
4112 method should find the default value sufficient.
4113 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4114 plot the fit and the residual. In this each
4115 indivual fit has to be approved, by typing 'y'
4116 or 'n'
4117 getresidual: if False, returns best-fit values instead of
4118 residual. (default is True)
4119 showprogress: show progress status for large data.
4120 default is True.
4121 minnrow: minimum number of input spectra to show.
4122 default is 1000.
4123 outlog: Output the coefficients of the best-fit
4124 function to logger (default is False)
4125 blfile: Name of a text file in which the best-fit
4126 parameter values to be written
4127 (default is "": no file/logger output)
4128 csvformat: if True blfile is csv-formatted, default is False.
4129 bltable: name of a baseline table where fitting results
4130 (coefficients, rms, etc.) are to be written.
4131 if given, fitting results will NOT be output to
4132 scantable (insitu=True) or None will be
4133 returned (insitu=False).
4134 (default is "": no table output)
4135
4136 Example:
4137 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4138
4139 Note:
4140 The best-fit parameter values output in logger and/or blfile are now
4141 based on specunit of 'channel'.
4142 """
4143
4144 try:
4145 varlist = vars()
4146
4147 if insitu is None: insitu = rcParams['insitu']
4148 if insitu:
4149 workscan = self
4150 else:
4151 workscan = self.copy()
4152
4153 if mask is None: mask = []
4154 if order is None: order = 5
4155 if clipthresh is None: clipthresh = 3.0
4156 if clipniter is None: clipniter = 0
4157 if edge is None: edge = (0, 0)
4158 if threshold is None: threshold = 3
4159 if chan_avg_limit is None: chan_avg_limit = 1
4160 if plot is None: plot = False
4161 if getresidual is None: getresidual = True
4162 if showprogress is None: showprogress = True
4163 if minnrow is None: minnrow = 1000
4164 if outlog is None: outlog = False
4165 if blfile is None: blfile = ''
4166 if csvformat is None: csvformat = False
4167 if bltable is None: bltable = ''
4168
4169 scsvformat = 'T' if csvformat else 'F'
4170
4171 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4172 workscan._auto_chebyshev_baseline(mask, order,
4173 clipthresh, clipniter,
4174 normalise_edge_param(edge),
4175 threshold,
4176 chan_avg_limit, getresidual,
4177 pack_progress_params(showprogress,
4178 minnrow),
4179 outlog, scsvformat+blfile,
4180 bltable)
4181 workscan._add_history("auto_chebyshev_baseline", varlist)
4182
4183 if bltable == '':
4184 if insitu:
4185 self._assign(workscan)
4186 else:
4187 return workscan
4188 else:
4189 if not insitu:
4190 return None
4191
4192 except RuntimeError, e:
4193 raise_fitting_failure_exception(e)
4194
4195 @asaplog_post_dec
4196 def poly_baseline(self, mask=None, order=None, insitu=None,
4197 clipthresh=None, clipniter=None, plot=None,
4198 getresidual=None, showprogress=None, minnrow=None,
4199 outlog=None, blfile=None, csvformat=None,
4200 bltable=None):
4201 """\
4202 Return a scan which has been baselined (all rows) by a polynomial.
4203 Parameters:
4204 mask: an optional mask
4205 order: the order of the polynomial (default is 0)
4206 insitu: if False a new scantable is returned.
4207 Otherwise, the scaling is done in-situ
4208 The default is taken from .asaprc (False)
4209 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4210 clipniter: maximum number of iteration of 'clipthresh'-sigma
4211 clipping (default is 0)
4212 plot: plot the fit and the residual. In this each
4213 indivual fit has to be approved, by typing 'y'
4214 or 'n'
4215 getresidual: if False, returns best-fit values instead of
4216 residual. (default is True)
4217 showprogress: show progress status for large data.
4218 default is True.
4219 minnrow: minimum number of input spectra to show.
4220 default is 1000.
4221 outlog: Output the coefficients of the best-fit
4222 function to logger (default is False)
4223 blfile: Name of a text file in which the best-fit
4224 parameter values to be written
4225 (default is "": no file/logger output)
4226 csvformat: if True blfile is csv-formatted, default is False.
4227 bltable: name of a baseline table where fitting results
4228 (coefficients, rms, etc.) are to be written.
4229 if given, fitting results will NOT be output to
4230 scantable (insitu=True) or None will be
4231 returned (insitu=False).
4232 (default is "": no table output)
4233
4234 Example:
4235 # return a scan baselined by a third order polynomial,
4236 # not using a mask
4237 bscan = scan.poly_baseline(order=3)
4238 """
4239
4240 try:
4241 varlist = vars()
4242
4243 if insitu is None:
4244 insitu = rcParams["insitu"]
4245 if insitu:
4246 workscan = self
4247 else:
4248 workscan = self.copy()
4249
4250 if mask is None: mask = []
4251 if order is None: order = 0
4252 if clipthresh is None: clipthresh = 3.0
4253 if clipniter is None: clipniter = 0
4254 if plot is None: plot = False
4255 if getresidual is None: getresidual = True
4256 if showprogress is None: showprogress = True
4257 if minnrow is None: minnrow = 1000
4258 if outlog is None: outlog = False
4259 if blfile is None: blfile = ''
4260 if csvformat is None: csvformat = False
4261 if bltable is None: bltable = ''
4262
4263 scsvformat = 'T' if csvformat else 'F'
4264
4265 if plot:
4266 outblfile = (blfile != "") and \
4267 os.path.exists(os.path.expanduser(
4268 os.path.expandvars(blfile))
4269 )
4270 if outblfile:
4271 blf = open(blfile, "a")
4272
4273 f = fitter()
4274 f.set_function(lpoly=order)
4275
4276 rows = xrange(workscan.nrow())
4277 #if len(rows) > 0: workscan._init_blinfo()
4278
4279 action = "H"
4280 for r in rows:
4281 f.x = workscan._getabcissa(r)
4282 f.y = workscan._getspectrum(r)
4283 if mask:
4284 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4285 else: # mask=None
4286 f.mask = workscan._getmask(r)
4287
4288 f.data = None
4289 f.fit()
4290
4291 if action != "Y": # skip plotting when accepting all
4292 f.plot(residual=True)
4293 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4294 #if accept_fit.upper() == "N":
4295 # #workscan._append_blinfo(None, None, None)
4296 # continue
4297 accept_fit = self._get_verify_action("Accept fit?",action)
4298 if r == 0: action = None
4299 if accept_fit.upper() == "N":
4300 continue
4301 elif accept_fit.upper() == "R":
4302 break
4303 elif accept_fit.upper() == "A":
4304 action = "Y"
4305
4306 blpars = f.get_parameters()
4307 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4308 #workscan._append_blinfo(blpars, masklist, f.mask)
4309 workscan._setspectrum((f.fitter.getresidual()
4310 if getresidual else f.fitter.getfit()), r)
4311
4312 if outblfile:
4313 rms = workscan.get_rms(f.mask, r)
4314 dataout = \
4315 workscan.format_blparams_row(blpars["params"],
4316 blpars["fixed"],
4317 rms, str(masklist),
4318 r, True, csvformat)
4319 blf.write(dataout)
4320
4321 f._p.unmap()
4322 f._p = None
4323
4324 if outblfile:
4325 blf.close()
4326 else:
4327 workscan._poly_baseline(mask, order,
4328 clipthresh, clipniter, #
4329 getresidual,
4330 pack_progress_params(showprogress,
4331 minnrow),
4332 outlog, scsvformat+blfile,
4333 bltable) #
4334
4335 workscan._add_history("poly_baseline", varlist)
4336
4337 if insitu:
4338 self._assign(workscan)
4339 else:
4340 return workscan
4341
4342 except RuntimeError, e:
4343 raise_fitting_failure_exception(e)
4344
4345 @asaplog_post_dec
4346 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4347 clipthresh=None, clipniter=None,
4348 edge=None, threshold=None, chan_avg_limit=None,
4349 getresidual=None, plot=None,
4350 showprogress=None, minnrow=None, outlog=None,
4351 blfile=None, csvformat=None, bltable=None):
4352 """\
4353 Return a scan which has been baselined (all rows) by a polynomial.
4354 Spectral lines are detected first using linefinder and masked out
4355 to avoid them affecting the baseline solution.
4356
4357 Parameters:
4358 mask: an optional mask retreived from scantable
4359 order: the order of the polynomial (default is 0)
4360 insitu: if False a new scantable is returned.
4361 Otherwise, the scaling is done in-situ
4362 The default is taken from .asaprc (False)
4363 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4364 clipniter: maximum number of iteration of 'clipthresh'-sigma
4365 clipping (default is 0)
4366 edge: an optional number of channel to drop at
4367 the edge of spectrum. If only one value is
4368 specified, the same number will be dropped
4369 from both sides of the spectrum. Default
4370 is to keep all channels. Nested tuples
4371 represent individual edge selection for
4372 different IFs (a number of spectral channels
4373 can be different)
4374 threshold: the threshold used by line finder. It is
4375 better to keep it large as only strong lines
4376 affect the baseline solution.
4377 chan_avg_limit: a maximum number of consequtive spectral
4378 channels to average during the search of
4379 weak and broad lines. The default is no
4380 averaging (and no search for weak lines).
4381 If such lines can affect the fitted baseline
4382 (e.g. a high order polynomial is fitted),
4383 increase this parameter (usually values up
4384 to 8 are reasonable). Most users of this
4385 method should find the default value sufficient.
4386 plot: plot the fit and the residual. In this each
4387 indivual fit has to be approved, by typing 'y'
4388 or 'n'
4389 getresidual: if False, returns best-fit values instead of
4390 residual. (default is True)
4391 showprogress: show progress status for large data.
4392 default is True.
4393 minnrow: minimum number of input spectra to show.
4394 default is 1000.
4395 outlog: Output the coefficients of the best-fit
4396 function to logger (default is False)
4397 blfile: Name of a text file in which the best-fit
4398 parameter values to be written
4399 (default is "": no file/logger output)
4400 csvformat: if True blfile is csv-formatted, default is False.
4401 bltable: name of a baseline table where fitting results
4402 (coefficients, rms, etc.) are to be written.
4403 if given, fitting results will NOT be output to
4404 scantable (insitu=True) or None will be
4405 returned (insitu=False).
4406 (default is "": no table output)
4407
4408 Example:
4409 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4410 """
4411
4412 try:
4413 varlist = vars()
4414
4415 if insitu is None:
4416 insitu = rcParams['insitu']
4417 if insitu:
4418 workscan = self
4419 else:
4420 workscan = self.copy()
4421
4422 if mask is None: mask = []
4423 if order is None: order = 0
4424 if clipthresh is None: clipthresh = 3.0
4425 if clipniter is None: clipniter = 0
4426 if edge is None: edge = (0, 0)
4427 if threshold is None: threshold = 3
4428 if chan_avg_limit is None: chan_avg_limit = 1
4429 if plot is None: plot = False
4430 if getresidual is None: getresidual = True
4431 if showprogress is None: showprogress = True
4432 if minnrow is None: minnrow = 1000
4433 if outlog is None: outlog = False
4434 if blfile is None: blfile = ''
4435 if csvformat is None: csvformat = False
4436 if bltable is None: bltable = ''
4437
4438 scsvformat = 'T' if csvformat else 'F'
4439
4440 edge = normalise_edge_param(edge)
4441
4442 if plot:
4443 outblfile = (blfile != "") and \
4444 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4445 if outblfile: blf = open(blfile, "a")
4446
4447 from asap.asaplinefind import linefinder
4448 fl = linefinder()
4449 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4450 fl.set_scan(workscan)
4451
4452 f = fitter()
4453 f.set_function(lpoly=order)
4454
4455 rows = xrange(workscan.nrow())
4456 #if len(rows) > 0: workscan._init_blinfo()
4457
4458 action = "H"
4459 for r in rows:
4460 idx = 2*workscan.getif(r)
4461 if mask:
4462 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4463 else: # mask=None
4464 msk = workscan._getmask(r)
4465 fl.find_lines(r, msk, edge[idx:idx+2])
4466
4467 f.x = workscan._getabcissa(r)
4468 f.y = workscan._getspectrum(r)
4469 f.mask = fl.get_mask()
4470 f.data = None
4471 f.fit()
4472
4473 if action != "Y": # skip plotting when accepting all
4474 f.plot(residual=True)
4475 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4476 accept_fit = self._get_verify_action("Accept fit?",action)
4477 if r == 0: action = None
4478 if accept_fit.upper() == "N":
4479 #workscan._append_blinfo(None, None, None)
4480 continue
4481 elif accept_fit.upper() == "R":
4482 break
4483 elif accept_fit.upper() == "A":
4484 action = "Y"
4485
4486 blpars = f.get_parameters()
4487 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4488 #workscan._append_blinfo(blpars, masklist, f.mask)
4489 workscan._setspectrum(
4490 (f.fitter.getresidual() if getresidual
4491 else f.fitter.getfit()), r
4492 )
4493
4494 if outblfile:
4495 rms = workscan.get_rms(f.mask, r)
4496 dataout = \
4497 workscan.format_blparams_row(blpars["params"],
4498 blpars["fixed"],
4499 rms, str(masklist),
4500 r, True, csvformat)
4501 blf.write(dataout)
4502
4503 f._p.unmap()
4504 f._p = None
4505
4506 if outblfile: blf.close()
4507 else:
4508 workscan._auto_poly_baseline(mask, order,
4509 clipthresh, clipniter,
4510 edge, threshold,
4511 chan_avg_limit, getresidual,
4512 pack_progress_params(showprogress,
4513 minnrow),
4514 outlog, scsvformat+blfile,
4515 bltable)
4516 workscan._add_history("auto_poly_baseline", varlist)
4517
4518 if bltable == '':
4519 if insitu:
4520 self._assign(workscan)
4521 else:
4522 return workscan
4523 else:
4524 if not insitu:
4525 return None
4526
4527 except RuntimeError, e:
4528 raise_fitting_failure_exception(e)
4529
4530 def _init_blinfo(self):
4531 """\
4532 Initialise the following three auxiliary members:
4533 blpars : parameters of the best-fit baseline,
4534 masklists : mask data (edge positions of masked channels) and
4535 actualmask : mask data (in boolean list),
4536 to keep for use later (including output to logger/text files).
4537 Used by poly_baseline() and auto_poly_baseline() in case of
4538 'plot=True'.
4539 """
4540 self.blpars = []
4541 self.masklists = []
4542 self.actualmask = []
4543 return
4544
4545 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4546 """\
4547 Append baseline-fitting related info to blpars, masklist and
4548 actualmask.
4549 """
4550 self.blpars.append(data_blpars)
4551 self.masklists.append(data_masklists)
4552 self.actualmask.append(data_actualmask)
4553 return
4554
4555 @asaplog_post_dec
4556 def rotate_linpolphase(self, angle):
4557 """\
4558 Rotate the phase of the complex polarization O=Q+iU correlation.
4559 This is always done in situ in the raw data. So if you call this
4560 function more than once then each call rotates the phase further.
4561
4562 Parameters:
4563
4564 angle: The angle (degrees) to rotate (add) by.
4565
4566 Example::
4567
4568 scan.rotate_linpolphase(2.3)
4569
4570 """
4571 varlist = vars()
4572 self._math._rotate_linpolphase(self, angle)
4573 self._add_history("rotate_linpolphase", varlist)
4574 return
4575
4576 @asaplog_post_dec
4577 def rotate_xyphase(self, angle):
4578 """\
4579 Rotate the phase of the XY correlation. This is always done in situ
4580 in the data. So if you call this function more than once
4581 then each call rotates the phase further.
4582
4583 Parameters:
4584
4585 angle: The angle (degrees) to rotate (add) by.
4586
4587 Example::
4588
4589 scan.rotate_xyphase(2.3)
4590
4591 """
4592 varlist = vars()
4593 self._math._rotate_xyphase(self, angle)
4594 self._add_history("rotate_xyphase", varlist)
4595 return
4596
4597 @asaplog_post_dec
4598 def swap_linears(self):
4599 """\
4600 Swap the linear polarisations XX and YY, or better the first two
4601 polarisations as this also works for ciculars.
4602 """
4603 varlist = vars()
4604 self._math._swap_linears(self)
4605 self._add_history("swap_linears", varlist)
4606 return
4607
4608 @asaplog_post_dec
4609 def invert_phase(self):
4610 """\
4611 Invert the phase of the complex polarisation
4612 """
4613 varlist = vars()
4614 self._math._invert_phase(self)
4615 self._add_history("invert_phase", varlist)
4616 return
4617
4618 @asaplog_post_dec
4619 def add(self, offset, insitu=None):
4620 """\
4621 Return a scan where all spectra have the offset added
4622
4623 Parameters:
4624
4625 offset: the offset
4626
4627 insitu: if False a new scantable is returned.
4628 Otherwise, the scaling is done in-situ
4629 The default is taken from .asaprc (False)
4630
4631 """
4632 if insitu is None: insitu = rcParams['insitu']
4633 self._math._setinsitu(insitu)
4634 varlist = vars()
4635 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4636 s._add_history("add", varlist)
4637 if insitu:
4638 self._assign(s)
4639 else:
4640 return s
4641
4642 @asaplog_post_dec
4643 def scale(self, factor, tsys=True, insitu=None):
4644 """\
4645
4646 Return a scan where all spectra are scaled by the given 'factor'
4647
4648 Parameters:
4649
4650 factor: the scaling factor (float or 1D float list)
4651
4652 insitu: if False a new scantable is returned.
4653 Otherwise, the scaling is done in-situ
4654 The default is taken from .asaprc (False)
4655
4656 tsys: if True (default) then apply the operation to Tsys
4657 as well as the data
4658
4659 """
4660 if insitu is None: insitu = rcParams['insitu']
4661 self._math._setinsitu(insitu)
4662 varlist = vars()
4663 s = None
4664 import numpy
4665 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4666 if isinstance(factor[0], list) or isinstance(factor[0],
4667 numpy.ndarray):
4668 from asapmath import _array2dOp
4669 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4670 else:
4671 s = scantable( self._math._arrayop( self, factor,
4672 "MUL", tsys ) )
4673 else:
4674 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4675 s._add_history("scale", varlist)
4676 if insitu:
4677 self._assign(s)
4678 else:
4679 return s
4680
4681 @preserve_selection
4682 def set_sourcetype(self, match, matchtype="pattern",
4683 sourcetype="reference"):
4684 """\
4685 Set the type of the source to be an source or reference scan
4686 using the provided pattern.
4687
4688 Parameters:
4689
4690 match: a Unix style pattern, regular expression or selector
4691
4692 matchtype: 'pattern' (default) UNIX style pattern or
4693 'regex' regular expression
4694
4695 sourcetype: the type of the source to use (source/reference)
4696
4697 """
4698 varlist = vars()
4699 stype = -1
4700 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4701 stype = 1
4702 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4703 stype = 0
4704 else:
4705 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4706 if matchtype.lower().startswith("p"):
4707 matchtype = "pattern"
4708 elif matchtype.lower().startswith("r"):
4709 matchtype = "regex"
4710 else:
4711 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4712 sel = selector()
4713 if isinstance(match, selector):
4714 sel = match
4715 else:
4716 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4717 self.set_selection(sel)
4718 self._setsourcetype(stype)
4719 self._add_history("set_sourcetype", varlist)
4720
4721
4722 def set_sourcename(self, name):
4723 varlist = vars()
4724 self._setsourcename(name)
4725 self._add_history("set_sourcename", varlist)
4726
4727 @asaplog_post_dec
4728 @preserve_selection
4729 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4730 """\
4731 This function allows to build quotients automatically.
4732 It assumes the observation to have the same number of
4733 "ons" and "offs"
4734
4735 Parameters:
4736
4737 preserve: you can preserve (default) the continuum or
4738 remove it. The equations used are
4739
4740 preserve: Output = Toff * (on/off) - Toff
4741
4742 remove: Output = Toff * (on/off) - Ton
4743
4744 mode: the on/off detection mode
4745 'paired' (default)
4746 identifies 'off' scans by the
4747 trailing '_R' (Mopra/Parkes) or
4748 '_e'/'_w' (Tid) and matches
4749 on/off pairs from the observing pattern
4750 'time'
4751 finds the closest off in time
4752
4753 .. todo:: verify argument is not implemented
4754
4755 """
4756 varlist = vars()
4757 modes = ["time", "paired"]
4758 if not mode in modes:
4759 msg = "please provide valid mode. Valid modes are %s" % (modes)
4760 raise ValueError(msg)
4761 s = None
4762 if mode.lower() == "paired":
4763 from asap._asap import srctype
4764 sel = self.get_selection()
4765 #sel.set_query("SRCTYPE==psoff")
4766 sel.set_types(srctype.psoff)
4767 self.set_selection(sel)
4768 offs = self.copy()
4769 #sel.set_query("SRCTYPE==pson")
4770 sel.set_types(srctype.pson)
4771 self.set_selection(sel)
4772 ons = self.copy()
4773 s = scantable(self._math._quotient(ons, offs, preserve))
4774 elif mode.lower() == "time":
4775 s = scantable(self._math._auto_quotient(self, mode, preserve))
4776 s._add_history("auto_quotient", varlist)
4777 return s
4778
4779 @asaplog_post_dec
4780 def mx_quotient(self, mask = None, weight='median', preserve=True):
4781 """\
4782 Form a quotient using "off" beams when observing in "MX" mode.
4783
4784 Parameters:
4785
4786 mask: an optional mask to be used when weight == 'stddev'
4787
4788 weight: How to average the off beams. Default is 'median'.
4789
4790 preserve: you can preserve (default) the continuum or
4791 remove it. The equations used are:
4792
4793 preserve: Output = Toff * (on/off) - Toff
4794
4795 remove: Output = Toff * (on/off) - Ton
4796
4797 """
4798 mask = mask or ()
4799 varlist = vars()
4800 on = scantable(self._math._mx_extract(self, 'on'))
4801 preoff = scantable(self._math._mx_extract(self, 'off'))
4802 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4803 from asapmath import quotient
4804 q = quotient(on, off, preserve)
4805 q._add_history("mx_quotient", varlist)
4806 return q
4807
4808 @asaplog_post_dec
4809 def freq_switch(self, insitu=None):
4810 """\
4811 Apply frequency switching to the data.
4812
4813 Parameters:
4814
4815 insitu: if False a new scantable is returned.
4816 Otherwise, the swictching is done in-situ
4817 The default is taken from .asaprc (False)
4818
4819 """
4820 if insitu is None: insitu = rcParams['insitu']
4821 self._math._setinsitu(insitu)
4822 varlist = vars()
4823 s = scantable(self._math._freqswitch(self))
4824 s._add_history("freq_switch", varlist)
4825 if insitu:
4826 self._assign(s)
4827 else:
4828 return s
4829
4830 @asaplog_post_dec
4831 def recalc_azel(self):
4832 """Recalculate the azimuth and elevation for each position."""
4833 varlist = vars()
4834 self._recalcazel()
4835 self._add_history("recalc_azel", varlist)
4836 return
4837
4838 @asaplog_post_dec
4839 def __add__(self, other):
4840 """
4841 implicit on all axes and on Tsys
4842 """
4843 varlist = vars()
4844 s = self.__op( other, "ADD" )
4845 s._add_history("operator +", varlist)
4846 return s
4847
4848 @asaplog_post_dec
4849 def __sub__(self, other):
4850 """
4851 implicit on all axes and on Tsys
4852 """
4853 varlist = vars()
4854 s = self.__op( other, "SUB" )
4855 s._add_history("operator -", varlist)
4856 return s
4857
4858 @asaplog_post_dec
4859 def __mul__(self, other):
4860 """
4861 implicit on all axes and on Tsys
4862 """
4863 varlist = vars()
4864 s = self.__op( other, "MUL" ) ;
4865 s._add_history("operator *", varlist)
4866 return s
4867
4868
4869 @asaplog_post_dec
4870 def __div__(self, other):
4871 """
4872 implicit on all axes and on Tsys
4873 """
4874 varlist = vars()
4875 s = self.__op( other, "DIV" )
4876 s._add_history("operator /", varlist)
4877 return s
4878
4879 @asaplog_post_dec
4880 def __op( self, other, mode ):
4881 s = None
4882 if isinstance(other, scantable):
4883 s = scantable(self._math._binaryop(self, other, mode))
4884 elif isinstance(other, float):
4885 if other == 0.0:
4886 raise ZeroDivisionError("Dividing by zero is not recommended")
4887 s = scantable(self._math._unaryop(self, other, mode, False))
4888 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
4889 if isinstance(other[0], list) \
4890 or isinstance(other[0], numpy.ndarray):
4891 from asapmath import _array2dOp
4892 s = _array2dOp( self, other, mode, False )
4893 else:
4894 s = scantable( self._math._arrayop( self, other,
4895 mode, False ) )
4896 else:
4897 raise TypeError("Other input is not a scantable or float value")
4898 return s
4899
4900 @asaplog_post_dec
4901 def get_fit(self, row=0):
4902 """\
4903 Print or return the stored fits for a row in the scantable
4904
4905 Parameters:
4906
4907 row: the row which the fit has been applied to.
4908
4909 """
4910 if row > self.nrow():
4911 return
4912 from asap.asapfit import asapfit
4913 fit = asapfit(self._getfit(row))
4914 asaplog.push( '%s' %(fit) )
4915 return fit.as_dict()
4916
4917 @preserve_selection
4918 def flag_nans(self):
4919 """\
4920 Utility function to flag NaN values in the scantable.
4921 """
4922 import numpy
4923 basesel = self.get_selection()
4924 for i in range(self.nrow()):
4925 sel = self.get_row_selector(i)
4926 self.set_selection(basesel+sel)
4927 nans = numpy.isnan(self._getspectrum(0))
4928 if numpy.any(nans):
4929 bnans = [ bool(v) for v in nans]
4930 self.flag(bnans)
4931
4932 self.set_selection(basesel)
4933
4934 def get_row_selector(self, rowno):
4935 return selector(rows=[rowno])
4936
4937 def _add_history(self, funcname, parameters):
4938 if not rcParams['scantable.history']:
4939 return
4940 # create date
4941 sep = "##"
4942 from datetime import datetime
4943 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4944 hist = dstr+sep
4945 hist += funcname+sep#cdate+sep
4946 if parameters.has_key('self'):
4947 del parameters['self']
4948 for k, v in parameters.iteritems():
4949 if type(v) is dict:
4950 for k2, v2 in v.iteritems():
4951 hist += k2
4952 hist += "="
4953 if isinstance(v2, scantable):
4954 hist += 'scantable'
4955 elif k2 == 'mask':
4956 if isinstance(v2, list) or isinstance(v2, tuple):
4957 hist += str(self._zip_mask(v2))
4958 else:
4959 hist += str(v2)
4960 else:
4961 hist += str(v2)
4962 else:
4963 hist += k
4964 hist += "="
4965 if isinstance(v, scantable):
4966 hist += 'scantable'
4967 elif k == 'mask':
4968 if isinstance(v, list) or isinstance(v, tuple):
4969 hist += str(self._zip_mask(v))
4970 else:
4971 hist += str(v)
4972 else:
4973 hist += str(v)
4974 hist += sep
4975 hist = hist[:-2] # remove trailing '##'
4976 self._addhistory(hist)
4977
4978
4979 def _zip_mask(self, mask):
4980 mask = list(mask)
4981 i = 0
4982 segments = []
4983 while mask[i:].count(1):
4984 i += mask[i:].index(1)
4985 if mask[i:].count(0):
4986 j = i + mask[i:].index(0)
4987 else:
4988 j = len(mask)
4989 segments.append([i, j])
4990 i = j
4991 return segments
4992
4993 def _get_ordinate_label(self):
4994 fu = "("+self.get_fluxunit()+")"
4995 import re
4996 lbl = "Intensity"
4997 if re.match(".K.", fu):
4998 lbl = "Brightness Temperature "+ fu
4999 elif re.match(".Jy.", fu):
5000 lbl = "Flux density "+ fu
5001 return lbl
5002
5003 def _check_ifs(self):
5004# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5005 nchans = [self.nchan(i) for i in self.getifnos()]
5006 nchans = filter(lambda t: t > 0, nchans)
5007 return (sum(nchans)/len(nchans) == nchans[0])
5008
5009 @asaplog_post_dec
5010 def _fill(self, names, unit, average, opts={}):
5011 first = True
5012 fullnames = []
5013 for name in names:
5014 name = os.path.expandvars(name)
5015 name = os.path.expanduser(name)
5016 if not os.path.exists(name):
5017 msg = "File '%s' does not exists" % (name)
5018 raise IOError(msg)
5019 fullnames.append(name)
5020 if average:
5021 asaplog.push('Auto averaging integrations')
5022 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5023 for name in fullnames:
5024 tbl = Scantable(stype)
5025 if is_ms( name ):
5026 r = msfiller( tbl )
5027 else:
5028 r = filler( tbl )
5029 msg = "Importing %s..." % (name)
5030 asaplog.push(msg, False)
5031 r.open(name, opts)
5032 rx = rcParams['scantable.reference']
5033 r.setreferenceexpr(rx)
5034 r.fill()
5035 if average:
5036 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5037 if not first:
5038 tbl = self._math._merge([self, tbl])
5039 Scantable.__init__(self, tbl)
5040 r.close()
5041 del r, tbl
5042 first = False
5043 #flush log
5044 asaplog.post()
5045 if unit is not None:
5046 self.set_fluxunit(unit)
5047 if not is_casapy():
5048 self.set_freqframe(rcParams['scantable.freqframe'])
5049
5050 def _get_verify_action( self, msg, action=None ):
5051 valid_act = ['Y', 'N', 'A', 'R']
5052 if not action or not isinstance(action, str):
5053 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5054 if action == '':
5055 return "Y"
5056 elif (action.upper()[0] in valid_act):
5057 return action.upper()[0]
5058 elif (action.upper()[0] in ['H','?']):
5059 print "Available actions of verification [Y|n|a|r]"
5060 print " Y : Yes for current data (default)"
5061 print " N : No for current data"
5062 print " A : Accept all in the following and exit from verification"
5063 print " R : Reject all in the following and exit from verification"
5064 print " H or ?: help (show this message)"
5065 return self._get_verify_action(msg)
5066 else:
5067 return 'Y'
5068
5069 def __getitem__(self, key):
5070 if key < 0:
5071 key += self.nrow()
5072 if key >= self.nrow():
5073 raise IndexError("Row index out of range.")
5074 return self._getspectrum(key)
5075
5076 def __setitem__(self, key, value):
5077 if key < 0:
5078 key += self.nrow()
5079 if key >= self.nrow():
5080 raise IndexError("Row index out of range.")
5081 if not hasattr(value, "__len__") or \
5082 len(value) > self.nchan(self.getif(key)):
5083 raise ValueError("Spectrum length doesn't match.")
5084 return self._setspectrum(value, key)
5085
5086 def __len__(self):
5087 return self.nrow()
5088
5089 def __iter__(self):
5090 for i in range(len(self)):
5091 yield self[i]
Note: See TracBrowser for help on using the repository browser.