source: trunk/python/scantable.py@ 2886

Last change on this file since 2886 was 2886, checked in by WataruKawasaki, 12 years ago

New Development: No

JIRA Issue: Yes CAS-5859

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: modified parse_spw_selection() so that it emits ValueError when invalid spw given.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 200.8 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
374
375
376class scantable(Scantable):
377 """\
378 The ASAP container for scans (single-dish data).
379 """
380
381 @asaplog_post_dec
382 def __init__(self, filename, average=None, unit=None, parallactify=None,
383 **args):
384 """\
385 Create a scantable from a saved one or make a reference
386
387 Parameters:
388
389 filename: the name of an asap table on disk
390 or
391 the name of a rpfits/sdfits/ms file
392 (integrations within scans are auto averaged
393 and the whole file is read) or
394 [advanced] a reference to an existing scantable
395
396 average: average all integrations withinb a scan on read.
397 The default (True) is taken from .asaprc.
398
399 unit: brightness unit; must be consistent with K or Jy.
400 Over-rides the default selected by the filler
401 (input rpfits/sdfits/ms) or replaces the value
402 in existing scantables
403
404 antenna: for MeasurementSet input data only:
405 Antenna selection. integer (id) or string
406 (name or id).
407
408 parallactify: Indicate that the data had been parallactified.
409 Default (false) is taken from rc file.
410
411 getpt: Whether to import direction from MS/POINTING
412 table properly or not.
413 This is effective only when filename is MS.
414 The default (True) is to import direction
415 from MS/POINTING.
416 """
417 if average is None:
418 average = rcParams['scantable.autoaverage']
419 parallactify = parallactify or rcParams['scantable.parallactify']
420 varlist = vars()
421 from asap._asap import stmath
422 self._math = stmath( rcParams['insitu'] )
423 if isinstance(filename, Scantable):
424 Scantable.__init__(self, filename)
425 else:
426 if isinstance(filename, str):
427 filename = os.path.expandvars(filename)
428 filename = os.path.expanduser(filename)
429 if not os.path.exists(filename):
430 s = "File '%s' not found." % (filename)
431 raise IOError(s)
432 if is_scantable(filename):
433 ondisk = rcParams['scantable.storage'] == 'disk'
434 Scantable.__init__(self, filename, ondisk)
435 if unit is not None:
436 self.set_fluxunit(unit)
437 if average:
438 self._assign( self.average_time( scanav=True ) )
439 # do not reset to the default freqframe
440 #self.set_freqframe(rcParams['scantable.freqframe'])
441 elif is_ms(filename):
442 # Measurement Set
443 opts={'ms': {}}
444 mskeys=['getpt','antenna']
445 for key in mskeys:
446 if key in args.keys():
447 opts['ms'][key] = args[key]
448 self._fill([filename], unit, average, opts)
449 elif os.path.isfile(filename):
450 opts={'nro': {}}
451 nrokeys=['freqref']
452 for key in nrokeys:
453 if key in args.keys():
454 opts['nro'][key] = args[key]
455 self._fill([filename], unit, average, opts)
456 # only apply to new data not "copy constructor"
457 self.parallactify(parallactify)
458 else:
459 msg = "The given file '%s'is not a valid " \
460 "asap table." % (filename)
461 raise IOError(msg)
462 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
463 and isinstance(filename[-1], str):
464 self._fill(filename, unit, average)
465 self.parallactify(parallactify)
466 self._add_history("scantable", varlist)
467
468 @asaplog_post_dec
469 def save(self, name=None, format=None, overwrite=False):
470 """\
471 Store the scantable on disk. This can be an asap (aips++) Table,
472 SDFITS or MS2 format.
473
474 Parameters:
475
476 name: the name of the outputfile. For format 'ASCII'
477 this is the root file name (data in 'name'.txt
478 and header in 'name'_header.txt)
479
480 format: an optional file format. Default is ASAP.
481 Allowed are:
482
483 * 'ASAP' (save as ASAP [aips++] Table),
484 * 'SDFITS' (save as SDFITS file)
485 * 'ASCII' (saves as ascii text file)
486 * 'MS2' (saves as an casacore MeasurementSet V2)
487 * 'FITS' (save as image FITS - not readable by
488 class)
489 * 'CLASS' (save as FITS readable by CLASS)
490
491 overwrite: If the file should be overwritten if it exists.
492 The default False is to return with warning
493 without writing the output. USE WITH CARE.
494
495 Example::
496
497 scan.save('myscan.asap')
498 scan.save('myscan.sdfits', 'SDFITS')
499
500 """
501 from os import path
502 format = format or rcParams['scantable.save']
503 suffix = '.'+format.lower()
504 if name is None or name == "":
505 name = 'scantable'+suffix
506 msg = "No filename given. Using default name %s..." % name
507 asaplog.push(msg)
508 name = path.expandvars(name)
509 if path.isfile(name) or path.isdir(name):
510 if not overwrite:
511 msg = "File %s exists." % name
512 raise IOError(msg)
513 format2 = format.upper()
514 if format2 == 'ASAP':
515 self._save(name)
516 elif format2 == 'MS2':
517 msopt = {'ms': {'overwrite': overwrite } }
518 from asap._asap import mswriter
519 writer = mswriter( self )
520 writer.write( name, msopt )
521 else:
522 from asap._asap import stwriter as stw
523 writer = stw(format2)
524 writer.write(self, name)
525 return
526
527 def copy(self):
528 """Return a copy of this scantable.
529
530 *Note*:
531
532 This makes a full (deep) copy. scan2 = scan1 makes a reference.
533
534 Example::
535
536 copiedscan = scan.copy()
537
538 """
539 sd = scantable(Scantable._copy(self))
540 return sd
541
542 def drop_scan(self, scanid=None):
543 """\
544 Return a new scantable where the specified scan number(s) has(have)
545 been dropped.
546
547 Parameters:
548
549 scanid: a (list of) scan number(s)
550
551 """
552 from asap import _is_sequence_or_number as _is_valid
553 from asap import _to_list
554 from asap import unique
555 if not _is_valid(scanid):
556 raise RuntimeError( 'Please specify a scanno to drop from the'
557 ' scantable' )
558 scanid = _to_list(scanid)
559 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
560 for sid in scanid: allscans.remove(sid)
561 if len(allscans) == 0:
562 raise ValueError("Can't remove all scans")
563 sel = selector(scans=allscans)
564 return self._select_copy(sel)
565
566 def _select_copy(self, selection):
567 orig = self.get_selection()
568 self.set_selection(orig+selection)
569 cp = self.copy()
570 self.set_selection(orig)
571 return cp
572
573 def get_scan(self, scanid=None):
574 """\
575 Return a specific scan (by scanno) or collection of scans (by
576 source name) in a new scantable.
577
578 *Note*:
579
580 See scantable.drop_scan() for the inverse operation.
581
582 Parameters:
583
584 scanid: a (list of) scanno or a source name, unix-style
585 patterns are accepted for source name matching, e.g.
586 '*_R' gets all 'ref scans
587
588 Example::
589
590 # get all scans containing the source '323p459'
591 newscan = scan.get_scan('323p459')
592 # get all 'off' scans
593 refscans = scan.get_scan('*_R')
594 # get a susbset of scans by scanno (as listed in scan.summary())
595 newscan = scan.get_scan([0, 2, 7, 10])
596
597 """
598 if scanid is None:
599 raise RuntimeError( 'Please specify a scan no or name to '
600 'retrieve from the scantable' )
601 try:
602 bsel = self.get_selection()
603 sel = selector()
604 if type(scanid) is str:
605 sel.set_name(scanid)
606 return self._select_copy(sel)
607 elif type(scanid) is int:
608 sel.set_scans([scanid])
609 return self._select_copy(sel)
610 elif type(scanid) is list:
611 sel.set_scans(scanid)
612 return self._select_copy(sel)
613 else:
614 msg = "Illegal scanid type, use 'int' or 'list' if ints."
615 raise TypeError(msg)
616 except RuntimeError:
617 raise
618
619 def __str__(self):
620 tempFile = tempfile.NamedTemporaryFile()
621 Scantable._summary(self, tempFile.name)
622 tempFile.seek(0)
623 asaplog.clear()
624 return tempFile.file.read()
625
626 @asaplog_post_dec
627 def summary(self, filename=None):
628 """\
629 Print a summary of the contents of this scantable.
630
631 Parameters:
632
633 filename: the name of a file to write the putput to
634 Default - no file output
635
636 """
637 if filename is not None:
638 if filename is "":
639 filename = 'scantable_summary.txt'
640 from os.path import expandvars, isdir
641 filename = expandvars(filename)
642 if isdir(filename):
643 msg = "Illegal file name '%s'." % (filename)
644 raise IOError(msg)
645 else:
646 filename = ""
647 Scantable._summary(self, filename)
648
649 def get_spectrum(self, rowno):
650 """Return the spectrum for the current row in the scantable as a list.
651
652 Parameters:
653
654 rowno: the row number to retrieve the spectrum from
655
656 """
657 return self._getspectrum(rowno)
658
659 def get_mask(self, rowno):
660 """Return the mask for the current row in the scantable as a list.
661
662 Parameters:
663
664 rowno: the row number to retrieve the mask from
665
666 """
667 return self._getmask(rowno)
668
669 def set_spectrum(self, spec, rowno):
670 """Set the spectrum for the current row in the scantable.
671
672 Parameters:
673
674 spec: the new spectrum
675
676 rowno: the row number to set the spectrum for
677
678 """
679 assert(len(spec) == self.nchan(self.getif(rowno)))
680 return self._setspectrum(spec, rowno)
681
682 def get_coordinate(self, rowno):
683 """Return the (spectral) coordinate for a a given 'rowno'.
684
685 *Note*:
686
687 * This coordinate is only valid until a scantable method modifies
688 the frequency axis.
689 * This coordinate does contain the original frequency set-up
690 NOT the new frame. The conversions however are done using the user
691 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
692 use scantable.freq_align first. Without it there is no closure,
693 i.e.::
694
695 c = myscan.get_coordinate(0)
696 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
697
698 Parameters:
699
700 rowno: the row number for the spectral coordinate
701
702 """
703 return coordinate(Scantable.get_coordinate(self, rowno))
704
705 def get_selection(self):
706 """\
707 Get the selection object currently set on this scantable.
708
709 Example::
710
711 sel = scan.get_selection()
712 sel.set_ifs(0) # select IF 0
713 scan.set_selection(sel) # apply modified selection
714
715 """
716 return selector(self._getselection())
717
718 def set_selection(self, selection=None, **kw):
719 """\
720 Select a subset of the data. All following operations on this scantable
721 are only applied to thi selection.
722
723 Parameters:
724
725 selection: a selector object (default unset the selection), or
726 any combination of 'pols', 'ifs', 'beams', 'scans',
727 'cycles', 'name', 'query'
728
729 Examples::
730
731 sel = selector() # create a selection object
732 self.set_scans([0, 3]) # select SCANNO 0 and 3
733 scan.set_selection(sel) # set the selection
734 scan.summary() # will only print summary of scanno 0 an 3
735 scan.set_selection() # unset the selection
736 # or the equivalent
737 scan.set_selection(scans=[0,3])
738 scan.summary() # will only print summary of scanno 0 an 3
739 scan.set_selection() # unset the selection
740
741 """
742 if selection is None:
743 # reset
744 if len(kw) == 0:
745 selection = selector()
746 else:
747 # try keywords
748 for k in kw:
749 if k not in selector.fields:
750 raise KeyError("Invalid selection key '%s', "
751 "valid keys are %s" % (k,
752 selector.fields))
753 selection = selector(**kw)
754 self._setselection(selection)
755
756 def get_row(self, row=0, insitu=None):
757 """\
758 Select a row in the scantable.
759 Return a scantable with single row.
760
761 Parameters:
762
763 row: row no of integration, default is 0.
764 insitu: if False a new scantable is returned. Otherwise, the
765 scaling is done in-situ. The default is taken from .asaprc
766 (False)
767
768 """
769 if insitu is None:
770 insitu = rcParams['insitu']
771 if not insitu:
772 workscan = self.copy()
773 else:
774 workscan = self
775 # Select a row
776 sel = selector()
777 sel.set_rows([row])
778 workscan.set_selection(sel)
779 if not workscan.nrow() == 1:
780 msg = "Could not identify single row. %d rows selected." \
781 % (workscan.nrow())
782 raise RuntimeError(msg)
783 if insitu:
784 self._assign(workscan)
785 else:
786 return workscan
787
788 @asaplog_post_dec
789 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
790 """\
791 Determine the specified statistic of the current beam/if/pol
792 Takes a 'mask' as an optional parameter to specify which
793 channels should be excluded.
794
795 Parameters:
796
797 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
798 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
799
800 mask: an optional mask specifying where the statistic
801 should be determined.
802
803 form: format string to print statistic values
804
805 row: row number of spectrum to process.
806 (default is None: for all rows)
807
808 Example:
809 scan.set_unit('channel')
810 msk = scan.create_mask([100, 200], [500, 600])
811 scan.stats(stat='mean', mask=m)
812
813 """
814 mask = mask or []
815 if not self._check_ifs():
816 raise ValueError("Cannot apply mask as the IFs have different "
817 "number of channels. Please use setselection() "
818 "to select individual IFs")
819 rtnabc = False
820 if stat.lower().endswith('_abc'): rtnabc = True
821 getchan = False
822 if stat.lower().startswith('min') or stat.lower().startswith('max'):
823 chan = self._math._minmaxchan(self, mask, stat)
824 getchan = True
825 statvals = []
826 if not rtnabc:
827 if row == None:
828 statvals = self._math._stats(self, mask, stat)
829 else:
830 statvals = self._math._statsrow(self, mask, stat, int(row))
831
832 #def cb(i):
833 # return statvals[i]
834
835 #return self._row_callback(cb, stat)
836
837 label=stat
838 #callback=cb
839 out = ""
840 #outvec = []
841 sep = '-'*50
842
843 if row == None:
844 rows = xrange(self.nrow())
845 elif isinstance(row, int):
846 rows = [ row ]
847
848 for i in rows:
849 refstr = ''
850 statunit= ''
851 if getchan:
852 qx, qy = self.chan2data(rowno=i, chan=chan[i])
853 if rtnabc:
854 statvals.append(qx['value'])
855 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
856 statunit= '['+qx['unit']+']'
857 else:
858 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
859
860 tm = self._gettime(i)
861 src = self._getsourcename(i)
862 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
863 out += 'Time[%s]:\n' % (tm)
864 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
865 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
866 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
867 #outvec.append(callback(i))
868 if len(rows) > 1:
869 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
870 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
871 else:
872 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
873 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
874 out += sep+"\n"
875
876 import os
877 if os.environ.has_key( 'USER' ):
878 usr = os.environ['USER']
879 else:
880 import commands
881 usr = commands.getoutput( 'whoami' )
882 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
883 f = open(tmpfile,'w')
884 print >> f, sep
885 print >> f, ' %s %s' % (label, statunit)
886 print >> f, sep
887 print >> f, out
888 f.close()
889 f = open(tmpfile,'r')
890 x = f.readlines()
891 f.close()
892 asaplog.push(''.join(x), False)
893
894 return statvals
895
896 def chan2data(self, rowno=0, chan=0):
897 """\
898 Returns channel/frequency/velocity and spectral value
899 at an arbitrary row and channel in the scantable.
900
901 Parameters:
902
903 rowno: a row number in the scantable. Default is the
904 first row, i.e. rowno=0
905
906 chan: a channel in the scantable. Default is the first
907 channel, i.e. pos=0
908
909 """
910 if isinstance(rowno, int) and isinstance(chan, int):
911 qx = {'unit': self.get_unit(),
912 'value': self._getabcissa(rowno)[chan]}
913 qy = {'unit': self.get_fluxunit(),
914 'value': self._getspectrum(rowno)[chan]}
915 return qx, qy
916
917 def stddev(self, mask=None):
918 """\
919 Determine the standard deviation of the current beam/if/pol
920 Takes a 'mask' as an optional parameter to specify which
921 channels should be excluded.
922
923 Parameters:
924
925 mask: an optional mask specifying where the standard
926 deviation should be determined.
927
928 Example::
929
930 scan.set_unit('channel')
931 msk = scan.create_mask([100, 200], [500, 600])
932 scan.stddev(mask=m)
933
934 """
935 return self.stats(stat='stddev', mask=mask);
936
937
938 def get_column_names(self):
939 """\
940 Return a list of column names, which can be used for selection.
941 """
942 return list(Scantable.get_column_names(self))
943
944 def get_tsys(self, row=-1):
945 """\
946 Return the System temperatures.
947
948 Parameters:
949
950 row: the rowno to get the information for. (default all rows)
951
952 Returns:
953
954 a list of Tsys values for the current selection
955
956 """
957 if row > -1:
958 return self._get_column(self._gettsys, row)
959 return self._row_callback(self._gettsys, "Tsys")
960
961 def get_tsysspectrum(self, row=-1):
962 """\
963 Return the channel dependent system temperatures.
964
965 Parameters:
966
967 row: the rowno to get the information for. (default all rows)
968
969 Returns:
970
971 a list of Tsys values for the current selection
972
973 """
974 return self._get_column( self._gettsysspectrum, row )
975
976 def set_tsys(self, values, row=-1):
977 """\
978 Set the Tsys value(s) of the given 'row' or the whole scantable
979 (selection).
980
981 Parameters:
982
983 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
984 row: the row number to apply Tsys values to.
985 (default all rows)
986
987 """
988
989 if not hasattr(values, "__len__"):
990 values = [values]
991 self._settsys(values, row)
992
993 def get_weather(self, row=-1):
994 """\
995 Return the weather informations.
996
997 Parameters:
998
999 row: the rowno to get the information for. (default all rows)
1000
1001 Returns:
1002
1003 a dict or list of of dicts of values for the current selection
1004
1005 """
1006
1007 values = self._get_column(self._get_weather, row)
1008 if row > -1:
1009 return {'temperature': values[0],
1010 'pressure': values[1], 'humidity' : values[2],
1011 'windspeed' : values[3], 'windaz' : values[4]
1012 }
1013 else:
1014 out = []
1015 for r in values:
1016
1017 out.append({'temperature': r[0],
1018 'pressure': r[1], 'humidity' : r[2],
1019 'windspeed' : r[3], 'windaz' : r[4]
1020 })
1021 return out
1022
1023 def _row_callback(self, callback, label):
1024 out = ""
1025 outvec = []
1026 sep = '-'*50
1027 for i in range(self.nrow()):
1028 tm = self._gettime(i)
1029 src = self._getsourcename(i)
1030 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1031 out += 'Time[%s]:\n' % (tm)
1032 if self.nbeam(-1) > 1:
1033 out += ' Beam[%d] ' % (self.getbeam(i))
1034 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1035 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1036 outvec.append(callback(i))
1037 out += '= %3.3f\n' % (outvec[i])
1038 out += sep+'\n'
1039
1040 asaplog.push(sep)
1041 asaplog.push(" %s" % (label))
1042 asaplog.push(sep)
1043 asaplog.push(out)
1044 asaplog.post()
1045 return outvec
1046
1047 def _get_column(self, callback, row=-1, *args):
1048 """
1049 """
1050 if row == -1:
1051 return [callback(i, *args) for i in range(self.nrow())]
1052 else:
1053 if 0 <= row < self.nrow():
1054 return callback(row, *args)
1055
1056
1057 def get_time(self, row=-1, asdatetime=False, prec=-1):
1058 """\
1059 Get a list of time stamps for the observations.
1060 Return a datetime object or a string (default) for each
1061 integration time stamp in the scantable.
1062
1063 Parameters:
1064
1065 row: row no of integration. Default -1 return all rows
1066
1067 asdatetime: return values as datetime objects rather than strings
1068
1069 prec: number of digits shown. Default -1 to automatic
1070 calculation.
1071 Note this number is equals to the digits of MVTime,
1072 i.e., 0<prec<3: dates with hh:: only,
1073 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1074 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1075
1076 """
1077 from datetime import datetime
1078 if prec < 0:
1079 # automagically set necessary precision +1
1080 prec = 7 - \
1081 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1082 prec = max(6, int(prec))
1083 else:
1084 prec = max(0, prec)
1085 if asdatetime:
1086 #precision can be 1 millisecond at max
1087 prec = min(12, prec)
1088
1089 times = self._get_column(self._gettime, row, prec)
1090 if not asdatetime:
1091 return times
1092 format = "%Y/%m/%d/%H:%M:%S.%f"
1093 if prec < 7:
1094 nsub = 1 + (((6-prec)/2) % 3)
1095 substr = [".%f","%S","%M"]
1096 for i in range(nsub):
1097 format = format.replace(substr[i],"")
1098 if isinstance(times, list):
1099 return [datetime.strptime(i, format) for i in times]
1100 else:
1101 return datetime.strptime(times, format)
1102
1103
1104 def get_inttime(self, row=-1):
1105 """\
1106 Get a list of integration times for the observations.
1107 Return a time in seconds for each integration in the scantable.
1108
1109 Parameters:
1110
1111 row: row no of integration. Default -1 return all rows.
1112
1113 """
1114 return self._get_column(self._getinttime, row)
1115
1116
1117 def get_sourcename(self, row=-1):
1118 """\
1119 Get a list source names for the observations.
1120 Return a string for each integration in the scantable.
1121 Parameters:
1122
1123 row: row no of integration. Default -1 return all rows.
1124
1125 """
1126 return self._get_column(self._getsourcename, row)
1127
1128 def get_elevation(self, row=-1):
1129 """\
1130 Get a list of elevations for the observations.
1131 Return a float for each integration in the scantable.
1132
1133 Parameters:
1134
1135 row: row no of integration. Default -1 return all rows.
1136
1137 """
1138 return self._get_column(self._getelevation, row)
1139
1140 def get_azimuth(self, row=-1):
1141 """\
1142 Get a list of azimuths for the observations.
1143 Return a float for each integration in the scantable.
1144
1145 Parameters:
1146 row: row no of integration. Default -1 return all rows.
1147
1148 """
1149 return self._get_column(self._getazimuth, row)
1150
1151 def get_parangle(self, row=-1):
1152 """\
1153 Get a list of parallactic angles for the observations.
1154 Return a float for each integration in the scantable.
1155
1156 Parameters:
1157
1158 row: row no of integration. Default -1 return all rows.
1159
1160 """
1161 return self._get_column(self._getparangle, row)
1162
1163 def get_direction(self, row=-1):
1164 """
1165 Get a list of Positions on the sky (direction) for the observations.
1166 Return a string for each integration in the scantable.
1167
1168 Parameters:
1169
1170 row: row no of integration. Default -1 return all rows
1171
1172 """
1173 return self._get_column(self._getdirection, row)
1174
1175 def get_directionval(self, row=-1):
1176 """\
1177 Get a list of Positions on the sky (direction) for the observations.
1178 Return a float for each integration in the scantable.
1179
1180 Parameters:
1181
1182 row: row no of integration. Default -1 return all rows
1183
1184 """
1185 return self._get_column(self._getdirectionvec, row)
1186
1187 @asaplog_post_dec
1188 def set_unit(self, unit='channel'):
1189 """\
1190 Set the unit for all following operations on this scantable
1191
1192 Parameters:
1193
1194 unit: optional unit, default is 'channel'. Use one of '*Hz',
1195 'km/s', 'channel' or equivalent ''
1196
1197 """
1198 varlist = vars()
1199 if unit in ['', 'pixel', 'channel']:
1200 unit = ''
1201 inf = list(self._getcoordinfo())
1202 inf[0] = unit
1203 self._setcoordinfo(inf)
1204 self._add_history("set_unit", varlist)
1205
1206 @asaplog_post_dec
1207 def set_instrument(self, instr):
1208 """\
1209 Set the instrument for subsequent processing.
1210
1211 Parameters:
1212
1213 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1214 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1215
1216 """
1217 self._setInstrument(instr)
1218 self._add_history("set_instument", vars())
1219
1220 @asaplog_post_dec
1221 def set_feedtype(self, feedtype):
1222 """\
1223 Overwrite the feed type, which might not be set correctly.
1224
1225 Parameters:
1226
1227 feedtype: 'linear' or 'circular'
1228
1229 """
1230 self._setfeedtype(feedtype)
1231 self._add_history("set_feedtype", vars())
1232
1233 @asaplog_post_dec
1234 def set_doppler(self, doppler='RADIO'):
1235 """\
1236 Set the doppler for all following operations on this scantable.
1237
1238 Parameters:
1239
1240 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1241
1242 """
1243 varlist = vars()
1244 inf = list(self._getcoordinfo())
1245 inf[2] = doppler
1246 self._setcoordinfo(inf)
1247 self._add_history("set_doppler", vars())
1248
1249 @asaplog_post_dec
1250 def set_freqframe(self, frame=None):
1251 """\
1252 Set the frame type of the Spectral Axis.
1253
1254 Parameters:
1255
1256 frame: an optional frame type, default 'LSRK'. Valid frames are:
1257 'TOPO', 'LSRD', 'LSRK', 'BARY',
1258 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1259
1260 Example::
1261
1262 scan.set_freqframe('BARY')
1263
1264 """
1265 frame = frame or rcParams['scantable.freqframe']
1266 varlist = vars()
1267 # "REST" is not implemented in casacore
1268 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1269 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1270 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1271 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1272
1273 if frame in valid:
1274 inf = list(self._getcoordinfo())
1275 inf[1] = frame
1276 self._setcoordinfo(inf)
1277 self._add_history("set_freqframe", varlist)
1278 else:
1279 msg = "Please specify a valid freq type. Valid types are:\n", valid
1280 raise TypeError(msg)
1281
1282 @asaplog_post_dec
1283 def set_dirframe(self, frame=""):
1284 """\
1285 Set the frame type of the Direction on the sky.
1286
1287 Parameters:
1288
1289 frame: an optional frame type, default ''. Valid frames are:
1290 'J2000', 'B1950', 'GALACTIC'
1291
1292 Example:
1293
1294 scan.set_dirframe('GALACTIC')
1295
1296 """
1297 varlist = vars()
1298 Scantable.set_dirframe(self, frame)
1299 self._add_history("set_dirframe", varlist)
1300
1301 def get_unit(self):
1302 """\
1303 Get the default unit set in this scantable
1304
1305 Returns:
1306
1307 A unit string
1308
1309 """
1310 inf = self._getcoordinfo()
1311 unit = inf[0]
1312 if unit == '': unit = 'channel'
1313 return unit
1314
1315 @asaplog_post_dec
1316 def get_abcissa(self, rowno=0):
1317 """\
1318 Get the abcissa in the current coordinate setup for the currently
1319 selected Beam/IF/Pol
1320
1321 Parameters:
1322
1323 rowno: an optional row number in the scantable. Default is the
1324 first row, i.e. rowno=0
1325
1326 Returns:
1327
1328 The abcissa values and the format string (as a dictionary)
1329
1330 """
1331 abc = self._getabcissa(rowno)
1332 lbl = self._getabcissalabel(rowno)
1333 return abc, lbl
1334
1335 @asaplog_post_dec
1336 def flag(self, mask=None, unflag=False, row=-1):
1337 """\
1338 Flag the selected data using an optional channel mask.
1339
1340 Parameters:
1341
1342 mask: an optional channel mask, created with create_mask. Default
1343 (no mask) is all channels.
1344
1345 unflag: if True, unflag the data
1346
1347 row: an optional row number in the scantable.
1348 Default -1 flags all rows
1349
1350 """
1351 varlist = vars()
1352 mask = mask or []
1353 self._flag(row, mask, unflag)
1354 self._add_history("flag", varlist)
1355
1356 @asaplog_post_dec
1357 def flag_row(self, rows=None, unflag=False):
1358 """\
1359 Flag the selected data in row-based manner.
1360
1361 Parameters:
1362
1363 rows: list of row numbers to be flagged. Default is no row
1364 (must be explicitly specified to execute row-based
1365 flagging).
1366
1367 unflag: if True, unflag the data.
1368
1369 """
1370 varlist = vars()
1371 if rows is None:
1372 rows = []
1373 self._flag_row(rows, unflag)
1374 self._add_history("flag_row", varlist)
1375
1376 @asaplog_post_dec
1377 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1378 """\
1379 Flag the selected data outside a specified range (in channel-base)
1380
1381 Parameters:
1382
1383 uthres: upper threshold.
1384
1385 dthres: lower threshold
1386
1387 clipoutside: True for flagging data outside the range
1388 [dthres:uthres].
1389 False for flagging data inside the range.
1390
1391 unflag: if True, unflag the data.
1392
1393 """
1394 varlist = vars()
1395 self._clip(uthres, dthres, clipoutside, unflag)
1396 self._add_history("clip", varlist)
1397
1398 @asaplog_post_dec
1399 def lag_flag(self, start, end, unit="MHz", insitu=None):
1400 """\
1401 Flag the data in 'lag' space by providing a frequency to remove.
1402 Flagged data in the scantable get interpolated over the region.
1403 No taper is applied.
1404
1405 Parameters:
1406
1407 start: the start frequency (really a period within the
1408 bandwidth) or period to remove
1409
1410 end: the end frequency or period to remove
1411
1412 unit: the frequency unit (default 'MHz') or '' for
1413 explicit lag channels
1414
1415 *Notes*:
1416
1417 It is recommended to flag edges of the band or strong
1418 signals beforehand.
1419
1420 """
1421 if insitu is None: insitu = rcParams['insitu']
1422 self._math._setinsitu(insitu)
1423 varlist = vars()
1424 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1425 if not (unit == "" or base.has_key(unit)):
1426 raise ValueError("%s is not a valid unit." % unit)
1427 if unit == "":
1428 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1429 else:
1430 s = scantable(self._math._lag_flag(self, start*base[unit],
1431 end*base[unit], "frequency"))
1432 s._add_history("lag_flag", varlist)
1433 if insitu:
1434 self._assign(s)
1435 else:
1436 return s
1437
1438 @asaplog_post_dec
1439 def fft(self, rowno=None, mask=None, getrealimag=False):
1440 """\
1441 Apply FFT to the spectra.
1442 Flagged data in the scantable get interpolated over the region.
1443
1444 Parameters:
1445
1446 rowno: The row number(s) to be processed. int, list
1447 and tuple are accepted. By default (None), FFT
1448 is applied to the whole data.
1449
1450 mask: Auxiliary channel mask(s). Given as a boolean
1451 list, it is applied to all specified rows.
1452 A list of boolean lists can also be used to
1453 apply different masks. In the latter case, the
1454 length of 'mask' must be the same as that of
1455 'rowno'. The default is None.
1456
1457 getrealimag: If True, returns the real and imaginary part
1458 values of the complex results.
1459 If False (the default), returns the amplitude
1460 (absolute value) normalised with Ndata/2 and
1461 phase (argument, in unit of radian).
1462
1463 Returns:
1464
1465 A list of dictionaries containing the results for each spectrum.
1466 Each dictionary contains two values, the real and the imaginary
1467 parts when getrealimag = True, or the amplitude(absolute value)
1468 and the phase(argument) when getrealimag = False. The key for
1469 these values are 'real' and 'imag', or 'ampl' and 'phase',
1470 respectively.
1471 """
1472 if rowno is None:
1473 rowno = []
1474 if isinstance(rowno, int):
1475 rowno = [rowno]
1476 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1477 raise TypeError("The row number(s) must be int, list or tuple.")
1478 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1479
1480 usecommonmask = True
1481
1482 if mask is None:
1483 mask = []
1484 if isinstance(mask, list) or isinstance(mask, tuple):
1485 if len(mask) == 0:
1486 mask = [[]]
1487 else:
1488 if isinstance(mask[0], bool):
1489 if len(mask) != self.nchan(self.getif(rowno[0])):
1490 raise ValueError("The spectra and the mask have "
1491 "different length.")
1492 mask = [mask]
1493 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1494 usecommonmask = False
1495 if len(mask) != len(rowno):
1496 raise ValueError("When specifying masks for each "
1497 "spectrum, the numbers of them "
1498 "must be identical.")
1499 for i in xrange(mask):
1500 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1501 raise ValueError("The spectra and the mask have "
1502 "different length.")
1503 else:
1504 raise TypeError("The mask must be a boolean list or "
1505 "a list of boolean list.")
1506 else:
1507 raise TypeError("The mask must be a boolean list or a list of "
1508 "boolean list.")
1509
1510 res = []
1511
1512 imask = 0
1513 for whichrow in rowno:
1514 fspec = self._fft(whichrow, mask[imask], getrealimag)
1515 nspec = len(fspec)
1516
1517 i = 0
1518 v1 = []
1519 v2 = []
1520 reselem = {"real":[],"imag":[]} if getrealimag \
1521 else {"ampl":[],"phase":[]}
1522
1523 while (i < nspec):
1524 v1.append(fspec[i])
1525 v2.append(fspec[i+1])
1526 i += 2
1527
1528 if getrealimag:
1529 reselem["real"] += v1
1530 reselem["imag"] += v2
1531 else:
1532 reselem["ampl"] += v1
1533 reselem["phase"] += v2
1534
1535 res.append(reselem)
1536
1537 if not usecommonmask:
1538 imask += 1
1539
1540 return res
1541
1542 @asaplog_post_dec
1543 def create_mask(self, *args, **kwargs):
1544 """\
1545 Compute and return a mask based on [min, max] windows.
1546 The specified windows are to be INCLUDED, when the mask is
1547 applied.
1548
1549 Parameters:
1550
1551 [min, max], [min2, max2], ...
1552 Pairs of start/end points (inclusive)specifying the regions
1553 to be masked
1554
1555 invert: optional argument. If specified as True,
1556 return an inverted mask, i.e. the regions
1557 specified are EXCLUDED
1558
1559 row: create the mask using the specified row for
1560 unit conversions, default is row=0
1561 only necessary if frequency varies over rows.
1562
1563 Examples::
1564
1565 scan.set_unit('channel')
1566 # a)
1567 msk = scan.create_mask([400, 500], [800, 900])
1568 # masks everything outside 400 and 500
1569 # and 800 and 900 in the unit 'channel'
1570
1571 # b)
1572 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1573 # masks the regions between 400 and 500
1574 # and 800 and 900 in the unit 'channel'
1575
1576 # c)
1577 #mask only channel 400
1578 msk = scan.create_mask([400])
1579
1580 """
1581 row = kwargs.get("row", 0)
1582 data = self._getabcissa(row)
1583 u = self._getcoordinfo()[0]
1584 if u == "":
1585 u = "channel"
1586 msg = "The current mask window unit is %s" % u
1587 i = self._check_ifs()
1588 if not i:
1589 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1590 asaplog.push(msg)
1591 n = len(data)
1592 msk = _n_bools(n, False)
1593 # test if args is a 'list' or a 'normal *args - UGLY!!!
1594
1595 ws = (isinstance(args[-1][-1], int)
1596 or isinstance(args[-1][-1], float)) and args or args[0]
1597 for window in ws:
1598 if len(window) == 1:
1599 window = [window[0], window[0]]
1600 if len(window) == 0 or len(window) > 2:
1601 raise ValueError("A window needs to be defined as "
1602 "[start(, end)]")
1603 if window[0] > window[1]:
1604 tmp = window[0]
1605 window[0] = window[1]
1606 window[1] = tmp
1607 for i in range(n):
1608 if data[i] >= window[0] and data[i] <= window[1]:
1609 msk[i] = True
1610 if kwargs.has_key('invert'):
1611 if kwargs.get('invert'):
1612 msk = mask_not(msk)
1613 return msk
1614
1615 def get_masklist(self, mask=None, row=0, silent=False):
1616 """\
1617 Compute and return a list of mask windows, [min, max].
1618
1619 Parameters:
1620
1621 mask: channel mask, created with create_mask.
1622
1623 row: calcutate the masklist using the specified row
1624 for unit conversions, default is row=0
1625 only necessary if frequency varies over rows.
1626
1627 Returns:
1628
1629 [min, max], [min2, max2], ...
1630 Pairs of start/end points (inclusive)specifying
1631 the masked regions
1632
1633 """
1634 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1635 raise TypeError("The mask should be list or tuple.")
1636 if len(mask) <= 0:
1637 raise TypeError("The mask elements should be > 0")
1638 data = self._getabcissa(row)
1639 if len(data) != len(mask):
1640 msg = "Number of channels in scantable != number of mask elements"
1641 raise TypeError(msg)
1642 u = self._getcoordinfo()[0]
1643 if u == "":
1644 u = "channel"
1645 msg = "The current mask window unit is %s" % u
1646 i = self._check_ifs()
1647 if not i:
1648 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1649 if not silent:
1650 asaplog.push(msg)
1651 masklist = []
1652 ist, ien = None, None
1653 ist, ien=self.get_mask_indices(mask)
1654 if ist is not None and ien is not None:
1655 for i in xrange(len(ist)):
1656 range=[data[ist[i]],data[ien[i]]]
1657 range.sort()
1658 masklist.append([range[0],range[1]])
1659 return masklist
1660
1661 def get_mask_indices(self, mask=None):
1662 """\
1663 Compute and Return lists of mask start indices and mask end indices.
1664
1665 Parameters:
1666
1667 mask: channel mask, created with create_mask.
1668
1669 Returns:
1670
1671 List of mask start indices and that of mask end indices,
1672 i.e., [istart1,istart2,....], [iend1,iend2,....].
1673
1674 """
1675 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1676 raise TypeError("The mask should be list or tuple.")
1677 if len(mask) <= 0:
1678 raise TypeError("The mask elements should be > 0")
1679 istart = []
1680 iend = []
1681 if mask[0]:
1682 istart.append(0)
1683 for i in range(len(mask)-1):
1684 if not mask[i] and mask[i+1]:
1685 istart.append(i+1)
1686 elif mask[i] and not mask[i+1]:
1687 iend.append(i)
1688 if mask[len(mask)-1]:
1689 iend.append(len(mask)-1)
1690 if len(istart) != len(iend):
1691 raise RuntimeError("Numbers of mask start != mask end.")
1692 for i in range(len(istart)):
1693 if istart[i] > iend[i]:
1694 raise RuntimeError("Mask start index > mask end index")
1695 break
1696 return istart,iend
1697
1698 @asaplog_post_dec
1699 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1700 """
1701 Parse MS type spw/channel selection syntax.
1702
1703 Parameters:
1704 selectstring : A string expression of spw and channel selection.
1705 Comma-separated expressions mean different spw -
1706 channel combinations. Spws and channel selections
1707 are partitioned by a colon ':'. In a single
1708 selection expression, you can put multiple values
1709 separated by semicolons ';'. Both for spw and
1710 channel selection, allowed cases include single
1711 value, blank('') or asterisk('*') to specify all
1712 available values, two values connected with a
1713 tilde ('~') to specify an inclusive range. Unit
1714 strings for frequency or velocity can be added to
1715 the tilde-connected values. For channel selection
1716 expression, placing a '<' or a '>' is possible to
1717 specify a semi-infinite interval as well.
1718
1719 examples:
1720 '' or '*' = all spws (all channels)
1721 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1722 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1723 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1724 all channels in spw8
1725 '1.3~1.5GHz' = all spws that fall in or have at
1726 least some overwrap with frequency
1727 range between 1.3GHz and 1.5GHz.
1728 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1729 between the specified
1730 frequency range in spws
1731 that fall in or have
1732 overwrap with the
1733 specified frequency
1734 range.
1735 '1:-200~250km/s' = channels that fall between the
1736 specified velocity range in
1737 spw 1.
1738 Returns:
1739 A dictionary of selected (valid) spw and masklist pairs,
1740 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1741 """
1742 if not isinstance(selectstring, str):
1743 asaplog.post()
1744 asaplog.push("Expression of spw/channel selection must be a string.")
1745 asaplog.post("ERROR")
1746
1747 orig_unit = self.get_unit()
1748 self.set_unit('channel')
1749
1750 orig_restfreq = self.get_restfreqs()
1751 orig_restfreq_list = []
1752 for i in orig_restfreq.keys():
1753 if len(orig_restfreq[i]) == 1:
1754 orig_restfreq_list.append(orig_restfreq[i][0])
1755 else:
1756 orig_restfreq_list.append(orig_restfreq[i])
1757
1758 orig_coord = self._getcoordinfo()
1759 orig_frame = orig_coord[1]
1760 orig_doppler = orig_coord[2]
1761
1762 if restfreq is None: restfreq = orig_restfreq_list
1763 if frame is None: frame = orig_frame
1764 if doppler is None: doppler = orig_doppler
1765
1766 self.set_restfreqs(restfreq)
1767 self.set_freqframe(frame)
1768 self.set_doppler(doppler)
1769
1770 valid_ifs = self.getifnos()
1771
1772 comma_sep = selectstring.split(",")
1773 res = {}
1774
1775 for cms_elem in comma_sep:
1776 colon_sep = cms_elem.split(":")
1777
1778 if (len(colon_sep) > 2):
1779 raise RuntimeError("Invalid selection expression: more than two colons!")
1780
1781 # parse spw expression and store result in spw_list.
1782 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1783 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1784 # 'a~b*m/s' (where * can be '' or 'k') and also
1785 # several of the above expressions connected with ';'.
1786
1787 spw_list = []
1788
1789 semicolon_sep = colon_sep[0].split(";")
1790
1791 for scs_elem in semicolon_sep:
1792 scs_elem = scs_elem.strip()
1793
1794 lt_sep = scs_elem.split("<")
1795 gt_sep = scs_elem.split(">")
1796 ti_sep = scs_elem.split("~")
1797
1798 lt_sep_length = len(lt_sep)
1799 gt_sep_length = len(gt_sep)
1800 ti_sep_length = len(ti_sep)
1801
1802 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1803
1804 if (len_product > 2):
1805 # '<', '>' and '~' must not coexist in a single spw expression
1806
1807 raise RuntimeError("Invalid spw selection.")
1808
1809 elif (len_product == 1):
1810 # '', '*', or single spw number.
1811
1812 if (scs_elem == "") or (scs_elem == "*"):
1813 spw_list = valid_ifs[:] # deep copy
1814
1815 else: # single number
1816 #checking if the given number is valid for spw ID
1817 idx = valid_ifs.index(int(scs_elem))
1818 spw_list.append(valid_ifs[idx])
1819
1820 else: # (len_product == 2)
1821 # namely, one of '<', '>' or '~' appers just once.
1822
1823 if (lt_sep_length == 2): # '<a'
1824 if is_number(lt_sep[1]):
1825 no_valid_spw = True
1826 for i in valid_ifs:
1827 if (i < float(lt_sep[1])):
1828 spw_list.append(i)
1829 no_valid_spw = False
1830
1831 if no_valid_spw:
1832 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1833
1834 else:
1835 raise RuntimeError("Invalid spw selection.")
1836
1837 elif (gt_sep_length == 2): # '>a'
1838 if is_number(gt_sep[1]):
1839 no_valid_spw = True
1840 for i in valid_ifs:
1841 if (i > float(gt_sep[1])):
1842 spw_list.append(i)
1843 no_valid_spw = False
1844
1845 if no_valid_spw:
1846 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1847
1848 else:
1849 raise RuntimeError("Invalid spw selection.")
1850
1851 else: # (ti_sep_length == 2) where both boundaries inclusive
1852 expr0 = ti_sep[0].strip()
1853 expr1 = ti_sep[1].strip()
1854
1855 if is_number(expr0) and is_number(expr1):
1856 # 'a~b'
1857 expr_pmin = min(float(expr0), float(expr1))
1858 expr_pmax = max(float(expr0), float(expr1))
1859 no_valid_spw = True
1860
1861 for i in valid_ifs:
1862 if (expr_pmin <= i) and (i <= expr_pmax):
1863 spw_list.append(i)
1864 no_valid_spw = False
1865
1866 if no_valid_spw:
1867 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1868
1869 elif is_number(expr0) and is_frequency(expr1):
1870 # 'a~b*Hz'
1871 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1872 no_valid_spw = True
1873
1874 for coord in self._get_coordinate_list():
1875 expr_p0 = coord['coord'].to_pixel(expr_f0)
1876 expr_p1 = coord['coord'].to_pixel(expr_f1)
1877 expr_pmin = min(expr_p0, expr_p1)
1878 expr_pmax = max(expr_p0, expr_p1)
1879
1880 spw = coord['if']
1881 pmin = 0.0
1882 pmax = float(self.nchan(spw) - 1)
1883
1884 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
1885 spw_list.append(spw)
1886 no_valid_spw = False
1887
1888 if no_valid_spw:
1889 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
1890
1891 elif is_number(expr0) and is_velocity(expr1):
1892 # 'a~b*m/s'
1893 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
1894 expr_vmin = min(expr_v0, expr_v1)
1895 expr_vmax = max(expr_v0, expr_v1)
1896 no_valid_spw = True
1897
1898 for coord in self._get_coordinate_list():
1899 spw = coord['if']
1900
1901 pmin = 0.0
1902 pmax = float(self.nchan(spw) - 1)
1903
1904 vel0 = coord['coord'].to_velocity(pmin)
1905 vel1 = coord['coord'].to_velocity(pmax)
1906
1907 vmin = min(vel0, vel1)
1908 vmax = max(vel0, vel1)
1909
1910 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
1911 spw_list.append(spw)
1912 no_valid_spw = False
1913
1914 if no_valid_spw:
1915 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
1916
1917 else:
1918 # cases such as 'aGHz~bkm/s' are not allowed now
1919 raise RuntimeError("Invalid spw selection.")
1920
1921 # parse channel expression and store the result in crange_list.
1922 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
1923 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
1924 # and also several of the above expressions connected with ';'.
1925
1926 for spw in spw_list:
1927 pmin = 0.0
1928 pmax = float(self.nchan(spw) - 1)
1929
1930 if (len(colon_sep) == 1):
1931 # no expression for channel selection,
1932 # which means all channels are to be selected.
1933 crange_list = [[pmin, pmax]]
1934
1935 else: # (len(colon_sep) == 2)
1936 crange_list = []
1937
1938 found = False
1939 for i in self._get_coordinate_list():
1940 if (i['if'] == spw):
1941 coord = i['coord']
1942 found = True
1943 break
1944
1945 if not found:
1946 raise RuntimeError("Invalid spw value.")
1947
1948 semicolon_sep = colon_sep[1].split(";")
1949 for scs_elem in semicolon_sep:
1950 scs_elem = scs_elem.strip()
1951
1952 ti_sep = scs_elem.split("~")
1953 ti_sep_length = len(ti_sep)
1954
1955 if (ti_sep_length > 2):
1956 raise RuntimeError("Invalid channel selection.")
1957
1958 elif (ti_sep_length == 1):
1959 if (scs_elem == "") or (scs_elem == "*"):
1960 # '' and '*' for all channels
1961 crange_list = [[pmin, pmax]]
1962 break
1963 elif (is_number(scs_elem)):
1964 # single channel given
1965 crange_list.append([float(scs_elem), float(scs_elem)])
1966 else:
1967 raise RuntimeError("Invalid channel selection.")
1968
1969 else: #(ti_sep_length == 2)
1970 expr0 = ti_sep[0].strip()
1971 expr1 = ti_sep[1].strip()
1972
1973 if is_number(expr0) and is_number(expr1):
1974 # 'a~b'
1975 expr_pmin = min(float(expr0), float(expr1))
1976 expr_pmax = max(float(expr0), float(expr1))
1977
1978 elif is_number(expr0) and is_frequency(expr1):
1979 # 'a~b*Hz'
1980 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1981 expr_p0 = coord.to_pixel(expr_f0)
1982 expr_p1 = coord.to_pixel(expr_f1)
1983 expr_pmin = min(expr_p0, expr_p1)
1984 expr_pmax = max(expr_p0, expr_p1)
1985
1986 elif is_number(expr0) and is_velocity(expr1):
1987 # 'a~b*m/s'
1988 restf = self.get_restfreqs().values()[0][0]
1989 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
1990 expr_f0 = get_frequency_by_velocity(restf, expr_v0)
1991 expr_f1 = get_frequency_by_velocity(restf, expr_v1)
1992 expr_p0 = coord.to_pixel(expr_f0)
1993 expr_p1 = coord.to_pixel(expr_f1)
1994 expr_pmin = min(expr_p0, expr_p1)
1995 expr_pmax = max(expr_p0, expr_p1)
1996
1997 else:
1998 # cases such as 'aGHz~bkm/s' are not allowed now
1999 raise RuntimeError("Invalid channel selection.")
2000
2001 cmin = max(pmin, expr_pmin)
2002 cmax = min(pmax, expr_pmax)
2003 # if the given range of channel selection has overwrap with
2004 # that of current spw, output the overwrap area.
2005 if (cmin <= cmax):
2006 cmin = float(int(cmin + 0.5))
2007 cmax = float(int(cmax + 0.5))
2008 crange_list.append([cmin, cmax])
2009
2010 if (len(crange_list) == 0):
2011 crange_list.append([])
2012
2013 if res.has_key(spw):
2014 res[spw].extend(crange_list)
2015 else:
2016 res[spw] = crange_list
2017
2018 # restore original values
2019 self.set_restfreqs(orig_restfreq_list)
2020 self.set_freqframe(orig_frame)
2021 self.set_doppler(orig_doppler)
2022 self.set_unit(orig_unit)
2023
2024 return res
2025 #found
2026
2027 @asaplog_post_dec
2028 def get_first_rowno_by_if(self, ifno):
2029 found = False
2030 for irow in xrange(self.nrow()):
2031 if (self.getif(irow) == ifno):
2032 res = irow
2033 found = True
2034 break
2035
2036 if not found: raise RuntimeError("Invalid IF value.")
2037
2038 return res
2039
2040 @asaplog_post_dec
2041 def _get_coordinate_list(self):
2042 res = []
2043 spws = self.getifnos()
2044 for spw in spws:
2045 elem = {}
2046 elem['if'] = spw
2047 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2048 res.append(elem)
2049
2050 return res
2051
2052##################################
2053
2054 @asaplog_post_dec
2055 def parse_maskexpr(self, maskstring):
2056 """
2057 Parse CASA type mask selection syntax (IF dependent).
2058
2059 Parameters:
2060 maskstring : A string mask selection expression.
2061 A comma separated selections mean different IF -
2062 channel combinations. IFs and channel selections
2063 are partitioned by a colon, ':'.
2064 examples:
2065 '' = all IFs (all channels)
2066 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2067 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2068 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2069 all channels in IF8
2070 Returns:
2071 A dictionary of selected (valid) IF and masklist pairs,
2072 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2073 """
2074 if not isinstance(maskstring,str):
2075 asaplog.post()
2076 asaplog.push("Mask expression should be a string.")
2077 asaplog.post("ERROR")
2078
2079 valid_ifs = self.getifnos()
2080 frequnit = self.get_unit()
2081 seldict = {}
2082 if maskstring == "":
2083 maskstring = str(valid_ifs)[1:-1]
2084 ## split each selection "IF range[:CHAN range]"
2085 # split maskstring by "<spaces>,<spaces>"
2086 comma_sep = re.compile('\s*,\s*')
2087 sellist = comma_sep.split(maskstring)
2088 # separator by "<spaces>:<spaces>"
2089 collon_sep = re.compile('\s*:\s*')
2090 for currselstr in sellist:
2091 selset = collon_sep.split(currselstr)
2092 # spw and mask string (may include ~, < or >)
2093 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2094 minval=min(valid_ifs),
2095 maxval=max(valid_ifs))
2096 for spwlist in spwmasklist:
2097 selspws = []
2098 for ispw in range(spwlist[0],spwlist[1]+1):
2099 # Put into the list only if ispw exists
2100 if valid_ifs.count(ispw):
2101 selspws.append(ispw)
2102 del spwmasklist, spwlist
2103
2104 # parse frequency mask list
2105 if len(selset) > 1:
2106 freqmasklist = self._parse_selection(selset[1], typestr='float',
2107 offset=0.)
2108 else:
2109 # want to select the whole spectrum
2110 freqmasklist = [None]
2111
2112 ## define a dictionary of spw - masklist combination
2113 for ispw in selspws:
2114 #print "working on", ispw
2115 spwstr = str(ispw)
2116 if len(selspws) == 0:
2117 # empty spw
2118 continue
2119 else:
2120 ## want to get min and max of the spw and
2121 ## offset to set for '<' and '>'
2122 if frequnit == 'channel':
2123 minfreq = 0
2124 maxfreq = self.nchan(ifno=ispw)
2125 offset = 0.5
2126 else:
2127 ## This is ugly part. need improvement
2128 for ifrow in xrange(self.nrow()):
2129 if self.getif(ifrow) == ispw:
2130 #print "IF",ispw,"found in row =",ifrow
2131 break
2132 freqcoord = self.get_coordinate(ifrow)
2133 freqs = self._getabcissa(ifrow)
2134 minfreq = min(freqs)
2135 maxfreq = max(freqs)
2136 if len(freqs) == 1:
2137 offset = 0.5
2138 elif frequnit.find('Hz') > 0:
2139 offset = abs(freqcoord.to_frequency(1,
2140 unit=frequnit)
2141 -freqcoord.to_frequency(0,
2142 unit=frequnit)
2143 )*0.5
2144 elif frequnit.find('m/s') > 0:
2145 offset = abs(freqcoord.to_velocity(1,
2146 unit=frequnit)
2147 -freqcoord.to_velocity(0,
2148 unit=frequnit)
2149 )*0.5
2150 else:
2151 asaplog.post()
2152 asaplog.push("Invalid frequency unit")
2153 asaplog.post("ERROR")
2154 del freqs, freqcoord, ifrow
2155 for freq in freqmasklist:
2156 selmask = freq or [minfreq, maxfreq]
2157 if selmask[0] == None:
2158 ## selection was "<freq[1]".
2159 if selmask[1] < minfreq:
2160 ## avoid adding region selection
2161 selmask = None
2162 else:
2163 selmask = [minfreq,selmask[1]-offset]
2164 elif selmask[1] == None:
2165 ## selection was ">freq[0]"
2166 if selmask[0] > maxfreq:
2167 ## avoid adding region selection
2168 selmask = None
2169 else:
2170 selmask = [selmask[0]+offset,maxfreq]
2171 if selmask:
2172 if not seldict.has_key(spwstr):
2173 # new spw selection
2174 seldict[spwstr] = []
2175 seldict[spwstr] += [selmask]
2176 del minfreq,maxfreq,offset,freq,selmask
2177 del spwstr
2178 del freqmasklist
2179 del valid_ifs
2180 if len(seldict) == 0:
2181 asaplog.post()
2182 asaplog.push("No valid selection in the mask expression: "
2183 +maskstring)
2184 asaplog.post("WARN")
2185 return None
2186 msg = "Selected masklist:\n"
2187 for sif, lmask in seldict.iteritems():
2188 msg += " IF"+sif+" - "+str(lmask)+"\n"
2189 asaplog.push(msg)
2190 return seldict
2191
2192 @asaplog_post_dec
2193 def parse_idx_selection(self, mode, selexpr):
2194 """
2195 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2196 BEAMNO, and row number
2197
2198 Parameters:
2199 mode : which column to select.
2200 ['scan',|'if'|'pol'|'beam'|'row']
2201 selexpr : A comma separated selection expression.
2202 examples:
2203 '' = all (returns [])
2204 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2205 (returns [0,1,4,5,6,9])
2206 Returns:
2207 A List of selected indices
2208 """
2209 if selexpr == "":
2210 return []
2211 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2212 'b': 'beam', 'r': 'row'}
2213 smode = mode.lower()[0]
2214 if not (smode in valid_modes.keys()):
2215 msg = "Invalid mode '%s'. Valid modes are %s" %\
2216 (mode, str(valid_modes.values()))
2217 asaplog.post()
2218 asaplog.push(msg)
2219 asaplog.post("ERROR")
2220 mode = valid_modes[smode]
2221 minidx = None
2222 maxidx = None
2223 if smode == 'r':
2224 minidx = 0
2225 maxidx = self.nrow()
2226 else:
2227 idx = getattr(self,"get"+mode+"nos")()
2228 minidx = min(idx)
2229 maxidx = max(idx)
2230 del idx
2231 # split selexpr by "<spaces>,<spaces>"
2232 comma_sep = re.compile('\s*,\s*')
2233 sellist = comma_sep.split(selexpr)
2234 idxlist = []
2235 for currselstr in sellist:
2236 # single range (may include ~, < or >)
2237 currlist = self._parse_selection(currselstr, typestr='integer',
2238 minval=minidx,maxval=maxidx)
2239 for thelist in currlist:
2240 idxlist += range(thelist[0],thelist[1]+1)
2241 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2242 asaplog.push(msg)
2243 return idxlist
2244
2245 def _parse_selection(self, selstr, typestr='float', offset=0.,
2246 minval=None, maxval=None):
2247 """
2248 Parameters:
2249 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2250 typestr : The type of the values in returned list
2251 ('integer' or 'float')
2252 offset : The offset value to subtract from or add to
2253 the boundary value if the selection string
2254 includes '<' or '>' [Valid only for typestr='float']
2255 minval, maxval : The minimum/maximum values to set if the
2256 selection string includes '<' or '>'.
2257 The list element is filled with None by default.
2258 Returns:
2259 A list of min/max pair of selections.
2260 Example:
2261 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2262 --> returns [[0,2],[5,7],[9,9]]
2263 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2264 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2265 """
2266 # split selstr by '<spaces>;<spaces>'
2267 semi_sep = re.compile('\s*;\s*')
2268 selgroups = semi_sep.split(selstr)
2269 sellists = []
2270 if typestr.lower().startswith('int'):
2271 formatfunc = int
2272 offset = 1
2273 else:
2274 formatfunc = float
2275
2276 for currsel in selgroups:
2277 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2278 minsel = minval
2279 maxsel = maxval
2280 if currsel.find('~') > 0:
2281 # val0 <= x <= val1
2282 minsel = formatfunc(currsel.split('~')[0].strip())
2283 maxsel = formatfunc(currsel.split('~')[1].strip())
2284 elif currsel.strip().find('<=') > -1:
2285 bound = currsel.split('<=')
2286 try: # try "x <= val"
2287 minsel = minval
2288 maxsel = formatfunc(bound[1].strip())
2289 except ValueError: # now "val <= x"
2290 minsel = formatfunc(bound[0].strip())
2291 maxsel = maxval
2292 elif currsel.strip().find('>=') > -1:
2293 bound = currsel.split('>=')
2294 try: # try "x >= val"
2295 minsel = formatfunc(bound[1].strip())
2296 maxsel = maxval
2297 except ValueError: # now "val >= x"
2298 minsel = minval
2299 maxsel = formatfunc(bound[0].strip())
2300 elif currsel.strip().find('<') > -1:
2301 bound = currsel.split('<')
2302 try: # try "x < val"
2303 minsel = minval
2304 maxsel = formatfunc(bound[1].strip()) \
2305 - formatfunc(offset)
2306 except ValueError: # now "val < x"
2307 minsel = formatfunc(bound[0].strip()) \
2308 + formatfunc(offset)
2309 maxsel = maxval
2310 elif currsel.strip().find('>') > -1:
2311 bound = currsel.split('>')
2312 try: # try "x > val"
2313 minsel = formatfunc(bound[1].strip()) \
2314 + formatfunc(offset)
2315 maxsel = maxval
2316 except ValueError: # now "val > x"
2317 minsel = minval
2318 maxsel = formatfunc(bound[0].strip()) \
2319 - formatfunc(offset)
2320 else:
2321 minsel = formatfunc(currsel)
2322 maxsel = formatfunc(currsel)
2323 sellists.append([minsel,maxsel])
2324 return sellists
2325
2326# def get_restfreqs(self):
2327# """
2328# Get the restfrequency(s) stored in this scantable.
2329# The return value(s) are always of unit 'Hz'
2330# Parameters:
2331# none
2332# Returns:
2333# a list of doubles
2334# """
2335# return list(self._getrestfreqs())
2336
2337 def get_restfreqs(self, ids=None):
2338 """\
2339 Get the restfrequency(s) stored in this scantable.
2340 The return value(s) are always of unit 'Hz'
2341
2342 Parameters:
2343
2344 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2345 be retrieved
2346
2347 Returns:
2348
2349 dictionary containing ids and a list of doubles for each id
2350
2351 """
2352 if ids is None:
2353 rfreqs = {}
2354 idlist = self.getmolnos()
2355 for i in idlist:
2356 rfreqs[i] = list(self._getrestfreqs(i))
2357 return rfreqs
2358 else:
2359 if type(ids) == list or type(ids) == tuple:
2360 rfreqs = {}
2361 for i in ids:
2362 rfreqs[i] = list(self._getrestfreqs(i))
2363 return rfreqs
2364 else:
2365 return list(self._getrestfreqs(ids))
2366
2367 @asaplog_post_dec
2368 def set_restfreqs(self, freqs=None, unit='Hz'):
2369 """\
2370 Set or replace the restfrequency specified and
2371 if the 'freqs' argument holds a scalar,
2372 then that rest frequency will be applied to all the selected
2373 data. If the 'freqs' argument holds
2374 a vector, then it MUST be of equal or smaller length than
2375 the number of IFs (and the available restfrequencies will be
2376 replaced by this vector). In this case, *all* data have
2377 the restfrequency set per IF according
2378 to the corresponding value you give in the 'freqs' vector.
2379 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2380 IF 1 gets restfreq 2e9.
2381
2382 You can also specify the frequencies via a linecatalog.
2383
2384 Parameters:
2385
2386 freqs: list of rest frequency values or string idenitfiers
2387
2388 unit: unit for rest frequency (default 'Hz')
2389
2390
2391 Example::
2392
2393 # set the given restfrequency for the all currently selected IFs
2394 scan.set_restfreqs(freqs=1.4e9)
2395 # set restfrequencies for the n IFs (n > 1) in the order of the
2396 # list, i.e
2397 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2398 # len(list_of_restfreqs) == nIF
2399 # for nIF == 1 the following will set multiple restfrequency for
2400 # that IF
2401 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2402 # set multiple restfrequencies per IF. as a list of lists where
2403 # the outer list has nIF elements, the inner s arbitrary
2404 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2405
2406 *Note*:
2407
2408 To do more sophisticate Restfrequency setting, e.g. on a
2409 source and IF basis, use scantable.set_selection() before using
2410 this function::
2411
2412 # provided your scantable is called scan
2413 selection = selector()
2414 selection.set_name('ORION*')
2415 selection.set_ifs([1])
2416 scan.set_selection(selection)
2417 scan.set_restfreqs(freqs=86.6e9)
2418
2419 """
2420 varlist = vars()
2421 from asap import linecatalog
2422 # simple value
2423 if isinstance(freqs, int) or isinstance(freqs, float):
2424 self._setrestfreqs([freqs], [""], unit)
2425 # list of values
2426 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2427 # list values are scalars
2428 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2429 if len(freqs) == 1:
2430 self._setrestfreqs(freqs, [""], unit)
2431 else:
2432 # allow the 'old' mode of setting mulitple IFs
2433 savesel = self._getselection()
2434 sel = self.get_selection()
2435 iflist = self.getifnos()
2436 if len(freqs)>len(iflist):
2437 raise ValueError("number of elements in list of list "
2438 "exeeds the current IF selections")
2439 iflist = self.getifnos()
2440 for i, fval in enumerate(freqs):
2441 sel.set_ifs(iflist[i])
2442 self._setselection(sel)
2443 self._setrestfreqs([fval], [""], unit)
2444 self._setselection(savesel)
2445
2446 # list values are dict, {'value'=, 'name'=)
2447 elif isinstance(freqs[-1], dict):
2448 values = []
2449 names = []
2450 for d in freqs:
2451 values.append(d["value"])
2452 names.append(d["name"])
2453 self._setrestfreqs(values, names, unit)
2454 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2455 savesel = self._getselection()
2456 sel = self.get_selection()
2457 iflist = self.getifnos()
2458 if len(freqs)>len(iflist):
2459 raise ValueError("number of elements in list of list exeeds"
2460 " the current IF selections")
2461 for i, fval in enumerate(freqs):
2462 sel.set_ifs(iflist[i])
2463 self._setselection(sel)
2464 self._setrestfreqs(fval, [""], unit)
2465 self._setselection(savesel)
2466 # freqs are to be taken from a linecatalog
2467 elif isinstance(freqs, linecatalog):
2468 savesel = self._getselection()
2469 sel = self.get_selection()
2470 for i in xrange(freqs.nrow()):
2471 sel.set_ifs(iflist[i])
2472 self._setselection(sel)
2473 self._setrestfreqs([freqs.get_frequency(i)],
2474 [freqs.get_name(i)], "MHz")
2475 # ensure that we are not iterating past nIF
2476 if i == self.nif()-1: break
2477 self._setselection(savesel)
2478 else:
2479 return
2480 self._add_history("set_restfreqs", varlist)
2481
2482 @asaplog_post_dec
2483 def shift_refpix(self, delta):
2484 """\
2485 Shift the reference pixel of the Spectra Coordinate by an
2486 integer amount.
2487
2488 Parameters:
2489
2490 delta: the amount to shift by
2491
2492 *Note*:
2493
2494 Be careful using this with broadband data.
2495
2496 """
2497 varlist = vars()
2498 Scantable.shift_refpix(self, delta)
2499 s._add_history("shift_refpix", varlist)
2500
2501 @asaplog_post_dec
2502 def history(self, filename=None, nrows=-1, start=0):
2503 """\
2504 Print the history. Optionally to a file.
2505
2506 Parameters:
2507
2508 filename: The name of the file to save the history to.
2509
2510 """
2511 n = self._historylength()
2512 if nrows == -1:
2513 nrows = n
2514 if start+nrows > n:
2515 nrows = nrows-start
2516 if n > 1000 and nrows == n:
2517 nrows = 1000
2518 start = n-1000
2519 asaplog.push("Warning: History has {0} entries. Displaying last "
2520 "1000".format(n))
2521 hist = list(self._gethistory(nrows, start))
2522 out = "-"*80
2523 for h in hist:
2524 if not h.strip():
2525 continue
2526 if h.find("---") >-1:
2527 continue
2528 else:
2529 items = h.split("##")
2530 date = items[0]
2531 func = items[1]
2532 items = items[2:]
2533 out += "\n"+date+"\n"
2534 out += "Function: %s\n Parameters:" % (func)
2535 for i in items:
2536 if i == '':
2537 continue
2538 s = i.split("=")
2539 out += "\n %s = %s" % (s[0], s[1])
2540 out = "\n".join([out, "*"*80])
2541 if filename is not None:
2542 if filename is "":
2543 filename = 'scantable_history.txt'
2544 filename = os.path.expandvars(os.path.expanduser(filename))
2545 if not os.path.isdir(filename):
2546 data = open(filename, 'w')
2547 data.write(out)
2548 data.close()
2549 else:
2550 msg = "Illegal file name '%s'." % (filename)
2551 raise IOError(msg)
2552 return page(out)
2553
2554 #
2555 # Maths business
2556 #
2557 @asaplog_post_dec
2558 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2559 avmode="NONE"):
2560 """\
2561 Return the (time) weighted average of a scan. Scans will be averaged
2562 only if the source direction (RA/DEC) is within 1' otherwise
2563
2564 *Note*:
2565
2566 in channels only - align if necessary
2567
2568 Parameters:
2569
2570 mask: an optional mask (only used for 'var' and 'tsys'
2571 weighting)
2572
2573 scanav: True averages each scan separately
2574 False (default) averages all scans together,
2575
2576 weight: Weighting scheme.
2577 'none' (mean no weight)
2578 'var' (1/var(spec) weighted)
2579 'tsys' (1/Tsys**2 weighted)
2580 'tint' (integration time weighted)
2581 'tintsys' (Tint/Tsys**2)
2582 'median' ( median averaging)
2583 The default is 'tint'
2584
2585 align: align the spectra in velocity before averaging. It takes
2586 the time of the first spectrum as reference time.
2587 avmode: 'SOURCE' - also select by source name - or
2588 'NONE' (default). Not applicable for scanav=True or
2589 weight=median
2590
2591 Example::
2592
2593 # time average the scantable without using a mask
2594 newscan = scan.average_time()
2595
2596 """
2597 varlist = vars()
2598 weight = weight or 'TINT'
2599 mask = mask or ()
2600 scanav = (scanav and 'SCAN') or avmode.upper()
2601 scan = (self, )
2602
2603 if align:
2604 scan = (self.freq_align(insitu=False), )
2605 asaplog.push("Note: Alignment is don on a source-by-source basis")
2606 asaplog.push("Note: Averaging (by default) is not")
2607 # we need to set it to SOURCE averaging here
2608 s = None
2609 if weight.upper() == 'MEDIAN':
2610 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2611 scanav))
2612 else:
2613 s = scantable(self._math._average(scan, mask, weight.upper(),
2614 scanav))
2615 s._add_history("average_time", varlist)
2616 return s
2617
2618 @asaplog_post_dec
2619 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2620 """\
2621 Return a scan where all spectra are converted to either
2622 Jansky or Kelvin depending upon the flux units of the scan table.
2623 By default the function tries to look the values up internally.
2624 If it can't find them (or if you want to over-ride), you must
2625 specify EITHER jyperk OR eta (and D which it will try to look up
2626 also if you don't set it). jyperk takes precedence if you set both.
2627
2628 Parameters:
2629
2630 jyperk: the Jy / K conversion factor
2631
2632 eta: the aperture efficiency
2633
2634 d: the geometric diameter (metres)
2635
2636 insitu: if False a new scantable is returned.
2637 Otherwise, the scaling is done in-situ
2638 The default is taken from .asaprc (False)
2639
2640 """
2641 if insitu is None: insitu = rcParams['insitu']
2642 self._math._setinsitu(insitu)
2643 varlist = vars()
2644 jyperk = jyperk or -1.0
2645 d = d or -1.0
2646 eta = eta or -1.0
2647 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2648 s._add_history("convert_flux", varlist)
2649 if insitu: self._assign(s)
2650 else: return s
2651
2652 @asaplog_post_dec
2653 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2654 """\
2655 Return a scan after applying a gain-elevation correction.
2656 The correction can be made via either a polynomial or a
2657 table-based interpolation (and extrapolation if necessary).
2658 You specify polynomial coefficients, an ascii table or neither.
2659 If you specify neither, then a polynomial correction will be made
2660 with built in coefficients known for certain telescopes (an error
2661 will occur if the instrument is not known).
2662 The data and Tsys are *divided* by the scaling factors.
2663
2664 Parameters:
2665
2666 poly: Polynomial coefficients (default None) to compute a
2667 gain-elevation correction as a function of
2668 elevation (in degrees).
2669
2670 filename: The name of an ascii file holding correction factors.
2671 The first row of the ascii file must give the column
2672 names and these MUST include columns
2673 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2674 by this) somewhere.
2675 The second row must give the data type of the
2676 column. Use 'R' for Real and 'I' for Integer.
2677 An example file would be
2678 (actual factors are arbitrary) :
2679
2680 TIME ELEVATION FACTOR
2681 R R R
2682 0.1 0 0.8
2683 0.2 20 0.85
2684 0.3 40 0.9
2685 0.4 60 0.85
2686 0.5 80 0.8
2687 0.6 90 0.75
2688
2689 method: Interpolation method when correcting from a table.
2690 Values are 'nearest', 'linear' (default), 'cubic'
2691 and 'spline'
2692
2693 insitu: if False a new scantable is returned.
2694 Otherwise, the scaling is done in-situ
2695 The default is taken from .asaprc (False)
2696
2697 """
2698
2699 if insitu is None: insitu = rcParams['insitu']
2700 self._math._setinsitu(insitu)
2701 varlist = vars()
2702 poly = poly or ()
2703 from os.path import expandvars
2704 filename = expandvars(filename)
2705 s = scantable(self._math._gainel(self, poly, filename, method))
2706 s._add_history("gain_el", varlist)
2707 if insitu:
2708 self._assign(s)
2709 else:
2710 return s
2711
2712 @asaplog_post_dec
2713 def freq_align(self, reftime=None, method='cubic', insitu=None):
2714 """\
2715 Return a scan where all rows have been aligned in frequency/velocity.
2716 The alignment frequency frame (e.g. LSRK) is that set by function
2717 set_freqframe.
2718
2719 Parameters:
2720
2721 reftime: reference time to align at. By default, the time of
2722 the first row of data is used.
2723
2724 method: Interpolation method for regridding the spectra.
2725 Choose from 'nearest', 'linear', 'cubic' (default)
2726 and 'spline'
2727
2728 insitu: if False a new scantable is returned.
2729 Otherwise, the scaling is done in-situ
2730 The default is taken from .asaprc (False)
2731
2732 """
2733 if insitu is None: insitu = rcParams["insitu"]
2734 oldInsitu = self._math._insitu()
2735 self._math._setinsitu(insitu)
2736 varlist = vars()
2737 reftime = reftime or ""
2738 s = scantable(self._math._freq_align(self, reftime, method))
2739 s._add_history("freq_align", varlist)
2740 self._math._setinsitu(oldInsitu)
2741 if insitu:
2742 self._assign(s)
2743 else:
2744 return s
2745
2746 @asaplog_post_dec
2747 def opacity(self, tau=None, insitu=None):
2748 """\
2749 Apply an opacity correction. The data
2750 and Tsys are multiplied by the correction factor.
2751
2752 Parameters:
2753
2754 tau: (list of) opacity from which the correction factor is
2755 exp(tau*ZD)
2756 where ZD is the zenith-distance.
2757 If a list is provided, it has to be of length nIF,
2758 nIF*nPol or 1 and in order of IF/POL, e.g.
2759 [opif0pol0, opif0pol1, opif1pol0 ...]
2760 if tau is `None` the opacities are determined from a
2761 model.
2762
2763 insitu: if False a new scantable is returned.
2764 Otherwise, the scaling is done in-situ
2765 The default is taken from .asaprc (False)
2766
2767 """
2768 if insitu is None:
2769 insitu = rcParams['insitu']
2770 self._math._setinsitu(insitu)
2771 varlist = vars()
2772 if not hasattr(tau, "__len__"):
2773 tau = [tau]
2774 s = scantable(self._math._opacity(self, tau))
2775 s._add_history("opacity", varlist)
2776 if insitu:
2777 self._assign(s)
2778 else:
2779 return s
2780
2781 @asaplog_post_dec
2782 def bin(self, width=5, insitu=None):
2783 """\
2784 Return a scan where all spectra have been binned up.
2785
2786 Parameters:
2787
2788 width: The bin width (default=5) in pixels
2789
2790 insitu: if False a new scantable is returned.
2791 Otherwise, the scaling is done in-situ
2792 The default is taken from .asaprc (False)
2793
2794 """
2795 if insitu is None:
2796 insitu = rcParams['insitu']
2797 self._math._setinsitu(insitu)
2798 varlist = vars()
2799 s = scantable(self._math._bin(self, width))
2800 s._add_history("bin", varlist)
2801 if insitu:
2802 self._assign(s)
2803 else:
2804 return s
2805
2806 @asaplog_post_dec
2807 def reshape(self, first, last, insitu=None):
2808 """Resize the band by providing first and last channel.
2809 This will cut off all channels outside [first, last].
2810 """
2811 if insitu is None:
2812 insitu = rcParams['insitu']
2813 varlist = vars()
2814 if last < 0:
2815 last = self.nchan()-1 + last
2816 s = None
2817 if insitu:
2818 s = self
2819 else:
2820 s = self.copy()
2821 s._reshape(first,last)
2822 s._add_history("reshape", varlist)
2823 if not insitu:
2824 return s
2825
2826 @asaplog_post_dec
2827 def resample(self, width=5, method='cubic', insitu=None):
2828 """\
2829 Return a scan where all spectra have been binned up.
2830
2831 Parameters:
2832
2833 width: The bin width (default=5) in pixels
2834
2835 method: Interpolation method when correcting from a table.
2836 Values are 'nearest', 'linear', 'cubic' (default)
2837 and 'spline'
2838
2839 insitu: if False a new scantable is returned.
2840 Otherwise, the scaling is done in-situ
2841 The default is taken from .asaprc (False)
2842
2843 """
2844 if insitu is None:
2845 insitu = rcParams['insitu']
2846 self._math._setinsitu(insitu)
2847 varlist = vars()
2848 s = scantable(self._math._resample(self, method, width))
2849 s._add_history("resample", varlist)
2850 if insitu:
2851 self._assign(s)
2852 else:
2853 return s
2854
2855 @asaplog_post_dec
2856 def average_pol(self, mask=None, weight='none'):
2857 """\
2858 Average the Polarisations together.
2859
2860 Parameters:
2861
2862 mask: An optional mask defining the region, where the
2863 averaging will be applied. The output will have all
2864 specified points masked.
2865
2866 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2867 weighted), or 'tsys' (1/Tsys**2 weighted)
2868
2869 """
2870 varlist = vars()
2871 mask = mask or ()
2872 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2873 s._add_history("average_pol", varlist)
2874 return s
2875
2876 @asaplog_post_dec
2877 def average_beam(self, mask=None, weight='none'):
2878 """\
2879 Average the Beams together.
2880
2881 Parameters:
2882 mask: An optional mask defining the region, where the
2883 averaging will be applied. The output will have all
2884 specified points masked.
2885
2886 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2887 weighted), or 'tsys' (1/Tsys**2 weighted)
2888
2889 """
2890 varlist = vars()
2891 mask = mask or ()
2892 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2893 s._add_history("average_beam", varlist)
2894 return s
2895
2896 def parallactify(self, pflag):
2897 """\
2898 Set a flag to indicate whether this data should be treated as having
2899 been 'parallactified' (total phase == 0.0)
2900
2901 Parameters:
2902
2903 pflag: Bool indicating whether to turn this on (True) or
2904 off (False)
2905
2906 """
2907 varlist = vars()
2908 self._parallactify(pflag)
2909 self._add_history("parallactify", varlist)
2910
2911 @asaplog_post_dec
2912 def convert_pol(self, poltype=None):
2913 """\
2914 Convert the data to a different polarisation type.
2915 Note that you will need cross-polarisation terms for most conversions.
2916
2917 Parameters:
2918
2919 poltype: The new polarisation type. Valid types are:
2920 'linear', 'circular', 'stokes' and 'linpol'
2921
2922 """
2923 varlist = vars()
2924 s = scantable(self._math._convertpol(self, poltype))
2925 s._add_history("convert_pol", varlist)
2926 return s
2927
2928 @asaplog_post_dec
2929 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2930 insitu=None):
2931 """\
2932 Smooth the spectrum by the specified kernel (conserving flux).
2933
2934 Parameters:
2935
2936 kernel: The type of smoothing kernel. Select from
2937 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2938 or 'poly'
2939
2940 width: The width of the kernel in pixels. For hanning this is
2941 ignored otherwise it defauls to 5 pixels.
2942 For 'gaussian' it is the Full Width Half
2943 Maximum. For 'boxcar' it is the full width.
2944 For 'rmedian' and 'poly' it is the half width.
2945
2946 order: Optional parameter for 'poly' kernel (default is 2), to
2947 specify the order of the polnomial. Ignored by all other
2948 kernels.
2949
2950 plot: plot the original and the smoothed spectra.
2951 In this each indivual fit has to be approved, by
2952 typing 'y' or 'n'
2953
2954 insitu: if False a new scantable is returned.
2955 Otherwise, the scaling is done in-situ
2956 The default is taken from .asaprc (False)
2957
2958 """
2959 if insitu is None: insitu = rcParams['insitu']
2960 self._math._setinsitu(insitu)
2961 varlist = vars()
2962
2963 if plot: orgscan = self.copy()
2964
2965 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2966 s._add_history("smooth", varlist)
2967
2968 action = 'H'
2969 if plot:
2970 from asap.asapplotter import new_asaplot
2971 theplot = new_asaplot(rcParams['plotter.gui'])
2972 from matplotlib import rc as rcp
2973 rcp('lines', linewidth=1)
2974 theplot.set_panels()
2975 ylab=s._get_ordinate_label()
2976 #theplot.palette(0,["#777777","red"])
2977 for r in xrange(s.nrow()):
2978 xsm=s._getabcissa(r)
2979 ysm=s._getspectrum(r)
2980 xorg=orgscan._getabcissa(r)
2981 yorg=orgscan._getspectrum(r)
2982 if action != "N": #skip plotting if rejecting all
2983 theplot.clear()
2984 theplot.hold()
2985 theplot.set_axes('ylabel',ylab)
2986 theplot.set_axes('xlabel',s._getabcissalabel(r))
2987 theplot.set_axes('title',s._getsourcename(r))
2988 theplot.set_line(label='Original',color="#777777")
2989 theplot.plot(xorg,yorg)
2990 theplot.set_line(label='Smoothed',color="red")
2991 theplot.plot(xsm,ysm)
2992 ### Ugly part for legend
2993 for i in [0,1]:
2994 theplot.subplots[0]['lines'].append(
2995 [theplot.subplots[0]['axes'].lines[i]]
2996 )
2997 theplot.release()
2998 ### Ugly part for legend
2999 theplot.subplots[0]['lines']=[]
3000 res = self._get_verify_action("Accept smoothing?",action)
3001 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3002 if r == 0: action = None
3003 #res = raw_input("Accept smoothing ([y]/n): ")
3004 if res.upper() == 'N':
3005 # reject for the current rows
3006 s._setspectrum(yorg, r)
3007 elif res.upper() == 'R':
3008 # reject all the following rows
3009 action = "N"
3010 s._setspectrum(yorg, r)
3011 elif res.upper() == 'A':
3012 # accept all the following rows
3013 break
3014 theplot.quit()
3015 del theplot
3016 del orgscan
3017
3018 if insitu: self._assign(s)
3019 else: return s
3020
3021 @asaplog_post_dec
3022 def regrid_channel(self, width=5, plot=False, insitu=None):
3023 """\
3024 Regrid the spectra by the specified channel width
3025
3026 Parameters:
3027
3028 width: The channel width (float) of regridded spectra
3029 in the current spectral unit.
3030
3031 plot: [NOT IMPLEMENTED YET]
3032 plot the original and the regridded spectra.
3033 In this each indivual fit has to be approved, by
3034 typing 'y' or 'n'
3035
3036 insitu: if False a new scantable is returned.
3037 Otherwise, the scaling is done in-situ
3038 The default is taken from .asaprc (False)
3039
3040 """
3041 if insitu is None: insitu = rcParams['insitu']
3042 varlist = vars()
3043
3044 if plot:
3045 asaplog.post()
3046 asaplog.push("Verification plot is not implemtnetd yet.")
3047 asaplog.post("WARN")
3048
3049 s = self.copy()
3050 s._regrid_specchan(width)
3051
3052 s._add_history("regrid_channel", varlist)
3053
3054# if plot:
3055# from asap.asapplotter import new_asaplot
3056# theplot = new_asaplot(rcParams['plotter.gui'])
3057# from matplotlib import rc as rcp
3058# rcp('lines', linewidth=1)
3059# theplot.set_panels()
3060# ylab=s._get_ordinate_label()
3061# #theplot.palette(0,["#777777","red"])
3062# for r in xrange(s.nrow()):
3063# xsm=s._getabcissa(r)
3064# ysm=s._getspectrum(r)
3065# xorg=orgscan._getabcissa(r)
3066# yorg=orgscan._getspectrum(r)
3067# theplot.clear()
3068# theplot.hold()
3069# theplot.set_axes('ylabel',ylab)
3070# theplot.set_axes('xlabel',s._getabcissalabel(r))
3071# theplot.set_axes('title',s._getsourcename(r))
3072# theplot.set_line(label='Original',color="#777777")
3073# theplot.plot(xorg,yorg)
3074# theplot.set_line(label='Smoothed',color="red")
3075# theplot.plot(xsm,ysm)
3076# ### Ugly part for legend
3077# for i in [0,1]:
3078# theplot.subplots[0]['lines'].append(
3079# [theplot.subplots[0]['axes'].lines[i]]
3080# )
3081# theplot.release()
3082# ### Ugly part for legend
3083# theplot.subplots[0]['lines']=[]
3084# res = raw_input("Accept smoothing ([y]/n): ")
3085# if res.upper() == 'N':
3086# s._setspectrum(yorg, r)
3087# theplot.quit()
3088# del theplot
3089# del orgscan
3090
3091 if insitu: self._assign(s)
3092 else: return s
3093
3094 @asaplog_post_dec
3095 def _parse_wn(self, wn):
3096 if isinstance(wn, list) or isinstance(wn, tuple):
3097 return wn
3098 elif isinstance(wn, int):
3099 return [ wn ]
3100 elif isinstance(wn, str):
3101 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3102 val = wn.split('-')
3103 val = [int(val[0]), int(val[1])]
3104 val.sort()
3105 res = [i for i in xrange(val[0], val[1]+1)]
3106 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3107 val = int(wn[2:])+1
3108 res = [i for i in xrange(val)]
3109 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3110 val = int(wn[:-2])+1
3111 res = [i for i in xrange(val)]
3112 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3113 val = int(wn[1:])
3114 res = [i for i in xrange(val)]
3115 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3116 val = int(wn[:-1])
3117 res = [i for i in xrange(val)]
3118 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3119 # then interpreted in C++
3120 # side as [a,a+1,...,a_nyq]
3121 # (CAS-3759)
3122 val = int(wn[2:])
3123 res = [val, -999]
3124 #res = [i for i in xrange(val, self.nchan()/2+1)]
3125 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3126 # then interpreted in C++
3127 # side as [a,a+1,...,a_nyq]
3128 # (CAS-3759)
3129 val = int(wn[:-2])
3130 res = [val, -999]
3131 #res = [i for i in xrange(val, self.nchan()/2+1)]
3132 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3133 # then interpreted in C++
3134 # side as [a+1,a+2,...,a_nyq]
3135 # (CAS-3759)
3136 val = int(wn[1:])+1
3137 res = [val, -999]
3138 #res = [i for i in xrange(val, self.nchan()/2+1)]
3139 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3140 # then interpreted in C++
3141 # side as [a+1,a+2,...,a_nyq]
3142 # (CAS-3759)
3143 val = int(wn[:-1])+1
3144 res = [val, -999]
3145 #res = [i for i in xrange(val, self.nchan()/2+1)]
3146
3147 return res
3148 else:
3149 msg = 'wrong value given for addwn/rejwn'
3150 raise RuntimeError(msg)
3151
3152 @asaplog_post_dec
3153 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3154 """\
3155 Subtract baseline based on parameters written in Baseline Table.
3156
3157 Parameters:
3158 insitu: if True, baseline fitting/subtraction is done
3159 in-situ. If False, a new scantable with
3160 baseline subtracted is returned. Actually,
3161 format of the returned value depends on both
3162 insitu and retfitres (see below).
3163 The default is taken from .asaprc (False)
3164 retfitres: if True, the results of baseline fitting (i.e.,
3165 coefficients and rms) are returned.
3166 default is False.
3167 The format of the returned value of this
3168 function varies as follows:
3169 (1) in case insitu=True and retfitres=True:
3170 fitting result.
3171 (2) in case insitu=True and retfitres=False:
3172 None.
3173 (3) in case insitu=False and retfitres=True:
3174 a dictionary containing a new scantable
3175 (with baseline subtracted) and the fitting
3176 results.
3177 (4) in case insitu=False and retfitres=False:
3178 a new scantable (with baseline subtracted).
3179 inbltable: name of input baseline table. The row number of
3180 scantable and that of inbltable must be
3181 identical.
3182 outbltable: name of output baseline table where baseline
3183 parameters and fitting results recorded.
3184 default is ''(no output).
3185 overwrite: if True when an existing baseline table is
3186 specified for outbltable, overwrites it.
3187 Otherwise there is no harm.
3188 default is False.
3189 """
3190
3191 try:
3192 varlist = vars()
3193 if retfitres is None: retfitres = False
3194 if inbltable is None: raise ValueError("bltable missing.")
3195 if outbltable is None: outbltable = ''
3196 if overwrite is None: overwrite = False
3197
3198 if insitu is None: insitu = rcParams['insitu']
3199 if insitu:
3200 workscan = self
3201 else:
3202 workscan = self.copy()
3203
3204 sres = workscan._apply_bltable(inbltable,
3205 retfitres,
3206 outbltable,
3207 os.path.exists(outbltable),
3208 overwrite)
3209 if retfitres: res = parse_fitresult(sres)
3210
3211 workscan._add_history('apply_bltable', varlist)
3212
3213 if insitu:
3214 self._assign(workscan)
3215 if retfitres:
3216 return res
3217 else:
3218 return None
3219 else:
3220 if retfitres:
3221 return {'scantable': workscan, 'fitresults': res}
3222 else:
3223 return workscan
3224
3225 except RuntimeError, e:
3226 raise_fitting_failure_exception(e)
3227
3228 @asaplog_post_dec
3229 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3230 """\
3231 Subtract baseline based on parameters written in the input list.
3232
3233 Parameters:
3234 insitu: if True, baseline fitting/subtraction is done
3235 in-situ. If False, a new scantable with
3236 baseline subtracted is returned. Actually,
3237 format of the returned value depends on both
3238 insitu and retfitres (see below).
3239 The default is taken from .asaprc (False)
3240 retfitres: if True, the results of baseline fitting (i.e.,
3241 coefficients and rms) are returned.
3242 default is False.
3243 The format of the returned value of this
3244 function varies as follows:
3245 (1) in case insitu=True and retfitres=True:
3246 fitting result.
3247 (2) in case insitu=True and retfitres=False:
3248 None.
3249 (3) in case insitu=False and retfitres=True:
3250 a dictionary containing a new scantable
3251 (with baseline subtracted) and the fitting
3252 results.
3253 (4) in case insitu=False and retfitres=False:
3254 a new scantable (with baseline subtracted).
3255 blinfo: baseline parameter set stored in a dictionary
3256 or a list of dictionary. Each dictionary
3257 corresponds to each spectrum and must contain
3258 the following keys and values:
3259 'row': row number,
3260 'blfunc': function name. available ones include
3261 'poly', 'chebyshev', 'cspline' and
3262 'sinusoid',
3263 'order': maximum order of polynomial. needed
3264 if blfunc='poly' or 'chebyshev',
3265 'npiece': number or piecewise polynomial.
3266 needed if blfunc='cspline',
3267 'nwave': a list of sinusoidal wave numbers.
3268 needed if blfunc='sinusoid', and
3269 'masklist': min-max windows for channel mask.
3270 the specified ranges will be used
3271 for fitting.
3272 bltable: name of output baseline table where baseline
3273 parameters and fitting results recorded.
3274 default is ''(no output).
3275 overwrite: if True when an existing baseline table is
3276 specified for bltable, overwrites it.
3277 Otherwise there is no harm.
3278 default is False.
3279
3280 Example:
3281 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3282 'masklist':[[10,350],[352,510]]},
3283 {'row':1, 'blfunc':'cspline', 'npiece':3,
3284 'masklist':[[3,16],[19,404],[407,511]]}
3285 ])
3286
3287 the first spectrum (row=0) will be fitted with polynomial
3288 of order=5 and the next one (row=1) will be fitted with cubic
3289 spline consisting of 3 pieces.
3290 """
3291
3292 try:
3293 varlist = vars()
3294 if retfitres is None: retfitres = False
3295 if blinfo is None: blinfo = []
3296 if bltable is None: bltable = ''
3297 if overwrite is None: overwrite = False
3298
3299 if insitu is None: insitu = rcParams['insitu']
3300 if insitu:
3301 workscan = self
3302 else:
3303 workscan = self.copy()
3304
3305 nrow = workscan.nrow()
3306
3307 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3308
3309 print "in_blinfo=< "+ str(in_blinfo)+" >"
3310
3311 sres = workscan._sub_baseline(in_blinfo,
3312 retfitres,
3313 bltable,
3314 os.path.exists(bltable),
3315 overwrite)
3316 if retfitres: res = parse_fitresult(sres)
3317
3318 workscan._add_history('sub_baseline', varlist)
3319
3320 if insitu:
3321 self._assign(workscan)
3322 if retfitres:
3323 return res
3324 else:
3325 return None
3326 else:
3327 if retfitres:
3328 return {'scantable': workscan, 'fitresults': res}
3329 else:
3330 return workscan
3331
3332 except RuntimeError, e:
3333 raise_fitting_failure_exception(e)
3334
3335 @asaplog_post_dec
3336 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3337 whichrow=None, uselinefinder=None, edge=None,
3338 threshold=None, chan_avg_limit=None):
3339 """\
3340 Calculates and returns model selection criteria for a specified
3341 baseline model and a given spectrum data.
3342 Available values include Akaike Information Criterion (AIC), the
3343 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3344 Bayesian Information Criterion (BIC) and the Generalised Cross
3345 Validation (GCV).
3346
3347 Parameters:
3348 value: name of model selection criteria to calculate.
3349 available ones include 'aic', 'aicc', 'bic' and
3350 'gcv'. default is 'aicc'.
3351 blfunc: baseline function name. available ones include
3352 'chebyshev', 'cspline' and 'sinusoid'.
3353 default is 'chebyshev'.
3354 order: parameter for basline function. actually stands for
3355 order of polynomial (order) for 'chebyshev',
3356 number of spline pieces (npiece) for 'cspline' and
3357 maximum wave number for 'sinusoid', respectively.
3358 default is 5 (which is also the default order value
3359 for [auto_]chebyshev_baseline()).
3360 mask: an optional mask. default is [].
3361 whichrow: row number. default is 0 (the first row)
3362 uselinefinder: use sd.linefinder() to flag out line regions
3363 default is True.
3364 edge: an optional number of channel to drop at
3365 the edge of spectrum. If only one value is
3366 specified, the same number will be dropped
3367 from both sides of the spectrum. Default
3368 is to keep all channels. Nested tuples
3369 represent individual edge selection for
3370 different IFs (a number of spectral channels
3371 can be different)
3372 default is (0, 0).
3373 threshold: the threshold used by line finder. It is
3374 better to keep it large as only strong lines
3375 affect the baseline solution.
3376 default is 3.
3377 chan_avg_limit: a maximum number of consequtive spectral
3378 channels to average during the search of
3379 weak and broad lines. The default is no
3380 averaging (and no search for weak lines).
3381 If such lines can affect the fitted baseline
3382 (e.g. a high order polynomial is fitted),
3383 increase this parameter (usually values up
3384 to 8 are reasonable). Most users of this
3385 method should find the default value sufficient.
3386 default is 1.
3387
3388 Example:
3389 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3390 """
3391
3392 try:
3393 varlist = vars()
3394
3395 if value is None: value = 'aicc'
3396 if blfunc is None: blfunc = 'chebyshev'
3397 if order is None: order = 5
3398 if mask is None: mask = []
3399 if whichrow is None: whichrow = 0
3400 if uselinefinder is None: uselinefinder = True
3401 if edge is None: edge = (0, 0)
3402 if threshold is None: threshold = 3
3403 if chan_avg_limit is None: chan_avg_limit = 1
3404
3405 return self._calc_aic(value, blfunc, order, mask,
3406 whichrow, uselinefinder, edge,
3407 threshold, chan_avg_limit)
3408
3409 except RuntimeError, e:
3410 raise_fitting_failure_exception(e)
3411
3412 @asaplog_post_dec
3413 def sinusoid_baseline(self, mask=None, applyfft=None,
3414 fftmethod=None, fftthresh=None,
3415 addwn=None, rejwn=None,
3416 insitu=None,
3417 clipthresh=None, clipniter=None,
3418 plot=None,
3419 getresidual=None,
3420 showprogress=None, minnrow=None,
3421 outlog=None,
3422 blfile=None, csvformat=None,
3423 bltable=None):
3424 """\
3425 Return a scan which has been baselined (all rows) with sinusoidal
3426 functions.
3427
3428 Parameters:
3429 mask: an optional mask
3430 applyfft: if True use some method, such as FFT, to find
3431 strongest sinusoidal components in the wavenumber
3432 domain to be used for baseline fitting.
3433 default is True.
3434 fftmethod: method to find the strong sinusoidal components.
3435 now only 'fft' is available and it is the default.
3436 fftthresh: the threshold to select wave numbers to be used for
3437 fitting from the distribution of amplitudes in the
3438 wavenumber domain.
3439 both float and string values accepted.
3440 given a float value, the unit is set to sigma.
3441 for string values, allowed formats include:
3442 'xsigma' or 'x' (= x-sigma level. e.g.,
3443 '3sigma'), or
3444 'topx' (= the x strongest ones, e.g. 'top5').
3445 default is 3.0 (unit: sigma).
3446 addwn: the additional wave numbers to be used for fitting.
3447 list or integer value is accepted to specify every
3448 wave numbers. also string value can be used in case
3449 you need to specify wave numbers in a certain range,
3450 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3451 '<a' (= 0,1,...,a-2,a-1),
3452 '>=a' (= a, a+1, ... up to the maximum wave
3453 number corresponding to the Nyquist
3454 frequency for the case of FFT).
3455 default is [0].
3456 rejwn: the wave numbers NOT to be used for fitting.
3457 can be set just as addwn but has higher priority:
3458 wave numbers which are specified both in addwn
3459 and rejwn will NOT be used. default is [].
3460 insitu: if False a new scantable is returned.
3461 Otherwise, the scaling is done in-situ
3462 The default is taken from .asaprc (False)
3463 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3464 clipniter: maximum number of iteration of 'clipthresh'-sigma
3465 clipping (default is 0)
3466 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3467 plot the fit and the residual. In this each
3468 indivual fit has to be approved, by typing 'y'
3469 or 'n'
3470 getresidual: if False, returns best-fit values instead of
3471 residual. (default is True)
3472 showprogress: show progress status for large data.
3473 default is True.
3474 minnrow: minimum number of input spectra to show.
3475 default is 1000.
3476 outlog: Output the coefficients of the best-fit
3477 function to logger (default is False)
3478 blfile: Name of a text file in which the best-fit
3479 parameter values to be written
3480 (default is '': no file/logger output)
3481 csvformat: if True blfile is csv-formatted, default is False.
3482 bltable: name of a baseline table where fitting results
3483 (coefficients, rms, etc.) are to be written.
3484 if given, fitting results will NOT be output to
3485 scantable (insitu=True) or None will be
3486 returned (insitu=False).
3487 (default is "": no table output)
3488
3489 Example:
3490 # return a scan baselined by a combination of sinusoidal curves
3491 # having wave numbers in spectral window up to 10,
3492 # also with 3-sigma clipping, iteration up to 4 times
3493 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3494
3495 Note:
3496 The best-fit parameter values output in logger and/or blfile are now
3497 based on specunit of 'channel'.
3498 """
3499
3500 try:
3501 varlist = vars()
3502
3503 if insitu is None: insitu = rcParams['insitu']
3504 if insitu:
3505 workscan = self
3506 else:
3507 workscan = self.copy()
3508
3509 if mask is None: mask = []
3510 if applyfft is None: applyfft = True
3511 if fftmethod is None: fftmethod = 'fft'
3512 if fftthresh is None: fftthresh = 3.0
3513 if addwn is None: addwn = [0]
3514 if rejwn is None: rejwn = []
3515 if clipthresh is None: clipthresh = 3.0
3516 if clipniter is None: clipniter = 0
3517 if plot is None: plot = False
3518 if getresidual is None: getresidual = True
3519 if showprogress is None: showprogress = True
3520 if minnrow is None: minnrow = 1000
3521 if outlog is None: outlog = False
3522 if blfile is None: blfile = ''
3523 if csvformat is None: csvformat = False
3524 if bltable is None: bltable = ''
3525
3526 sapplyfft = 'true' if applyfft else 'false'
3527 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3528
3529 scsvformat = 'T' if csvformat else 'F'
3530
3531 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3532 workscan._sinusoid_baseline(mask,
3533 fftinfo,
3534 #applyfft, fftmethod.lower(),
3535 #str(fftthresh).lower(),
3536 workscan._parse_wn(addwn),
3537 workscan._parse_wn(rejwn),
3538 clipthresh, clipniter,
3539 getresidual,
3540 pack_progress_params(showprogress,
3541 minnrow),
3542 outlog, scsvformat+blfile,
3543 bltable)
3544 workscan._add_history('sinusoid_baseline', varlist)
3545
3546 if bltable == '':
3547 if insitu:
3548 self._assign(workscan)
3549 else:
3550 return workscan
3551 else:
3552 if not insitu:
3553 return None
3554
3555 except RuntimeError, e:
3556 raise_fitting_failure_exception(e)
3557
3558
3559 @asaplog_post_dec
3560 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3561 fftmethod=None, fftthresh=None,
3562 addwn=None, rejwn=None,
3563 insitu=None,
3564 clipthresh=None, clipniter=None,
3565 edge=None, threshold=None, chan_avg_limit=None,
3566 plot=None,
3567 getresidual=None,
3568 showprogress=None, minnrow=None,
3569 outlog=None,
3570 blfile=None, csvformat=None,
3571 bltable=None):
3572 """\
3573 Return a scan which has been baselined (all rows) with sinusoidal
3574 functions.
3575 Spectral lines are detected first using linefinder and masked out
3576 to avoid them affecting the baseline solution.
3577
3578 Parameters:
3579 mask: an optional mask retreived from scantable
3580 applyfft: if True use some method, such as FFT, to find
3581 strongest sinusoidal components in the wavenumber
3582 domain to be used for baseline fitting.
3583 default is True.
3584 fftmethod: method to find the strong sinusoidal components.
3585 now only 'fft' is available and it is the default.
3586 fftthresh: the threshold to select wave numbers to be used for
3587 fitting from the distribution of amplitudes in the
3588 wavenumber domain.
3589 both float and string values accepted.
3590 given a float value, the unit is set to sigma.
3591 for string values, allowed formats include:
3592 'xsigma' or 'x' (= x-sigma level. e.g.,
3593 '3sigma'), or
3594 'topx' (= the x strongest ones, e.g. 'top5').
3595 default is 3.0 (unit: sigma).
3596 addwn: the additional wave numbers to be used for fitting.
3597 list or integer value is accepted to specify every
3598 wave numbers. also string value can be used in case
3599 you need to specify wave numbers in a certain range,
3600 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3601 '<a' (= 0,1,...,a-2,a-1),
3602 '>=a' (= a, a+1, ... up to the maximum wave
3603 number corresponding to the Nyquist
3604 frequency for the case of FFT).
3605 default is [0].
3606 rejwn: the wave numbers NOT to be used for fitting.
3607 can be set just as addwn but has higher priority:
3608 wave numbers which are specified both in addwn
3609 and rejwn will NOT be used. default is [].
3610 insitu: if False a new scantable is returned.
3611 Otherwise, the scaling is done in-situ
3612 The default is taken from .asaprc (False)
3613 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3614 clipniter: maximum number of iteration of 'clipthresh'-sigma
3615 clipping (default is 0)
3616 edge: an optional number of channel to drop at
3617 the edge of spectrum. If only one value is
3618 specified, the same number will be dropped
3619 from both sides of the spectrum. Default
3620 is to keep all channels. Nested tuples
3621 represent individual edge selection for
3622 different IFs (a number of spectral channels
3623 can be different)
3624 threshold: the threshold used by line finder. It is
3625 better to keep it large as only strong lines
3626 affect the baseline solution.
3627 chan_avg_limit: a maximum number of consequtive spectral
3628 channels to average during the search of
3629 weak and broad lines. The default is no
3630 averaging (and no search for weak lines).
3631 If such lines can affect the fitted baseline
3632 (e.g. a high order polynomial is fitted),
3633 increase this parameter (usually values up
3634 to 8 are reasonable). Most users of this
3635 method should find the default value sufficient.
3636 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3637 plot the fit and the residual. In this each
3638 indivual fit has to be approved, by typing 'y'
3639 or 'n'
3640 getresidual: if False, returns best-fit values instead of
3641 residual. (default is True)
3642 showprogress: show progress status for large data.
3643 default is True.
3644 minnrow: minimum number of input spectra to show.
3645 default is 1000.
3646 outlog: Output the coefficients of the best-fit
3647 function to logger (default is False)
3648 blfile: Name of a text file in which the best-fit
3649 parameter values to be written
3650 (default is "": no file/logger output)
3651 csvformat: if True blfile is csv-formatted, default is False.
3652 bltable: name of a baseline table where fitting results
3653 (coefficients, rms, etc.) are to be written.
3654 if given, fitting results will NOT be output to
3655 scantable (insitu=True) or None will be
3656 returned (insitu=False).
3657 (default is "": no table output)
3658
3659 Example:
3660 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3661
3662 Note:
3663 The best-fit parameter values output in logger and/or blfile are now
3664 based on specunit of 'channel'.
3665 """
3666
3667 try:
3668 varlist = vars()
3669
3670 if insitu is None: insitu = rcParams['insitu']
3671 if insitu:
3672 workscan = self
3673 else:
3674 workscan = self.copy()
3675
3676 if mask is None: mask = []
3677 if applyfft is None: applyfft = True
3678 if fftmethod is None: fftmethod = 'fft'
3679 if fftthresh is None: fftthresh = 3.0
3680 if addwn is None: addwn = [0]
3681 if rejwn is None: rejwn = []
3682 if clipthresh is None: clipthresh = 3.0
3683 if clipniter is None: clipniter = 0
3684 if edge is None: edge = (0,0)
3685 if threshold is None: threshold = 3
3686 if chan_avg_limit is None: chan_avg_limit = 1
3687 if plot is None: plot = False
3688 if getresidual is None: getresidual = True
3689 if showprogress is None: showprogress = True
3690 if minnrow is None: minnrow = 1000
3691 if outlog is None: outlog = False
3692 if blfile is None: blfile = ''
3693 if csvformat is None: csvformat = False
3694 if bltable is None: bltable = ''
3695
3696 sapplyfft = 'true' if applyfft else 'false'
3697 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3698
3699 scsvformat = 'T' if csvformat else 'F'
3700
3701 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3702 workscan._auto_sinusoid_baseline(mask,
3703 fftinfo,
3704 workscan._parse_wn(addwn),
3705 workscan._parse_wn(rejwn),
3706 clipthresh, clipniter,
3707 normalise_edge_param(edge),
3708 threshold, chan_avg_limit,
3709 getresidual,
3710 pack_progress_params(showprogress,
3711 minnrow),
3712 outlog, scsvformat+blfile, bltable)
3713 workscan._add_history("auto_sinusoid_baseline", varlist)
3714
3715 if bltable == '':
3716 if insitu:
3717 self._assign(workscan)
3718 else:
3719 return workscan
3720 else:
3721 if not insitu:
3722 return None
3723
3724 except RuntimeError, e:
3725 raise_fitting_failure_exception(e)
3726
3727 @asaplog_post_dec
3728 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3729 clipthresh=None, clipniter=None, plot=None,
3730 getresidual=None, showprogress=None, minnrow=None,
3731 outlog=None, blfile=None, csvformat=None,
3732 bltable=None):
3733 """\
3734 Return a scan which has been baselined (all rows) by cubic spline
3735 function (piecewise cubic polynomial).
3736
3737 Parameters:
3738 mask: An optional mask
3739 npiece: Number of pieces. (default is 2)
3740 insitu: If False a new scantable is returned.
3741 Otherwise, the scaling is done in-situ
3742 The default is taken from .asaprc (False)
3743 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3744 clipniter: maximum number of iteration of 'clipthresh'-sigma
3745 clipping (default is 0)
3746 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3747 plot the fit and the residual. In this each
3748 indivual fit has to be approved, by typing 'y'
3749 or 'n'
3750 getresidual: if False, returns best-fit values instead of
3751 residual. (default is True)
3752 showprogress: show progress status for large data.
3753 default is True.
3754 minnrow: minimum number of input spectra to show.
3755 default is 1000.
3756 outlog: Output the coefficients of the best-fit
3757 function to logger (default is False)
3758 blfile: Name of a text file in which the best-fit
3759 parameter values to be written
3760 (default is "": no file/logger output)
3761 csvformat: if True blfile is csv-formatted, default is False.
3762 bltable: name of a baseline table where fitting results
3763 (coefficients, rms, etc.) are to be written.
3764 if given, fitting results will NOT be output to
3765 scantable (insitu=True) or None will be
3766 returned (insitu=False).
3767 (default is "": no table output)
3768
3769 Example:
3770 # return a scan baselined by a cubic spline consisting of 2 pieces
3771 # (i.e., 1 internal knot),
3772 # also with 3-sigma clipping, iteration up to 4 times
3773 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3774
3775 Note:
3776 The best-fit parameter values output in logger and/or blfile are now
3777 based on specunit of 'channel'.
3778 """
3779
3780 try:
3781 varlist = vars()
3782
3783 if insitu is None: insitu = rcParams['insitu']
3784 if insitu:
3785 workscan = self
3786 else:
3787 workscan = self.copy()
3788
3789 if mask is None: mask = []
3790 if npiece is None: npiece = 2
3791 if clipthresh is None: clipthresh = 3.0
3792 if clipniter is None: clipniter = 0
3793 if plot is None: plot = False
3794 if getresidual is None: getresidual = True
3795 if showprogress is None: showprogress = True
3796 if minnrow is None: minnrow = 1000
3797 if outlog is None: outlog = False
3798 if blfile is None: blfile = ''
3799 if csvformat is None: csvformat = False
3800 if bltable is None: bltable = ''
3801
3802 scsvformat = 'T' if csvformat else 'F'
3803
3804 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3805 workscan._cspline_baseline(mask, npiece,
3806 clipthresh, clipniter,
3807 getresidual,
3808 pack_progress_params(showprogress,
3809 minnrow),
3810 outlog, scsvformat+blfile,
3811 bltable)
3812 workscan._add_history("cspline_baseline", varlist)
3813
3814 if bltable == '':
3815 if insitu:
3816 self._assign(workscan)
3817 else:
3818 return workscan
3819 else:
3820 if not insitu:
3821 return None
3822
3823 except RuntimeError, e:
3824 raise_fitting_failure_exception(e)
3825
3826 @asaplog_post_dec
3827 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3828 clipthresh=None, clipniter=None,
3829 edge=None, threshold=None, chan_avg_limit=None,
3830 getresidual=None, plot=None,
3831 showprogress=None, minnrow=None, outlog=None,
3832 blfile=None, csvformat=None, bltable=None):
3833 """\
3834 Return a scan which has been baselined (all rows) by cubic spline
3835 function (piecewise cubic polynomial).
3836 Spectral lines are detected first using linefinder and masked out
3837 to avoid them affecting the baseline solution.
3838
3839 Parameters:
3840 mask: an optional mask retreived from scantable
3841 npiece: Number of pieces. (default is 2)
3842 insitu: if False a new scantable is returned.
3843 Otherwise, the scaling is done in-situ
3844 The default is taken from .asaprc (False)
3845 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3846 clipniter: maximum number of iteration of 'clipthresh'-sigma
3847 clipping (default is 0)
3848 edge: an optional number of channel to drop at
3849 the edge of spectrum. If only one value is
3850 specified, the same number will be dropped
3851 from both sides of the spectrum. Default
3852 is to keep all channels. Nested tuples
3853 represent individual edge selection for
3854 different IFs (a number of spectral channels
3855 can be different)
3856 threshold: the threshold used by line finder. It is
3857 better to keep it large as only strong lines
3858 affect the baseline solution.
3859 chan_avg_limit: a maximum number of consequtive spectral
3860 channels to average during the search of
3861 weak and broad lines. The default is no
3862 averaging (and no search for weak lines).
3863 If such lines can affect the fitted baseline
3864 (e.g. a high order polynomial is fitted),
3865 increase this parameter (usually values up
3866 to 8 are reasonable). Most users of this
3867 method should find the default value sufficient.
3868 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3869 plot the fit and the residual. In this each
3870 indivual fit has to be approved, by typing 'y'
3871 or 'n'
3872 getresidual: if False, returns best-fit values instead of
3873 residual. (default is True)
3874 showprogress: show progress status for large data.
3875 default is True.
3876 minnrow: minimum number of input spectra to show.
3877 default is 1000.
3878 outlog: Output the coefficients of the best-fit
3879 function to logger (default is False)
3880 blfile: Name of a text file in which the best-fit
3881 parameter values to be written
3882 (default is "": no file/logger output)
3883 csvformat: if True blfile is csv-formatted, default is False.
3884 bltable: name of a baseline table where fitting results
3885 (coefficients, rms, etc.) are to be written.
3886 if given, fitting results will NOT be output to
3887 scantable (insitu=True) or None will be
3888 returned (insitu=False).
3889 (default is "": no table output)
3890
3891 Example:
3892 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3893
3894 Note:
3895 The best-fit parameter values output in logger and/or blfile are now
3896 based on specunit of 'channel'.
3897 """
3898
3899 try:
3900 varlist = vars()
3901
3902 if insitu is None: insitu = rcParams['insitu']
3903 if insitu:
3904 workscan = self
3905 else:
3906 workscan = self.copy()
3907
3908 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3909 if mask is None: mask = []
3910 if npiece is None: npiece = 2
3911 if clipthresh is None: clipthresh = 3.0
3912 if clipniter is None: clipniter = 0
3913 if edge is None: edge = (0, 0)
3914 if threshold is None: threshold = 3
3915 if chan_avg_limit is None: chan_avg_limit = 1
3916 if plot is None: plot = False
3917 if getresidual is None: getresidual = True
3918 if showprogress is None: showprogress = True
3919 if minnrow is None: minnrow = 1000
3920 if outlog is None: outlog = False
3921 if blfile is None: blfile = ''
3922 if csvformat is None: csvformat = False
3923 if bltable is None: bltable = ''
3924
3925 scsvformat = 'T' if csvformat else 'F'
3926
3927 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3928 workscan._auto_cspline_baseline(mask, npiece,
3929 clipthresh, clipniter,
3930 normalise_edge_param(edge),
3931 threshold,
3932 chan_avg_limit, getresidual,
3933 pack_progress_params(showprogress,
3934 minnrow),
3935 outlog,
3936 scsvformat+blfile,
3937 bltable)
3938 workscan._add_history("auto_cspline_baseline", varlist)
3939
3940 if bltable == '':
3941 if insitu:
3942 self._assign(workscan)
3943 else:
3944 return workscan
3945 else:
3946 if not insitu:
3947 return None
3948
3949 except RuntimeError, e:
3950 raise_fitting_failure_exception(e)
3951
3952 @asaplog_post_dec
3953 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
3954 clipthresh=None, clipniter=None, plot=None,
3955 getresidual=None, showprogress=None, minnrow=None,
3956 outlog=None, blfile=None, csvformat=None,
3957 bltable=None):
3958 """\
3959 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3960
3961 Parameters:
3962 mask: An optional mask
3963 order: the maximum order of Chebyshev polynomial (default is 5)
3964 insitu: If False a new scantable is returned.
3965 Otherwise, the scaling is done in-situ
3966 The default is taken from .asaprc (False)
3967 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3968 clipniter: maximum number of iteration of 'clipthresh'-sigma
3969 clipping (default is 0)
3970 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3971 plot the fit and the residual. In this each
3972 indivual fit has to be approved, by typing 'y'
3973 or 'n'
3974 getresidual: if False, returns best-fit values instead of
3975 residual. (default is True)
3976 showprogress: show progress status for large data.
3977 default is True.
3978 minnrow: minimum number of input spectra to show.
3979 default is 1000.
3980 outlog: Output the coefficients of the best-fit
3981 function to logger (default is False)
3982 blfile: Name of a text file in which the best-fit
3983 parameter values to be written
3984 (default is "": no file/logger output)
3985 csvformat: if True blfile is csv-formatted, default is False.
3986 bltable: name of a baseline table where fitting results
3987 (coefficients, rms, etc.) are to be written.
3988 if given, fitting results will NOT be output to
3989 scantable (insitu=True) or None will be
3990 returned (insitu=False).
3991 (default is "": no table output)
3992
3993 Example:
3994 # return a scan baselined by a cubic spline consisting of 2 pieces
3995 # (i.e., 1 internal knot),
3996 # also with 3-sigma clipping, iteration up to 4 times
3997 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3998
3999 Note:
4000 The best-fit parameter values output in logger and/or blfile are now
4001 based on specunit of 'channel'.
4002 """
4003
4004 try:
4005 varlist = vars()
4006
4007 if insitu is None: insitu = rcParams['insitu']
4008 if insitu:
4009 workscan = self
4010 else:
4011 workscan = self.copy()
4012
4013 if mask is None: mask = []
4014 if order is None: order = 5
4015 if clipthresh is None: clipthresh = 3.0
4016 if clipniter is None: clipniter = 0
4017 if plot is None: plot = False
4018 if getresidual is None: getresidual = True
4019 if showprogress is None: showprogress = True
4020 if minnrow is None: minnrow = 1000
4021 if outlog is None: outlog = False
4022 if blfile is None: blfile = ''
4023 if csvformat is None: csvformat = False
4024 if bltable is None: bltable = ''
4025
4026 scsvformat = 'T' if csvformat else 'F'
4027
4028 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4029 workscan._chebyshev_baseline(mask, order,
4030 clipthresh, clipniter,
4031 getresidual,
4032 pack_progress_params(showprogress,
4033 minnrow),
4034 outlog, scsvformat+blfile,
4035 bltable)
4036 workscan._add_history("chebyshev_baseline", varlist)
4037
4038 if bltable == '':
4039 if insitu:
4040 self._assign(workscan)
4041 else:
4042 return workscan
4043 else:
4044 if not insitu:
4045 return None
4046
4047 except RuntimeError, e:
4048 raise_fitting_failure_exception(e)
4049
4050 @asaplog_post_dec
4051 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4052 clipthresh=None, clipniter=None,
4053 edge=None, threshold=None, chan_avg_limit=None,
4054 getresidual=None, plot=None,
4055 showprogress=None, minnrow=None, outlog=None,
4056 blfile=None, csvformat=None, bltable=None):
4057 """\
4058 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4059 Spectral lines are detected first using linefinder and masked out
4060 to avoid them affecting the baseline solution.
4061
4062 Parameters:
4063 mask: an optional mask retreived from scantable
4064 order: the maximum order of Chebyshev polynomial (default is 5)
4065 insitu: if False a new scantable is returned.
4066 Otherwise, the scaling is done in-situ
4067 The default is taken from .asaprc (False)
4068 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4069 clipniter: maximum number of iteration of 'clipthresh'-sigma
4070 clipping (default is 0)
4071 edge: an optional number of channel to drop at
4072 the edge of spectrum. If only one value is
4073 specified, the same number will be dropped
4074 from both sides of the spectrum. Default
4075 is to keep all channels. Nested tuples
4076 represent individual edge selection for
4077 different IFs (a number of spectral channels
4078 can be different)
4079 threshold: the threshold used by line finder. It is
4080 better to keep it large as only strong lines
4081 affect the baseline solution.
4082 chan_avg_limit: a maximum number of consequtive spectral
4083 channels to average during the search of
4084 weak and broad lines. The default is no
4085 averaging (and no search for weak lines).
4086 If such lines can affect the fitted baseline
4087 (e.g. a high order polynomial is fitted),
4088 increase this parameter (usually values up
4089 to 8 are reasonable). Most users of this
4090 method should find the default value sufficient.
4091 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4092 plot the fit and the residual. In this each
4093 indivual fit has to be approved, by typing 'y'
4094 or 'n'
4095 getresidual: if False, returns best-fit values instead of
4096 residual. (default is True)
4097 showprogress: show progress status for large data.
4098 default is True.
4099 minnrow: minimum number of input spectra to show.
4100 default is 1000.
4101 outlog: Output the coefficients of the best-fit
4102 function to logger (default is False)
4103 blfile: Name of a text file in which the best-fit
4104 parameter values to be written
4105 (default is "": no file/logger output)
4106 csvformat: if True blfile is csv-formatted, default is False.
4107 bltable: name of a baseline table where fitting results
4108 (coefficients, rms, etc.) are to be written.
4109 if given, fitting results will NOT be output to
4110 scantable (insitu=True) or None will be
4111 returned (insitu=False).
4112 (default is "": no table output)
4113
4114 Example:
4115 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4116
4117 Note:
4118 The best-fit parameter values output in logger and/or blfile are now
4119 based on specunit of 'channel'.
4120 """
4121
4122 try:
4123 varlist = vars()
4124
4125 if insitu is None: insitu = rcParams['insitu']
4126 if insitu:
4127 workscan = self
4128 else:
4129 workscan = self.copy()
4130
4131 if mask is None: mask = []
4132 if order is None: order = 5
4133 if clipthresh is None: clipthresh = 3.0
4134 if clipniter is None: clipniter = 0
4135 if edge is None: edge = (0, 0)
4136 if threshold is None: threshold = 3
4137 if chan_avg_limit is None: chan_avg_limit = 1
4138 if plot is None: plot = False
4139 if getresidual is None: getresidual = True
4140 if showprogress is None: showprogress = True
4141 if minnrow is None: minnrow = 1000
4142 if outlog is None: outlog = False
4143 if blfile is None: blfile = ''
4144 if csvformat is None: csvformat = False
4145 if bltable is None: bltable = ''
4146
4147 scsvformat = 'T' if csvformat else 'F'
4148
4149 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4150 workscan._auto_chebyshev_baseline(mask, order,
4151 clipthresh, clipniter,
4152 normalise_edge_param(edge),
4153 threshold,
4154 chan_avg_limit, getresidual,
4155 pack_progress_params(showprogress,
4156 minnrow),
4157 outlog, scsvformat+blfile,
4158 bltable)
4159 workscan._add_history("auto_chebyshev_baseline", varlist)
4160
4161 if bltable == '':
4162 if insitu:
4163 self._assign(workscan)
4164 else:
4165 return workscan
4166 else:
4167 if not insitu:
4168 return None
4169
4170 except RuntimeError, e:
4171 raise_fitting_failure_exception(e)
4172
4173 @asaplog_post_dec
4174 def poly_baseline(self, mask=None, order=None, insitu=None,
4175 clipthresh=None, clipniter=None, plot=None,
4176 getresidual=None, showprogress=None, minnrow=None,
4177 outlog=None, blfile=None, csvformat=None,
4178 bltable=None):
4179 """\
4180 Return a scan which has been baselined (all rows) by a polynomial.
4181 Parameters:
4182 mask: an optional mask
4183 order: the order of the polynomial (default is 0)
4184 insitu: if False a new scantable is returned.
4185 Otherwise, the scaling is done in-situ
4186 The default is taken from .asaprc (False)
4187 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4188 clipniter: maximum number of iteration of 'clipthresh'-sigma
4189 clipping (default is 0)
4190 plot: plot the fit and the residual. In this each
4191 indivual fit has to be approved, by typing 'y'
4192 or 'n'
4193 getresidual: if False, returns best-fit values instead of
4194 residual. (default is True)
4195 showprogress: show progress status for large data.
4196 default is True.
4197 minnrow: minimum number of input spectra to show.
4198 default is 1000.
4199 outlog: Output the coefficients of the best-fit
4200 function to logger (default is False)
4201 blfile: Name of a text file in which the best-fit
4202 parameter values to be written
4203 (default is "": no file/logger output)
4204 csvformat: if True blfile is csv-formatted, default is False.
4205 bltable: name of a baseline table where fitting results
4206 (coefficients, rms, etc.) are to be written.
4207 if given, fitting results will NOT be output to
4208 scantable (insitu=True) or None will be
4209 returned (insitu=False).
4210 (default is "": no table output)
4211
4212 Example:
4213 # return a scan baselined by a third order polynomial,
4214 # not using a mask
4215 bscan = scan.poly_baseline(order=3)
4216 """
4217
4218 try:
4219 varlist = vars()
4220
4221 if insitu is None:
4222 insitu = rcParams["insitu"]
4223 if insitu:
4224 workscan = self
4225 else:
4226 workscan = self.copy()
4227
4228 if mask is None: mask = []
4229 if order is None: order = 0
4230 if clipthresh is None: clipthresh = 3.0
4231 if clipniter is None: clipniter = 0
4232 if plot is None: plot = False
4233 if getresidual is None: getresidual = True
4234 if showprogress is None: showprogress = True
4235 if minnrow is None: minnrow = 1000
4236 if outlog is None: outlog = False
4237 if blfile is None: blfile = ''
4238 if csvformat is None: csvformat = False
4239 if bltable is None: bltable = ''
4240
4241 scsvformat = 'T' if csvformat else 'F'
4242
4243 if plot:
4244 outblfile = (blfile != "") and \
4245 os.path.exists(os.path.expanduser(
4246 os.path.expandvars(blfile))
4247 )
4248 if outblfile:
4249 blf = open(blfile, "a")
4250
4251 f = fitter()
4252 f.set_function(lpoly=order)
4253
4254 rows = xrange(workscan.nrow())
4255 #if len(rows) > 0: workscan._init_blinfo()
4256
4257 action = "H"
4258 for r in rows:
4259 f.x = workscan._getabcissa(r)
4260 f.y = workscan._getspectrum(r)
4261 if mask:
4262 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4263 else: # mask=None
4264 f.mask = workscan._getmask(r)
4265
4266 f.data = None
4267 f.fit()
4268
4269 if action != "Y": # skip plotting when accepting all
4270 f.plot(residual=True)
4271 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4272 #if accept_fit.upper() == "N":
4273 # #workscan._append_blinfo(None, None, None)
4274 # continue
4275 accept_fit = self._get_verify_action("Accept fit?",action)
4276 if r == 0: action = None
4277 if accept_fit.upper() == "N":
4278 continue
4279 elif accept_fit.upper() == "R":
4280 break
4281 elif accept_fit.upper() == "A":
4282 action = "Y"
4283
4284 blpars = f.get_parameters()
4285 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4286 #workscan._append_blinfo(blpars, masklist, f.mask)
4287 workscan._setspectrum((f.fitter.getresidual()
4288 if getresidual else f.fitter.getfit()), r)
4289
4290 if outblfile:
4291 rms = workscan.get_rms(f.mask, r)
4292 dataout = \
4293 workscan.format_blparams_row(blpars["params"],
4294 blpars["fixed"],
4295 rms, str(masklist),
4296 r, True, csvformat)
4297 blf.write(dataout)
4298
4299 f._p.unmap()
4300 f._p = None
4301
4302 if outblfile:
4303 blf.close()
4304 else:
4305 workscan._poly_baseline(mask, order,
4306 clipthresh, clipniter, #
4307 getresidual,
4308 pack_progress_params(showprogress,
4309 minnrow),
4310 outlog, scsvformat+blfile,
4311 bltable) #
4312
4313 workscan._add_history("poly_baseline", varlist)
4314
4315 if insitu:
4316 self._assign(workscan)
4317 else:
4318 return workscan
4319
4320 except RuntimeError, e:
4321 raise_fitting_failure_exception(e)
4322
4323 @asaplog_post_dec
4324 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4325 clipthresh=None, clipniter=None,
4326 edge=None, threshold=None, chan_avg_limit=None,
4327 getresidual=None, plot=None,
4328 showprogress=None, minnrow=None, outlog=None,
4329 blfile=None, csvformat=None, bltable=None):
4330 """\
4331 Return a scan which has been baselined (all rows) by a polynomial.
4332 Spectral lines are detected first using linefinder and masked out
4333 to avoid them affecting the baseline solution.
4334
4335 Parameters:
4336 mask: an optional mask retreived from scantable
4337 order: the order of the polynomial (default is 0)
4338 insitu: if False a new scantable is returned.
4339 Otherwise, the scaling is done in-situ
4340 The default is taken from .asaprc (False)
4341 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4342 clipniter: maximum number of iteration of 'clipthresh'-sigma
4343 clipping (default is 0)
4344 edge: an optional number of channel to drop at
4345 the edge of spectrum. If only one value is
4346 specified, the same number will be dropped
4347 from both sides of the spectrum. Default
4348 is to keep all channels. Nested tuples
4349 represent individual edge selection for
4350 different IFs (a number of spectral channels
4351 can be different)
4352 threshold: the threshold used by line finder. It is
4353 better to keep it large as only strong lines
4354 affect the baseline solution.
4355 chan_avg_limit: a maximum number of consequtive spectral
4356 channels to average during the search of
4357 weak and broad lines. The default is no
4358 averaging (and no search for weak lines).
4359 If such lines can affect the fitted baseline
4360 (e.g. a high order polynomial is fitted),
4361 increase this parameter (usually values up
4362 to 8 are reasonable). Most users of this
4363 method should find the default value sufficient.
4364 plot: plot the fit and the residual. In this each
4365 indivual fit has to be approved, by typing 'y'
4366 or 'n'
4367 getresidual: if False, returns best-fit values instead of
4368 residual. (default is True)
4369 showprogress: show progress status for large data.
4370 default is True.
4371 minnrow: minimum number of input spectra to show.
4372 default is 1000.
4373 outlog: Output the coefficients of the best-fit
4374 function to logger (default is False)
4375 blfile: Name of a text file in which the best-fit
4376 parameter values to be written
4377 (default is "": no file/logger output)
4378 csvformat: if True blfile is csv-formatted, default is False.
4379 bltable: name of a baseline table where fitting results
4380 (coefficients, rms, etc.) are to be written.
4381 if given, fitting results will NOT be output to
4382 scantable (insitu=True) or None will be
4383 returned (insitu=False).
4384 (default is "": no table output)
4385
4386 Example:
4387 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4388 """
4389
4390 try:
4391 varlist = vars()
4392
4393 if insitu is None:
4394 insitu = rcParams['insitu']
4395 if insitu:
4396 workscan = self
4397 else:
4398 workscan = self.copy()
4399
4400 if mask is None: mask = []
4401 if order is None: order = 0
4402 if clipthresh is None: clipthresh = 3.0
4403 if clipniter is None: clipniter = 0
4404 if edge is None: edge = (0, 0)
4405 if threshold is None: threshold = 3
4406 if chan_avg_limit is None: chan_avg_limit = 1
4407 if plot is None: plot = False
4408 if getresidual is None: getresidual = True
4409 if showprogress is None: showprogress = True
4410 if minnrow is None: minnrow = 1000
4411 if outlog is None: outlog = False
4412 if blfile is None: blfile = ''
4413 if csvformat is None: csvformat = False
4414 if bltable is None: bltable = ''
4415
4416 scsvformat = 'T' if csvformat else 'F'
4417
4418 edge = normalise_edge_param(edge)
4419
4420 if plot:
4421 outblfile = (blfile != "") and \
4422 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4423 if outblfile: blf = open(blfile, "a")
4424
4425 from asap.asaplinefind import linefinder
4426 fl = linefinder()
4427 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4428 fl.set_scan(workscan)
4429
4430 f = fitter()
4431 f.set_function(lpoly=order)
4432
4433 rows = xrange(workscan.nrow())
4434 #if len(rows) > 0: workscan._init_blinfo()
4435
4436 action = "H"
4437 for r in rows:
4438 idx = 2*workscan.getif(r)
4439 if mask:
4440 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4441 else: # mask=None
4442 msk = workscan._getmask(r)
4443 fl.find_lines(r, msk, edge[idx:idx+2])
4444
4445 f.x = workscan._getabcissa(r)
4446 f.y = workscan._getspectrum(r)
4447 f.mask = fl.get_mask()
4448 f.data = None
4449 f.fit()
4450
4451 if action != "Y": # skip plotting when accepting all
4452 f.plot(residual=True)
4453 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4454 accept_fit = self._get_verify_action("Accept fit?",action)
4455 if r == 0: action = None
4456 if accept_fit.upper() == "N":
4457 #workscan._append_blinfo(None, None, None)
4458 continue
4459 elif accept_fit.upper() == "R":
4460 break
4461 elif accept_fit.upper() == "A":
4462 action = "Y"
4463
4464 blpars = f.get_parameters()
4465 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4466 #workscan._append_blinfo(blpars, masklist, f.mask)
4467 workscan._setspectrum(
4468 (f.fitter.getresidual() if getresidual
4469 else f.fitter.getfit()), r
4470 )
4471
4472 if outblfile:
4473 rms = workscan.get_rms(f.mask, r)
4474 dataout = \
4475 workscan.format_blparams_row(blpars["params"],
4476 blpars["fixed"],
4477 rms, str(masklist),
4478 r, True, csvformat)
4479 blf.write(dataout)
4480
4481 f._p.unmap()
4482 f._p = None
4483
4484 if outblfile: blf.close()
4485 else:
4486 workscan._auto_poly_baseline(mask, order,
4487 clipthresh, clipniter,
4488 edge, threshold,
4489 chan_avg_limit, getresidual,
4490 pack_progress_params(showprogress,
4491 minnrow),
4492 outlog, scsvformat+blfile,
4493 bltable)
4494 workscan._add_history("auto_poly_baseline", varlist)
4495
4496 if bltable == '':
4497 if insitu:
4498 self._assign(workscan)
4499 else:
4500 return workscan
4501 else:
4502 if not insitu:
4503 return None
4504
4505 except RuntimeError, e:
4506 raise_fitting_failure_exception(e)
4507
4508 def _init_blinfo(self):
4509 """\
4510 Initialise the following three auxiliary members:
4511 blpars : parameters of the best-fit baseline,
4512 masklists : mask data (edge positions of masked channels) and
4513 actualmask : mask data (in boolean list),
4514 to keep for use later (including output to logger/text files).
4515 Used by poly_baseline() and auto_poly_baseline() in case of
4516 'plot=True'.
4517 """
4518 self.blpars = []
4519 self.masklists = []
4520 self.actualmask = []
4521 return
4522
4523 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4524 """\
4525 Append baseline-fitting related info to blpars, masklist and
4526 actualmask.
4527 """
4528 self.blpars.append(data_blpars)
4529 self.masklists.append(data_masklists)
4530 self.actualmask.append(data_actualmask)
4531 return
4532
4533 @asaplog_post_dec
4534 def rotate_linpolphase(self, angle):
4535 """\
4536 Rotate the phase of the complex polarization O=Q+iU correlation.
4537 This is always done in situ in the raw data. So if you call this
4538 function more than once then each call rotates the phase further.
4539
4540 Parameters:
4541
4542 angle: The angle (degrees) to rotate (add) by.
4543
4544 Example::
4545
4546 scan.rotate_linpolphase(2.3)
4547
4548 """
4549 varlist = vars()
4550 self._math._rotate_linpolphase(self, angle)
4551 self._add_history("rotate_linpolphase", varlist)
4552 return
4553
4554 @asaplog_post_dec
4555 def rotate_xyphase(self, angle):
4556 """\
4557 Rotate the phase of the XY correlation. This is always done in situ
4558 in the data. So if you call this function more than once
4559 then each call rotates the phase further.
4560
4561 Parameters:
4562
4563 angle: The angle (degrees) to rotate (add) by.
4564
4565 Example::
4566
4567 scan.rotate_xyphase(2.3)
4568
4569 """
4570 varlist = vars()
4571 self._math._rotate_xyphase(self, angle)
4572 self._add_history("rotate_xyphase", varlist)
4573 return
4574
4575 @asaplog_post_dec
4576 def swap_linears(self):
4577 """\
4578 Swap the linear polarisations XX and YY, or better the first two
4579 polarisations as this also works for ciculars.
4580 """
4581 varlist = vars()
4582 self._math._swap_linears(self)
4583 self._add_history("swap_linears", varlist)
4584 return
4585
4586 @asaplog_post_dec
4587 def invert_phase(self):
4588 """\
4589 Invert the phase of the complex polarisation
4590 """
4591 varlist = vars()
4592 self._math._invert_phase(self)
4593 self._add_history("invert_phase", varlist)
4594 return
4595
4596 @asaplog_post_dec
4597 def add(self, offset, insitu=None):
4598 """\
4599 Return a scan where all spectra have the offset added
4600
4601 Parameters:
4602
4603 offset: the offset
4604
4605 insitu: if False a new scantable is returned.
4606 Otherwise, the scaling is done in-situ
4607 The default is taken from .asaprc (False)
4608
4609 """
4610 if insitu is None: insitu = rcParams['insitu']
4611 self._math._setinsitu(insitu)
4612 varlist = vars()
4613 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4614 s._add_history("add", varlist)
4615 if insitu:
4616 self._assign(s)
4617 else:
4618 return s
4619
4620 @asaplog_post_dec
4621 def scale(self, factor, tsys=True, insitu=None):
4622 """\
4623
4624 Return a scan where all spectra are scaled by the given 'factor'
4625
4626 Parameters:
4627
4628 factor: the scaling factor (float or 1D float list)
4629
4630 insitu: if False a new scantable is returned.
4631 Otherwise, the scaling is done in-situ
4632 The default is taken from .asaprc (False)
4633
4634 tsys: if True (default) then apply the operation to Tsys
4635 as well as the data
4636
4637 """
4638 if insitu is None: insitu = rcParams['insitu']
4639 self._math._setinsitu(insitu)
4640 varlist = vars()
4641 s = None
4642 import numpy
4643 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4644 if isinstance(factor[0], list) or isinstance(factor[0],
4645 numpy.ndarray):
4646 from asapmath import _array2dOp
4647 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4648 else:
4649 s = scantable( self._math._arrayop( self, factor,
4650 "MUL", tsys ) )
4651 else:
4652 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4653 s._add_history("scale", varlist)
4654 if insitu:
4655 self._assign(s)
4656 else:
4657 return s
4658
4659 @preserve_selection
4660 def set_sourcetype(self, match, matchtype="pattern",
4661 sourcetype="reference"):
4662 """\
4663 Set the type of the source to be an source or reference scan
4664 using the provided pattern.
4665
4666 Parameters:
4667
4668 match: a Unix style pattern, regular expression or selector
4669
4670 matchtype: 'pattern' (default) UNIX style pattern or
4671 'regex' regular expression
4672
4673 sourcetype: the type of the source to use (source/reference)
4674
4675 """
4676 varlist = vars()
4677 stype = -1
4678 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4679 stype = 1
4680 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4681 stype = 0
4682 else:
4683 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4684 if matchtype.lower().startswith("p"):
4685 matchtype = "pattern"
4686 elif matchtype.lower().startswith("r"):
4687 matchtype = "regex"
4688 else:
4689 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4690 sel = selector()
4691 if isinstance(match, selector):
4692 sel = match
4693 else:
4694 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4695 self.set_selection(sel)
4696 self._setsourcetype(stype)
4697 self._add_history("set_sourcetype", varlist)
4698
4699
4700 def set_sourcename(self, name):
4701 varlist = vars()
4702 self._setsourcename(name)
4703 self._add_history("set_sourcename", varlist)
4704
4705 @asaplog_post_dec
4706 @preserve_selection
4707 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4708 """\
4709 This function allows to build quotients automatically.
4710 It assumes the observation to have the same number of
4711 "ons" and "offs"
4712
4713 Parameters:
4714
4715 preserve: you can preserve (default) the continuum or
4716 remove it. The equations used are
4717
4718 preserve: Output = Toff * (on/off) - Toff
4719
4720 remove: Output = Toff * (on/off) - Ton
4721
4722 mode: the on/off detection mode
4723 'paired' (default)
4724 identifies 'off' scans by the
4725 trailing '_R' (Mopra/Parkes) or
4726 '_e'/'_w' (Tid) and matches
4727 on/off pairs from the observing pattern
4728 'time'
4729 finds the closest off in time
4730
4731 .. todo:: verify argument is not implemented
4732
4733 """
4734 varlist = vars()
4735 modes = ["time", "paired"]
4736 if not mode in modes:
4737 msg = "please provide valid mode. Valid modes are %s" % (modes)
4738 raise ValueError(msg)
4739 s = None
4740 if mode.lower() == "paired":
4741 from asap._asap import srctype
4742 sel = self.get_selection()
4743 #sel.set_query("SRCTYPE==psoff")
4744 sel.set_types(srctype.psoff)
4745 self.set_selection(sel)
4746 offs = self.copy()
4747 #sel.set_query("SRCTYPE==pson")
4748 sel.set_types(srctype.pson)
4749 self.set_selection(sel)
4750 ons = self.copy()
4751 s = scantable(self._math._quotient(ons, offs, preserve))
4752 elif mode.lower() == "time":
4753 s = scantable(self._math._auto_quotient(self, mode, preserve))
4754 s._add_history("auto_quotient", varlist)
4755 return s
4756
4757 @asaplog_post_dec
4758 def mx_quotient(self, mask = None, weight='median', preserve=True):
4759 """\
4760 Form a quotient using "off" beams when observing in "MX" mode.
4761
4762 Parameters:
4763
4764 mask: an optional mask to be used when weight == 'stddev'
4765
4766 weight: How to average the off beams. Default is 'median'.
4767
4768 preserve: you can preserve (default) the continuum or
4769 remove it. The equations used are:
4770
4771 preserve: Output = Toff * (on/off) - Toff
4772
4773 remove: Output = Toff * (on/off) - Ton
4774
4775 """
4776 mask = mask or ()
4777 varlist = vars()
4778 on = scantable(self._math._mx_extract(self, 'on'))
4779 preoff = scantable(self._math._mx_extract(self, 'off'))
4780 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4781 from asapmath import quotient
4782 q = quotient(on, off, preserve)
4783 q._add_history("mx_quotient", varlist)
4784 return q
4785
4786 @asaplog_post_dec
4787 def freq_switch(self, insitu=None):
4788 """\
4789 Apply frequency switching to the data.
4790
4791 Parameters:
4792
4793 insitu: if False a new scantable is returned.
4794 Otherwise, the swictching is done in-situ
4795 The default is taken from .asaprc (False)
4796
4797 """
4798 if insitu is None: insitu = rcParams['insitu']
4799 self._math._setinsitu(insitu)
4800 varlist = vars()
4801 s = scantable(self._math._freqswitch(self))
4802 s._add_history("freq_switch", varlist)
4803 if insitu:
4804 self._assign(s)
4805 else:
4806 return s
4807
4808 @asaplog_post_dec
4809 def recalc_azel(self):
4810 """Recalculate the azimuth and elevation for each position."""
4811 varlist = vars()
4812 self._recalcazel()
4813 self._add_history("recalc_azel", varlist)
4814 return
4815
4816 @asaplog_post_dec
4817 def __add__(self, other):
4818 """
4819 implicit on all axes and on Tsys
4820 """
4821 varlist = vars()
4822 s = self.__op( other, "ADD" )
4823 s._add_history("operator +", varlist)
4824 return s
4825
4826 @asaplog_post_dec
4827 def __sub__(self, other):
4828 """
4829 implicit on all axes and on Tsys
4830 """
4831 varlist = vars()
4832 s = self.__op( other, "SUB" )
4833 s._add_history("operator -", varlist)
4834 return s
4835
4836 @asaplog_post_dec
4837 def __mul__(self, other):
4838 """
4839 implicit on all axes and on Tsys
4840 """
4841 varlist = vars()
4842 s = self.__op( other, "MUL" ) ;
4843 s._add_history("operator *", varlist)
4844 return s
4845
4846
4847 @asaplog_post_dec
4848 def __div__(self, other):
4849 """
4850 implicit on all axes and on Tsys
4851 """
4852 varlist = vars()
4853 s = self.__op( other, "DIV" )
4854 s._add_history("operator /", varlist)
4855 return s
4856
4857 @asaplog_post_dec
4858 def __op( self, other, mode ):
4859 s = None
4860 if isinstance(other, scantable):
4861 s = scantable(self._math._binaryop(self, other, mode))
4862 elif isinstance(other, float):
4863 if other == 0.0:
4864 raise ZeroDivisionError("Dividing by zero is not recommended")
4865 s = scantable(self._math._unaryop(self, other, mode, False))
4866 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
4867 if isinstance(other[0], list) \
4868 or isinstance(other[0], numpy.ndarray):
4869 from asapmath import _array2dOp
4870 s = _array2dOp( self, other, mode, False )
4871 else:
4872 s = scantable( self._math._arrayop( self, other,
4873 mode, False ) )
4874 else:
4875 raise TypeError("Other input is not a scantable or float value")
4876 return s
4877
4878 @asaplog_post_dec
4879 def get_fit(self, row=0):
4880 """\
4881 Print or return the stored fits for a row in the scantable
4882
4883 Parameters:
4884
4885 row: the row which the fit has been applied to.
4886
4887 """
4888 if row > self.nrow():
4889 return
4890 from asap.asapfit import asapfit
4891 fit = asapfit(self._getfit(row))
4892 asaplog.push( '%s' %(fit) )
4893 return fit.as_dict()
4894
4895 @preserve_selection
4896 def flag_nans(self):
4897 """\
4898 Utility function to flag NaN values in the scantable.
4899 """
4900 import numpy
4901 basesel = self.get_selection()
4902 for i in range(self.nrow()):
4903 sel = self.get_row_selector(i)
4904 self.set_selection(basesel+sel)
4905 nans = numpy.isnan(self._getspectrum(0))
4906 if numpy.any(nans):
4907 bnans = [ bool(v) for v in nans]
4908 self.flag(bnans)
4909
4910 self.set_selection(basesel)
4911
4912 def get_row_selector(self, rowno):
4913 return selector(rows=[rowno])
4914
4915 def _add_history(self, funcname, parameters):
4916 if not rcParams['scantable.history']:
4917 return
4918 # create date
4919 sep = "##"
4920 from datetime import datetime
4921 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4922 hist = dstr+sep
4923 hist += funcname+sep#cdate+sep
4924 if parameters.has_key('self'):
4925 del parameters['self']
4926 for k, v in parameters.iteritems():
4927 if type(v) is dict:
4928 for k2, v2 in v.iteritems():
4929 hist += k2
4930 hist += "="
4931 if isinstance(v2, scantable):
4932 hist += 'scantable'
4933 elif k2 == 'mask':
4934 if isinstance(v2, list) or isinstance(v2, tuple):
4935 hist += str(self._zip_mask(v2))
4936 else:
4937 hist += str(v2)
4938 else:
4939 hist += str(v2)
4940 else:
4941 hist += k
4942 hist += "="
4943 if isinstance(v, scantable):
4944 hist += 'scantable'
4945 elif k == 'mask':
4946 if isinstance(v, list) or isinstance(v, tuple):
4947 hist += str(self._zip_mask(v))
4948 else:
4949 hist += str(v)
4950 else:
4951 hist += str(v)
4952 hist += sep
4953 hist = hist[:-2] # remove trailing '##'
4954 self._addhistory(hist)
4955
4956
4957 def _zip_mask(self, mask):
4958 mask = list(mask)
4959 i = 0
4960 segments = []
4961 while mask[i:].count(1):
4962 i += mask[i:].index(1)
4963 if mask[i:].count(0):
4964 j = i + mask[i:].index(0)
4965 else:
4966 j = len(mask)
4967 segments.append([i, j])
4968 i = j
4969 return segments
4970
4971 def _get_ordinate_label(self):
4972 fu = "("+self.get_fluxunit()+")"
4973 import re
4974 lbl = "Intensity"
4975 if re.match(".K.", fu):
4976 lbl = "Brightness Temperature "+ fu
4977 elif re.match(".Jy.", fu):
4978 lbl = "Flux density "+ fu
4979 return lbl
4980
4981 def _check_ifs(self):
4982# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
4983 nchans = [self.nchan(i) for i in self.getifnos()]
4984 nchans = filter(lambda t: t > 0, nchans)
4985 return (sum(nchans)/len(nchans) == nchans[0])
4986
4987 @asaplog_post_dec
4988 def _fill(self, names, unit, average, opts={}):
4989 first = True
4990 fullnames = []
4991 for name in names:
4992 name = os.path.expandvars(name)
4993 name = os.path.expanduser(name)
4994 if not os.path.exists(name):
4995 msg = "File '%s' does not exists" % (name)
4996 raise IOError(msg)
4997 fullnames.append(name)
4998 if average:
4999 asaplog.push('Auto averaging integrations')
5000 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5001 for name in fullnames:
5002 tbl = Scantable(stype)
5003 if is_ms( name ):
5004 r = msfiller( tbl )
5005 else:
5006 r = filler( tbl )
5007 msg = "Importing %s..." % (name)
5008 asaplog.push(msg, False)
5009 r.open(name, opts)
5010 rx = rcParams['scantable.reference']
5011 r.setreferenceexpr(rx)
5012 r.fill()
5013 if average:
5014 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5015 if not first:
5016 tbl = self._math._merge([self, tbl])
5017 Scantable.__init__(self, tbl)
5018 r.close()
5019 del r, tbl
5020 first = False
5021 #flush log
5022 asaplog.post()
5023 if unit is not None:
5024 self.set_fluxunit(unit)
5025 if not is_casapy():
5026 self.set_freqframe(rcParams['scantable.freqframe'])
5027
5028 def _get_verify_action( self, msg, action=None ):
5029 valid_act = ['Y', 'N', 'A', 'R']
5030 if not action or not isinstance(action, str):
5031 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5032 if action == '':
5033 return "Y"
5034 elif (action.upper()[0] in valid_act):
5035 return action.upper()[0]
5036 elif (action.upper()[0] in ['H','?']):
5037 print "Available actions of verification [Y|n|a|r]"
5038 print " Y : Yes for current data (default)"
5039 print " N : No for current data"
5040 print " A : Accept all in the following and exit from verification"
5041 print " R : Reject all in the following and exit from verification"
5042 print " H or ?: help (show this message)"
5043 return self._get_verify_action(msg)
5044 else:
5045 return 'Y'
5046
5047 def __getitem__(self, key):
5048 if key < 0:
5049 key += self.nrow()
5050 if key >= self.nrow():
5051 raise IndexError("Row index out of range.")
5052 return self._getspectrum(key)
5053
5054 def __setitem__(self, key, value):
5055 if key < 0:
5056 key += self.nrow()
5057 if key >= self.nrow():
5058 raise IndexError("Row index out of range.")
5059 if not hasattr(value, "__len__") or \
5060 len(value) > self.nchan(self.getif(key)):
5061 raise ValueError("Spectrum length doesn't match.")
5062 return self._setspectrum(value, key)
5063
5064 def __len__(self):
5065 return self.nrow()
5066
5067 def __iter__(self):
5068 for i in range(len(self)):
5069 yield self[i]
Note: See TracBrowser for help on using the repository browser.