source: trunk/python/scantable.py@ 2888

Last change on this file since 2888 was 2887, checked in by WataruKawasaki, 11 years ago

New Development: No

JIRA Issue: Yes CAS-5859

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes:

Module(s): sd

Description: modified parse_spw_selection() so that it returns only for valid spws. if no valid spws given, it emits RuntimeError.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 202.0 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
374
375
376class scantable(Scantable):
377 """\
378 The ASAP container for scans (single-dish data).
379 """
380
381 @asaplog_post_dec
382 def __init__(self, filename, average=None, unit=None, parallactify=None,
383 **args):
384 """\
385 Create a scantable from a saved one or make a reference
386
387 Parameters:
388
389 filename: the name of an asap table on disk
390 or
391 the name of a rpfits/sdfits/ms file
392 (integrations within scans are auto averaged
393 and the whole file is read) or
394 [advanced] a reference to an existing scantable
395
396 average: average all integrations withinb a scan on read.
397 The default (True) is taken from .asaprc.
398
399 unit: brightness unit; must be consistent with K or Jy.
400 Over-rides the default selected by the filler
401 (input rpfits/sdfits/ms) or replaces the value
402 in existing scantables
403
404 antenna: for MeasurementSet input data only:
405 Antenna selection. integer (id) or string
406 (name or id).
407
408 parallactify: Indicate that the data had been parallactified.
409 Default (false) is taken from rc file.
410
411 getpt: Whether to import direction from MS/POINTING
412 table properly or not.
413 This is effective only when filename is MS.
414 The default (True) is to import direction
415 from MS/POINTING.
416 """
417 if average is None:
418 average = rcParams['scantable.autoaverage']
419 parallactify = parallactify or rcParams['scantable.parallactify']
420 varlist = vars()
421 from asap._asap import stmath
422 self._math = stmath( rcParams['insitu'] )
423 if isinstance(filename, Scantable):
424 Scantable.__init__(self, filename)
425 else:
426 if isinstance(filename, str):
427 filename = os.path.expandvars(filename)
428 filename = os.path.expanduser(filename)
429 if not os.path.exists(filename):
430 s = "File '%s' not found." % (filename)
431 raise IOError(s)
432 if is_scantable(filename):
433 ondisk = rcParams['scantable.storage'] == 'disk'
434 Scantable.__init__(self, filename, ondisk)
435 if unit is not None:
436 self.set_fluxunit(unit)
437 if average:
438 self._assign( self.average_time( scanav=True ) )
439 # do not reset to the default freqframe
440 #self.set_freqframe(rcParams['scantable.freqframe'])
441 elif is_ms(filename):
442 # Measurement Set
443 opts={'ms': {}}
444 mskeys=['getpt','antenna']
445 for key in mskeys:
446 if key in args.keys():
447 opts['ms'][key] = args[key]
448 self._fill([filename], unit, average, opts)
449 elif os.path.isfile(filename):
450 opts={'nro': {}}
451 nrokeys=['freqref']
452 for key in nrokeys:
453 if key in args.keys():
454 opts['nro'][key] = args[key]
455 self._fill([filename], unit, average, opts)
456 # only apply to new data not "copy constructor"
457 self.parallactify(parallactify)
458 else:
459 msg = "The given file '%s'is not a valid " \
460 "asap table." % (filename)
461 raise IOError(msg)
462 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
463 and isinstance(filename[-1], str):
464 self._fill(filename, unit, average)
465 self.parallactify(parallactify)
466 self._add_history("scantable", varlist)
467
468 @asaplog_post_dec
469 def save(self, name=None, format=None, overwrite=False):
470 """\
471 Store the scantable on disk. This can be an asap (aips++) Table,
472 SDFITS or MS2 format.
473
474 Parameters:
475
476 name: the name of the outputfile. For format 'ASCII'
477 this is the root file name (data in 'name'.txt
478 and header in 'name'_header.txt)
479
480 format: an optional file format. Default is ASAP.
481 Allowed are:
482
483 * 'ASAP' (save as ASAP [aips++] Table),
484 * 'SDFITS' (save as SDFITS file)
485 * 'ASCII' (saves as ascii text file)
486 * 'MS2' (saves as an casacore MeasurementSet V2)
487 * 'FITS' (save as image FITS - not readable by
488 class)
489 * 'CLASS' (save as FITS readable by CLASS)
490
491 overwrite: If the file should be overwritten if it exists.
492 The default False is to return with warning
493 without writing the output. USE WITH CARE.
494
495 Example::
496
497 scan.save('myscan.asap')
498 scan.save('myscan.sdfits', 'SDFITS')
499
500 """
501 from os import path
502 format = format or rcParams['scantable.save']
503 suffix = '.'+format.lower()
504 if name is None or name == "":
505 name = 'scantable'+suffix
506 msg = "No filename given. Using default name %s..." % name
507 asaplog.push(msg)
508 name = path.expandvars(name)
509 if path.isfile(name) or path.isdir(name):
510 if not overwrite:
511 msg = "File %s exists." % name
512 raise IOError(msg)
513 format2 = format.upper()
514 if format2 == 'ASAP':
515 self._save(name)
516 elif format2 == 'MS2':
517 msopt = {'ms': {'overwrite': overwrite } }
518 from asap._asap import mswriter
519 writer = mswriter( self )
520 writer.write( name, msopt )
521 else:
522 from asap._asap import stwriter as stw
523 writer = stw(format2)
524 writer.write(self, name)
525 return
526
527 def copy(self):
528 """Return a copy of this scantable.
529
530 *Note*:
531
532 This makes a full (deep) copy. scan2 = scan1 makes a reference.
533
534 Example::
535
536 copiedscan = scan.copy()
537
538 """
539 sd = scantable(Scantable._copy(self))
540 return sd
541
542 def drop_scan(self, scanid=None):
543 """\
544 Return a new scantable where the specified scan number(s) has(have)
545 been dropped.
546
547 Parameters:
548
549 scanid: a (list of) scan number(s)
550
551 """
552 from asap import _is_sequence_or_number as _is_valid
553 from asap import _to_list
554 from asap import unique
555 if not _is_valid(scanid):
556 raise RuntimeError( 'Please specify a scanno to drop from the'
557 ' scantable' )
558 scanid = _to_list(scanid)
559 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
560 for sid in scanid: allscans.remove(sid)
561 if len(allscans) == 0:
562 raise ValueError("Can't remove all scans")
563 sel = selector(scans=allscans)
564 return self._select_copy(sel)
565
566 def _select_copy(self, selection):
567 orig = self.get_selection()
568 self.set_selection(orig+selection)
569 cp = self.copy()
570 self.set_selection(orig)
571 return cp
572
573 def get_scan(self, scanid=None):
574 """\
575 Return a specific scan (by scanno) or collection of scans (by
576 source name) in a new scantable.
577
578 *Note*:
579
580 See scantable.drop_scan() for the inverse operation.
581
582 Parameters:
583
584 scanid: a (list of) scanno or a source name, unix-style
585 patterns are accepted for source name matching, e.g.
586 '*_R' gets all 'ref scans
587
588 Example::
589
590 # get all scans containing the source '323p459'
591 newscan = scan.get_scan('323p459')
592 # get all 'off' scans
593 refscans = scan.get_scan('*_R')
594 # get a susbset of scans by scanno (as listed in scan.summary())
595 newscan = scan.get_scan([0, 2, 7, 10])
596
597 """
598 if scanid is None:
599 raise RuntimeError( 'Please specify a scan no or name to '
600 'retrieve from the scantable' )
601 try:
602 bsel = self.get_selection()
603 sel = selector()
604 if type(scanid) is str:
605 sel.set_name(scanid)
606 return self._select_copy(sel)
607 elif type(scanid) is int:
608 sel.set_scans([scanid])
609 return self._select_copy(sel)
610 elif type(scanid) is list:
611 sel.set_scans(scanid)
612 return self._select_copy(sel)
613 else:
614 msg = "Illegal scanid type, use 'int' or 'list' if ints."
615 raise TypeError(msg)
616 except RuntimeError:
617 raise
618
619 def __str__(self):
620 tempFile = tempfile.NamedTemporaryFile()
621 Scantable._summary(self, tempFile.name)
622 tempFile.seek(0)
623 asaplog.clear()
624 return tempFile.file.read()
625
626 @asaplog_post_dec
627 def summary(self, filename=None):
628 """\
629 Print a summary of the contents of this scantable.
630
631 Parameters:
632
633 filename: the name of a file to write the putput to
634 Default - no file output
635
636 """
637 if filename is not None:
638 if filename is "":
639 filename = 'scantable_summary.txt'
640 from os.path import expandvars, isdir
641 filename = expandvars(filename)
642 if isdir(filename):
643 msg = "Illegal file name '%s'." % (filename)
644 raise IOError(msg)
645 else:
646 filename = ""
647 Scantable._summary(self, filename)
648
649 def get_spectrum(self, rowno):
650 """Return the spectrum for the current row in the scantable as a list.
651
652 Parameters:
653
654 rowno: the row number to retrieve the spectrum from
655
656 """
657 return self._getspectrum(rowno)
658
659 def get_mask(self, rowno):
660 """Return the mask for the current row in the scantable as a list.
661
662 Parameters:
663
664 rowno: the row number to retrieve the mask from
665
666 """
667 return self._getmask(rowno)
668
669 def set_spectrum(self, spec, rowno):
670 """Set the spectrum for the current row in the scantable.
671
672 Parameters:
673
674 spec: the new spectrum
675
676 rowno: the row number to set the spectrum for
677
678 """
679 assert(len(spec) == self.nchan(self.getif(rowno)))
680 return self._setspectrum(spec, rowno)
681
682 def get_coordinate(self, rowno):
683 """Return the (spectral) coordinate for a a given 'rowno'.
684
685 *Note*:
686
687 * This coordinate is only valid until a scantable method modifies
688 the frequency axis.
689 * This coordinate does contain the original frequency set-up
690 NOT the new frame. The conversions however are done using the user
691 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
692 use scantable.freq_align first. Without it there is no closure,
693 i.e.::
694
695 c = myscan.get_coordinate(0)
696 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
697
698 Parameters:
699
700 rowno: the row number for the spectral coordinate
701
702 """
703 return coordinate(Scantable.get_coordinate(self, rowno))
704
705 def get_selection(self):
706 """\
707 Get the selection object currently set on this scantable.
708
709 Example::
710
711 sel = scan.get_selection()
712 sel.set_ifs(0) # select IF 0
713 scan.set_selection(sel) # apply modified selection
714
715 """
716 return selector(self._getselection())
717
718 def set_selection(self, selection=None, **kw):
719 """\
720 Select a subset of the data. All following operations on this scantable
721 are only applied to thi selection.
722
723 Parameters:
724
725 selection: a selector object (default unset the selection), or
726 any combination of 'pols', 'ifs', 'beams', 'scans',
727 'cycles', 'name', 'query'
728
729 Examples::
730
731 sel = selector() # create a selection object
732 self.set_scans([0, 3]) # select SCANNO 0 and 3
733 scan.set_selection(sel) # set the selection
734 scan.summary() # will only print summary of scanno 0 an 3
735 scan.set_selection() # unset the selection
736 # or the equivalent
737 scan.set_selection(scans=[0,3])
738 scan.summary() # will only print summary of scanno 0 an 3
739 scan.set_selection() # unset the selection
740
741 """
742 if selection is None:
743 # reset
744 if len(kw) == 0:
745 selection = selector()
746 else:
747 # try keywords
748 for k in kw:
749 if k not in selector.fields:
750 raise KeyError("Invalid selection key '%s', "
751 "valid keys are %s" % (k,
752 selector.fields))
753 selection = selector(**kw)
754 self._setselection(selection)
755
756 def get_row(self, row=0, insitu=None):
757 """\
758 Select a row in the scantable.
759 Return a scantable with single row.
760
761 Parameters:
762
763 row: row no of integration, default is 0.
764 insitu: if False a new scantable is returned. Otherwise, the
765 scaling is done in-situ. The default is taken from .asaprc
766 (False)
767
768 """
769 if insitu is None:
770 insitu = rcParams['insitu']
771 if not insitu:
772 workscan = self.copy()
773 else:
774 workscan = self
775 # Select a row
776 sel = selector()
777 sel.set_rows([row])
778 workscan.set_selection(sel)
779 if not workscan.nrow() == 1:
780 msg = "Could not identify single row. %d rows selected." \
781 % (workscan.nrow())
782 raise RuntimeError(msg)
783 if insitu:
784 self._assign(workscan)
785 else:
786 return workscan
787
788 @asaplog_post_dec
789 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
790 """\
791 Determine the specified statistic of the current beam/if/pol
792 Takes a 'mask' as an optional parameter to specify which
793 channels should be excluded.
794
795 Parameters:
796
797 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
798 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
799
800 mask: an optional mask specifying where the statistic
801 should be determined.
802
803 form: format string to print statistic values
804
805 row: row number of spectrum to process.
806 (default is None: for all rows)
807
808 Example:
809 scan.set_unit('channel')
810 msk = scan.create_mask([100, 200], [500, 600])
811 scan.stats(stat='mean', mask=m)
812
813 """
814 mask = mask or []
815 if not self._check_ifs():
816 raise ValueError("Cannot apply mask as the IFs have different "
817 "number of channels. Please use setselection() "
818 "to select individual IFs")
819 rtnabc = False
820 if stat.lower().endswith('_abc'): rtnabc = True
821 getchan = False
822 if stat.lower().startswith('min') or stat.lower().startswith('max'):
823 chan = self._math._minmaxchan(self, mask, stat)
824 getchan = True
825 statvals = []
826 if not rtnabc:
827 if row == None:
828 statvals = self._math._stats(self, mask, stat)
829 else:
830 statvals = self._math._statsrow(self, mask, stat, int(row))
831
832 #def cb(i):
833 # return statvals[i]
834
835 #return self._row_callback(cb, stat)
836
837 label=stat
838 #callback=cb
839 out = ""
840 #outvec = []
841 sep = '-'*50
842
843 if row == None:
844 rows = xrange(self.nrow())
845 elif isinstance(row, int):
846 rows = [ row ]
847
848 for i in rows:
849 refstr = ''
850 statunit= ''
851 if getchan:
852 qx, qy = self.chan2data(rowno=i, chan=chan[i])
853 if rtnabc:
854 statvals.append(qx['value'])
855 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
856 statunit= '['+qx['unit']+']'
857 else:
858 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
859
860 tm = self._gettime(i)
861 src = self._getsourcename(i)
862 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
863 out += 'Time[%s]:\n' % (tm)
864 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
865 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
866 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
867 #outvec.append(callback(i))
868 if len(rows) > 1:
869 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
870 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
871 else:
872 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
873 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
874 out += sep+"\n"
875
876 import os
877 if os.environ.has_key( 'USER' ):
878 usr = os.environ['USER']
879 else:
880 import commands
881 usr = commands.getoutput( 'whoami' )
882 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
883 f = open(tmpfile,'w')
884 print >> f, sep
885 print >> f, ' %s %s' % (label, statunit)
886 print >> f, sep
887 print >> f, out
888 f.close()
889 f = open(tmpfile,'r')
890 x = f.readlines()
891 f.close()
892 asaplog.push(''.join(x), False)
893
894 return statvals
895
896 def chan2data(self, rowno=0, chan=0):
897 """\
898 Returns channel/frequency/velocity and spectral value
899 at an arbitrary row and channel in the scantable.
900
901 Parameters:
902
903 rowno: a row number in the scantable. Default is the
904 first row, i.e. rowno=0
905
906 chan: a channel in the scantable. Default is the first
907 channel, i.e. pos=0
908
909 """
910 if isinstance(rowno, int) and isinstance(chan, int):
911 qx = {'unit': self.get_unit(),
912 'value': self._getabcissa(rowno)[chan]}
913 qy = {'unit': self.get_fluxunit(),
914 'value': self._getspectrum(rowno)[chan]}
915 return qx, qy
916
917 def stddev(self, mask=None):
918 """\
919 Determine the standard deviation of the current beam/if/pol
920 Takes a 'mask' as an optional parameter to specify which
921 channels should be excluded.
922
923 Parameters:
924
925 mask: an optional mask specifying where the standard
926 deviation should be determined.
927
928 Example::
929
930 scan.set_unit('channel')
931 msk = scan.create_mask([100, 200], [500, 600])
932 scan.stddev(mask=m)
933
934 """
935 return self.stats(stat='stddev', mask=mask);
936
937
938 def get_column_names(self):
939 """\
940 Return a list of column names, which can be used for selection.
941 """
942 return list(Scantable.get_column_names(self))
943
944 def get_tsys(self, row=-1):
945 """\
946 Return the System temperatures.
947
948 Parameters:
949
950 row: the rowno to get the information for. (default all rows)
951
952 Returns:
953
954 a list of Tsys values for the current selection
955
956 """
957 if row > -1:
958 return self._get_column(self._gettsys, row)
959 return self._row_callback(self._gettsys, "Tsys")
960
961 def get_tsysspectrum(self, row=-1):
962 """\
963 Return the channel dependent system temperatures.
964
965 Parameters:
966
967 row: the rowno to get the information for. (default all rows)
968
969 Returns:
970
971 a list of Tsys values for the current selection
972
973 """
974 return self._get_column( self._gettsysspectrum, row )
975
976 def set_tsys(self, values, row=-1):
977 """\
978 Set the Tsys value(s) of the given 'row' or the whole scantable
979 (selection).
980
981 Parameters:
982
983 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
984 row: the row number to apply Tsys values to.
985 (default all rows)
986
987 """
988
989 if not hasattr(values, "__len__"):
990 values = [values]
991 self._settsys(values, row)
992
993 def get_weather(self, row=-1):
994 """\
995 Return the weather informations.
996
997 Parameters:
998
999 row: the rowno to get the information for. (default all rows)
1000
1001 Returns:
1002
1003 a dict or list of of dicts of values for the current selection
1004
1005 """
1006
1007 values = self._get_column(self._get_weather, row)
1008 if row > -1:
1009 return {'temperature': values[0],
1010 'pressure': values[1], 'humidity' : values[2],
1011 'windspeed' : values[3], 'windaz' : values[4]
1012 }
1013 else:
1014 out = []
1015 for r in values:
1016
1017 out.append({'temperature': r[0],
1018 'pressure': r[1], 'humidity' : r[2],
1019 'windspeed' : r[3], 'windaz' : r[4]
1020 })
1021 return out
1022
1023 def _row_callback(self, callback, label):
1024 out = ""
1025 outvec = []
1026 sep = '-'*50
1027 for i in range(self.nrow()):
1028 tm = self._gettime(i)
1029 src = self._getsourcename(i)
1030 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1031 out += 'Time[%s]:\n' % (tm)
1032 if self.nbeam(-1) > 1:
1033 out += ' Beam[%d] ' % (self.getbeam(i))
1034 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1035 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1036 outvec.append(callback(i))
1037 out += '= %3.3f\n' % (outvec[i])
1038 out += sep+'\n'
1039
1040 asaplog.push(sep)
1041 asaplog.push(" %s" % (label))
1042 asaplog.push(sep)
1043 asaplog.push(out)
1044 asaplog.post()
1045 return outvec
1046
1047 def _get_column(self, callback, row=-1, *args):
1048 """
1049 """
1050 if row == -1:
1051 return [callback(i, *args) for i in range(self.nrow())]
1052 else:
1053 if 0 <= row < self.nrow():
1054 return callback(row, *args)
1055
1056
1057 def get_time(self, row=-1, asdatetime=False, prec=-1):
1058 """\
1059 Get a list of time stamps for the observations.
1060 Return a datetime object or a string (default) for each
1061 integration time stamp in the scantable.
1062
1063 Parameters:
1064
1065 row: row no of integration. Default -1 return all rows
1066
1067 asdatetime: return values as datetime objects rather than strings
1068
1069 prec: number of digits shown. Default -1 to automatic
1070 calculation.
1071 Note this number is equals to the digits of MVTime,
1072 i.e., 0<prec<3: dates with hh:: only,
1073 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1074 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1075
1076 """
1077 from datetime import datetime
1078 if prec < 0:
1079 # automagically set necessary precision +1
1080 prec = 7 - \
1081 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1082 prec = max(6, int(prec))
1083 else:
1084 prec = max(0, prec)
1085 if asdatetime:
1086 #precision can be 1 millisecond at max
1087 prec = min(12, prec)
1088
1089 times = self._get_column(self._gettime, row, prec)
1090 if not asdatetime:
1091 return times
1092 format = "%Y/%m/%d/%H:%M:%S.%f"
1093 if prec < 7:
1094 nsub = 1 + (((6-prec)/2) % 3)
1095 substr = [".%f","%S","%M"]
1096 for i in range(nsub):
1097 format = format.replace(substr[i],"")
1098 if isinstance(times, list):
1099 return [datetime.strptime(i, format) for i in times]
1100 else:
1101 return datetime.strptime(times, format)
1102
1103
1104 def get_inttime(self, row=-1):
1105 """\
1106 Get a list of integration times for the observations.
1107 Return a time in seconds for each integration in the scantable.
1108
1109 Parameters:
1110
1111 row: row no of integration. Default -1 return all rows.
1112
1113 """
1114 return self._get_column(self._getinttime, row)
1115
1116
1117 def get_sourcename(self, row=-1):
1118 """\
1119 Get a list source names for the observations.
1120 Return a string for each integration in the scantable.
1121 Parameters:
1122
1123 row: row no of integration. Default -1 return all rows.
1124
1125 """
1126 return self._get_column(self._getsourcename, row)
1127
1128 def get_elevation(self, row=-1):
1129 """\
1130 Get a list of elevations for the observations.
1131 Return a float for each integration in the scantable.
1132
1133 Parameters:
1134
1135 row: row no of integration. Default -1 return all rows.
1136
1137 """
1138 return self._get_column(self._getelevation, row)
1139
1140 def get_azimuth(self, row=-1):
1141 """\
1142 Get a list of azimuths for the observations.
1143 Return a float for each integration in the scantable.
1144
1145 Parameters:
1146 row: row no of integration. Default -1 return all rows.
1147
1148 """
1149 return self._get_column(self._getazimuth, row)
1150
1151 def get_parangle(self, row=-1):
1152 """\
1153 Get a list of parallactic angles for the observations.
1154 Return a float for each integration in the scantable.
1155
1156 Parameters:
1157
1158 row: row no of integration. Default -1 return all rows.
1159
1160 """
1161 return self._get_column(self._getparangle, row)
1162
1163 def get_direction(self, row=-1):
1164 """
1165 Get a list of Positions on the sky (direction) for the observations.
1166 Return a string for each integration in the scantable.
1167
1168 Parameters:
1169
1170 row: row no of integration. Default -1 return all rows
1171
1172 """
1173 return self._get_column(self._getdirection, row)
1174
1175 def get_directionval(self, row=-1):
1176 """\
1177 Get a list of Positions on the sky (direction) for the observations.
1178 Return a float for each integration in the scantable.
1179
1180 Parameters:
1181
1182 row: row no of integration. Default -1 return all rows
1183
1184 """
1185 return self._get_column(self._getdirectionvec, row)
1186
1187 @asaplog_post_dec
1188 def set_unit(self, unit='channel'):
1189 """\
1190 Set the unit for all following operations on this scantable
1191
1192 Parameters:
1193
1194 unit: optional unit, default is 'channel'. Use one of '*Hz',
1195 'km/s', 'channel' or equivalent ''
1196
1197 """
1198 varlist = vars()
1199 if unit in ['', 'pixel', 'channel']:
1200 unit = ''
1201 inf = list(self._getcoordinfo())
1202 inf[0] = unit
1203 self._setcoordinfo(inf)
1204 self._add_history("set_unit", varlist)
1205
1206 @asaplog_post_dec
1207 def set_instrument(self, instr):
1208 """\
1209 Set the instrument for subsequent processing.
1210
1211 Parameters:
1212
1213 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1214 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1215
1216 """
1217 self._setInstrument(instr)
1218 self._add_history("set_instument", vars())
1219
1220 @asaplog_post_dec
1221 def set_feedtype(self, feedtype):
1222 """\
1223 Overwrite the feed type, which might not be set correctly.
1224
1225 Parameters:
1226
1227 feedtype: 'linear' or 'circular'
1228
1229 """
1230 self._setfeedtype(feedtype)
1231 self._add_history("set_feedtype", vars())
1232
1233 @asaplog_post_dec
1234 def set_doppler(self, doppler='RADIO'):
1235 """\
1236 Set the doppler for all following operations on this scantable.
1237
1238 Parameters:
1239
1240 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1241
1242 """
1243 varlist = vars()
1244 inf = list(self._getcoordinfo())
1245 inf[2] = doppler
1246 self._setcoordinfo(inf)
1247 self._add_history("set_doppler", vars())
1248
1249 @asaplog_post_dec
1250 def set_freqframe(self, frame=None):
1251 """\
1252 Set the frame type of the Spectral Axis.
1253
1254 Parameters:
1255
1256 frame: an optional frame type, default 'LSRK'. Valid frames are:
1257 'TOPO', 'LSRD', 'LSRK', 'BARY',
1258 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1259
1260 Example::
1261
1262 scan.set_freqframe('BARY')
1263
1264 """
1265 frame = frame or rcParams['scantable.freqframe']
1266 varlist = vars()
1267 # "REST" is not implemented in casacore
1268 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1269 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1270 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1271 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1272
1273 if frame in valid:
1274 inf = list(self._getcoordinfo())
1275 inf[1] = frame
1276 self._setcoordinfo(inf)
1277 self._add_history("set_freqframe", varlist)
1278 else:
1279 msg = "Please specify a valid freq type. Valid types are:\n", valid
1280 raise TypeError(msg)
1281
1282 @asaplog_post_dec
1283 def set_dirframe(self, frame=""):
1284 """\
1285 Set the frame type of the Direction on the sky.
1286
1287 Parameters:
1288
1289 frame: an optional frame type, default ''. Valid frames are:
1290 'J2000', 'B1950', 'GALACTIC'
1291
1292 Example:
1293
1294 scan.set_dirframe('GALACTIC')
1295
1296 """
1297 varlist = vars()
1298 Scantable.set_dirframe(self, frame)
1299 self._add_history("set_dirframe", varlist)
1300
1301 def get_unit(self):
1302 """\
1303 Get the default unit set in this scantable
1304
1305 Returns:
1306
1307 A unit string
1308
1309 """
1310 inf = self._getcoordinfo()
1311 unit = inf[0]
1312 if unit == '': unit = 'channel'
1313 return unit
1314
1315 @asaplog_post_dec
1316 def get_abcissa(self, rowno=0):
1317 """\
1318 Get the abcissa in the current coordinate setup for the currently
1319 selected Beam/IF/Pol
1320
1321 Parameters:
1322
1323 rowno: an optional row number in the scantable. Default is the
1324 first row, i.e. rowno=0
1325
1326 Returns:
1327
1328 The abcissa values and the format string (as a dictionary)
1329
1330 """
1331 abc = self._getabcissa(rowno)
1332 lbl = self._getabcissalabel(rowno)
1333 return abc, lbl
1334
1335 @asaplog_post_dec
1336 def flag(self, mask=None, unflag=False, row=-1):
1337 """\
1338 Flag the selected data using an optional channel mask.
1339
1340 Parameters:
1341
1342 mask: an optional channel mask, created with create_mask. Default
1343 (no mask) is all channels.
1344
1345 unflag: if True, unflag the data
1346
1347 row: an optional row number in the scantable.
1348 Default -1 flags all rows
1349
1350 """
1351 varlist = vars()
1352 mask = mask or []
1353 self._flag(row, mask, unflag)
1354 self._add_history("flag", varlist)
1355
1356 @asaplog_post_dec
1357 def flag_row(self, rows=None, unflag=False):
1358 """\
1359 Flag the selected data in row-based manner.
1360
1361 Parameters:
1362
1363 rows: list of row numbers to be flagged. Default is no row
1364 (must be explicitly specified to execute row-based
1365 flagging).
1366
1367 unflag: if True, unflag the data.
1368
1369 """
1370 varlist = vars()
1371 if rows is None:
1372 rows = []
1373 self._flag_row(rows, unflag)
1374 self._add_history("flag_row", varlist)
1375
1376 @asaplog_post_dec
1377 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1378 """\
1379 Flag the selected data outside a specified range (in channel-base)
1380
1381 Parameters:
1382
1383 uthres: upper threshold.
1384
1385 dthres: lower threshold
1386
1387 clipoutside: True for flagging data outside the range
1388 [dthres:uthres].
1389 False for flagging data inside the range.
1390
1391 unflag: if True, unflag the data.
1392
1393 """
1394 varlist = vars()
1395 self._clip(uthres, dthres, clipoutside, unflag)
1396 self._add_history("clip", varlist)
1397
1398 @asaplog_post_dec
1399 def lag_flag(self, start, end, unit="MHz", insitu=None):
1400 """\
1401 Flag the data in 'lag' space by providing a frequency to remove.
1402 Flagged data in the scantable get interpolated over the region.
1403 No taper is applied.
1404
1405 Parameters:
1406
1407 start: the start frequency (really a period within the
1408 bandwidth) or period to remove
1409
1410 end: the end frequency or period to remove
1411
1412 unit: the frequency unit (default 'MHz') or '' for
1413 explicit lag channels
1414
1415 *Notes*:
1416
1417 It is recommended to flag edges of the band or strong
1418 signals beforehand.
1419
1420 """
1421 if insitu is None: insitu = rcParams['insitu']
1422 self._math._setinsitu(insitu)
1423 varlist = vars()
1424 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1425 if not (unit == "" or base.has_key(unit)):
1426 raise ValueError("%s is not a valid unit." % unit)
1427 if unit == "":
1428 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1429 else:
1430 s = scantable(self._math._lag_flag(self, start*base[unit],
1431 end*base[unit], "frequency"))
1432 s._add_history("lag_flag", varlist)
1433 if insitu:
1434 self._assign(s)
1435 else:
1436 return s
1437
1438 @asaplog_post_dec
1439 def fft(self, rowno=None, mask=None, getrealimag=False):
1440 """\
1441 Apply FFT to the spectra.
1442 Flagged data in the scantable get interpolated over the region.
1443
1444 Parameters:
1445
1446 rowno: The row number(s) to be processed. int, list
1447 and tuple are accepted. By default (None), FFT
1448 is applied to the whole data.
1449
1450 mask: Auxiliary channel mask(s). Given as a boolean
1451 list, it is applied to all specified rows.
1452 A list of boolean lists can also be used to
1453 apply different masks. In the latter case, the
1454 length of 'mask' must be the same as that of
1455 'rowno'. The default is None.
1456
1457 getrealimag: If True, returns the real and imaginary part
1458 values of the complex results.
1459 If False (the default), returns the amplitude
1460 (absolute value) normalised with Ndata/2 and
1461 phase (argument, in unit of radian).
1462
1463 Returns:
1464
1465 A list of dictionaries containing the results for each spectrum.
1466 Each dictionary contains two values, the real and the imaginary
1467 parts when getrealimag = True, or the amplitude(absolute value)
1468 and the phase(argument) when getrealimag = False. The key for
1469 these values are 'real' and 'imag', or 'ampl' and 'phase',
1470 respectively.
1471 """
1472 if rowno is None:
1473 rowno = []
1474 if isinstance(rowno, int):
1475 rowno = [rowno]
1476 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1477 raise TypeError("The row number(s) must be int, list or tuple.")
1478 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1479
1480 usecommonmask = True
1481
1482 if mask is None:
1483 mask = []
1484 if isinstance(mask, list) or isinstance(mask, tuple):
1485 if len(mask) == 0:
1486 mask = [[]]
1487 else:
1488 if isinstance(mask[0], bool):
1489 if len(mask) != self.nchan(self.getif(rowno[0])):
1490 raise ValueError("The spectra and the mask have "
1491 "different length.")
1492 mask = [mask]
1493 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1494 usecommonmask = False
1495 if len(mask) != len(rowno):
1496 raise ValueError("When specifying masks for each "
1497 "spectrum, the numbers of them "
1498 "must be identical.")
1499 for i in xrange(mask):
1500 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1501 raise ValueError("The spectra and the mask have "
1502 "different length.")
1503 else:
1504 raise TypeError("The mask must be a boolean list or "
1505 "a list of boolean list.")
1506 else:
1507 raise TypeError("The mask must be a boolean list or a list of "
1508 "boolean list.")
1509
1510 res = []
1511
1512 imask = 0
1513 for whichrow in rowno:
1514 fspec = self._fft(whichrow, mask[imask], getrealimag)
1515 nspec = len(fspec)
1516
1517 i = 0
1518 v1 = []
1519 v2 = []
1520 reselem = {"real":[],"imag":[]} if getrealimag \
1521 else {"ampl":[],"phase":[]}
1522
1523 while (i < nspec):
1524 v1.append(fspec[i])
1525 v2.append(fspec[i+1])
1526 i += 2
1527
1528 if getrealimag:
1529 reselem["real"] += v1
1530 reselem["imag"] += v2
1531 else:
1532 reselem["ampl"] += v1
1533 reselem["phase"] += v2
1534
1535 res.append(reselem)
1536
1537 if not usecommonmask:
1538 imask += 1
1539
1540 return res
1541
1542 @asaplog_post_dec
1543 def create_mask(self, *args, **kwargs):
1544 """\
1545 Compute and return a mask based on [min, max] windows.
1546 The specified windows are to be INCLUDED, when the mask is
1547 applied.
1548
1549 Parameters:
1550
1551 [min, max], [min2, max2], ...
1552 Pairs of start/end points (inclusive)specifying the regions
1553 to be masked
1554
1555 invert: optional argument. If specified as True,
1556 return an inverted mask, i.e. the regions
1557 specified are EXCLUDED
1558
1559 row: create the mask using the specified row for
1560 unit conversions, default is row=0
1561 only necessary if frequency varies over rows.
1562
1563 Examples::
1564
1565 scan.set_unit('channel')
1566 # a)
1567 msk = scan.create_mask([400, 500], [800, 900])
1568 # masks everything outside 400 and 500
1569 # and 800 and 900 in the unit 'channel'
1570
1571 # b)
1572 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1573 # masks the regions between 400 and 500
1574 # and 800 and 900 in the unit 'channel'
1575
1576 # c)
1577 #mask only channel 400
1578 msk = scan.create_mask([400])
1579
1580 """
1581 row = kwargs.get("row", 0)
1582 data = self._getabcissa(row)
1583 u = self._getcoordinfo()[0]
1584 if u == "":
1585 u = "channel"
1586 msg = "The current mask window unit is %s" % u
1587 i = self._check_ifs()
1588 if not i:
1589 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1590 asaplog.push(msg)
1591 n = len(data)
1592 msk = _n_bools(n, False)
1593 # test if args is a 'list' or a 'normal *args - UGLY!!!
1594
1595 ws = (isinstance(args[-1][-1], int)
1596 or isinstance(args[-1][-1], float)) and args or args[0]
1597 for window in ws:
1598 if len(window) == 1:
1599 window = [window[0], window[0]]
1600 if len(window) == 0 or len(window) > 2:
1601 raise ValueError("A window needs to be defined as "
1602 "[start(, end)]")
1603 if window[0] > window[1]:
1604 tmp = window[0]
1605 window[0] = window[1]
1606 window[1] = tmp
1607 for i in range(n):
1608 if data[i] >= window[0] and data[i] <= window[1]:
1609 msk[i] = True
1610 if kwargs.has_key('invert'):
1611 if kwargs.get('invert'):
1612 msk = mask_not(msk)
1613 return msk
1614
1615 def get_masklist(self, mask=None, row=0, silent=False):
1616 """\
1617 Compute and return a list of mask windows, [min, max].
1618
1619 Parameters:
1620
1621 mask: channel mask, created with create_mask.
1622
1623 row: calcutate the masklist using the specified row
1624 for unit conversions, default is row=0
1625 only necessary if frequency varies over rows.
1626
1627 Returns:
1628
1629 [min, max], [min2, max2], ...
1630 Pairs of start/end points (inclusive)specifying
1631 the masked regions
1632
1633 """
1634 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1635 raise TypeError("The mask should be list or tuple.")
1636 if len(mask) <= 0:
1637 raise TypeError("The mask elements should be > 0")
1638 data = self._getabcissa(row)
1639 if len(data) != len(mask):
1640 msg = "Number of channels in scantable != number of mask elements"
1641 raise TypeError(msg)
1642 u = self._getcoordinfo()[0]
1643 if u == "":
1644 u = "channel"
1645 msg = "The current mask window unit is %s" % u
1646 i = self._check_ifs()
1647 if not i:
1648 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1649 if not silent:
1650 asaplog.push(msg)
1651 masklist = []
1652 ist, ien = None, None
1653 ist, ien=self.get_mask_indices(mask)
1654 if ist is not None and ien is not None:
1655 for i in xrange(len(ist)):
1656 range=[data[ist[i]],data[ien[i]]]
1657 range.sort()
1658 masklist.append([range[0],range[1]])
1659 return masklist
1660
1661 def get_mask_indices(self, mask=None):
1662 """\
1663 Compute and Return lists of mask start indices and mask end indices.
1664
1665 Parameters:
1666
1667 mask: channel mask, created with create_mask.
1668
1669 Returns:
1670
1671 List of mask start indices and that of mask end indices,
1672 i.e., [istart1,istart2,....], [iend1,iend2,....].
1673
1674 """
1675 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1676 raise TypeError("The mask should be list or tuple.")
1677 if len(mask) <= 0:
1678 raise TypeError("The mask elements should be > 0")
1679 istart = []
1680 iend = []
1681 if mask[0]:
1682 istart.append(0)
1683 for i in range(len(mask)-1):
1684 if not mask[i] and mask[i+1]:
1685 istart.append(i+1)
1686 elif mask[i] and not mask[i+1]:
1687 iend.append(i)
1688 if mask[len(mask)-1]:
1689 iend.append(len(mask)-1)
1690 if len(istart) != len(iend):
1691 raise RuntimeError("Numbers of mask start != mask end.")
1692 for i in range(len(istart)):
1693 if istart[i] > iend[i]:
1694 raise RuntimeError("Mask start index > mask end index")
1695 break
1696 return istart,iend
1697
1698 @asaplog_post_dec
1699 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1700 """
1701 Parse MS type spw/channel selection syntax.
1702
1703 Parameters:
1704 selectstring : A string expression of spw and channel selection.
1705 Comma-separated expressions mean different spw -
1706 channel combinations. Spws and channel selections
1707 are partitioned by a colon ':'. In a single
1708 selection expression, you can put multiple values
1709 separated by semicolons ';'. Both for spw and
1710 channel selection, allowed cases include single
1711 value, blank('') or asterisk('*') to specify all
1712 available values, two values connected with a
1713 tilde ('~') to specify an inclusive range. Unit
1714 strings for frequency or velocity can be added to
1715 the tilde-connected values. For channel selection
1716 expression, placing a '<' or a '>' is possible to
1717 specify a semi-infinite interval as well.
1718
1719 examples:
1720 '' or '*' = all spws (all channels)
1721 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1722 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1723 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1724 all channels in spw8
1725 '1.3~1.5GHz' = all spws that fall in or have at
1726 least some overwrap with frequency
1727 range between 1.3GHz and 1.5GHz.
1728 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1729 between the specified
1730 frequency range in spws
1731 that fall in or have
1732 overwrap with the
1733 specified frequency
1734 range.
1735 '1:-200~250km/s' = channels that fall between the
1736 specified velocity range in
1737 spw 1.
1738 Returns:
1739 A dictionary of selected (valid) spw and masklist pairs,
1740 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1741 """
1742 if not isinstance(selectstring, str):
1743 asaplog.post()
1744 asaplog.push("Expression of spw/channel selection must be a string.")
1745 asaplog.post("ERROR")
1746
1747 orig_unit = self.get_unit()
1748 self.set_unit('channel')
1749
1750 orig_restfreq = self.get_restfreqs()
1751 orig_restfreq_list = []
1752 for i in orig_restfreq.keys():
1753 if len(orig_restfreq[i]) == 1:
1754 orig_restfreq_list.append(orig_restfreq[i][0])
1755 else:
1756 orig_restfreq_list.append(orig_restfreq[i])
1757
1758 orig_coord = self._getcoordinfo()
1759 orig_frame = orig_coord[1]
1760 orig_doppler = orig_coord[2]
1761
1762 if restfreq is None: restfreq = orig_restfreq_list
1763 if frame is None: frame = orig_frame
1764 if doppler is None: doppler = orig_doppler
1765
1766 self.set_restfreqs(restfreq)
1767 self.set_freqframe(frame)
1768 self.set_doppler(doppler)
1769
1770 valid_ifs = self.getifnos()
1771
1772 comma_sep = selectstring.split(",")
1773 res = {}
1774
1775 for cms_elem in comma_sep:
1776 colon_sep = cms_elem.split(":")
1777
1778 if (len(colon_sep) > 2):
1779 raise RuntimeError("Invalid selection expression: more than two colons!")
1780
1781 # parse spw expression and store result in spw_list.
1782 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1783 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1784 # 'a~b*m/s' (where * can be '' or 'k') and also
1785 # several of the above expressions connected with ';'.
1786
1787 spw_list = []
1788
1789 semicolon_sep = colon_sep[0].split(";")
1790
1791 for scs_elem in semicolon_sep:
1792 scs_elem = scs_elem.strip()
1793
1794 lt_sep = scs_elem.split("<")
1795 gt_sep = scs_elem.split(">")
1796 ti_sep = scs_elem.split("~")
1797
1798 lt_sep_length = len(lt_sep)
1799 gt_sep_length = len(gt_sep)
1800 ti_sep_length = len(ti_sep)
1801
1802 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1803
1804 if (len_product > 2):
1805 # '<', '>' and '~' must not coexist in a single spw expression
1806
1807 raise RuntimeError("Invalid spw selection.")
1808
1809 elif (len_product == 1):
1810 # '', '*', or single spw number.
1811
1812 if (scs_elem == "") or (scs_elem == "*"):
1813 spw_list = valid_ifs[:] # deep copy
1814
1815 else: # single number
1816 expr = int(scs_elem)
1817 spw_list.append(expr)
1818 if expr not in valid_ifs:
1819 asaplog.push("Invalid spw given. Ignored.")
1820
1821 else: # (len_product == 2)
1822 # namely, one of '<', '>' or '~' appears just once.
1823
1824 if (lt_sep_length == 2): # '<a'
1825 if is_number(lt_sep[1]):
1826 no_valid_spw = True
1827 for i in valid_ifs:
1828 if (i < float(lt_sep[1])):
1829 spw_list.append(i)
1830 no_valid_spw = False
1831
1832 if no_valid_spw:
1833 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1834
1835 else:
1836 raise RuntimeError("Invalid spw selection.")
1837
1838 elif (gt_sep_length == 2): # '>a'
1839 if is_number(gt_sep[1]):
1840 no_valid_spw = True
1841 for i in valid_ifs:
1842 if (i > float(gt_sep[1])):
1843 spw_list.append(i)
1844 no_valid_spw = False
1845
1846 if no_valid_spw:
1847 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1848
1849 else:
1850 raise RuntimeError("Invalid spw selection.")
1851
1852 else: # (ti_sep_length == 2) where both boundaries inclusive
1853 expr0 = ti_sep[0].strip()
1854 expr1 = ti_sep[1].strip()
1855
1856 if is_number(expr0) and is_number(expr1):
1857 # 'a~b'
1858 expr_pmin = min(float(expr0), float(expr1))
1859 expr_pmax = max(float(expr0), float(expr1))
1860 has_invalid_spw = False
1861 no_valid_spw = True
1862
1863 for i in valid_ifs:
1864 if (expr_pmin <= i) and (i <= expr_pmax):
1865 spw_list.append(i)
1866 no_valid_spw = False
1867 else:
1868 has_invalid_spw = True
1869
1870 if has_invalid_spw:
1871 msg = "Invalid spw is given. Ignored."
1872 asaplog.push(msg)
1873 asaplog.post()
1874
1875 if no_valid_spw:
1876 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1877
1878 elif is_number(expr0) and is_frequency(expr1):
1879 # 'a~b*Hz'
1880 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1881 no_valid_spw = True
1882
1883 for coord in self._get_coordinate_list():
1884 expr_p0 = coord['coord'].to_pixel(expr_f0)
1885 expr_p1 = coord['coord'].to_pixel(expr_f1)
1886 expr_pmin = min(expr_p0, expr_p1)
1887 expr_pmax = max(expr_p0, expr_p1)
1888
1889 spw = coord['if']
1890 pmin = 0.0
1891 pmax = float(self.nchan(spw) - 1)
1892
1893 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
1894 spw_list.append(spw)
1895 no_valid_spw = False
1896
1897 if no_valid_spw:
1898 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
1899
1900 elif is_number(expr0) and is_velocity(expr1):
1901 # 'a~b*m/s'
1902 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
1903 expr_vmin = min(expr_v0, expr_v1)
1904 expr_vmax = max(expr_v0, expr_v1)
1905 no_valid_spw = True
1906
1907 for coord in self._get_coordinate_list():
1908 spw = coord['if']
1909
1910 pmin = 0.0
1911 pmax = float(self.nchan(spw) - 1)
1912
1913 vel0 = coord['coord'].to_velocity(pmin)
1914 vel1 = coord['coord'].to_velocity(pmax)
1915
1916 vmin = min(vel0, vel1)
1917 vmax = max(vel0, vel1)
1918
1919 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
1920 spw_list.append(spw)
1921 no_valid_spw = False
1922
1923 if no_valid_spw:
1924 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
1925
1926 else:
1927 # cases such as 'aGHz~bkm/s' are not allowed now
1928 raise RuntimeError("Invalid spw selection.")
1929
1930 # check spw list and remove invalid ones.
1931 # if no valid spw left, emit ValueError.
1932 if len(spw_list) == 0:
1933 raise ValueError("No valid spw in given range.")
1934 """
1935 no_valid_spw = True
1936 for spw in spw_list:
1937 print spw_list
1938 if spw in valid_ifs:
1939 no_valid_spw = False
1940 else:
1941 spw_list.remove(spw)
1942
1943 if no_valid_spw:
1944 raise ValueError("No valid spw in given range.")
1945 """
1946
1947 # parse channel expression and store the result in crange_list.
1948 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
1949 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
1950 # and also several of the above expressions connected with ';'.
1951
1952 for spw in spw_list:
1953 pmin = 0.0
1954 pmax = float(self.nchan(spw) - 1)
1955
1956 if (len(colon_sep) == 1):
1957 # no expression for channel selection,
1958 # which means all channels are to be selected.
1959 crange_list = [[pmin, pmax]]
1960
1961 else: # (len(colon_sep) == 2)
1962 crange_list = []
1963
1964 found = False
1965 for i in self._get_coordinate_list():
1966 if (i['if'] == spw):
1967 coord = i['coord']
1968 found = True
1969 break
1970
1971 if found:
1972 semicolon_sep = colon_sep[1].split(";")
1973 for scs_elem in semicolon_sep:
1974 scs_elem = scs_elem.strip()
1975
1976 ti_sep = scs_elem.split("~")
1977 ti_sep_length = len(ti_sep)
1978
1979 if (ti_sep_length > 2):
1980 raise RuntimeError("Invalid channel selection.")
1981
1982 elif (ti_sep_length == 1):
1983 if (scs_elem == "") or (scs_elem == "*"):
1984 # '' and '*' for all channels
1985 crange_list = [[pmin, pmax]]
1986 break
1987 elif (is_number(scs_elem)):
1988 # single channel given
1989 crange_list.append([float(scs_elem), float(scs_elem)])
1990 else:
1991 raise RuntimeError("Invalid channel selection.")
1992
1993 else: #(ti_sep_length == 2)
1994 expr0 = ti_sep[0].strip()
1995 expr1 = ti_sep[1].strip()
1996
1997 if is_number(expr0) and is_number(expr1):
1998 # 'a~b'
1999 expr_pmin = min(float(expr0), float(expr1))
2000 expr_pmax = max(float(expr0), float(expr1))
2001
2002 elif is_number(expr0) and is_frequency(expr1):
2003 # 'a~b*Hz'
2004 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
2005 expr_p0 = coord.to_pixel(expr_f0)
2006 expr_p1 = coord.to_pixel(expr_f1)
2007 expr_pmin = min(expr_p0, expr_p1)
2008 expr_pmax = max(expr_p0, expr_p1)
2009
2010 elif is_number(expr0) and is_velocity(expr1):
2011 # 'a~b*m/s'
2012 restf = self.get_restfreqs().values()[0][0]
2013 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2014 expr_f0 = get_frequency_by_velocity(restf, expr_v0)
2015 expr_f1 = get_frequency_by_velocity(restf, expr_v1)
2016 expr_p0 = coord.to_pixel(expr_f0)
2017 expr_p1 = coord.to_pixel(expr_f1)
2018 expr_pmin = min(expr_p0, expr_p1)
2019 expr_pmax = max(expr_p0, expr_p1)
2020
2021 else:
2022 # cases such as 'aGHz~bkm/s' are not allowed now
2023 raise RuntimeError("Invalid channel selection.")
2024
2025 cmin = max(pmin, expr_pmin)
2026 cmax = min(pmax, expr_pmax)
2027 # if the given range of channel selection has overwrap with
2028 # that of current spw, output the overwrap area.
2029 if (cmin <= cmax):
2030 cmin = float(int(cmin + 0.5))
2031 cmax = float(int(cmax + 0.5))
2032 crange_list.append([cmin, cmax])
2033
2034 if (len(crange_list) == 0):
2035 crange_list.append([])
2036
2037 if res.has_key(spw):
2038 res[spw].extend(crange_list)
2039 else:
2040 res[spw] = crange_list
2041
2042 for spw in res.keys():
2043 if spw not in valid_ifs:
2044 del res[spw]
2045
2046 if len(res) == 0:
2047 raise RuntimeError("No valid spw.")
2048
2049 # restore original values
2050 self.set_restfreqs(orig_restfreq_list)
2051 self.set_freqframe(orig_frame)
2052 self.set_doppler(orig_doppler)
2053 self.set_unit(orig_unit)
2054
2055 return res
2056 #found
2057
2058 @asaplog_post_dec
2059 def get_first_rowno_by_if(self, ifno):
2060 found = False
2061 for irow in xrange(self.nrow()):
2062 if (self.getif(irow) == ifno):
2063 res = irow
2064 found = True
2065 break
2066
2067 if not found: raise RuntimeError("Invalid IF value.")
2068
2069 return res
2070
2071 @asaplog_post_dec
2072 def _get_coordinate_list(self):
2073 res = []
2074 spws = self.getifnos()
2075 for spw in spws:
2076 elem = {}
2077 elem['if'] = spw
2078 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2079 res.append(elem)
2080
2081 return res
2082
2083##################################
2084
2085 @asaplog_post_dec
2086 def parse_maskexpr(self, maskstring):
2087 """
2088 Parse CASA type mask selection syntax (IF dependent).
2089
2090 Parameters:
2091 maskstring : A string mask selection expression.
2092 A comma separated selections mean different IF -
2093 channel combinations. IFs and channel selections
2094 are partitioned by a colon, ':'.
2095 examples:
2096 '' = all IFs (all channels)
2097 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2098 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2099 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2100 all channels in IF8
2101 Returns:
2102 A dictionary of selected (valid) IF and masklist pairs,
2103 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2104 """
2105 if not isinstance(maskstring,str):
2106 asaplog.post()
2107 asaplog.push("Mask expression should be a string.")
2108 asaplog.post("ERROR")
2109
2110 valid_ifs = self.getifnos()
2111 frequnit = self.get_unit()
2112 seldict = {}
2113 if maskstring == "":
2114 maskstring = str(valid_ifs)[1:-1]
2115 ## split each selection "IF range[:CHAN range]"
2116 # split maskstring by "<spaces>,<spaces>"
2117 comma_sep = re.compile('\s*,\s*')
2118 sellist = comma_sep.split(maskstring)
2119 # separator by "<spaces>:<spaces>"
2120 collon_sep = re.compile('\s*:\s*')
2121 for currselstr in sellist:
2122 selset = collon_sep.split(currselstr)
2123 # spw and mask string (may include ~, < or >)
2124 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2125 minval=min(valid_ifs),
2126 maxval=max(valid_ifs))
2127 for spwlist in spwmasklist:
2128 selspws = []
2129 for ispw in range(spwlist[0],spwlist[1]+1):
2130 # Put into the list only if ispw exists
2131 if valid_ifs.count(ispw):
2132 selspws.append(ispw)
2133 del spwmasklist, spwlist
2134
2135 # parse frequency mask list
2136 if len(selset) > 1:
2137 freqmasklist = self._parse_selection(selset[1], typestr='float',
2138 offset=0.)
2139 else:
2140 # want to select the whole spectrum
2141 freqmasklist = [None]
2142
2143 ## define a dictionary of spw - masklist combination
2144 for ispw in selspws:
2145 #print "working on", ispw
2146 spwstr = str(ispw)
2147 if len(selspws) == 0:
2148 # empty spw
2149 continue
2150 else:
2151 ## want to get min and max of the spw and
2152 ## offset to set for '<' and '>'
2153 if frequnit == 'channel':
2154 minfreq = 0
2155 maxfreq = self.nchan(ifno=ispw)
2156 offset = 0.5
2157 else:
2158 ## This is ugly part. need improvement
2159 for ifrow in xrange(self.nrow()):
2160 if self.getif(ifrow) == ispw:
2161 #print "IF",ispw,"found in row =",ifrow
2162 break
2163 freqcoord = self.get_coordinate(ifrow)
2164 freqs = self._getabcissa(ifrow)
2165 minfreq = min(freqs)
2166 maxfreq = max(freqs)
2167 if len(freqs) == 1:
2168 offset = 0.5
2169 elif frequnit.find('Hz') > 0:
2170 offset = abs(freqcoord.to_frequency(1,
2171 unit=frequnit)
2172 -freqcoord.to_frequency(0,
2173 unit=frequnit)
2174 )*0.5
2175 elif frequnit.find('m/s') > 0:
2176 offset = abs(freqcoord.to_velocity(1,
2177 unit=frequnit)
2178 -freqcoord.to_velocity(0,
2179 unit=frequnit)
2180 )*0.5
2181 else:
2182 asaplog.post()
2183 asaplog.push("Invalid frequency unit")
2184 asaplog.post("ERROR")
2185 del freqs, freqcoord, ifrow
2186 for freq in freqmasklist:
2187 selmask = freq or [minfreq, maxfreq]
2188 if selmask[0] == None:
2189 ## selection was "<freq[1]".
2190 if selmask[1] < minfreq:
2191 ## avoid adding region selection
2192 selmask = None
2193 else:
2194 selmask = [minfreq,selmask[1]-offset]
2195 elif selmask[1] == None:
2196 ## selection was ">freq[0]"
2197 if selmask[0] > maxfreq:
2198 ## avoid adding region selection
2199 selmask = None
2200 else:
2201 selmask = [selmask[0]+offset,maxfreq]
2202 if selmask:
2203 if not seldict.has_key(spwstr):
2204 # new spw selection
2205 seldict[spwstr] = []
2206 seldict[spwstr] += [selmask]
2207 del minfreq,maxfreq,offset,freq,selmask
2208 del spwstr
2209 del freqmasklist
2210 del valid_ifs
2211 if len(seldict) == 0:
2212 asaplog.post()
2213 asaplog.push("No valid selection in the mask expression: "
2214 +maskstring)
2215 asaplog.post("WARN")
2216 return None
2217 msg = "Selected masklist:\n"
2218 for sif, lmask in seldict.iteritems():
2219 msg += " IF"+sif+" - "+str(lmask)+"\n"
2220 asaplog.push(msg)
2221 return seldict
2222
2223 @asaplog_post_dec
2224 def parse_idx_selection(self, mode, selexpr):
2225 """
2226 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2227 BEAMNO, and row number
2228
2229 Parameters:
2230 mode : which column to select.
2231 ['scan',|'if'|'pol'|'beam'|'row']
2232 selexpr : A comma separated selection expression.
2233 examples:
2234 '' = all (returns [])
2235 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2236 (returns [0,1,4,5,6,9])
2237 Returns:
2238 A List of selected indices
2239 """
2240 if selexpr == "":
2241 return []
2242 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2243 'b': 'beam', 'r': 'row'}
2244 smode = mode.lower()[0]
2245 if not (smode in valid_modes.keys()):
2246 msg = "Invalid mode '%s'. Valid modes are %s" %\
2247 (mode, str(valid_modes.values()))
2248 asaplog.post()
2249 asaplog.push(msg)
2250 asaplog.post("ERROR")
2251 mode = valid_modes[smode]
2252 minidx = None
2253 maxidx = None
2254 if smode == 'r':
2255 minidx = 0
2256 maxidx = self.nrow()
2257 else:
2258 idx = getattr(self,"get"+mode+"nos")()
2259 minidx = min(idx)
2260 maxidx = max(idx)
2261 del idx
2262 # split selexpr by "<spaces>,<spaces>"
2263 comma_sep = re.compile('\s*,\s*')
2264 sellist = comma_sep.split(selexpr)
2265 idxlist = []
2266 for currselstr in sellist:
2267 # single range (may include ~, < or >)
2268 currlist = self._parse_selection(currselstr, typestr='integer',
2269 minval=minidx,maxval=maxidx)
2270 for thelist in currlist:
2271 idxlist += range(thelist[0],thelist[1]+1)
2272 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2273 asaplog.push(msg)
2274 return idxlist
2275
2276 def _parse_selection(self, selstr, typestr='float', offset=0.,
2277 minval=None, maxval=None):
2278 """
2279 Parameters:
2280 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2281 typestr : The type of the values in returned list
2282 ('integer' or 'float')
2283 offset : The offset value to subtract from or add to
2284 the boundary value if the selection string
2285 includes '<' or '>' [Valid only for typestr='float']
2286 minval, maxval : The minimum/maximum values to set if the
2287 selection string includes '<' or '>'.
2288 The list element is filled with None by default.
2289 Returns:
2290 A list of min/max pair of selections.
2291 Example:
2292 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2293 --> returns [[0,2],[5,7],[9,9]]
2294 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2295 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2296 """
2297 # split selstr by '<spaces>;<spaces>'
2298 semi_sep = re.compile('\s*;\s*')
2299 selgroups = semi_sep.split(selstr)
2300 sellists = []
2301 if typestr.lower().startswith('int'):
2302 formatfunc = int
2303 offset = 1
2304 else:
2305 formatfunc = float
2306
2307 for currsel in selgroups:
2308 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2309 minsel = minval
2310 maxsel = maxval
2311 if currsel.find('~') > 0:
2312 # val0 <= x <= val1
2313 minsel = formatfunc(currsel.split('~')[0].strip())
2314 maxsel = formatfunc(currsel.split('~')[1].strip())
2315 elif currsel.strip().find('<=') > -1:
2316 bound = currsel.split('<=')
2317 try: # try "x <= val"
2318 minsel = minval
2319 maxsel = formatfunc(bound[1].strip())
2320 except ValueError: # now "val <= x"
2321 minsel = formatfunc(bound[0].strip())
2322 maxsel = maxval
2323 elif currsel.strip().find('>=') > -1:
2324 bound = currsel.split('>=')
2325 try: # try "x >= val"
2326 minsel = formatfunc(bound[1].strip())
2327 maxsel = maxval
2328 except ValueError: # now "val >= x"
2329 minsel = minval
2330 maxsel = formatfunc(bound[0].strip())
2331 elif currsel.strip().find('<') > -1:
2332 bound = currsel.split('<')
2333 try: # try "x < val"
2334 minsel = minval
2335 maxsel = formatfunc(bound[1].strip()) \
2336 - formatfunc(offset)
2337 except ValueError: # now "val < x"
2338 minsel = formatfunc(bound[0].strip()) \
2339 + formatfunc(offset)
2340 maxsel = maxval
2341 elif currsel.strip().find('>') > -1:
2342 bound = currsel.split('>')
2343 try: # try "x > val"
2344 minsel = formatfunc(bound[1].strip()) \
2345 + formatfunc(offset)
2346 maxsel = maxval
2347 except ValueError: # now "val > x"
2348 minsel = minval
2349 maxsel = formatfunc(bound[0].strip()) \
2350 - formatfunc(offset)
2351 else:
2352 minsel = formatfunc(currsel)
2353 maxsel = formatfunc(currsel)
2354 sellists.append([minsel,maxsel])
2355 return sellists
2356
2357# def get_restfreqs(self):
2358# """
2359# Get the restfrequency(s) stored in this scantable.
2360# The return value(s) are always of unit 'Hz'
2361# Parameters:
2362# none
2363# Returns:
2364# a list of doubles
2365# """
2366# return list(self._getrestfreqs())
2367
2368 def get_restfreqs(self, ids=None):
2369 """\
2370 Get the restfrequency(s) stored in this scantable.
2371 The return value(s) are always of unit 'Hz'
2372
2373 Parameters:
2374
2375 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2376 be retrieved
2377
2378 Returns:
2379
2380 dictionary containing ids and a list of doubles for each id
2381
2382 """
2383 if ids is None:
2384 rfreqs = {}
2385 idlist = self.getmolnos()
2386 for i in idlist:
2387 rfreqs[i] = list(self._getrestfreqs(i))
2388 return rfreqs
2389 else:
2390 if type(ids) == list or type(ids) == tuple:
2391 rfreqs = {}
2392 for i in ids:
2393 rfreqs[i] = list(self._getrestfreqs(i))
2394 return rfreqs
2395 else:
2396 return list(self._getrestfreqs(ids))
2397
2398 @asaplog_post_dec
2399 def set_restfreqs(self, freqs=None, unit='Hz'):
2400 """\
2401 Set or replace the restfrequency specified and
2402 if the 'freqs' argument holds a scalar,
2403 then that rest frequency will be applied to all the selected
2404 data. If the 'freqs' argument holds
2405 a vector, then it MUST be of equal or smaller length than
2406 the number of IFs (and the available restfrequencies will be
2407 replaced by this vector). In this case, *all* data have
2408 the restfrequency set per IF according
2409 to the corresponding value you give in the 'freqs' vector.
2410 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2411 IF 1 gets restfreq 2e9.
2412
2413 You can also specify the frequencies via a linecatalog.
2414
2415 Parameters:
2416
2417 freqs: list of rest frequency values or string idenitfiers
2418
2419 unit: unit for rest frequency (default 'Hz')
2420
2421
2422 Example::
2423
2424 # set the given restfrequency for the all currently selected IFs
2425 scan.set_restfreqs(freqs=1.4e9)
2426 # set restfrequencies for the n IFs (n > 1) in the order of the
2427 # list, i.e
2428 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2429 # len(list_of_restfreqs) == nIF
2430 # for nIF == 1 the following will set multiple restfrequency for
2431 # that IF
2432 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2433 # set multiple restfrequencies per IF. as a list of lists where
2434 # the outer list has nIF elements, the inner s arbitrary
2435 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2436
2437 *Note*:
2438
2439 To do more sophisticate Restfrequency setting, e.g. on a
2440 source and IF basis, use scantable.set_selection() before using
2441 this function::
2442
2443 # provided your scantable is called scan
2444 selection = selector()
2445 selection.set_name('ORION*')
2446 selection.set_ifs([1])
2447 scan.set_selection(selection)
2448 scan.set_restfreqs(freqs=86.6e9)
2449
2450 """
2451 varlist = vars()
2452 from asap import linecatalog
2453 # simple value
2454 if isinstance(freqs, int) or isinstance(freqs, float):
2455 self._setrestfreqs([freqs], [""], unit)
2456 # list of values
2457 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2458 # list values are scalars
2459 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2460 if len(freqs) == 1:
2461 self._setrestfreqs(freqs, [""], unit)
2462 else:
2463 # allow the 'old' mode of setting mulitple IFs
2464 savesel = self._getselection()
2465 sel = self.get_selection()
2466 iflist = self.getifnos()
2467 if len(freqs)>len(iflist):
2468 raise ValueError("number of elements in list of list "
2469 "exeeds the current IF selections")
2470 iflist = self.getifnos()
2471 for i, fval in enumerate(freqs):
2472 sel.set_ifs(iflist[i])
2473 self._setselection(sel)
2474 self._setrestfreqs([fval], [""], unit)
2475 self._setselection(savesel)
2476
2477 # list values are dict, {'value'=, 'name'=)
2478 elif isinstance(freqs[-1], dict):
2479 values = []
2480 names = []
2481 for d in freqs:
2482 values.append(d["value"])
2483 names.append(d["name"])
2484 self._setrestfreqs(values, names, unit)
2485 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2486 savesel = self._getselection()
2487 sel = self.get_selection()
2488 iflist = self.getifnos()
2489 if len(freqs)>len(iflist):
2490 raise ValueError("number of elements in list of list exeeds"
2491 " the current IF selections")
2492 for i, fval in enumerate(freqs):
2493 sel.set_ifs(iflist[i])
2494 self._setselection(sel)
2495 self._setrestfreqs(fval, [""], unit)
2496 self._setselection(savesel)
2497 # freqs are to be taken from a linecatalog
2498 elif isinstance(freqs, linecatalog):
2499 savesel = self._getselection()
2500 sel = self.get_selection()
2501 for i in xrange(freqs.nrow()):
2502 sel.set_ifs(iflist[i])
2503 self._setselection(sel)
2504 self._setrestfreqs([freqs.get_frequency(i)],
2505 [freqs.get_name(i)], "MHz")
2506 # ensure that we are not iterating past nIF
2507 if i == self.nif()-1: break
2508 self._setselection(savesel)
2509 else:
2510 return
2511 self._add_history("set_restfreqs", varlist)
2512
2513 @asaplog_post_dec
2514 def shift_refpix(self, delta):
2515 """\
2516 Shift the reference pixel of the Spectra Coordinate by an
2517 integer amount.
2518
2519 Parameters:
2520
2521 delta: the amount to shift by
2522
2523 *Note*:
2524
2525 Be careful using this with broadband data.
2526
2527 """
2528 varlist = vars()
2529 Scantable.shift_refpix(self, delta)
2530 s._add_history("shift_refpix", varlist)
2531
2532 @asaplog_post_dec
2533 def history(self, filename=None, nrows=-1, start=0):
2534 """\
2535 Print the history. Optionally to a file.
2536
2537 Parameters:
2538
2539 filename: The name of the file to save the history to.
2540
2541 """
2542 n = self._historylength()
2543 if nrows == -1:
2544 nrows = n
2545 if start+nrows > n:
2546 nrows = nrows-start
2547 if n > 1000 and nrows == n:
2548 nrows = 1000
2549 start = n-1000
2550 asaplog.push("Warning: History has {0} entries. Displaying last "
2551 "1000".format(n))
2552 hist = list(self._gethistory(nrows, start))
2553 out = "-"*80
2554 for h in hist:
2555 if not h.strip():
2556 continue
2557 if h.find("---") >-1:
2558 continue
2559 else:
2560 items = h.split("##")
2561 date = items[0]
2562 func = items[1]
2563 items = items[2:]
2564 out += "\n"+date+"\n"
2565 out += "Function: %s\n Parameters:" % (func)
2566 for i in items:
2567 if i == '':
2568 continue
2569 s = i.split("=")
2570 out += "\n %s = %s" % (s[0], s[1])
2571 out = "\n".join([out, "*"*80])
2572 if filename is not None:
2573 if filename is "":
2574 filename = 'scantable_history.txt'
2575 filename = os.path.expandvars(os.path.expanduser(filename))
2576 if not os.path.isdir(filename):
2577 data = open(filename, 'w')
2578 data.write(out)
2579 data.close()
2580 else:
2581 msg = "Illegal file name '%s'." % (filename)
2582 raise IOError(msg)
2583 return page(out)
2584
2585 #
2586 # Maths business
2587 #
2588 @asaplog_post_dec
2589 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2590 avmode="NONE"):
2591 """\
2592 Return the (time) weighted average of a scan. Scans will be averaged
2593 only if the source direction (RA/DEC) is within 1' otherwise
2594
2595 *Note*:
2596
2597 in channels only - align if necessary
2598
2599 Parameters:
2600
2601 mask: an optional mask (only used for 'var' and 'tsys'
2602 weighting)
2603
2604 scanav: True averages each scan separately
2605 False (default) averages all scans together,
2606
2607 weight: Weighting scheme.
2608 'none' (mean no weight)
2609 'var' (1/var(spec) weighted)
2610 'tsys' (1/Tsys**2 weighted)
2611 'tint' (integration time weighted)
2612 'tintsys' (Tint/Tsys**2)
2613 'median' ( median averaging)
2614 The default is 'tint'
2615
2616 align: align the spectra in velocity before averaging. It takes
2617 the time of the first spectrum as reference time.
2618 avmode: 'SOURCE' - also select by source name - or
2619 'NONE' (default). Not applicable for scanav=True or
2620 weight=median
2621
2622 Example::
2623
2624 # time average the scantable without using a mask
2625 newscan = scan.average_time()
2626
2627 """
2628 varlist = vars()
2629 weight = weight or 'TINT'
2630 mask = mask or ()
2631 scanav = (scanav and 'SCAN') or avmode.upper()
2632 scan = (self, )
2633
2634 if align:
2635 scan = (self.freq_align(insitu=False), )
2636 asaplog.push("Note: Alignment is don on a source-by-source basis")
2637 asaplog.push("Note: Averaging (by default) is not")
2638 # we need to set it to SOURCE averaging here
2639 s = None
2640 if weight.upper() == 'MEDIAN':
2641 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2642 scanav))
2643 else:
2644 s = scantable(self._math._average(scan, mask, weight.upper(),
2645 scanav))
2646 s._add_history("average_time", varlist)
2647 return s
2648
2649 @asaplog_post_dec
2650 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2651 """\
2652 Return a scan where all spectra are converted to either
2653 Jansky or Kelvin depending upon the flux units of the scan table.
2654 By default the function tries to look the values up internally.
2655 If it can't find them (or if you want to over-ride), you must
2656 specify EITHER jyperk OR eta (and D which it will try to look up
2657 also if you don't set it). jyperk takes precedence if you set both.
2658
2659 Parameters:
2660
2661 jyperk: the Jy / K conversion factor
2662
2663 eta: the aperture efficiency
2664
2665 d: the geometric diameter (metres)
2666
2667 insitu: if False a new scantable is returned.
2668 Otherwise, the scaling is done in-situ
2669 The default is taken from .asaprc (False)
2670
2671 """
2672 if insitu is None: insitu = rcParams['insitu']
2673 self._math._setinsitu(insitu)
2674 varlist = vars()
2675 jyperk = jyperk or -1.0
2676 d = d or -1.0
2677 eta = eta or -1.0
2678 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2679 s._add_history("convert_flux", varlist)
2680 if insitu: self._assign(s)
2681 else: return s
2682
2683 @asaplog_post_dec
2684 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2685 """\
2686 Return a scan after applying a gain-elevation correction.
2687 The correction can be made via either a polynomial or a
2688 table-based interpolation (and extrapolation if necessary).
2689 You specify polynomial coefficients, an ascii table or neither.
2690 If you specify neither, then a polynomial correction will be made
2691 with built in coefficients known for certain telescopes (an error
2692 will occur if the instrument is not known).
2693 The data and Tsys are *divided* by the scaling factors.
2694
2695 Parameters:
2696
2697 poly: Polynomial coefficients (default None) to compute a
2698 gain-elevation correction as a function of
2699 elevation (in degrees).
2700
2701 filename: The name of an ascii file holding correction factors.
2702 The first row of the ascii file must give the column
2703 names and these MUST include columns
2704 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2705 by this) somewhere.
2706 The second row must give the data type of the
2707 column. Use 'R' for Real and 'I' for Integer.
2708 An example file would be
2709 (actual factors are arbitrary) :
2710
2711 TIME ELEVATION FACTOR
2712 R R R
2713 0.1 0 0.8
2714 0.2 20 0.85
2715 0.3 40 0.9
2716 0.4 60 0.85
2717 0.5 80 0.8
2718 0.6 90 0.75
2719
2720 method: Interpolation method when correcting from a table.
2721 Values are 'nearest', 'linear' (default), 'cubic'
2722 and 'spline'
2723
2724 insitu: if False a new scantable is returned.
2725 Otherwise, the scaling is done in-situ
2726 The default is taken from .asaprc (False)
2727
2728 """
2729
2730 if insitu is None: insitu = rcParams['insitu']
2731 self._math._setinsitu(insitu)
2732 varlist = vars()
2733 poly = poly or ()
2734 from os.path import expandvars
2735 filename = expandvars(filename)
2736 s = scantable(self._math._gainel(self, poly, filename, method))
2737 s._add_history("gain_el", varlist)
2738 if insitu:
2739 self._assign(s)
2740 else:
2741 return s
2742
2743 @asaplog_post_dec
2744 def freq_align(self, reftime=None, method='cubic', insitu=None):
2745 """\
2746 Return a scan where all rows have been aligned in frequency/velocity.
2747 The alignment frequency frame (e.g. LSRK) is that set by function
2748 set_freqframe.
2749
2750 Parameters:
2751
2752 reftime: reference time to align at. By default, the time of
2753 the first row of data is used.
2754
2755 method: Interpolation method for regridding the spectra.
2756 Choose from 'nearest', 'linear', 'cubic' (default)
2757 and 'spline'
2758
2759 insitu: if False a new scantable is returned.
2760 Otherwise, the scaling is done in-situ
2761 The default is taken from .asaprc (False)
2762
2763 """
2764 if insitu is None: insitu = rcParams["insitu"]
2765 oldInsitu = self._math._insitu()
2766 self._math._setinsitu(insitu)
2767 varlist = vars()
2768 reftime = reftime or ""
2769 s = scantable(self._math._freq_align(self, reftime, method))
2770 s._add_history("freq_align", varlist)
2771 self._math._setinsitu(oldInsitu)
2772 if insitu:
2773 self._assign(s)
2774 else:
2775 return s
2776
2777 @asaplog_post_dec
2778 def opacity(self, tau=None, insitu=None):
2779 """\
2780 Apply an opacity correction. The data
2781 and Tsys are multiplied by the correction factor.
2782
2783 Parameters:
2784
2785 tau: (list of) opacity from which the correction factor is
2786 exp(tau*ZD)
2787 where ZD is the zenith-distance.
2788 If a list is provided, it has to be of length nIF,
2789 nIF*nPol or 1 and in order of IF/POL, e.g.
2790 [opif0pol0, opif0pol1, opif1pol0 ...]
2791 if tau is `None` the opacities are determined from a
2792 model.
2793
2794 insitu: if False a new scantable is returned.
2795 Otherwise, the scaling is done in-situ
2796 The default is taken from .asaprc (False)
2797
2798 """
2799 if insitu is None:
2800 insitu = rcParams['insitu']
2801 self._math._setinsitu(insitu)
2802 varlist = vars()
2803 if not hasattr(tau, "__len__"):
2804 tau = [tau]
2805 s = scantable(self._math._opacity(self, tau))
2806 s._add_history("opacity", varlist)
2807 if insitu:
2808 self._assign(s)
2809 else:
2810 return s
2811
2812 @asaplog_post_dec
2813 def bin(self, width=5, insitu=None):
2814 """\
2815 Return a scan where all spectra have been binned up.
2816
2817 Parameters:
2818
2819 width: The bin width (default=5) in pixels
2820
2821 insitu: if False a new scantable is returned.
2822 Otherwise, the scaling is done in-situ
2823 The default is taken from .asaprc (False)
2824
2825 """
2826 if insitu is None:
2827 insitu = rcParams['insitu']
2828 self._math._setinsitu(insitu)
2829 varlist = vars()
2830 s = scantable(self._math._bin(self, width))
2831 s._add_history("bin", varlist)
2832 if insitu:
2833 self._assign(s)
2834 else:
2835 return s
2836
2837 @asaplog_post_dec
2838 def reshape(self, first, last, insitu=None):
2839 """Resize the band by providing first and last channel.
2840 This will cut off all channels outside [first, last].
2841 """
2842 if insitu is None:
2843 insitu = rcParams['insitu']
2844 varlist = vars()
2845 if last < 0:
2846 last = self.nchan()-1 + last
2847 s = None
2848 if insitu:
2849 s = self
2850 else:
2851 s = self.copy()
2852 s._reshape(first,last)
2853 s._add_history("reshape", varlist)
2854 if not insitu:
2855 return s
2856
2857 @asaplog_post_dec
2858 def resample(self, width=5, method='cubic', insitu=None):
2859 """\
2860 Return a scan where all spectra have been binned up.
2861
2862 Parameters:
2863
2864 width: The bin width (default=5) in pixels
2865
2866 method: Interpolation method when correcting from a table.
2867 Values are 'nearest', 'linear', 'cubic' (default)
2868 and 'spline'
2869
2870 insitu: if False a new scantable is returned.
2871 Otherwise, the scaling is done in-situ
2872 The default is taken from .asaprc (False)
2873
2874 """
2875 if insitu is None:
2876 insitu = rcParams['insitu']
2877 self._math._setinsitu(insitu)
2878 varlist = vars()
2879 s = scantable(self._math._resample(self, method, width))
2880 s._add_history("resample", varlist)
2881 if insitu:
2882 self._assign(s)
2883 else:
2884 return s
2885
2886 @asaplog_post_dec
2887 def average_pol(self, mask=None, weight='none'):
2888 """\
2889 Average the Polarisations together.
2890
2891 Parameters:
2892
2893 mask: An optional mask defining the region, where the
2894 averaging will be applied. The output will have all
2895 specified points masked.
2896
2897 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2898 weighted), or 'tsys' (1/Tsys**2 weighted)
2899
2900 """
2901 varlist = vars()
2902 mask = mask or ()
2903 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2904 s._add_history("average_pol", varlist)
2905 return s
2906
2907 @asaplog_post_dec
2908 def average_beam(self, mask=None, weight='none'):
2909 """\
2910 Average the Beams together.
2911
2912 Parameters:
2913 mask: An optional mask defining the region, where the
2914 averaging will be applied. The output will have all
2915 specified points masked.
2916
2917 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2918 weighted), or 'tsys' (1/Tsys**2 weighted)
2919
2920 """
2921 varlist = vars()
2922 mask = mask or ()
2923 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2924 s._add_history("average_beam", varlist)
2925 return s
2926
2927 def parallactify(self, pflag):
2928 """\
2929 Set a flag to indicate whether this data should be treated as having
2930 been 'parallactified' (total phase == 0.0)
2931
2932 Parameters:
2933
2934 pflag: Bool indicating whether to turn this on (True) or
2935 off (False)
2936
2937 """
2938 varlist = vars()
2939 self._parallactify(pflag)
2940 self._add_history("parallactify", varlist)
2941
2942 @asaplog_post_dec
2943 def convert_pol(self, poltype=None):
2944 """\
2945 Convert the data to a different polarisation type.
2946 Note that you will need cross-polarisation terms for most conversions.
2947
2948 Parameters:
2949
2950 poltype: The new polarisation type. Valid types are:
2951 'linear', 'circular', 'stokes' and 'linpol'
2952
2953 """
2954 varlist = vars()
2955 s = scantable(self._math._convertpol(self, poltype))
2956 s._add_history("convert_pol", varlist)
2957 return s
2958
2959 @asaplog_post_dec
2960 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2961 insitu=None):
2962 """\
2963 Smooth the spectrum by the specified kernel (conserving flux).
2964
2965 Parameters:
2966
2967 kernel: The type of smoothing kernel. Select from
2968 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2969 or 'poly'
2970
2971 width: The width of the kernel in pixels. For hanning this is
2972 ignored otherwise it defauls to 5 pixels.
2973 For 'gaussian' it is the Full Width Half
2974 Maximum. For 'boxcar' it is the full width.
2975 For 'rmedian' and 'poly' it is the half width.
2976
2977 order: Optional parameter for 'poly' kernel (default is 2), to
2978 specify the order of the polnomial. Ignored by all other
2979 kernels.
2980
2981 plot: plot the original and the smoothed spectra.
2982 In this each indivual fit has to be approved, by
2983 typing 'y' or 'n'
2984
2985 insitu: if False a new scantable is returned.
2986 Otherwise, the scaling is done in-situ
2987 The default is taken from .asaprc (False)
2988
2989 """
2990 if insitu is None: insitu = rcParams['insitu']
2991 self._math._setinsitu(insitu)
2992 varlist = vars()
2993
2994 if plot: orgscan = self.copy()
2995
2996 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2997 s._add_history("smooth", varlist)
2998
2999 action = 'H'
3000 if plot:
3001 from asap.asapplotter import new_asaplot
3002 theplot = new_asaplot(rcParams['plotter.gui'])
3003 from matplotlib import rc as rcp
3004 rcp('lines', linewidth=1)
3005 theplot.set_panels()
3006 ylab=s._get_ordinate_label()
3007 #theplot.palette(0,["#777777","red"])
3008 for r in xrange(s.nrow()):
3009 xsm=s._getabcissa(r)
3010 ysm=s._getspectrum(r)
3011 xorg=orgscan._getabcissa(r)
3012 yorg=orgscan._getspectrum(r)
3013 if action != "N": #skip plotting if rejecting all
3014 theplot.clear()
3015 theplot.hold()
3016 theplot.set_axes('ylabel',ylab)
3017 theplot.set_axes('xlabel',s._getabcissalabel(r))
3018 theplot.set_axes('title',s._getsourcename(r))
3019 theplot.set_line(label='Original',color="#777777")
3020 theplot.plot(xorg,yorg)
3021 theplot.set_line(label='Smoothed',color="red")
3022 theplot.plot(xsm,ysm)
3023 ### Ugly part for legend
3024 for i in [0,1]:
3025 theplot.subplots[0]['lines'].append(
3026 [theplot.subplots[0]['axes'].lines[i]]
3027 )
3028 theplot.release()
3029 ### Ugly part for legend
3030 theplot.subplots[0]['lines']=[]
3031 res = self._get_verify_action("Accept smoothing?",action)
3032 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3033 if r == 0: action = None
3034 #res = raw_input("Accept smoothing ([y]/n): ")
3035 if res.upper() == 'N':
3036 # reject for the current rows
3037 s._setspectrum(yorg, r)
3038 elif res.upper() == 'R':
3039 # reject all the following rows
3040 action = "N"
3041 s._setspectrum(yorg, r)
3042 elif res.upper() == 'A':
3043 # accept all the following rows
3044 break
3045 theplot.quit()
3046 del theplot
3047 del orgscan
3048
3049 if insitu: self._assign(s)
3050 else: return s
3051
3052 @asaplog_post_dec
3053 def regrid_channel(self, width=5, plot=False, insitu=None):
3054 """\
3055 Regrid the spectra by the specified channel width
3056
3057 Parameters:
3058
3059 width: The channel width (float) of regridded spectra
3060 in the current spectral unit.
3061
3062 plot: [NOT IMPLEMENTED YET]
3063 plot the original and the regridded spectra.
3064 In this each indivual fit has to be approved, by
3065 typing 'y' or 'n'
3066
3067 insitu: if False a new scantable is returned.
3068 Otherwise, the scaling is done in-situ
3069 The default is taken from .asaprc (False)
3070
3071 """
3072 if insitu is None: insitu = rcParams['insitu']
3073 varlist = vars()
3074
3075 if plot:
3076 asaplog.post()
3077 asaplog.push("Verification plot is not implemtnetd yet.")
3078 asaplog.post("WARN")
3079
3080 s = self.copy()
3081 s._regrid_specchan(width)
3082
3083 s._add_history("regrid_channel", varlist)
3084
3085# if plot:
3086# from asap.asapplotter import new_asaplot
3087# theplot = new_asaplot(rcParams['plotter.gui'])
3088# from matplotlib import rc as rcp
3089# rcp('lines', linewidth=1)
3090# theplot.set_panels()
3091# ylab=s._get_ordinate_label()
3092# #theplot.palette(0,["#777777","red"])
3093# for r in xrange(s.nrow()):
3094# xsm=s._getabcissa(r)
3095# ysm=s._getspectrum(r)
3096# xorg=orgscan._getabcissa(r)
3097# yorg=orgscan._getspectrum(r)
3098# theplot.clear()
3099# theplot.hold()
3100# theplot.set_axes('ylabel',ylab)
3101# theplot.set_axes('xlabel',s._getabcissalabel(r))
3102# theplot.set_axes('title',s._getsourcename(r))
3103# theplot.set_line(label='Original',color="#777777")
3104# theplot.plot(xorg,yorg)
3105# theplot.set_line(label='Smoothed',color="red")
3106# theplot.plot(xsm,ysm)
3107# ### Ugly part for legend
3108# for i in [0,1]:
3109# theplot.subplots[0]['lines'].append(
3110# [theplot.subplots[0]['axes'].lines[i]]
3111# )
3112# theplot.release()
3113# ### Ugly part for legend
3114# theplot.subplots[0]['lines']=[]
3115# res = raw_input("Accept smoothing ([y]/n): ")
3116# if res.upper() == 'N':
3117# s._setspectrum(yorg, r)
3118# theplot.quit()
3119# del theplot
3120# del orgscan
3121
3122 if insitu: self._assign(s)
3123 else: return s
3124
3125 @asaplog_post_dec
3126 def _parse_wn(self, wn):
3127 if isinstance(wn, list) or isinstance(wn, tuple):
3128 return wn
3129 elif isinstance(wn, int):
3130 return [ wn ]
3131 elif isinstance(wn, str):
3132 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3133 val = wn.split('-')
3134 val = [int(val[0]), int(val[1])]
3135 val.sort()
3136 res = [i for i in xrange(val[0], val[1]+1)]
3137 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3138 val = int(wn[2:])+1
3139 res = [i for i in xrange(val)]
3140 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3141 val = int(wn[:-2])+1
3142 res = [i for i in xrange(val)]
3143 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3144 val = int(wn[1:])
3145 res = [i for i in xrange(val)]
3146 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3147 val = int(wn[:-1])
3148 res = [i for i in xrange(val)]
3149 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3150 # then interpreted in C++
3151 # side as [a,a+1,...,a_nyq]
3152 # (CAS-3759)
3153 val = int(wn[2:])
3154 res = [val, -999]
3155 #res = [i for i in xrange(val, self.nchan()/2+1)]
3156 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3157 # then interpreted in C++
3158 # side as [a,a+1,...,a_nyq]
3159 # (CAS-3759)
3160 val = int(wn[:-2])
3161 res = [val, -999]
3162 #res = [i for i in xrange(val, self.nchan()/2+1)]
3163 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3164 # then interpreted in C++
3165 # side as [a+1,a+2,...,a_nyq]
3166 # (CAS-3759)
3167 val = int(wn[1:])+1
3168 res = [val, -999]
3169 #res = [i for i in xrange(val, self.nchan()/2+1)]
3170 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3171 # then interpreted in C++
3172 # side as [a+1,a+2,...,a_nyq]
3173 # (CAS-3759)
3174 val = int(wn[:-1])+1
3175 res = [val, -999]
3176 #res = [i for i in xrange(val, self.nchan()/2+1)]
3177
3178 return res
3179 else:
3180 msg = 'wrong value given for addwn/rejwn'
3181 raise RuntimeError(msg)
3182
3183 @asaplog_post_dec
3184 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3185 """\
3186 Subtract baseline based on parameters written in Baseline Table.
3187
3188 Parameters:
3189 insitu: if True, baseline fitting/subtraction is done
3190 in-situ. If False, a new scantable with
3191 baseline subtracted is returned. Actually,
3192 format of the returned value depends on both
3193 insitu and retfitres (see below).
3194 The default is taken from .asaprc (False)
3195 retfitres: if True, the results of baseline fitting (i.e.,
3196 coefficients and rms) are returned.
3197 default is False.
3198 The format of the returned value of this
3199 function varies as follows:
3200 (1) in case insitu=True and retfitres=True:
3201 fitting result.
3202 (2) in case insitu=True and retfitres=False:
3203 None.
3204 (3) in case insitu=False and retfitres=True:
3205 a dictionary containing a new scantable
3206 (with baseline subtracted) and the fitting
3207 results.
3208 (4) in case insitu=False and retfitres=False:
3209 a new scantable (with baseline subtracted).
3210 inbltable: name of input baseline table. The row number of
3211 scantable and that of inbltable must be
3212 identical.
3213 outbltable: name of output baseline table where baseline
3214 parameters and fitting results recorded.
3215 default is ''(no output).
3216 overwrite: if True when an existing baseline table is
3217 specified for outbltable, overwrites it.
3218 Otherwise there is no harm.
3219 default is False.
3220 """
3221
3222 try:
3223 varlist = vars()
3224 if retfitres is None: retfitres = False
3225 if inbltable is None: raise ValueError("bltable missing.")
3226 if outbltable is None: outbltable = ''
3227 if overwrite is None: overwrite = False
3228
3229 if insitu is None: insitu = rcParams['insitu']
3230 if insitu:
3231 workscan = self
3232 else:
3233 workscan = self.copy()
3234
3235 sres = workscan._apply_bltable(inbltable,
3236 retfitres,
3237 outbltable,
3238 os.path.exists(outbltable),
3239 overwrite)
3240 if retfitres: res = parse_fitresult(sres)
3241
3242 workscan._add_history('apply_bltable', varlist)
3243
3244 if insitu:
3245 self._assign(workscan)
3246 if retfitres:
3247 return res
3248 else:
3249 return None
3250 else:
3251 if retfitres:
3252 return {'scantable': workscan, 'fitresults': res}
3253 else:
3254 return workscan
3255
3256 except RuntimeError, e:
3257 raise_fitting_failure_exception(e)
3258
3259 @asaplog_post_dec
3260 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3261 """\
3262 Subtract baseline based on parameters written in the input list.
3263
3264 Parameters:
3265 insitu: if True, baseline fitting/subtraction is done
3266 in-situ. If False, a new scantable with
3267 baseline subtracted is returned. Actually,
3268 format of the returned value depends on both
3269 insitu and retfitres (see below).
3270 The default is taken from .asaprc (False)
3271 retfitres: if True, the results of baseline fitting (i.e.,
3272 coefficients and rms) are returned.
3273 default is False.
3274 The format of the returned value of this
3275 function varies as follows:
3276 (1) in case insitu=True and retfitres=True:
3277 fitting result.
3278 (2) in case insitu=True and retfitres=False:
3279 None.
3280 (3) in case insitu=False and retfitres=True:
3281 a dictionary containing a new scantable
3282 (with baseline subtracted) and the fitting
3283 results.
3284 (4) in case insitu=False and retfitres=False:
3285 a new scantable (with baseline subtracted).
3286 blinfo: baseline parameter set stored in a dictionary
3287 or a list of dictionary. Each dictionary
3288 corresponds to each spectrum and must contain
3289 the following keys and values:
3290 'row': row number,
3291 'blfunc': function name. available ones include
3292 'poly', 'chebyshev', 'cspline' and
3293 'sinusoid',
3294 'order': maximum order of polynomial. needed
3295 if blfunc='poly' or 'chebyshev',
3296 'npiece': number or piecewise polynomial.
3297 needed if blfunc='cspline',
3298 'nwave': a list of sinusoidal wave numbers.
3299 needed if blfunc='sinusoid', and
3300 'masklist': min-max windows for channel mask.
3301 the specified ranges will be used
3302 for fitting.
3303 bltable: name of output baseline table where baseline
3304 parameters and fitting results recorded.
3305 default is ''(no output).
3306 overwrite: if True when an existing baseline table is
3307 specified for bltable, overwrites it.
3308 Otherwise there is no harm.
3309 default is False.
3310
3311 Example:
3312 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3313 'masklist':[[10,350],[352,510]]},
3314 {'row':1, 'blfunc':'cspline', 'npiece':3,
3315 'masklist':[[3,16],[19,404],[407,511]]}
3316 ])
3317
3318 the first spectrum (row=0) will be fitted with polynomial
3319 of order=5 and the next one (row=1) will be fitted with cubic
3320 spline consisting of 3 pieces.
3321 """
3322
3323 try:
3324 varlist = vars()
3325 if retfitres is None: retfitres = False
3326 if blinfo is None: blinfo = []
3327 if bltable is None: bltable = ''
3328 if overwrite is None: overwrite = False
3329
3330 if insitu is None: insitu = rcParams['insitu']
3331 if insitu:
3332 workscan = self
3333 else:
3334 workscan = self.copy()
3335
3336 nrow = workscan.nrow()
3337
3338 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3339
3340 print "in_blinfo=< "+ str(in_blinfo)+" >"
3341
3342 sres = workscan._sub_baseline(in_blinfo,
3343 retfitres,
3344 bltable,
3345 os.path.exists(bltable),
3346 overwrite)
3347 if retfitres: res = parse_fitresult(sres)
3348
3349 workscan._add_history('sub_baseline', varlist)
3350
3351 if insitu:
3352 self._assign(workscan)
3353 if retfitres:
3354 return res
3355 else:
3356 return None
3357 else:
3358 if retfitres:
3359 return {'scantable': workscan, 'fitresults': res}
3360 else:
3361 return workscan
3362
3363 except RuntimeError, e:
3364 raise_fitting_failure_exception(e)
3365
3366 @asaplog_post_dec
3367 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3368 whichrow=None, uselinefinder=None, edge=None,
3369 threshold=None, chan_avg_limit=None):
3370 """\
3371 Calculates and returns model selection criteria for a specified
3372 baseline model and a given spectrum data.
3373 Available values include Akaike Information Criterion (AIC), the
3374 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3375 Bayesian Information Criterion (BIC) and the Generalised Cross
3376 Validation (GCV).
3377
3378 Parameters:
3379 value: name of model selection criteria to calculate.
3380 available ones include 'aic', 'aicc', 'bic' and
3381 'gcv'. default is 'aicc'.
3382 blfunc: baseline function name. available ones include
3383 'chebyshev', 'cspline' and 'sinusoid'.
3384 default is 'chebyshev'.
3385 order: parameter for basline function. actually stands for
3386 order of polynomial (order) for 'chebyshev',
3387 number of spline pieces (npiece) for 'cspline' and
3388 maximum wave number for 'sinusoid', respectively.
3389 default is 5 (which is also the default order value
3390 for [auto_]chebyshev_baseline()).
3391 mask: an optional mask. default is [].
3392 whichrow: row number. default is 0 (the first row)
3393 uselinefinder: use sd.linefinder() to flag out line regions
3394 default is True.
3395 edge: an optional number of channel to drop at
3396 the edge of spectrum. If only one value is
3397 specified, the same number will be dropped
3398 from both sides of the spectrum. Default
3399 is to keep all channels. Nested tuples
3400 represent individual edge selection for
3401 different IFs (a number of spectral channels
3402 can be different)
3403 default is (0, 0).
3404 threshold: the threshold used by line finder. It is
3405 better to keep it large as only strong lines
3406 affect the baseline solution.
3407 default is 3.
3408 chan_avg_limit: a maximum number of consequtive spectral
3409 channels to average during the search of
3410 weak and broad lines. The default is no
3411 averaging (and no search for weak lines).
3412 If such lines can affect the fitted baseline
3413 (e.g. a high order polynomial is fitted),
3414 increase this parameter (usually values up
3415 to 8 are reasonable). Most users of this
3416 method should find the default value sufficient.
3417 default is 1.
3418
3419 Example:
3420 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3421 """
3422
3423 try:
3424 varlist = vars()
3425
3426 if value is None: value = 'aicc'
3427 if blfunc is None: blfunc = 'chebyshev'
3428 if order is None: order = 5
3429 if mask is None: mask = []
3430 if whichrow is None: whichrow = 0
3431 if uselinefinder is None: uselinefinder = True
3432 if edge is None: edge = (0, 0)
3433 if threshold is None: threshold = 3
3434 if chan_avg_limit is None: chan_avg_limit = 1
3435
3436 return self._calc_aic(value, blfunc, order, mask,
3437 whichrow, uselinefinder, edge,
3438 threshold, chan_avg_limit)
3439
3440 except RuntimeError, e:
3441 raise_fitting_failure_exception(e)
3442
3443 @asaplog_post_dec
3444 def sinusoid_baseline(self, mask=None, applyfft=None,
3445 fftmethod=None, fftthresh=None,
3446 addwn=None, rejwn=None,
3447 insitu=None,
3448 clipthresh=None, clipniter=None,
3449 plot=None,
3450 getresidual=None,
3451 showprogress=None, minnrow=None,
3452 outlog=None,
3453 blfile=None, csvformat=None,
3454 bltable=None):
3455 """\
3456 Return a scan which has been baselined (all rows) with sinusoidal
3457 functions.
3458
3459 Parameters:
3460 mask: an optional mask
3461 applyfft: if True use some method, such as FFT, to find
3462 strongest sinusoidal components in the wavenumber
3463 domain to be used for baseline fitting.
3464 default is True.
3465 fftmethod: method to find the strong sinusoidal components.
3466 now only 'fft' is available and it is the default.
3467 fftthresh: the threshold to select wave numbers to be used for
3468 fitting from the distribution of amplitudes in the
3469 wavenumber domain.
3470 both float and string values accepted.
3471 given a float value, the unit is set to sigma.
3472 for string values, allowed formats include:
3473 'xsigma' or 'x' (= x-sigma level. e.g.,
3474 '3sigma'), or
3475 'topx' (= the x strongest ones, e.g. 'top5').
3476 default is 3.0 (unit: sigma).
3477 addwn: the additional wave numbers to be used for fitting.
3478 list or integer value is accepted to specify every
3479 wave numbers. also string value can be used in case
3480 you need to specify wave numbers in a certain range,
3481 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3482 '<a' (= 0,1,...,a-2,a-1),
3483 '>=a' (= a, a+1, ... up to the maximum wave
3484 number corresponding to the Nyquist
3485 frequency for the case of FFT).
3486 default is [0].
3487 rejwn: the wave numbers NOT to be used for fitting.
3488 can be set just as addwn but has higher priority:
3489 wave numbers which are specified both in addwn
3490 and rejwn will NOT be used. default is [].
3491 insitu: if False a new scantable is returned.
3492 Otherwise, the scaling is done in-situ
3493 The default is taken from .asaprc (False)
3494 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3495 clipniter: maximum number of iteration of 'clipthresh'-sigma
3496 clipping (default is 0)
3497 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3498 plot the fit and the residual. In this each
3499 indivual fit has to be approved, by typing 'y'
3500 or 'n'
3501 getresidual: if False, returns best-fit values instead of
3502 residual. (default is True)
3503 showprogress: show progress status for large data.
3504 default is True.
3505 minnrow: minimum number of input spectra to show.
3506 default is 1000.
3507 outlog: Output the coefficients of the best-fit
3508 function to logger (default is False)
3509 blfile: Name of a text file in which the best-fit
3510 parameter values to be written
3511 (default is '': no file/logger output)
3512 csvformat: if True blfile is csv-formatted, default is False.
3513 bltable: name of a baseline table where fitting results
3514 (coefficients, rms, etc.) are to be written.
3515 if given, fitting results will NOT be output to
3516 scantable (insitu=True) or None will be
3517 returned (insitu=False).
3518 (default is "": no table output)
3519
3520 Example:
3521 # return a scan baselined by a combination of sinusoidal curves
3522 # having wave numbers in spectral window up to 10,
3523 # also with 3-sigma clipping, iteration up to 4 times
3524 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3525
3526 Note:
3527 The best-fit parameter values output in logger and/or blfile are now
3528 based on specunit of 'channel'.
3529 """
3530
3531 try:
3532 varlist = vars()
3533
3534 if insitu is None: insitu = rcParams['insitu']
3535 if insitu:
3536 workscan = self
3537 else:
3538 workscan = self.copy()
3539
3540 if mask is None: mask = []
3541 if applyfft is None: applyfft = True
3542 if fftmethod is None: fftmethod = 'fft'
3543 if fftthresh is None: fftthresh = 3.0
3544 if addwn is None: addwn = [0]
3545 if rejwn is None: rejwn = []
3546 if clipthresh is None: clipthresh = 3.0
3547 if clipniter is None: clipniter = 0
3548 if plot is None: plot = False
3549 if getresidual is None: getresidual = True
3550 if showprogress is None: showprogress = True
3551 if minnrow is None: minnrow = 1000
3552 if outlog is None: outlog = False
3553 if blfile is None: blfile = ''
3554 if csvformat is None: csvformat = False
3555 if bltable is None: bltable = ''
3556
3557 sapplyfft = 'true' if applyfft else 'false'
3558 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3559
3560 scsvformat = 'T' if csvformat else 'F'
3561
3562 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3563 workscan._sinusoid_baseline(mask,
3564 fftinfo,
3565 #applyfft, fftmethod.lower(),
3566 #str(fftthresh).lower(),
3567 workscan._parse_wn(addwn),
3568 workscan._parse_wn(rejwn),
3569 clipthresh, clipniter,
3570 getresidual,
3571 pack_progress_params(showprogress,
3572 minnrow),
3573 outlog, scsvformat+blfile,
3574 bltable)
3575 workscan._add_history('sinusoid_baseline', varlist)
3576
3577 if bltable == '':
3578 if insitu:
3579 self._assign(workscan)
3580 else:
3581 return workscan
3582 else:
3583 if not insitu:
3584 return None
3585
3586 except RuntimeError, e:
3587 raise_fitting_failure_exception(e)
3588
3589
3590 @asaplog_post_dec
3591 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3592 fftmethod=None, fftthresh=None,
3593 addwn=None, rejwn=None,
3594 insitu=None,
3595 clipthresh=None, clipniter=None,
3596 edge=None, threshold=None, chan_avg_limit=None,
3597 plot=None,
3598 getresidual=None,
3599 showprogress=None, minnrow=None,
3600 outlog=None,
3601 blfile=None, csvformat=None,
3602 bltable=None):
3603 """\
3604 Return a scan which has been baselined (all rows) with sinusoidal
3605 functions.
3606 Spectral lines are detected first using linefinder and masked out
3607 to avoid them affecting the baseline solution.
3608
3609 Parameters:
3610 mask: an optional mask retreived from scantable
3611 applyfft: if True use some method, such as FFT, to find
3612 strongest sinusoidal components in the wavenumber
3613 domain to be used for baseline fitting.
3614 default is True.
3615 fftmethod: method to find the strong sinusoidal components.
3616 now only 'fft' is available and it is the default.
3617 fftthresh: the threshold to select wave numbers to be used for
3618 fitting from the distribution of amplitudes in the
3619 wavenumber domain.
3620 both float and string values accepted.
3621 given a float value, the unit is set to sigma.
3622 for string values, allowed formats include:
3623 'xsigma' or 'x' (= x-sigma level. e.g.,
3624 '3sigma'), or
3625 'topx' (= the x strongest ones, e.g. 'top5').
3626 default is 3.0 (unit: sigma).
3627 addwn: the additional wave numbers to be used for fitting.
3628 list or integer value is accepted to specify every
3629 wave numbers. also string value can be used in case
3630 you need to specify wave numbers in a certain range,
3631 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3632 '<a' (= 0,1,...,a-2,a-1),
3633 '>=a' (= a, a+1, ... up to the maximum wave
3634 number corresponding to the Nyquist
3635 frequency for the case of FFT).
3636 default is [0].
3637 rejwn: the wave numbers NOT to be used for fitting.
3638 can be set just as addwn but has higher priority:
3639 wave numbers which are specified both in addwn
3640 and rejwn will NOT be used. default is [].
3641 insitu: if False a new scantable is returned.
3642 Otherwise, the scaling is done in-situ
3643 The default is taken from .asaprc (False)
3644 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3645 clipniter: maximum number of iteration of 'clipthresh'-sigma
3646 clipping (default is 0)
3647 edge: an optional number of channel to drop at
3648 the edge of spectrum. If only one value is
3649 specified, the same number will be dropped
3650 from both sides of the spectrum. Default
3651 is to keep all channels. Nested tuples
3652 represent individual edge selection for
3653 different IFs (a number of spectral channels
3654 can be different)
3655 threshold: the threshold used by line finder. It is
3656 better to keep it large as only strong lines
3657 affect the baseline solution.
3658 chan_avg_limit: a maximum number of consequtive spectral
3659 channels to average during the search of
3660 weak and broad lines. The default is no
3661 averaging (and no search for weak lines).
3662 If such lines can affect the fitted baseline
3663 (e.g. a high order polynomial is fitted),
3664 increase this parameter (usually values up
3665 to 8 are reasonable). Most users of this
3666 method should find the default value sufficient.
3667 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3668 plot the fit and the residual. In this each
3669 indivual fit has to be approved, by typing 'y'
3670 or 'n'
3671 getresidual: if False, returns best-fit values instead of
3672 residual. (default is True)
3673 showprogress: show progress status for large data.
3674 default is True.
3675 minnrow: minimum number of input spectra to show.
3676 default is 1000.
3677 outlog: Output the coefficients of the best-fit
3678 function to logger (default is False)
3679 blfile: Name of a text file in which the best-fit
3680 parameter values to be written
3681 (default is "": no file/logger output)
3682 csvformat: if True blfile is csv-formatted, default is False.
3683 bltable: name of a baseline table where fitting results
3684 (coefficients, rms, etc.) are to be written.
3685 if given, fitting results will NOT be output to
3686 scantable (insitu=True) or None will be
3687 returned (insitu=False).
3688 (default is "": no table output)
3689
3690 Example:
3691 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3692
3693 Note:
3694 The best-fit parameter values output in logger and/or blfile are now
3695 based on specunit of 'channel'.
3696 """
3697
3698 try:
3699 varlist = vars()
3700
3701 if insitu is None: insitu = rcParams['insitu']
3702 if insitu:
3703 workscan = self
3704 else:
3705 workscan = self.copy()
3706
3707 if mask is None: mask = []
3708 if applyfft is None: applyfft = True
3709 if fftmethod is None: fftmethod = 'fft'
3710 if fftthresh is None: fftthresh = 3.0
3711 if addwn is None: addwn = [0]
3712 if rejwn is None: rejwn = []
3713 if clipthresh is None: clipthresh = 3.0
3714 if clipniter is None: clipniter = 0
3715 if edge is None: edge = (0,0)
3716 if threshold is None: threshold = 3
3717 if chan_avg_limit is None: chan_avg_limit = 1
3718 if plot is None: plot = False
3719 if getresidual is None: getresidual = True
3720 if showprogress is None: showprogress = True
3721 if minnrow is None: minnrow = 1000
3722 if outlog is None: outlog = False
3723 if blfile is None: blfile = ''
3724 if csvformat is None: csvformat = False
3725 if bltable is None: bltable = ''
3726
3727 sapplyfft = 'true' if applyfft else 'false'
3728 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3729
3730 scsvformat = 'T' if csvformat else 'F'
3731
3732 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3733 workscan._auto_sinusoid_baseline(mask,
3734 fftinfo,
3735 workscan._parse_wn(addwn),
3736 workscan._parse_wn(rejwn),
3737 clipthresh, clipniter,
3738 normalise_edge_param(edge),
3739 threshold, chan_avg_limit,
3740 getresidual,
3741 pack_progress_params(showprogress,
3742 minnrow),
3743 outlog, scsvformat+blfile, bltable)
3744 workscan._add_history("auto_sinusoid_baseline", varlist)
3745
3746 if bltable == '':
3747 if insitu:
3748 self._assign(workscan)
3749 else:
3750 return workscan
3751 else:
3752 if not insitu:
3753 return None
3754
3755 except RuntimeError, e:
3756 raise_fitting_failure_exception(e)
3757
3758 @asaplog_post_dec
3759 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3760 clipthresh=None, clipniter=None, plot=None,
3761 getresidual=None, showprogress=None, minnrow=None,
3762 outlog=None, blfile=None, csvformat=None,
3763 bltable=None):
3764 """\
3765 Return a scan which has been baselined (all rows) by cubic spline
3766 function (piecewise cubic polynomial).
3767
3768 Parameters:
3769 mask: An optional mask
3770 npiece: Number of pieces. (default is 2)
3771 insitu: If False a new scantable is returned.
3772 Otherwise, the scaling is done in-situ
3773 The default is taken from .asaprc (False)
3774 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3775 clipniter: maximum number of iteration of 'clipthresh'-sigma
3776 clipping (default is 0)
3777 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3778 plot the fit and the residual. In this each
3779 indivual fit has to be approved, by typing 'y'
3780 or 'n'
3781 getresidual: if False, returns best-fit values instead of
3782 residual. (default is True)
3783 showprogress: show progress status for large data.
3784 default is True.
3785 minnrow: minimum number of input spectra to show.
3786 default is 1000.
3787 outlog: Output the coefficients of the best-fit
3788 function to logger (default is False)
3789 blfile: Name of a text file in which the best-fit
3790 parameter values to be written
3791 (default is "": no file/logger output)
3792 csvformat: if True blfile is csv-formatted, default is False.
3793 bltable: name of a baseline table where fitting results
3794 (coefficients, rms, etc.) are to be written.
3795 if given, fitting results will NOT be output to
3796 scantable (insitu=True) or None will be
3797 returned (insitu=False).
3798 (default is "": no table output)
3799
3800 Example:
3801 # return a scan baselined by a cubic spline consisting of 2 pieces
3802 # (i.e., 1 internal knot),
3803 # also with 3-sigma clipping, iteration up to 4 times
3804 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3805
3806 Note:
3807 The best-fit parameter values output in logger and/or blfile are now
3808 based on specunit of 'channel'.
3809 """
3810
3811 try:
3812 varlist = vars()
3813
3814 if insitu is None: insitu = rcParams['insitu']
3815 if insitu:
3816 workscan = self
3817 else:
3818 workscan = self.copy()
3819
3820 if mask is None: mask = []
3821 if npiece is None: npiece = 2
3822 if clipthresh is None: clipthresh = 3.0
3823 if clipniter is None: clipniter = 0
3824 if plot is None: plot = False
3825 if getresidual is None: getresidual = True
3826 if showprogress is None: showprogress = True
3827 if minnrow is None: minnrow = 1000
3828 if outlog is None: outlog = False
3829 if blfile is None: blfile = ''
3830 if csvformat is None: csvformat = False
3831 if bltable is None: bltable = ''
3832
3833 scsvformat = 'T' if csvformat else 'F'
3834
3835 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3836 workscan._cspline_baseline(mask, npiece,
3837 clipthresh, clipniter,
3838 getresidual,
3839 pack_progress_params(showprogress,
3840 minnrow),
3841 outlog, scsvformat+blfile,
3842 bltable)
3843 workscan._add_history("cspline_baseline", varlist)
3844
3845 if bltable == '':
3846 if insitu:
3847 self._assign(workscan)
3848 else:
3849 return workscan
3850 else:
3851 if not insitu:
3852 return None
3853
3854 except RuntimeError, e:
3855 raise_fitting_failure_exception(e)
3856
3857 @asaplog_post_dec
3858 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3859 clipthresh=None, clipniter=None,
3860 edge=None, threshold=None, chan_avg_limit=None,
3861 getresidual=None, plot=None,
3862 showprogress=None, minnrow=None, outlog=None,
3863 blfile=None, csvformat=None, bltable=None):
3864 """\
3865 Return a scan which has been baselined (all rows) by cubic spline
3866 function (piecewise cubic polynomial).
3867 Spectral lines are detected first using linefinder and masked out
3868 to avoid them affecting the baseline solution.
3869
3870 Parameters:
3871 mask: an optional mask retreived from scantable
3872 npiece: Number of pieces. (default is 2)
3873 insitu: if False a new scantable is returned.
3874 Otherwise, the scaling is done in-situ
3875 The default is taken from .asaprc (False)
3876 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3877 clipniter: maximum number of iteration of 'clipthresh'-sigma
3878 clipping (default is 0)
3879 edge: an optional number of channel to drop at
3880 the edge of spectrum. If only one value is
3881 specified, the same number will be dropped
3882 from both sides of the spectrum. Default
3883 is to keep all channels. Nested tuples
3884 represent individual edge selection for
3885 different IFs (a number of spectral channels
3886 can be different)
3887 threshold: the threshold used by line finder. It is
3888 better to keep it large as only strong lines
3889 affect the baseline solution.
3890 chan_avg_limit: a maximum number of consequtive spectral
3891 channels to average during the search of
3892 weak and broad lines. The default is no
3893 averaging (and no search for weak lines).
3894 If such lines can affect the fitted baseline
3895 (e.g. a high order polynomial is fitted),
3896 increase this parameter (usually values up
3897 to 8 are reasonable). Most users of this
3898 method should find the default value sufficient.
3899 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3900 plot the fit and the residual. In this each
3901 indivual fit has to be approved, by typing 'y'
3902 or 'n'
3903 getresidual: if False, returns best-fit values instead of
3904 residual. (default is True)
3905 showprogress: show progress status for large data.
3906 default is True.
3907 minnrow: minimum number of input spectra to show.
3908 default is 1000.
3909 outlog: Output the coefficients of the best-fit
3910 function to logger (default is False)
3911 blfile: Name of a text file in which the best-fit
3912 parameter values to be written
3913 (default is "": no file/logger output)
3914 csvformat: if True blfile is csv-formatted, default is False.
3915 bltable: name of a baseline table where fitting results
3916 (coefficients, rms, etc.) are to be written.
3917 if given, fitting results will NOT be output to
3918 scantable (insitu=True) or None will be
3919 returned (insitu=False).
3920 (default is "": no table output)
3921
3922 Example:
3923 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3924
3925 Note:
3926 The best-fit parameter values output in logger and/or blfile are now
3927 based on specunit of 'channel'.
3928 """
3929
3930 try:
3931 varlist = vars()
3932
3933 if insitu is None: insitu = rcParams['insitu']
3934 if insitu:
3935 workscan = self
3936 else:
3937 workscan = self.copy()
3938
3939 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3940 if mask is None: mask = []
3941 if npiece is None: npiece = 2
3942 if clipthresh is None: clipthresh = 3.0
3943 if clipniter is None: clipniter = 0
3944 if edge is None: edge = (0, 0)
3945 if threshold is None: threshold = 3
3946 if chan_avg_limit is None: chan_avg_limit = 1
3947 if plot is None: plot = False
3948 if getresidual is None: getresidual = True
3949 if showprogress is None: showprogress = True
3950 if minnrow is None: minnrow = 1000
3951 if outlog is None: outlog = False
3952 if blfile is None: blfile = ''
3953 if csvformat is None: csvformat = False
3954 if bltable is None: bltable = ''
3955
3956 scsvformat = 'T' if csvformat else 'F'
3957
3958 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3959 workscan._auto_cspline_baseline(mask, npiece,
3960 clipthresh, clipniter,
3961 normalise_edge_param(edge),
3962 threshold,
3963 chan_avg_limit, getresidual,
3964 pack_progress_params(showprogress,
3965 minnrow),
3966 outlog,
3967 scsvformat+blfile,
3968 bltable)
3969 workscan._add_history("auto_cspline_baseline", varlist)
3970
3971 if bltable == '':
3972 if insitu:
3973 self._assign(workscan)
3974 else:
3975 return workscan
3976 else:
3977 if not insitu:
3978 return None
3979
3980 except RuntimeError, e:
3981 raise_fitting_failure_exception(e)
3982
3983 @asaplog_post_dec
3984 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
3985 clipthresh=None, clipniter=None, plot=None,
3986 getresidual=None, showprogress=None, minnrow=None,
3987 outlog=None, blfile=None, csvformat=None,
3988 bltable=None):
3989 """\
3990 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3991
3992 Parameters:
3993 mask: An optional mask
3994 order: the maximum order of Chebyshev polynomial (default is 5)
3995 insitu: If False a new scantable is returned.
3996 Otherwise, the scaling is done in-situ
3997 The default is taken from .asaprc (False)
3998 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3999 clipniter: maximum number of iteration of 'clipthresh'-sigma
4000 clipping (default is 0)
4001 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4002 plot the fit and the residual. In this each
4003 indivual fit has to be approved, by typing 'y'
4004 or 'n'
4005 getresidual: if False, returns best-fit values instead of
4006 residual. (default is True)
4007 showprogress: show progress status for large data.
4008 default is True.
4009 minnrow: minimum number of input spectra to show.
4010 default is 1000.
4011 outlog: Output the coefficients of the best-fit
4012 function to logger (default is False)
4013 blfile: Name of a text file in which the best-fit
4014 parameter values to be written
4015 (default is "": no file/logger output)
4016 csvformat: if True blfile is csv-formatted, default is False.
4017 bltable: name of a baseline table where fitting results
4018 (coefficients, rms, etc.) are to be written.
4019 if given, fitting results will NOT be output to
4020 scantable (insitu=True) or None will be
4021 returned (insitu=False).
4022 (default is "": no table output)
4023
4024 Example:
4025 # return a scan baselined by a cubic spline consisting of 2 pieces
4026 # (i.e., 1 internal knot),
4027 # also with 3-sigma clipping, iteration up to 4 times
4028 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4029
4030 Note:
4031 The best-fit parameter values output in logger and/or blfile are now
4032 based on specunit of 'channel'.
4033 """
4034
4035 try:
4036 varlist = vars()
4037
4038 if insitu is None: insitu = rcParams['insitu']
4039 if insitu:
4040 workscan = self
4041 else:
4042 workscan = self.copy()
4043
4044 if mask is None: mask = []
4045 if order is None: order = 5
4046 if clipthresh is None: clipthresh = 3.0
4047 if clipniter is None: clipniter = 0
4048 if plot is None: plot = False
4049 if getresidual is None: getresidual = True
4050 if showprogress is None: showprogress = True
4051 if minnrow is None: minnrow = 1000
4052 if outlog is None: outlog = False
4053 if blfile is None: blfile = ''
4054 if csvformat is None: csvformat = False
4055 if bltable is None: bltable = ''
4056
4057 scsvformat = 'T' if csvformat else 'F'
4058
4059 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4060 workscan._chebyshev_baseline(mask, order,
4061 clipthresh, clipniter,
4062 getresidual,
4063 pack_progress_params(showprogress,
4064 minnrow),
4065 outlog, scsvformat+blfile,
4066 bltable)
4067 workscan._add_history("chebyshev_baseline", varlist)
4068
4069 if bltable == '':
4070 if insitu:
4071 self._assign(workscan)
4072 else:
4073 return workscan
4074 else:
4075 if not insitu:
4076 return None
4077
4078 except RuntimeError, e:
4079 raise_fitting_failure_exception(e)
4080
4081 @asaplog_post_dec
4082 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4083 clipthresh=None, clipniter=None,
4084 edge=None, threshold=None, chan_avg_limit=None,
4085 getresidual=None, plot=None,
4086 showprogress=None, minnrow=None, outlog=None,
4087 blfile=None, csvformat=None, bltable=None):
4088 """\
4089 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4090 Spectral lines are detected first using linefinder and masked out
4091 to avoid them affecting the baseline solution.
4092
4093 Parameters:
4094 mask: an optional mask retreived from scantable
4095 order: the maximum order of Chebyshev polynomial (default is 5)
4096 insitu: if False a new scantable is returned.
4097 Otherwise, the scaling is done in-situ
4098 The default is taken from .asaprc (False)
4099 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4100 clipniter: maximum number of iteration of 'clipthresh'-sigma
4101 clipping (default is 0)
4102 edge: an optional number of channel to drop at
4103 the edge of spectrum. If only one value is
4104 specified, the same number will be dropped
4105 from both sides of the spectrum. Default
4106 is to keep all channels. Nested tuples
4107 represent individual edge selection for
4108 different IFs (a number of spectral channels
4109 can be different)
4110 threshold: the threshold used by line finder. It is
4111 better to keep it large as only strong lines
4112 affect the baseline solution.
4113 chan_avg_limit: a maximum number of consequtive spectral
4114 channels to average during the search of
4115 weak and broad lines. The default is no
4116 averaging (and no search for weak lines).
4117 If such lines can affect the fitted baseline
4118 (e.g. a high order polynomial is fitted),
4119 increase this parameter (usually values up
4120 to 8 are reasonable). Most users of this
4121 method should find the default value sufficient.
4122 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4123 plot the fit and the residual. In this each
4124 indivual fit has to be approved, by typing 'y'
4125 or 'n'
4126 getresidual: if False, returns best-fit values instead of
4127 residual. (default is True)
4128 showprogress: show progress status for large data.
4129 default is True.
4130 minnrow: minimum number of input spectra to show.
4131 default is 1000.
4132 outlog: Output the coefficients of the best-fit
4133 function to logger (default is False)
4134 blfile: Name of a text file in which the best-fit
4135 parameter values to be written
4136 (default is "": no file/logger output)
4137 csvformat: if True blfile is csv-formatted, default is False.
4138 bltable: name of a baseline table where fitting results
4139 (coefficients, rms, etc.) are to be written.
4140 if given, fitting results will NOT be output to
4141 scantable (insitu=True) or None will be
4142 returned (insitu=False).
4143 (default is "": no table output)
4144
4145 Example:
4146 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4147
4148 Note:
4149 The best-fit parameter values output in logger and/or blfile are now
4150 based on specunit of 'channel'.
4151 """
4152
4153 try:
4154 varlist = vars()
4155
4156 if insitu is None: insitu = rcParams['insitu']
4157 if insitu:
4158 workscan = self
4159 else:
4160 workscan = self.copy()
4161
4162 if mask is None: mask = []
4163 if order is None: order = 5
4164 if clipthresh is None: clipthresh = 3.0
4165 if clipniter is None: clipniter = 0
4166 if edge is None: edge = (0, 0)
4167 if threshold is None: threshold = 3
4168 if chan_avg_limit is None: chan_avg_limit = 1
4169 if plot is None: plot = False
4170 if getresidual is None: getresidual = True
4171 if showprogress is None: showprogress = True
4172 if minnrow is None: minnrow = 1000
4173 if outlog is None: outlog = False
4174 if blfile is None: blfile = ''
4175 if csvformat is None: csvformat = False
4176 if bltable is None: bltable = ''
4177
4178 scsvformat = 'T' if csvformat else 'F'
4179
4180 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4181 workscan._auto_chebyshev_baseline(mask, order,
4182 clipthresh, clipniter,
4183 normalise_edge_param(edge),
4184 threshold,
4185 chan_avg_limit, getresidual,
4186 pack_progress_params(showprogress,
4187 minnrow),
4188 outlog, scsvformat+blfile,
4189 bltable)
4190 workscan._add_history("auto_chebyshev_baseline", varlist)
4191
4192 if bltable == '':
4193 if insitu:
4194 self._assign(workscan)
4195 else:
4196 return workscan
4197 else:
4198 if not insitu:
4199 return None
4200
4201 except RuntimeError, e:
4202 raise_fitting_failure_exception(e)
4203
4204 @asaplog_post_dec
4205 def poly_baseline(self, mask=None, order=None, insitu=None,
4206 clipthresh=None, clipniter=None, plot=None,
4207 getresidual=None, showprogress=None, minnrow=None,
4208 outlog=None, blfile=None, csvformat=None,
4209 bltable=None):
4210 """\
4211 Return a scan which has been baselined (all rows) by a polynomial.
4212 Parameters:
4213 mask: an optional mask
4214 order: the order of the polynomial (default is 0)
4215 insitu: if False a new scantable is returned.
4216 Otherwise, the scaling is done in-situ
4217 The default is taken from .asaprc (False)
4218 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4219 clipniter: maximum number of iteration of 'clipthresh'-sigma
4220 clipping (default is 0)
4221 plot: plot the fit and the residual. In this each
4222 indivual fit has to be approved, by typing 'y'
4223 or 'n'
4224 getresidual: if False, returns best-fit values instead of
4225 residual. (default is True)
4226 showprogress: show progress status for large data.
4227 default is True.
4228 minnrow: minimum number of input spectra to show.
4229 default is 1000.
4230 outlog: Output the coefficients of the best-fit
4231 function to logger (default is False)
4232 blfile: Name of a text file in which the best-fit
4233 parameter values to be written
4234 (default is "": no file/logger output)
4235 csvformat: if True blfile is csv-formatted, default is False.
4236 bltable: name of a baseline table where fitting results
4237 (coefficients, rms, etc.) are to be written.
4238 if given, fitting results will NOT be output to
4239 scantable (insitu=True) or None will be
4240 returned (insitu=False).
4241 (default is "": no table output)
4242
4243 Example:
4244 # return a scan baselined by a third order polynomial,
4245 # not using a mask
4246 bscan = scan.poly_baseline(order=3)
4247 """
4248
4249 try:
4250 varlist = vars()
4251
4252 if insitu is None:
4253 insitu = rcParams["insitu"]
4254 if insitu:
4255 workscan = self
4256 else:
4257 workscan = self.copy()
4258
4259 if mask is None: mask = []
4260 if order is None: order = 0
4261 if clipthresh is None: clipthresh = 3.0
4262 if clipniter is None: clipniter = 0
4263 if plot is None: plot = False
4264 if getresidual is None: getresidual = True
4265 if showprogress is None: showprogress = True
4266 if minnrow is None: minnrow = 1000
4267 if outlog is None: outlog = False
4268 if blfile is None: blfile = ''
4269 if csvformat is None: csvformat = False
4270 if bltable is None: bltable = ''
4271
4272 scsvformat = 'T' if csvformat else 'F'
4273
4274 if plot:
4275 outblfile = (blfile != "") and \
4276 os.path.exists(os.path.expanduser(
4277 os.path.expandvars(blfile))
4278 )
4279 if outblfile:
4280 blf = open(blfile, "a")
4281
4282 f = fitter()
4283 f.set_function(lpoly=order)
4284
4285 rows = xrange(workscan.nrow())
4286 #if len(rows) > 0: workscan._init_blinfo()
4287
4288 action = "H"
4289 for r in rows:
4290 f.x = workscan._getabcissa(r)
4291 f.y = workscan._getspectrum(r)
4292 if mask:
4293 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4294 else: # mask=None
4295 f.mask = workscan._getmask(r)
4296
4297 f.data = None
4298 f.fit()
4299
4300 if action != "Y": # skip plotting when accepting all
4301 f.plot(residual=True)
4302 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4303 #if accept_fit.upper() == "N":
4304 # #workscan._append_blinfo(None, None, None)
4305 # continue
4306 accept_fit = self._get_verify_action("Accept fit?",action)
4307 if r == 0: action = None
4308 if accept_fit.upper() == "N":
4309 continue
4310 elif accept_fit.upper() == "R":
4311 break
4312 elif accept_fit.upper() == "A":
4313 action = "Y"
4314
4315 blpars = f.get_parameters()
4316 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4317 #workscan._append_blinfo(blpars, masklist, f.mask)
4318 workscan._setspectrum((f.fitter.getresidual()
4319 if getresidual else f.fitter.getfit()), r)
4320
4321 if outblfile:
4322 rms = workscan.get_rms(f.mask, r)
4323 dataout = \
4324 workscan.format_blparams_row(blpars["params"],
4325 blpars["fixed"],
4326 rms, str(masklist),
4327 r, True, csvformat)
4328 blf.write(dataout)
4329
4330 f._p.unmap()
4331 f._p = None
4332
4333 if outblfile:
4334 blf.close()
4335 else:
4336 workscan._poly_baseline(mask, order,
4337 clipthresh, clipniter, #
4338 getresidual,
4339 pack_progress_params(showprogress,
4340 minnrow),
4341 outlog, scsvformat+blfile,
4342 bltable) #
4343
4344 workscan._add_history("poly_baseline", varlist)
4345
4346 if insitu:
4347 self._assign(workscan)
4348 else:
4349 return workscan
4350
4351 except RuntimeError, e:
4352 raise_fitting_failure_exception(e)
4353
4354 @asaplog_post_dec
4355 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4356 clipthresh=None, clipniter=None,
4357 edge=None, threshold=None, chan_avg_limit=None,
4358 getresidual=None, plot=None,
4359 showprogress=None, minnrow=None, outlog=None,
4360 blfile=None, csvformat=None, bltable=None):
4361 """\
4362 Return a scan which has been baselined (all rows) by a polynomial.
4363 Spectral lines are detected first using linefinder and masked out
4364 to avoid them affecting the baseline solution.
4365
4366 Parameters:
4367 mask: an optional mask retreived from scantable
4368 order: the order of the polynomial (default is 0)
4369 insitu: if False a new scantable is returned.
4370 Otherwise, the scaling is done in-situ
4371 The default is taken from .asaprc (False)
4372 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4373 clipniter: maximum number of iteration of 'clipthresh'-sigma
4374 clipping (default is 0)
4375 edge: an optional number of channel to drop at
4376 the edge of spectrum. If only one value is
4377 specified, the same number will be dropped
4378 from both sides of the spectrum. Default
4379 is to keep all channels. Nested tuples
4380 represent individual edge selection for
4381 different IFs (a number of spectral channels
4382 can be different)
4383 threshold: the threshold used by line finder. It is
4384 better to keep it large as only strong lines
4385 affect the baseline solution.
4386 chan_avg_limit: a maximum number of consequtive spectral
4387 channels to average during the search of
4388 weak and broad lines. The default is no
4389 averaging (and no search for weak lines).
4390 If such lines can affect the fitted baseline
4391 (e.g. a high order polynomial is fitted),
4392 increase this parameter (usually values up
4393 to 8 are reasonable). Most users of this
4394 method should find the default value sufficient.
4395 plot: plot the fit and the residual. In this each
4396 indivual fit has to be approved, by typing 'y'
4397 or 'n'
4398 getresidual: if False, returns best-fit values instead of
4399 residual. (default is True)
4400 showprogress: show progress status for large data.
4401 default is True.
4402 minnrow: minimum number of input spectra to show.
4403 default is 1000.
4404 outlog: Output the coefficients of the best-fit
4405 function to logger (default is False)
4406 blfile: Name of a text file in which the best-fit
4407 parameter values to be written
4408 (default is "": no file/logger output)
4409 csvformat: if True blfile is csv-formatted, default is False.
4410 bltable: name of a baseline table where fitting results
4411 (coefficients, rms, etc.) are to be written.
4412 if given, fitting results will NOT be output to
4413 scantable (insitu=True) or None will be
4414 returned (insitu=False).
4415 (default is "": no table output)
4416
4417 Example:
4418 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4419 """
4420
4421 try:
4422 varlist = vars()
4423
4424 if insitu is None:
4425 insitu = rcParams['insitu']
4426 if insitu:
4427 workscan = self
4428 else:
4429 workscan = self.copy()
4430
4431 if mask is None: mask = []
4432 if order is None: order = 0
4433 if clipthresh is None: clipthresh = 3.0
4434 if clipniter is None: clipniter = 0
4435 if edge is None: edge = (0, 0)
4436 if threshold is None: threshold = 3
4437 if chan_avg_limit is None: chan_avg_limit = 1
4438 if plot is None: plot = False
4439 if getresidual is None: getresidual = True
4440 if showprogress is None: showprogress = True
4441 if minnrow is None: minnrow = 1000
4442 if outlog is None: outlog = False
4443 if blfile is None: blfile = ''
4444 if csvformat is None: csvformat = False
4445 if bltable is None: bltable = ''
4446
4447 scsvformat = 'T' if csvformat else 'F'
4448
4449 edge = normalise_edge_param(edge)
4450
4451 if plot:
4452 outblfile = (blfile != "") and \
4453 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4454 if outblfile: blf = open(blfile, "a")
4455
4456 from asap.asaplinefind import linefinder
4457 fl = linefinder()
4458 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4459 fl.set_scan(workscan)
4460
4461 f = fitter()
4462 f.set_function(lpoly=order)
4463
4464 rows = xrange(workscan.nrow())
4465 #if len(rows) > 0: workscan._init_blinfo()
4466
4467 action = "H"
4468 for r in rows:
4469 idx = 2*workscan.getif(r)
4470 if mask:
4471 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4472 else: # mask=None
4473 msk = workscan._getmask(r)
4474 fl.find_lines(r, msk, edge[idx:idx+2])
4475
4476 f.x = workscan._getabcissa(r)
4477 f.y = workscan._getspectrum(r)
4478 f.mask = fl.get_mask()
4479 f.data = None
4480 f.fit()
4481
4482 if action != "Y": # skip plotting when accepting all
4483 f.plot(residual=True)
4484 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4485 accept_fit = self._get_verify_action("Accept fit?",action)
4486 if r == 0: action = None
4487 if accept_fit.upper() == "N":
4488 #workscan._append_blinfo(None, None, None)
4489 continue
4490 elif accept_fit.upper() == "R":
4491 break
4492 elif accept_fit.upper() == "A":
4493 action = "Y"
4494
4495 blpars = f.get_parameters()
4496 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4497 #workscan._append_blinfo(blpars, masklist, f.mask)
4498 workscan._setspectrum(
4499 (f.fitter.getresidual() if getresidual
4500 else f.fitter.getfit()), r
4501 )
4502
4503 if outblfile:
4504 rms = workscan.get_rms(f.mask, r)
4505 dataout = \
4506 workscan.format_blparams_row(blpars["params"],
4507 blpars["fixed"],
4508 rms, str(masklist),
4509 r, True, csvformat)
4510 blf.write(dataout)
4511
4512 f._p.unmap()
4513 f._p = None
4514
4515 if outblfile: blf.close()
4516 else:
4517 workscan._auto_poly_baseline(mask, order,
4518 clipthresh, clipniter,
4519 edge, threshold,
4520 chan_avg_limit, getresidual,
4521 pack_progress_params(showprogress,
4522 minnrow),
4523 outlog, scsvformat+blfile,
4524 bltable)
4525 workscan._add_history("auto_poly_baseline", varlist)
4526
4527 if bltable == '':
4528 if insitu:
4529 self._assign(workscan)
4530 else:
4531 return workscan
4532 else:
4533 if not insitu:
4534 return None
4535
4536 except RuntimeError, e:
4537 raise_fitting_failure_exception(e)
4538
4539 def _init_blinfo(self):
4540 """\
4541 Initialise the following three auxiliary members:
4542 blpars : parameters of the best-fit baseline,
4543 masklists : mask data (edge positions of masked channels) and
4544 actualmask : mask data (in boolean list),
4545 to keep for use later (including output to logger/text files).
4546 Used by poly_baseline() and auto_poly_baseline() in case of
4547 'plot=True'.
4548 """
4549 self.blpars = []
4550 self.masklists = []
4551 self.actualmask = []
4552 return
4553
4554 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4555 """\
4556 Append baseline-fitting related info to blpars, masklist and
4557 actualmask.
4558 """
4559 self.blpars.append(data_blpars)
4560 self.masklists.append(data_masklists)
4561 self.actualmask.append(data_actualmask)
4562 return
4563
4564 @asaplog_post_dec
4565 def rotate_linpolphase(self, angle):
4566 """\
4567 Rotate the phase of the complex polarization O=Q+iU correlation.
4568 This is always done in situ in the raw data. So if you call this
4569 function more than once then each call rotates the phase further.
4570
4571 Parameters:
4572
4573 angle: The angle (degrees) to rotate (add) by.
4574
4575 Example::
4576
4577 scan.rotate_linpolphase(2.3)
4578
4579 """
4580 varlist = vars()
4581 self._math._rotate_linpolphase(self, angle)
4582 self._add_history("rotate_linpolphase", varlist)
4583 return
4584
4585 @asaplog_post_dec
4586 def rotate_xyphase(self, angle):
4587 """\
4588 Rotate the phase of the XY correlation. This is always done in situ
4589 in the data. So if you call this function more than once
4590 then each call rotates the phase further.
4591
4592 Parameters:
4593
4594 angle: The angle (degrees) to rotate (add) by.
4595
4596 Example::
4597
4598 scan.rotate_xyphase(2.3)
4599
4600 """
4601 varlist = vars()
4602 self._math._rotate_xyphase(self, angle)
4603 self._add_history("rotate_xyphase", varlist)
4604 return
4605
4606 @asaplog_post_dec
4607 def swap_linears(self):
4608 """\
4609 Swap the linear polarisations XX and YY, or better the first two
4610 polarisations as this also works for ciculars.
4611 """
4612 varlist = vars()
4613 self._math._swap_linears(self)
4614 self._add_history("swap_linears", varlist)
4615 return
4616
4617 @asaplog_post_dec
4618 def invert_phase(self):
4619 """\
4620 Invert the phase of the complex polarisation
4621 """
4622 varlist = vars()
4623 self._math._invert_phase(self)
4624 self._add_history("invert_phase", varlist)
4625 return
4626
4627 @asaplog_post_dec
4628 def add(self, offset, insitu=None):
4629 """\
4630 Return a scan where all spectra have the offset added
4631
4632 Parameters:
4633
4634 offset: the offset
4635
4636 insitu: if False a new scantable is returned.
4637 Otherwise, the scaling is done in-situ
4638 The default is taken from .asaprc (False)
4639
4640 """
4641 if insitu is None: insitu = rcParams['insitu']
4642 self._math._setinsitu(insitu)
4643 varlist = vars()
4644 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4645 s._add_history("add", varlist)
4646 if insitu:
4647 self._assign(s)
4648 else:
4649 return s
4650
4651 @asaplog_post_dec
4652 def scale(self, factor, tsys=True, insitu=None):
4653 """\
4654
4655 Return a scan where all spectra are scaled by the given 'factor'
4656
4657 Parameters:
4658
4659 factor: the scaling factor (float or 1D float list)
4660
4661 insitu: if False a new scantable is returned.
4662 Otherwise, the scaling is done in-situ
4663 The default is taken from .asaprc (False)
4664
4665 tsys: if True (default) then apply the operation to Tsys
4666 as well as the data
4667
4668 """
4669 if insitu is None: insitu = rcParams['insitu']
4670 self._math._setinsitu(insitu)
4671 varlist = vars()
4672 s = None
4673 import numpy
4674 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4675 if isinstance(factor[0], list) or isinstance(factor[0],
4676 numpy.ndarray):
4677 from asapmath import _array2dOp
4678 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4679 else:
4680 s = scantable( self._math._arrayop( self, factor,
4681 "MUL", tsys ) )
4682 else:
4683 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4684 s._add_history("scale", varlist)
4685 if insitu:
4686 self._assign(s)
4687 else:
4688 return s
4689
4690 @preserve_selection
4691 def set_sourcetype(self, match, matchtype="pattern",
4692 sourcetype="reference"):
4693 """\
4694 Set the type of the source to be an source or reference scan
4695 using the provided pattern.
4696
4697 Parameters:
4698
4699 match: a Unix style pattern, regular expression or selector
4700
4701 matchtype: 'pattern' (default) UNIX style pattern or
4702 'regex' regular expression
4703
4704 sourcetype: the type of the source to use (source/reference)
4705
4706 """
4707 varlist = vars()
4708 stype = -1
4709 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4710 stype = 1
4711 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4712 stype = 0
4713 else:
4714 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4715 if matchtype.lower().startswith("p"):
4716 matchtype = "pattern"
4717 elif matchtype.lower().startswith("r"):
4718 matchtype = "regex"
4719 else:
4720 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4721 sel = selector()
4722 if isinstance(match, selector):
4723 sel = match
4724 else:
4725 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4726 self.set_selection(sel)
4727 self._setsourcetype(stype)
4728 self._add_history("set_sourcetype", varlist)
4729
4730
4731 def set_sourcename(self, name):
4732 varlist = vars()
4733 self._setsourcename(name)
4734 self._add_history("set_sourcename", varlist)
4735
4736 @asaplog_post_dec
4737 @preserve_selection
4738 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4739 """\
4740 This function allows to build quotients automatically.
4741 It assumes the observation to have the same number of
4742 "ons" and "offs"
4743
4744 Parameters:
4745
4746 preserve: you can preserve (default) the continuum or
4747 remove it. The equations used are
4748
4749 preserve: Output = Toff * (on/off) - Toff
4750
4751 remove: Output = Toff * (on/off) - Ton
4752
4753 mode: the on/off detection mode
4754 'paired' (default)
4755 identifies 'off' scans by the
4756 trailing '_R' (Mopra/Parkes) or
4757 '_e'/'_w' (Tid) and matches
4758 on/off pairs from the observing pattern
4759 'time'
4760 finds the closest off in time
4761
4762 .. todo:: verify argument is not implemented
4763
4764 """
4765 varlist = vars()
4766 modes = ["time", "paired"]
4767 if not mode in modes:
4768 msg = "please provide valid mode. Valid modes are %s" % (modes)
4769 raise ValueError(msg)
4770 s = None
4771 if mode.lower() == "paired":
4772 from asap._asap import srctype
4773 sel = self.get_selection()
4774 #sel.set_query("SRCTYPE==psoff")
4775 sel.set_types(srctype.psoff)
4776 self.set_selection(sel)
4777 offs = self.copy()
4778 #sel.set_query("SRCTYPE==pson")
4779 sel.set_types(srctype.pson)
4780 self.set_selection(sel)
4781 ons = self.copy()
4782 s = scantable(self._math._quotient(ons, offs, preserve))
4783 elif mode.lower() == "time":
4784 s = scantable(self._math._auto_quotient(self, mode, preserve))
4785 s._add_history("auto_quotient", varlist)
4786 return s
4787
4788 @asaplog_post_dec
4789 def mx_quotient(self, mask = None, weight='median', preserve=True):
4790 """\
4791 Form a quotient using "off" beams when observing in "MX" mode.
4792
4793 Parameters:
4794
4795 mask: an optional mask to be used when weight == 'stddev'
4796
4797 weight: How to average the off beams. Default is 'median'.
4798
4799 preserve: you can preserve (default) the continuum or
4800 remove it. The equations used are:
4801
4802 preserve: Output = Toff * (on/off) - Toff
4803
4804 remove: Output = Toff * (on/off) - Ton
4805
4806 """
4807 mask = mask or ()
4808 varlist = vars()
4809 on = scantable(self._math._mx_extract(self, 'on'))
4810 preoff = scantable(self._math._mx_extract(self, 'off'))
4811 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4812 from asapmath import quotient
4813 q = quotient(on, off, preserve)
4814 q._add_history("mx_quotient", varlist)
4815 return q
4816
4817 @asaplog_post_dec
4818 def freq_switch(self, insitu=None):
4819 """\
4820 Apply frequency switching to the data.
4821
4822 Parameters:
4823
4824 insitu: if False a new scantable is returned.
4825 Otherwise, the swictching is done in-situ
4826 The default is taken from .asaprc (False)
4827
4828 """
4829 if insitu is None: insitu = rcParams['insitu']
4830 self._math._setinsitu(insitu)
4831 varlist = vars()
4832 s = scantable(self._math._freqswitch(self))
4833 s._add_history("freq_switch", varlist)
4834 if insitu:
4835 self._assign(s)
4836 else:
4837 return s
4838
4839 @asaplog_post_dec
4840 def recalc_azel(self):
4841 """Recalculate the azimuth and elevation for each position."""
4842 varlist = vars()
4843 self._recalcazel()
4844 self._add_history("recalc_azel", varlist)
4845 return
4846
4847 @asaplog_post_dec
4848 def __add__(self, other):
4849 """
4850 implicit on all axes and on Tsys
4851 """
4852 varlist = vars()
4853 s = self.__op( other, "ADD" )
4854 s._add_history("operator +", varlist)
4855 return s
4856
4857 @asaplog_post_dec
4858 def __sub__(self, other):
4859 """
4860 implicit on all axes and on Tsys
4861 """
4862 varlist = vars()
4863 s = self.__op( other, "SUB" )
4864 s._add_history("operator -", varlist)
4865 return s
4866
4867 @asaplog_post_dec
4868 def __mul__(self, other):
4869 """
4870 implicit on all axes and on Tsys
4871 """
4872 varlist = vars()
4873 s = self.__op( other, "MUL" ) ;
4874 s._add_history("operator *", varlist)
4875 return s
4876
4877
4878 @asaplog_post_dec
4879 def __div__(self, other):
4880 """
4881 implicit on all axes and on Tsys
4882 """
4883 varlist = vars()
4884 s = self.__op( other, "DIV" )
4885 s._add_history("operator /", varlist)
4886 return s
4887
4888 @asaplog_post_dec
4889 def __op( self, other, mode ):
4890 s = None
4891 if isinstance(other, scantable):
4892 s = scantable(self._math._binaryop(self, other, mode))
4893 elif isinstance(other, float):
4894 if other == 0.0:
4895 raise ZeroDivisionError("Dividing by zero is not recommended")
4896 s = scantable(self._math._unaryop(self, other, mode, False))
4897 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
4898 if isinstance(other[0], list) \
4899 or isinstance(other[0], numpy.ndarray):
4900 from asapmath import _array2dOp
4901 s = _array2dOp( self, other, mode, False )
4902 else:
4903 s = scantable( self._math._arrayop( self, other,
4904 mode, False ) )
4905 else:
4906 raise TypeError("Other input is not a scantable or float value")
4907 return s
4908
4909 @asaplog_post_dec
4910 def get_fit(self, row=0):
4911 """\
4912 Print or return the stored fits for a row in the scantable
4913
4914 Parameters:
4915
4916 row: the row which the fit has been applied to.
4917
4918 """
4919 if row > self.nrow():
4920 return
4921 from asap.asapfit import asapfit
4922 fit = asapfit(self._getfit(row))
4923 asaplog.push( '%s' %(fit) )
4924 return fit.as_dict()
4925
4926 @preserve_selection
4927 def flag_nans(self):
4928 """\
4929 Utility function to flag NaN values in the scantable.
4930 """
4931 import numpy
4932 basesel = self.get_selection()
4933 for i in range(self.nrow()):
4934 sel = self.get_row_selector(i)
4935 self.set_selection(basesel+sel)
4936 nans = numpy.isnan(self._getspectrum(0))
4937 if numpy.any(nans):
4938 bnans = [ bool(v) for v in nans]
4939 self.flag(bnans)
4940
4941 self.set_selection(basesel)
4942
4943 def get_row_selector(self, rowno):
4944 return selector(rows=[rowno])
4945
4946 def _add_history(self, funcname, parameters):
4947 if not rcParams['scantable.history']:
4948 return
4949 # create date
4950 sep = "##"
4951 from datetime import datetime
4952 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4953 hist = dstr+sep
4954 hist += funcname+sep#cdate+sep
4955 if parameters.has_key('self'):
4956 del parameters['self']
4957 for k, v in parameters.iteritems():
4958 if type(v) is dict:
4959 for k2, v2 in v.iteritems():
4960 hist += k2
4961 hist += "="
4962 if isinstance(v2, scantable):
4963 hist += 'scantable'
4964 elif k2 == 'mask':
4965 if isinstance(v2, list) or isinstance(v2, tuple):
4966 hist += str(self._zip_mask(v2))
4967 else:
4968 hist += str(v2)
4969 else:
4970 hist += str(v2)
4971 else:
4972 hist += k
4973 hist += "="
4974 if isinstance(v, scantable):
4975 hist += 'scantable'
4976 elif k == 'mask':
4977 if isinstance(v, list) or isinstance(v, tuple):
4978 hist += str(self._zip_mask(v))
4979 else:
4980 hist += str(v)
4981 else:
4982 hist += str(v)
4983 hist += sep
4984 hist = hist[:-2] # remove trailing '##'
4985 self._addhistory(hist)
4986
4987
4988 def _zip_mask(self, mask):
4989 mask = list(mask)
4990 i = 0
4991 segments = []
4992 while mask[i:].count(1):
4993 i += mask[i:].index(1)
4994 if mask[i:].count(0):
4995 j = i + mask[i:].index(0)
4996 else:
4997 j = len(mask)
4998 segments.append([i, j])
4999 i = j
5000 return segments
5001
5002 def _get_ordinate_label(self):
5003 fu = "("+self.get_fluxunit()+")"
5004 import re
5005 lbl = "Intensity"
5006 if re.match(".K.", fu):
5007 lbl = "Brightness Temperature "+ fu
5008 elif re.match(".Jy.", fu):
5009 lbl = "Flux density "+ fu
5010 return lbl
5011
5012 def _check_ifs(self):
5013# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5014 nchans = [self.nchan(i) for i in self.getifnos()]
5015 nchans = filter(lambda t: t > 0, nchans)
5016 return (sum(nchans)/len(nchans) == nchans[0])
5017
5018 @asaplog_post_dec
5019 def _fill(self, names, unit, average, opts={}):
5020 first = True
5021 fullnames = []
5022 for name in names:
5023 name = os.path.expandvars(name)
5024 name = os.path.expanduser(name)
5025 if not os.path.exists(name):
5026 msg = "File '%s' does not exists" % (name)
5027 raise IOError(msg)
5028 fullnames.append(name)
5029 if average:
5030 asaplog.push('Auto averaging integrations')
5031 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5032 for name in fullnames:
5033 tbl = Scantable(stype)
5034 if is_ms( name ):
5035 r = msfiller( tbl )
5036 else:
5037 r = filler( tbl )
5038 msg = "Importing %s..." % (name)
5039 asaplog.push(msg, False)
5040 r.open(name, opts)
5041 rx = rcParams['scantable.reference']
5042 r.setreferenceexpr(rx)
5043 r.fill()
5044 if average:
5045 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5046 if not first:
5047 tbl = self._math._merge([self, tbl])
5048 Scantable.__init__(self, tbl)
5049 r.close()
5050 del r, tbl
5051 first = False
5052 #flush log
5053 asaplog.post()
5054 if unit is not None:
5055 self.set_fluxunit(unit)
5056 if not is_casapy():
5057 self.set_freqframe(rcParams['scantable.freqframe'])
5058
5059 def _get_verify_action( self, msg, action=None ):
5060 valid_act = ['Y', 'N', 'A', 'R']
5061 if not action or not isinstance(action, str):
5062 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5063 if action == '':
5064 return "Y"
5065 elif (action.upper()[0] in valid_act):
5066 return action.upper()[0]
5067 elif (action.upper()[0] in ['H','?']):
5068 print "Available actions of verification [Y|n|a|r]"
5069 print " Y : Yes for current data (default)"
5070 print " N : No for current data"
5071 print " A : Accept all in the following and exit from verification"
5072 print " R : Reject all in the following and exit from verification"
5073 print " H or ?: help (show this message)"
5074 return self._get_verify_action(msg)
5075 else:
5076 return 'Y'
5077
5078 def __getitem__(self, key):
5079 if key < 0:
5080 key += self.nrow()
5081 if key >= self.nrow():
5082 raise IndexError("Row index out of range.")
5083 return self._getspectrum(key)
5084
5085 def __setitem__(self, key, value):
5086 if key < 0:
5087 key += self.nrow()
5088 if key >= self.nrow():
5089 raise IndexError("Row index out of range.")
5090 if not hasattr(value, "__len__") or \
5091 len(value) > self.nchan(self.getif(key)):
5092 raise ValueError("Spectrum length doesn't match.")
5093 return self._setspectrum(value, key)
5094
5095 def __len__(self):
5096 return self.nrow()
5097
5098 def __iter__(self):
5099 for i in range(len(self)):
5100 yield self[i]
Note: See TracBrowser for help on using the repository browser.