source: trunk/python/scantable.py@ 2885

Last change on this file since 2885 was 2885, checked in by WataruKawasaki, 11 years ago

New Development: No

JIRA Issue: Yes CAS-5859

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: modified sd.scantable.parse_spw_selection() so that it returns 'all channels of all spws' in case or '*' given.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 199.6 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
374
375
376class scantable(Scantable):
377 """\
378 The ASAP container for scans (single-dish data).
379 """
380
381 @asaplog_post_dec
382 def __init__(self, filename, average=None, unit=None, parallactify=None,
383 **args):
384 """\
385 Create a scantable from a saved one or make a reference
386
387 Parameters:
388
389 filename: the name of an asap table on disk
390 or
391 the name of a rpfits/sdfits/ms file
392 (integrations within scans are auto averaged
393 and the whole file is read) or
394 [advanced] a reference to an existing scantable
395
396 average: average all integrations withinb a scan on read.
397 The default (True) is taken from .asaprc.
398
399 unit: brightness unit; must be consistent with K or Jy.
400 Over-rides the default selected by the filler
401 (input rpfits/sdfits/ms) or replaces the value
402 in existing scantables
403
404 antenna: for MeasurementSet input data only:
405 Antenna selection. integer (id) or string
406 (name or id).
407
408 parallactify: Indicate that the data had been parallactified.
409 Default (false) is taken from rc file.
410
411 getpt: Whether to import direction from MS/POINTING
412 table properly or not.
413 This is effective only when filename is MS.
414 The default (True) is to import direction
415 from MS/POINTING.
416 """
417 if average is None:
418 average = rcParams['scantable.autoaverage']
419 parallactify = parallactify or rcParams['scantable.parallactify']
420 varlist = vars()
421 from asap._asap import stmath
422 self._math = stmath( rcParams['insitu'] )
423 if isinstance(filename, Scantable):
424 Scantable.__init__(self, filename)
425 else:
426 if isinstance(filename, str):
427 filename = os.path.expandvars(filename)
428 filename = os.path.expanduser(filename)
429 if not os.path.exists(filename):
430 s = "File '%s' not found." % (filename)
431 raise IOError(s)
432 if is_scantable(filename):
433 ondisk = rcParams['scantable.storage'] == 'disk'
434 Scantable.__init__(self, filename, ondisk)
435 if unit is not None:
436 self.set_fluxunit(unit)
437 if average:
438 self._assign( self.average_time( scanav=True ) )
439 # do not reset to the default freqframe
440 #self.set_freqframe(rcParams['scantable.freqframe'])
441 elif is_ms(filename):
442 # Measurement Set
443 opts={'ms': {}}
444 mskeys=['getpt','antenna']
445 for key in mskeys:
446 if key in args.keys():
447 opts['ms'][key] = args[key]
448 self._fill([filename], unit, average, opts)
449 elif os.path.isfile(filename):
450 opts={'nro': {}}
451 nrokeys=['freqref']
452 for key in nrokeys:
453 if key in args.keys():
454 opts['nro'][key] = args[key]
455 self._fill([filename], unit, average, opts)
456 # only apply to new data not "copy constructor"
457 self.parallactify(parallactify)
458 else:
459 msg = "The given file '%s'is not a valid " \
460 "asap table." % (filename)
461 raise IOError(msg)
462 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
463 and isinstance(filename[-1], str):
464 self._fill(filename, unit, average)
465 self.parallactify(parallactify)
466 self._add_history("scantable", varlist)
467
468 @asaplog_post_dec
469 def save(self, name=None, format=None, overwrite=False):
470 """\
471 Store the scantable on disk. This can be an asap (aips++) Table,
472 SDFITS or MS2 format.
473
474 Parameters:
475
476 name: the name of the outputfile. For format 'ASCII'
477 this is the root file name (data in 'name'.txt
478 and header in 'name'_header.txt)
479
480 format: an optional file format. Default is ASAP.
481 Allowed are:
482
483 * 'ASAP' (save as ASAP [aips++] Table),
484 * 'SDFITS' (save as SDFITS file)
485 * 'ASCII' (saves as ascii text file)
486 * 'MS2' (saves as an casacore MeasurementSet V2)
487 * 'FITS' (save as image FITS - not readable by
488 class)
489 * 'CLASS' (save as FITS readable by CLASS)
490
491 overwrite: If the file should be overwritten if it exists.
492 The default False is to return with warning
493 without writing the output. USE WITH CARE.
494
495 Example::
496
497 scan.save('myscan.asap')
498 scan.save('myscan.sdfits', 'SDFITS')
499
500 """
501 from os import path
502 format = format or rcParams['scantable.save']
503 suffix = '.'+format.lower()
504 if name is None or name == "":
505 name = 'scantable'+suffix
506 msg = "No filename given. Using default name %s..." % name
507 asaplog.push(msg)
508 name = path.expandvars(name)
509 if path.isfile(name) or path.isdir(name):
510 if not overwrite:
511 msg = "File %s exists." % name
512 raise IOError(msg)
513 format2 = format.upper()
514 if format2 == 'ASAP':
515 self._save(name)
516 elif format2 == 'MS2':
517 msopt = {'ms': {'overwrite': overwrite } }
518 from asap._asap import mswriter
519 writer = mswriter( self )
520 writer.write( name, msopt )
521 else:
522 from asap._asap import stwriter as stw
523 writer = stw(format2)
524 writer.write(self, name)
525 return
526
527 def copy(self):
528 """Return a copy of this scantable.
529
530 *Note*:
531
532 This makes a full (deep) copy. scan2 = scan1 makes a reference.
533
534 Example::
535
536 copiedscan = scan.copy()
537
538 """
539 sd = scantable(Scantable._copy(self))
540 return sd
541
542 def drop_scan(self, scanid=None):
543 """\
544 Return a new scantable where the specified scan number(s) has(have)
545 been dropped.
546
547 Parameters:
548
549 scanid: a (list of) scan number(s)
550
551 """
552 from asap import _is_sequence_or_number as _is_valid
553 from asap import _to_list
554 from asap import unique
555 if not _is_valid(scanid):
556 raise RuntimeError( 'Please specify a scanno to drop from the'
557 ' scantable' )
558 scanid = _to_list(scanid)
559 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
560 for sid in scanid: allscans.remove(sid)
561 if len(allscans) == 0:
562 raise ValueError("Can't remove all scans")
563 sel = selector(scans=allscans)
564 return self._select_copy(sel)
565
566 def _select_copy(self, selection):
567 orig = self.get_selection()
568 self.set_selection(orig+selection)
569 cp = self.copy()
570 self.set_selection(orig)
571 return cp
572
573 def get_scan(self, scanid=None):
574 """\
575 Return a specific scan (by scanno) or collection of scans (by
576 source name) in a new scantable.
577
578 *Note*:
579
580 See scantable.drop_scan() for the inverse operation.
581
582 Parameters:
583
584 scanid: a (list of) scanno or a source name, unix-style
585 patterns are accepted for source name matching, e.g.
586 '*_R' gets all 'ref scans
587
588 Example::
589
590 # get all scans containing the source '323p459'
591 newscan = scan.get_scan('323p459')
592 # get all 'off' scans
593 refscans = scan.get_scan('*_R')
594 # get a susbset of scans by scanno (as listed in scan.summary())
595 newscan = scan.get_scan([0, 2, 7, 10])
596
597 """
598 if scanid is None:
599 raise RuntimeError( 'Please specify a scan no or name to '
600 'retrieve from the scantable' )
601 try:
602 bsel = self.get_selection()
603 sel = selector()
604 if type(scanid) is str:
605 sel.set_name(scanid)
606 return self._select_copy(sel)
607 elif type(scanid) is int:
608 sel.set_scans([scanid])
609 return self._select_copy(sel)
610 elif type(scanid) is list:
611 sel.set_scans(scanid)
612 return self._select_copy(sel)
613 else:
614 msg = "Illegal scanid type, use 'int' or 'list' if ints."
615 raise TypeError(msg)
616 except RuntimeError:
617 raise
618
619 def __str__(self):
620 tempFile = tempfile.NamedTemporaryFile()
621 Scantable._summary(self, tempFile.name)
622 tempFile.seek(0)
623 asaplog.clear()
624 return tempFile.file.read()
625
626 @asaplog_post_dec
627 def summary(self, filename=None):
628 """\
629 Print a summary of the contents of this scantable.
630
631 Parameters:
632
633 filename: the name of a file to write the putput to
634 Default - no file output
635
636 """
637 if filename is not None:
638 if filename is "":
639 filename = 'scantable_summary.txt'
640 from os.path import expandvars, isdir
641 filename = expandvars(filename)
642 if isdir(filename):
643 msg = "Illegal file name '%s'." % (filename)
644 raise IOError(msg)
645 else:
646 filename = ""
647 Scantable._summary(self, filename)
648
649 def get_spectrum(self, rowno):
650 """Return the spectrum for the current row in the scantable as a list.
651
652 Parameters:
653
654 rowno: the row number to retrieve the spectrum from
655
656 """
657 return self._getspectrum(rowno)
658
659 def get_mask(self, rowno):
660 """Return the mask for the current row in the scantable as a list.
661
662 Parameters:
663
664 rowno: the row number to retrieve the mask from
665
666 """
667 return self._getmask(rowno)
668
669 def set_spectrum(self, spec, rowno):
670 """Set the spectrum for the current row in the scantable.
671
672 Parameters:
673
674 spec: the new spectrum
675
676 rowno: the row number to set the spectrum for
677
678 """
679 assert(len(spec) == self.nchan(self.getif(rowno)))
680 return self._setspectrum(spec, rowno)
681
682 def get_coordinate(self, rowno):
683 """Return the (spectral) coordinate for a a given 'rowno'.
684
685 *Note*:
686
687 * This coordinate is only valid until a scantable method modifies
688 the frequency axis.
689 * This coordinate does contain the original frequency set-up
690 NOT the new frame. The conversions however are done using the user
691 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
692 use scantable.freq_align first. Without it there is no closure,
693 i.e.::
694
695 c = myscan.get_coordinate(0)
696 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
697
698 Parameters:
699
700 rowno: the row number for the spectral coordinate
701
702 """
703 return coordinate(Scantable.get_coordinate(self, rowno))
704
705 def get_selection(self):
706 """\
707 Get the selection object currently set on this scantable.
708
709 Example::
710
711 sel = scan.get_selection()
712 sel.set_ifs(0) # select IF 0
713 scan.set_selection(sel) # apply modified selection
714
715 """
716 return selector(self._getselection())
717
718 def set_selection(self, selection=None, **kw):
719 """\
720 Select a subset of the data. All following operations on this scantable
721 are only applied to thi selection.
722
723 Parameters:
724
725 selection: a selector object (default unset the selection), or
726 any combination of 'pols', 'ifs', 'beams', 'scans',
727 'cycles', 'name', 'query'
728
729 Examples::
730
731 sel = selector() # create a selection object
732 self.set_scans([0, 3]) # select SCANNO 0 and 3
733 scan.set_selection(sel) # set the selection
734 scan.summary() # will only print summary of scanno 0 an 3
735 scan.set_selection() # unset the selection
736 # or the equivalent
737 scan.set_selection(scans=[0,3])
738 scan.summary() # will only print summary of scanno 0 an 3
739 scan.set_selection() # unset the selection
740
741 """
742 if selection is None:
743 # reset
744 if len(kw) == 0:
745 selection = selector()
746 else:
747 # try keywords
748 for k in kw:
749 if k not in selector.fields:
750 raise KeyError("Invalid selection key '%s', "
751 "valid keys are %s" % (k,
752 selector.fields))
753 selection = selector(**kw)
754 self._setselection(selection)
755
756 def get_row(self, row=0, insitu=None):
757 """\
758 Select a row in the scantable.
759 Return a scantable with single row.
760
761 Parameters:
762
763 row: row no of integration, default is 0.
764 insitu: if False a new scantable is returned. Otherwise, the
765 scaling is done in-situ. The default is taken from .asaprc
766 (False)
767
768 """
769 if insitu is None:
770 insitu = rcParams['insitu']
771 if not insitu:
772 workscan = self.copy()
773 else:
774 workscan = self
775 # Select a row
776 sel = selector()
777 sel.set_rows([row])
778 workscan.set_selection(sel)
779 if not workscan.nrow() == 1:
780 msg = "Could not identify single row. %d rows selected." \
781 % (workscan.nrow())
782 raise RuntimeError(msg)
783 if insitu:
784 self._assign(workscan)
785 else:
786 return workscan
787
788 @asaplog_post_dec
789 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
790 """\
791 Determine the specified statistic of the current beam/if/pol
792 Takes a 'mask' as an optional parameter to specify which
793 channels should be excluded.
794
795 Parameters:
796
797 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
798 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
799
800 mask: an optional mask specifying where the statistic
801 should be determined.
802
803 form: format string to print statistic values
804
805 row: row number of spectrum to process.
806 (default is None: for all rows)
807
808 Example:
809 scan.set_unit('channel')
810 msk = scan.create_mask([100, 200], [500, 600])
811 scan.stats(stat='mean', mask=m)
812
813 """
814 mask = mask or []
815 if not self._check_ifs():
816 raise ValueError("Cannot apply mask as the IFs have different "
817 "number of channels. Please use setselection() "
818 "to select individual IFs")
819 rtnabc = False
820 if stat.lower().endswith('_abc'): rtnabc = True
821 getchan = False
822 if stat.lower().startswith('min') or stat.lower().startswith('max'):
823 chan = self._math._minmaxchan(self, mask, stat)
824 getchan = True
825 statvals = []
826 if not rtnabc:
827 if row == None:
828 statvals = self._math._stats(self, mask, stat)
829 else:
830 statvals = self._math._statsrow(self, mask, stat, int(row))
831
832 #def cb(i):
833 # return statvals[i]
834
835 #return self._row_callback(cb, stat)
836
837 label=stat
838 #callback=cb
839 out = ""
840 #outvec = []
841 sep = '-'*50
842
843 if row == None:
844 rows = xrange(self.nrow())
845 elif isinstance(row, int):
846 rows = [ row ]
847
848 for i in rows:
849 refstr = ''
850 statunit= ''
851 if getchan:
852 qx, qy = self.chan2data(rowno=i, chan=chan[i])
853 if rtnabc:
854 statvals.append(qx['value'])
855 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
856 statunit= '['+qx['unit']+']'
857 else:
858 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
859
860 tm = self._gettime(i)
861 src = self._getsourcename(i)
862 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
863 out += 'Time[%s]:\n' % (tm)
864 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
865 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
866 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
867 #outvec.append(callback(i))
868 if len(rows) > 1:
869 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
870 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
871 else:
872 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
873 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
874 out += sep+"\n"
875
876 import os
877 if os.environ.has_key( 'USER' ):
878 usr = os.environ['USER']
879 else:
880 import commands
881 usr = commands.getoutput( 'whoami' )
882 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
883 f = open(tmpfile,'w')
884 print >> f, sep
885 print >> f, ' %s %s' % (label, statunit)
886 print >> f, sep
887 print >> f, out
888 f.close()
889 f = open(tmpfile,'r')
890 x = f.readlines()
891 f.close()
892 asaplog.push(''.join(x), False)
893
894 return statvals
895
896 def chan2data(self, rowno=0, chan=0):
897 """\
898 Returns channel/frequency/velocity and spectral value
899 at an arbitrary row and channel in the scantable.
900
901 Parameters:
902
903 rowno: a row number in the scantable. Default is the
904 first row, i.e. rowno=0
905
906 chan: a channel in the scantable. Default is the first
907 channel, i.e. pos=0
908
909 """
910 if isinstance(rowno, int) and isinstance(chan, int):
911 qx = {'unit': self.get_unit(),
912 'value': self._getabcissa(rowno)[chan]}
913 qy = {'unit': self.get_fluxunit(),
914 'value': self._getspectrum(rowno)[chan]}
915 return qx, qy
916
917 def stddev(self, mask=None):
918 """\
919 Determine the standard deviation of the current beam/if/pol
920 Takes a 'mask' as an optional parameter to specify which
921 channels should be excluded.
922
923 Parameters:
924
925 mask: an optional mask specifying where the standard
926 deviation should be determined.
927
928 Example::
929
930 scan.set_unit('channel')
931 msk = scan.create_mask([100, 200], [500, 600])
932 scan.stddev(mask=m)
933
934 """
935 return self.stats(stat='stddev', mask=mask);
936
937
938 def get_column_names(self):
939 """\
940 Return a list of column names, which can be used for selection.
941 """
942 return list(Scantable.get_column_names(self))
943
944 def get_tsys(self, row=-1):
945 """\
946 Return the System temperatures.
947
948 Parameters:
949
950 row: the rowno to get the information for. (default all rows)
951
952 Returns:
953
954 a list of Tsys values for the current selection
955
956 """
957 if row > -1:
958 return self._get_column(self._gettsys, row)
959 return self._row_callback(self._gettsys, "Tsys")
960
961 def get_tsysspectrum(self, row=-1):
962 """\
963 Return the channel dependent system temperatures.
964
965 Parameters:
966
967 row: the rowno to get the information for. (default all rows)
968
969 Returns:
970
971 a list of Tsys values for the current selection
972
973 """
974 return self._get_column( self._gettsysspectrum, row )
975
976 def set_tsys(self, values, row=-1):
977 """\
978 Set the Tsys value(s) of the given 'row' or the whole scantable
979 (selection).
980
981 Parameters:
982
983 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
984 row: the row number to apply Tsys values to.
985 (default all rows)
986
987 """
988
989 if not hasattr(values, "__len__"):
990 values = [values]
991 self._settsys(values, row)
992
993 def get_weather(self, row=-1):
994 """\
995 Return the weather informations.
996
997 Parameters:
998
999 row: the rowno to get the information for. (default all rows)
1000
1001 Returns:
1002
1003 a dict or list of of dicts of values for the current selection
1004
1005 """
1006
1007 values = self._get_column(self._get_weather, row)
1008 if row > -1:
1009 return {'temperature': values[0],
1010 'pressure': values[1], 'humidity' : values[2],
1011 'windspeed' : values[3], 'windaz' : values[4]
1012 }
1013 else:
1014 out = []
1015 for r in values:
1016
1017 out.append({'temperature': r[0],
1018 'pressure': r[1], 'humidity' : r[2],
1019 'windspeed' : r[3], 'windaz' : r[4]
1020 })
1021 return out
1022
1023 def _row_callback(self, callback, label):
1024 out = ""
1025 outvec = []
1026 sep = '-'*50
1027 for i in range(self.nrow()):
1028 tm = self._gettime(i)
1029 src = self._getsourcename(i)
1030 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1031 out += 'Time[%s]:\n' % (tm)
1032 if self.nbeam(-1) > 1:
1033 out += ' Beam[%d] ' % (self.getbeam(i))
1034 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1035 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1036 outvec.append(callback(i))
1037 out += '= %3.3f\n' % (outvec[i])
1038 out += sep+'\n'
1039
1040 asaplog.push(sep)
1041 asaplog.push(" %s" % (label))
1042 asaplog.push(sep)
1043 asaplog.push(out)
1044 asaplog.post()
1045 return outvec
1046
1047 def _get_column(self, callback, row=-1, *args):
1048 """
1049 """
1050 if row == -1:
1051 return [callback(i, *args) for i in range(self.nrow())]
1052 else:
1053 if 0 <= row < self.nrow():
1054 return callback(row, *args)
1055
1056
1057 def get_time(self, row=-1, asdatetime=False, prec=-1):
1058 """\
1059 Get a list of time stamps for the observations.
1060 Return a datetime object or a string (default) for each
1061 integration time stamp in the scantable.
1062
1063 Parameters:
1064
1065 row: row no of integration. Default -1 return all rows
1066
1067 asdatetime: return values as datetime objects rather than strings
1068
1069 prec: number of digits shown. Default -1 to automatic
1070 calculation.
1071 Note this number is equals to the digits of MVTime,
1072 i.e., 0<prec<3: dates with hh:: only,
1073 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1074 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1075
1076 """
1077 from datetime import datetime
1078 if prec < 0:
1079 # automagically set necessary precision +1
1080 prec = 7 - \
1081 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1082 prec = max(6, int(prec))
1083 else:
1084 prec = max(0, prec)
1085 if asdatetime:
1086 #precision can be 1 millisecond at max
1087 prec = min(12, prec)
1088
1089 times = self._get_column(self._gettime, row, prec)
1090 if not asdatetime:
1091 return times
1092 format = "%Y/%m/%d/%H:%M:%S.%f"
1093 if prec < 7:
1094 nsub = 1 + (((6-prec)/2) % 3)
1095 substr = [".%f","%S","%M"]
1096 for i in range(nsub):
1097 format = format.replace(substr[i],"")
1098 if isinstance(times, list):
1099 return [datetime.strptime(i, format) for i in times]
1100 else:
1101 return datetime.strptime(times, format)
1102
1103
1104 def get_inttime(self, row=-1):
1105 """\
1106 Get a list of integration times for the observations.
1107 Return a time in seconds for each integration in the scantable.
1108
1109 Parameters:
1110
1111 row: row no of integration. Default -1 return all rows.
1112
1113 """
1114 return self._get_column(self._getinttime, row)
1115
1116
1117 def get_sourcename(self, row=-1):
1118 """\
1119 Get a list source names for the observations.
1120 Return a string for each integration in the scantable.
1121 Parameters:
1122
1123 row: row no of integration. Default -1 return all rows.
1124
1125 """
1126 return self._get_column(self._getsourcename, row)
1127
1128 def get_elevation(self, row=-1):
1129 """\
1130 Get a list of elevations for the observations.
1131 Return a float for each integration in the scantable.
1132
1133 Parameters:
1134
1135 row: row no of integration. Default -1 return all rows.
1136
1137 """
1138 return self._get_column(self._getelevation, row)
1139
1140 def get_azimuth(self, row=-1):
1141 """\
1142 Get a list of azimuths for the observations.
1143 Return a float for each integration in the scantable.
1144
1145 Parameters:
1146 row: row no of integration. Default -1 return all rows.
1147
1148 """
1149 return self._get_column(self._getazimuth, row)
1150
1151 def get_parangle(self, row=-1):
1152 """\
1153 Get a list of parallactic angles for the observations.
1154 Return a float for each integration in the scantable.
1155
1156 Parameters:
1157
1158 row: row no of integration. Default -1 return all rows.
1159
1160 """
1161 return self._get_column(self._getparangle, row)
1162
1163 def get_direction(self, row=-1):
1164 """
1165 Get a list of Positions on the sky (direction) for the observations.
1166 Return a string for each integration in the scantable.
1167
1168 Parameters:
1169
1170 row: row no of integration. Default -1 return all rows
1171
1172 """
1173 return self._get_column(self._getdirection, row)
1174
1175 def get_directionval(self, row=-1):
1176 """\
1177 Get a list of Positions on the sky (direction) for the observations.
1178 Return a float for each integration in the scantable.
1179
1180 Parameters:
1181
1182 row: row no of integration. Default -1 return all rows
1183
1184 """
1185 return self._get_column(self._getdirectionvec, row)
1186
1187 @asaplog_post_dec
1188 def set_unit(self, unit='channel'):
1189 """\
1190 Set the unit for all following operations on this scantable
1191
1192 Parameters:
1193
1194 unit: optional unit, default is 'channel'. Use one of '*Hz',
1195 'km/s', 'channel' or equivalent ''
1196
1197 """
1198 varlist = vars()
1199 if unit in ['', 'pixel', 'channel']:
1200 unit = ''
1201 inf = list(self._getcoordinfo())
1202 inf[0] = unit
1203 self._setcoordinfo(inf)
1204 self._add_history("set_unit", varlist)
1205
1206 @asaplog_post_dec
1207 def set_instrument(self, instr):
1208 """\
1209 Set the instrument for subsequent processing.
1210
1211 Parameters:
1212
1213 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1214 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1215
1216 """
1217 self._setInstrument(instr)
1218 self._add_history("set_instument", vars())
1219
1220 @asaplog_post_dec
1221 def set_feedtype(self, feedtype):
1222 """\
1223 Overwrite the feed type, which might not be set correctly.
1224
1225 Parameters:
1226
1227 feedtype: 'linear' or 'circular'
1228
1229 """
1230 self._setfeedtype(feedtype)
1231 self._add_history("set_feedtype", vars())
1232
1233 @asaplog_post_dec
1234 def set_doppler(self, doppler='RADIO'):
1235 """\
1236 Set the doppler for all following operations on this scantable.
1237
1238 Parameters:
1239
1240 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1241
1242 """
1243 varlist = vars()
1244 inf = list(self._getcoordinfo())
1245 inf[2] = doppler
1246 self._setcoordinfo(inf)
1247 self._add_history("set_doppler", vars())
1248
1249 @asaplog_post_dec
1250 def set_freqframe(self, frame=None):
1251 """\
1252 Set the frame type of the Spectral Axis.
1253
1254 Parameters:
1255
1256 frame: an optional frame type, default 'LSRK'. Valid frames are:
1257 'TOPO', 'LSRD', 'LSRK', 'BARY',
1258 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1259
1260 Example::
1261
1262 scan.set_freqframe('BARY')
1263
1264 """
1265 frame = frame or rcParams['scantable.freqframe']
1266 varlist = vars()
1267 # "REST" is not implemented in casacore
1268 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1269 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1270 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1271 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1272
1273 if frame in valid:
1274 inf = list(self._getcoordinfo())
1275 inf[1] = frame
1276 self._setcoordinfo(inf)
1277 self._add_history("set_freqframe", varlist)
1278 else:
1279 msg = "Please specify a valid freq type. Valid types are:\n", valid
1280 raise TypeError(msg)
1281
1282 @asaplog_post_dec
1283 def set_dirframe(self, frame=""):
1284 """\
1285 Set the frame type of the Direction on the sky.
1286
1287 Parameters:
1288
1289 frame: an optional frame type, default ''. Valid frames are:
1290 'J2000', 'B1950', 'GALACTIC'
1291
1292 Example:
1293
1294 scan.set_dirframe('GALACTIC')
1295
1296 """
1297 varlist = vars()
1298 Scantable.set_dirframe(self, frame)
1299 self._add_history("set_dirframe", varlist)
1300
1301 def get_unit(self):
1302 """\
1303 Get the default unit set in this scantable
1304
1305 Returns:
1306
1307 A unit string
1308
1309 """
1310 inf = self._getcoordinfo()
1311 unit = inf[0]
1312 if unit == '': unit = 'channel'
1313 return unit
1314
1315 @asaplog_post_dec
1316 def get_abcissa(self, rowno=0):
1317 """\
1318 Get the abcissa in the current coordinate setup for the currently
1319 selected Beam/IF/Pol
1320
1321 Parameters:
1322
1323 rowno: an optional row number in the scantable. Default is the
1324 first row, i.e. rowno=0
1325
1326 Returns:
1327
1328 The abcissa values and the format string (as a dictionary)
1329
1330 """
1331 abc = self._getabcissa(rowno)
1332 lbl = self._getabcissalabel(rowno)
1333 return abc, lbl
1334
1335 @asaplog_post_dec
1336 def flag(self, mask=None, unflag=False, row=-1):
1337 """\
1338 Flag the selected data using an optional channel mask.
1339
1340 Parameters:
1341
1342 mask: an optional channel mask, created with create_mask. Default
1343 (no mask) is all channels.
1344
1345 unflag: if True, unflag the data
1346
1347 row: an optional row number in the scantable.
1348 Default -1 flags all rows
1349
1350 """
1351 varlist = vars()
1352 mask = mask or []
1353 self._flag(row, mask, unflag)
1354 self._add_history("flag", varlist)
1355
1356 @asaplog_post_dec
1357 def flag_row(self, rows=None, unflag=False):
1358 """\
1359 Flag the selected data in row-based manner.
1360
1361 Parameters:
1362
1363 rows: list of row numbers to be flagged. Default is no row
1364 (must be explicitly specified to execute row-based
1365 flagging).
1366
1367 unflag: if True, unflag the data.
1368
1369 """
1370 varlist = vars()
1371 if rows is None:
1372 rows = []
1373 self._flag_row(rows, unflag)
1374 self._add_history("flag_row", varlist)
1375
1376 @asaplog_post_dec
1377 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1378 """\
1379 Flag the selected data outside a specified range (in channel-base)
1380
1381 Parameters:
1382
1383 uthres: upper threshold.
1384
1385 dthres: lower threshold
1386
1387 clipoutside: True for flagging data outside the range
1388 [dthres:uthres].
1389 False for flagging data inside the range.
1390
1391 unflag: if True, unflag the data.
1392
1393 """
1394 varlist = vars()
1395 self._clip(uthres, dthres, clipoutside, unflag)
1396 self._add_history("clip", varlist)
1397
1398 @asaplog_post_dec
1399 def lag_flag(self, start, end, unit="MHz", insitu=None):
1400 """\
1401 Flag the data in 'lag' space by providing a frequency to remove.
1402 Flagged data in the scantable get interpolated over the region.
1403 No taper is applied.
1404
1405 Parameters:
1406
1407 start: the start frequency (really a period within the
1408 bandwidth) or period to remove
1409
1410 end: the end frequency or period to remove
1411
1412 unit: the frequency unit (default 'MHz') or '' for
1413 explicit lag channels
1414
1415 *Notes*:
1416
1417 It is recommended to flag edges of the band or strong
1418 signals beforehand.
1419
1420 """
1421 if insitu is None: insitu = rcParams['insitu']
1422 self._math._setinsitu(insitu)
1423 varlist = vars()
1424 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1425 if not (unit == "" or base.has_key(unit)):
1426 raise ValueError("%s is not a valid unit." % unit)
1427 if unit == "":
1428 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1429 else:
1430 s = scantable(self._math._lag_flag(self, start*base[unit],
1431 end*base[unit], "frequency"))
1432 s._add_history("lag_flag", varlist)
1433 if insitu:
1434 self._assign(s)
1435 else:
1436 return s
1437
1438 @asaplog_post_dec
1439 def fft(self, rowno=None, mask=None, getrealimag=False):
1440 """\
1441 Apply FFT to the spectra.
1442 Flagged data in the scantable get interpolated over the region.
1443
1444 Parameters:
1445
1446 rowno: The row number(s) to be processed. int, list
1447 and tuple are accepted. By default (None), FFT
1448 is applied to the whole data.
1449
1450 mask: Auxiliary channel mask(s). Given as a boolean
1451 list, it is applied to all specified rows.
1452 A list of boolean lists can also be used to
1453 apply different masks. In the latter case, the
1454 length of 'mask' must be the same as that of
1455 'rowno'. The default is None.
1456
1457 getrealimag: If True, returns the real and imaginary part
1458 values of the complex results.
1459 If False (the default), returns the amplitude
1460 (absolute value) normalised with Ndata/2 and
1461 phase (argument, in unit of radian).
1462
1463 Returns:
1464
1465 A list of dictionaries containing the results for each spectrum.
1466 Each dictionary contains two values, the real and the imaginary
1467 parts when getrealimag = True, or the amplitude(absolute value)
1468 and the phase(argument) when getrealimag = False. The key for
1469 these values are 'real' and 'imag', or 'ampl' and 'phase',
1470 respectively.
1471 """
1472 if rowno is None:
1473 rowno = []
1474 if isinstance(rowno, int):
1475 rowno = [rowno]
1476 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1477 raise TypeError("The row number(s) must be int, list or tuple.")
1478 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1479
1480 usecommonmask = True
1481
1482 if mask is None:
1483 mask = []
1484 if isinstance(mask, list) or isinstance(mask, tuple):
1485 if len(mask) == 0:
1486 mask = [[]]
1487 else:
1488 if isinstance(mask[0], bool):
1489 if len(mask) != self.nchan(self.getif(rowno[0])):
1490 raise ValueError("The spectra and the mask have "
1491 "different length.")
1492 mask = [mask]
1493 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1494 usecommonmask = False
1495 if len(mask) != len(rowno):
1496 raise ValueError("When specifying masks for each "
1497 "spectrum, the numbers of them "
1498 "must be identical.")
1499 for i in xrange(mask):
1500 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1501 raise ValueError("The spectra and the mask have "
1502 "different length.")
1503 else:
1504 raise TypeError("The mask must be a boolean list or "
1505 "a list of boolean list.")
1506 else:
1507 raise TypeError("The mask must be a boolean list or a list of "
1508 "boolean list.")
1509
1510 res = []
1511
1512 imask = 0
1513 for whichrow in rowno:
1514 fspec = self._fft(whichrow, mask[imask], getrealimag)
1515 nspec = len(fspec)
1516
1517 i = 0
1518 v1 = []
1519 v2 = []
1520 reselem = {"real":[],"imag":[]} if getrealimag \
1521 else {"ampl":[],"phase":[]}
1522
1523 while (i < nspec):
1524 v1.append(fspec[i])
1525 v2.append(fspec[i+1])
1526 i += 2
1527
1528 if getrealimag:
1529 reselem["real"] += v1
1530 reselem["imag"] += v2
1531 else:
1532 reselem["ampl"] += v1
1533 reselem["phase"] += v2
1534
1535 res.append(reselem)
1536
1537 if not usecommonmask:
1538 imask += 1
1539
1540 return res
1541
1542 @asaplog_post_dec
1543 def create_mask(self, *args, **kwargs):
1544 """\
1545 Compute and return a mask based on [min, max] windows.
1546 The specified windows are to be INCLUDED, when the mask is
1547 applied.
1548
1549 Parameters:
1550
1551 [min, max], [min2, max2], ...
1552 Pairs of start/end points (inclusive)specifying the regions
1553 to be masked
1554
1555 invert: optional argument. If specified as True,
1556 return an inverted mask, i.e. the regions
1557 specified are EXCLUDED
1558
1559 row: create the mask using the specified row for
1560 unit conversions, default is row=0
1561 only necessary if frequency varies over rows.
1562
1563 Examples::
1564
1565 scan.set_unit('channel')
1566 # a)
1567 msk = scan.create_mask([400, 500], [800, 900])
1568 # masks everything outside 400 and 500
1569 # and 800 and 900 in the unit 'channel'
1570
1571 # b)
1572 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1573 # masks the regions between 400 and 500
1574 # and 800 and 900 in the unit 'channel'
1575
1576 # c)
1577 #mask only channel 400
1578 msk = scan.create_mask([400])
1579
1580 """
1581 row = kwargs.get("row", 0)
1582 data = self._getabcissa(row)
1583 u = self._getcoordinfo()[0]
1584 if u == "":
1585 u = "channel"
1586 msg = "The current mask window unit is %s" % u
1587 i = self._check_ifs()
1588 if not i:
1589 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1590 asaplog.push(msg)
1591 n = len(data)
1592 msk = _n_bools(n, False)
1593 # test if args is a 'list' or a 'normal *args - UGLY!!!
1594
1595 ws = (isinstance(args[-1][-1], int)
1596 or isinstance(args[-1][-1], float)) and args or args[0]
1597 for window in ws:
1598 if len(window) == 1:
1599 window = [window[0], window[0]]
1600 if len(window) == 0 or len(window) > 2:
1601 raise ValueError("A window needs to be defined as "
1602 "[start(, end)]")
1603 if window[0] > window[1]:
1604 tmp = window[0]
1605 window[0] = window[1]
1606 window[1] = tmp
1607 for i in range(n):
1608 if data[i] >= window[0] and data[i] <= window[1]:
1609 msk[i] = True
1610 if kwargs.has_key('invert'):
1611 if kwargs.get('invert'):
1612 msk = mask_not(msk)
1613 return msk
1614
1615 def get_masklist(self, mask=None, row=0, silent=False):
1616 """\
1617 Compute and return a list of mask windows, [min, max].
1618
1619 Parameters:
1620
1621 mask: channel mask, created with create_mask.
1622
1623 row: calcutate the masklist using the specified row
1624 for unit conversions, default is row=0
1625 only necessary if frequency varies over rows.
1626
1627 Returns:
1628
1629 [min, max], [min2, max2], ...
1630 Pairs of start/end points (inclusive)specifying
1631 the masked regions
1632
1633 """
1634 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1635 raise TypeError("The mask should be list or tuple.")
1636 if len(mask) <= 0:
1637 raise TypeError("The mask elements should be > 0")
1638 data = self._getabcissa(row)
1639 if len(data) != len(mask):
1640 msg = "Number of channels in scantable != number of mask elements"
1641 raise TypeError(msg)
1642 u = self._getcoordinfo()[0]
1643 if u == "":
1644 u = "channel"
1645 msg = "The current mask window unit is %s" % u
1646 i = self._check_ifs()
1647 if not i:
1648 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1649 if not silent:
1650 asaplog.push(msg)
1651 masklist = []
1652 ist, ien = None, None
1653 ist, ien=self.get_mask_indices(mask)
1654 if ist is not None and ien is not None:
1655 for i in xrange(len(ist)):
1656 range=[data[ist[i]],data[ien[i]]]
1657 range.sort()
1658 masklist.append([range[0],range[1]])
1659 return masklist
1660
1661 def get_mask_indices(self, mask=None):
1662 """\
1663 Compute and Return lists of mask start indices and mask end indices.
1664
1665 Parameters:
1666
1667 mask: channel mask, created with create_mask.
1668
1669 Returns:
1670
1671 List of mask start indices and that of mask end indices,
1672 i.e., [istart1,istart2,....], [iend1,iend2,....].
1673
1674 """
1675 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1676 raise TypeError("The mask should be list or tuple.")
1677 if len(mask) <= 0:
1678 raise TypeError("The mask elements should be > 0")
1679 istart = []
1680 iend = []
1681 if mask[0]:
1682 istart.append(0)
1683 for i in range(len(mask)-1):
1684 if not mask[i] and mask[i+1]:
1685 istart.append(i+1)
1686 elif mask[i] and not mask[i+1]:
1687 iend.append(i)
1688 if mask[len(mask)-1]:
1689 iend.append(len(mask)-1)
1690 if len(istart) != len(iend):
1691 raise RuntimeError("Numbers of mask start != mask end.")
1692 for i in range(len(istart)):
1693 if istart[i] > iend[i]:
1694 raise RuntimeError("Mask start index > mask end index")
1695 break
1696 return istart,iend
1697
1698 @asaplog_post_dec
1699 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1700 """
1701 Parse MS type spw/channel selection syntax.
1702
1703 Parameters:
1704 selectstring : A string expression of spw and channel selection.
1705 Comma-separated expressions mean different spw -
1706 channel combinations. Spws and channel selections
1707 are partitioned by a colon ':'. In a single
1708 selection expression, you can put multiple values
1709 separated by semicolons ';'. Both for spw and
1710 channel selection, allowed cases include single
1711 value, blank('') or asterisk('*') to specify all
1712 available values, two values connected with a
1713 tilde ('~') to specify an inclusive range. Unit
1714 strings for frequency or velocity can be added to
1715 the tilde-connected values. For channel selection
1716 expression, placing a '<' or a '>' is possible to
1717 specify a semi-infinite interval as well.
1718
1719 examples:
1720 '' or '*' = all spws (all channels)
1721 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1722 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1723 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1724 all channels in spw8
1725 '1.3~1.5GHz' = all spws that fall in or have at
1726 least some overwrap with frequency
1727 range between 1.3GHz and 1.5GHz.
1728 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1729 between the specified
1730 frequency range in spws
1731 that fall in or have
1732 overwrap with the
1733 specified frequency
1734 range.
1735 '1:-200~250km/s' = channels that fall between the
1736 specified velocity range in
1737 spw 1.
1738 Returns:
1739 A dictionary of selected (valid) spw and masklist pairs,
1740 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1741 """
1742 if not isinstance(selectstring, str):
1743 asaplog.post()
1744 asaplog.push("Expression of spw/channel selection must be a string.")
1745 asaplog.post("ERROR")
1746
1747 orig_unit = self.get_unit()
1748 self.set_unit('channel')
1749
1750 orig_restfreq = self.get_restfreqs()
1751 orig_restfreq_list = []
1752 for i in orig_restfreq.keys():
1753 if len(orig_restfreq[i]) == 1:
1754 orig_restfreq_list.append(orig_restfreq[i][0])
1755 else:
1756 orig_restfreq_list.append(orig_restfreq[i])
1757
1758 orig_coord = self._getcoordinfo()
1759 orig_frame = orig_coord[1]
1760 orig_doppler = orig_coord[2]
1761
1762 if restfreq is None: restfreq = orig_restfreq_list
1763 if frame is None: frame = orig_frame
1764 if doppler is None: doppler = orig_doppler
1765
1766 self.set_restfreqs(restfreq)
1767 self.set_freqframe(frame)
1768 self.set_doppler(doppler)
1769
1770 valid_ifs = self.getifnos()
1771
1772 comma_sep = selectstring.split(",")
1773 res = {}
1774
1775 for cms_elem in comma_sep:
1776 colon_sep = cms_elem.split(":")
1777
1778 if (len(colon_sep) > 2):
1779 raise RuntimeError("Invalid selection expression: more than two colons!")
1780
1781 # parse spw expression and store result in spw_list.
1782 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1783 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1784 # 'a~b*m/s' (where * can be '' or 'k') and also
1785 # several of the above expressions connected with ';'.
1786
1787 spw_list = []
1788
1789 semicolon_sep = colon_sep[0].split(";")
1790
1791 for scs_elem in semicolon_sep:
1792 scs_elem = scs_elem.strip()
1793
1794 lt_sep = scs_elem.split("<")
1795 gt_sep = scs_elem.split(">")
1796 ti_sep = scs_elem.split("~")
1797
1798 lt_sep_length = len(lt_sep)
1799 gt_sep_length = len(gt_sep)
1800 ti_sep_length = len(ti_sep)
1801
1802 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1803
1804 if (len_product > 2):
1805 # '<', '>' and '~' must not coexist in a single spw expression
1806
1807 raise RuntimeError("Invalid spw selection.")
1808
1809 elif (len_product == 1):
1810 # '', '*', or single spw number.
1811
1812 if (scs_elem == "") or (scs_elem == "*"):
1813 spw_list = valid_ifs[:] # deep copy
1814
1815 else: # single number
1816 try:
1817 #checking if the given number is valid for spw ID
1818 idx = valid_ifs.index(int(scs_elem))
1819 spw_list.append(valid_ifs[idx])
1820
1821 except:
1822 asaplog.post()
1823 asaplog.push("Wrong spw number (" + scs_elem + ") given. ignored.")
1824 asaplog.post("WARNING")
1825
1826 else: # (len_product == 2)
1827 # namely, one of '<', '>' or '~' appers just once.
1828
1829 if (lt_sep_length == 2): # '<a'
1830 if is_number(lt_sep[1]):
1831 for i in valid_ifs:
1832 if (i < float(lt_sep[1])):
1833 spw_list.append(i)
1834
1835 else:
1836 RuntimeError("Invalid spw selection.")
1837
1838 elif (gt_sep_length == 2): # '>a'
1839 if is_number(gt_sep[1]):
1840 for i in valid_ifs:
1841 if (i > float(gt_sep[1])):
1842 spw_list.append(i)
1843
1844 else:
1845 RuntimeError("Invalid spw selection.")
1846
1847 else: # (ti_sep_length == 2) where both boundaries inclusive
1848 expr0 = ti_sep[0].strip()
1849 expr1 = ti_sep[1].strip()
1850
1851 if is_number(expr0) and is_number(expr1):
1852 # 'a~b'
1853 expr_pmin = min(float(expr0), float(expr1))
1854 expr_pmax = max(float(expr0), float(expr1))
1855 for i in valid_ifs:
1856 if (expr_pmin <= i) and (i <= expr_pmax):
1857 spw_list.append(i)
1858
1859 elif is_number(expr0) and is_frequency(expr1):
1860 # 'a~b*Hz'
1861 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1862
1863 for coord in self._get_coordinate_list():
1864 expr_p0 = coord['coord'].to_pixel(expr_f0)
1865 expr_p1 = coord['coord'].to_pixel(expr_f1)
1866 expr_pmin = min(expr_p0, expr_p1)
1867 expr_pmax = max(expr_p0, expr_p1)
1868
1869 spw = coord['if']
1870 pmin = 0.0
1871 pmax = float(self.nchan(spw) - 1)
1872
1873 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
1874 spw_list.append(spw)
1875
1876 elif is_number(expr0) and is_velocity(expr1):
1877 # 'a~b*m/s'
1878 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
1879 expr_vmin = min(expr_v0, expr_v1)
1880 expr_vmax = max(expr_v0, expr_v1)
1881
1882 for coord in self._get_coordinate_list():
1883 spw = coord['if']
1884
1885 pmin = 0.0
1886 pmax = float(self.nchan(spw) - 1)
1887
1888 vel0 = coord['coord'].to_velocity(pmin)
1889 vel1 = coord['coord'].to_velocity(pmax)
1890
1891 vmin = min(vel0, vel1)
1892 vmax = max(vel0, vel1)
1893
1894 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
1895 spw_list.append(spw)
1896
1897 else:
1898 # cases such as 'aGHz~bkm/s' are not allowed now
1899 raise RuntimeError("Invalid spw selection.")
1900
1901 # parse channel expression and store the result in crange_list.
1902 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
1903 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
1904 # and also several of the above expressions connected with ';'.
1905
1906 for spw in spw_list:
1907 pmin = 0.0
1908 pmax = float(self.nchan(spw) - 1)
1909
1910 if (len(colon_sep) == 1):
1911 # no expression for channel selection,
1912 # which means all channels are to be selected.
1913 crange_list = [[pmin, pmax]]
1914
1915 else: # (len(colon_sep) == 2)
1916 crange_list = []
1917
1918 found = False
1919 for i in self._get_coordinate_list():
1920 if (i['if'] == spw):
1921 coord = i['coord']
1922 found = True
1923 break
1924
1925 if not found:
1926 raise RuntimeError("Invalid spw value.")
1927
1928 semicolon_sep = colon_sep[1].split(";")
1929 for scs_elem in semicolon_sep:
1930 scs_elem = scs_elem.strip()
1931
1932 ti_sep = scs_elem.split("~")
1933 ti_sep_length = len(ti_sep)
1934
1935 if (ti_sep_length > 2):
1936 raise RuntimeError("Invalid channel selection.")
1937
1938 elif (ti_sep_length == 1):
1939 if (scs_elem == "") or (scs_elem == "*"):
1940 # '' and '*' for all channels
1941 crange_list = [[pmin, pmax]]
1942 break
1943 elif (is_number(scs_elem)):
1944 # single channel given
1945 crange_list.append([float(scs_elem), float(scs_elem)])
1946 else:
1947 raise RuntimeError("Invalid channel selection.")
1948
1949 else: #(ti_sep_length == 2)
1950 expr0 = ti_sep[0].strip()
1951 expr1 = ti_sep[1].strip()
1952
1953 if is_number(expr0) and is_number(expr1):
1954 # 'a~b'
1955 expr_pmin = min(float(expr0), float(expr1))
1956 expr_pmax = max(float(expr0), float(expr1))
1957
1958 elif is_number(expr0) and is_frequency(expr1):
1959 # 'a~b*Hz'
1960 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1961 expr_p0 = coord.to_pixel(expr_f0)
1962 expr_p1 = coord.to_pixel(expr_f1)
1963 expr_pmin = min(expr_p0, expr_p1)
1964 expr_pmax = max(expr_p0, expr_p1)
1965
1966 elif is_number(expr0) and is_velocity(expr1):
1967 # 'a~b*m/s'
1968 restf = self.get_restfreqs().values()[0][0]
1969 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
1970 expr_f0 = get_frequency_by_velocity(restf, expr_v0)
1971 expr_f1 = get_frequency_by_velocity(restf, expr_v1)
1972 expr_p0 = coord.to_pixel(expr_f0)
1973 expr_p1 = coord.to_pixel(expr_f1)
1974 expr_pmin = min(expr_p0, expr_p1)
1975 expr_pmax = max(expr_p0, expr_p1)
1976
1977 else:
1978 # cases such as 'aGHz~bkm/s' are not allowed now
1979 raise RuntimeError("Invalid channel selection.")
1980
1981 cmin = max(pmin, expr_pmin)
1982 cmax = min(pmax, expr_pmax)
1983 # if the given range of channel selection has overwrap with
1984 # that of current spw, output the overwrap area.
1985 if (cmin <= cmax):
1986 cmin = float(int(cmin + 0.5))
1987 cmax = float(int(cmax + 0.5))
1988 crange_list.append([cmin, cmax])
1989
1990 if (len(crange_list) == 0):
1991 crange_list.append([])
1992
1993 if res.has_key(spw):
1994 res[spw].extend(crange_list)
1995 else:
1996 res[spw] = crange_list
1997
1998 # restore original values
1999 self.set_restfreqs(orig_restfreq_list)
2000 self.set_freqframe(orig_frame)
2001 self.set_doppler(orig_doppler)
2002 self.set_unit(orig_unit)
2003
2004 return res
2005
2006 @asaplog_post_dec
2007 def get_first_rowno_by_if(self, ifno):
2008 found = False
2009 for irow in xrange(self.nrow()):
2010 if (self.getif(irow) == ifno):
2011 res = irow
2012 found = True
2013 break
2014
2015 if not found: raise RuntimeError("Invalid IF value.")
2016
2017 return res
2018
2019 @asaplog_post_dec
2020 def _get_coordinate_list(self):
2021 res = []
2022 spws = self.getifnos()
2023 for spw in spws:
2024 elem = {}
2025 elem['if'] = spw
2026 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2027 res.append(elem)
2028
2029 return res
2030
2031##################################
2032
2033 @asaplog_post_dec
2034 def parse_maskexpr(self, maskstring):
2035 """
2036 Parse CASA type mask selection syntax (IF dependent).
2037
2038 Parameters:
2039 maskstring : A string mask selection expression.
2040 A comma separated selections mean different IF -
2041 channel combinations. IFs and channel selections
2042 are partitioned by a colon, ':'.
2043 examples:
2044 '' = all IFs (all channels)
2045 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2046 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2047 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2048 all channels in IF8
2049 Returns:
2050 A dictionary of selected (valid) IF and masklist pairs,
2051 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2052 """
2053 if not isinstance(maskstring,str):
2054 asaplog.post()
2055 asaplog.push("Mask expression should be a string.")
2056 asaplog.post("ERROR")
2057
2058 valid_ifs = self.getifnos()
2059 frequnit = self.get_unit()
2060 seldict = {}
2061 if maskstring == "":
2062 maskstring = str(valid_ifs)[1:-1]
2063 ## split each selection "IF range[:CHAN range]"
2064 # split maskstring by "<spaces>,<spaces>"
2065 comma_sep = re.compile('\s*,\s*')
2066 sellist = comma_sep.split(maskstring)
2067 # separator by "<spaces>:<spaces>"
2068 collon_sep = re.compile('\s*:\s*')
2069 for currselstr in sellist:
2070 selset = collon_sep.split(currselstr)
2071 # spw and mask string (may include ~, < or >)
2072 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2073 minval=min(valid_ifs),
2074 maxval=max(valid_ifs))
2075 for spwlist in spwmasklist:
2076 selspws = []
2077 for ispw in range(spwlist[0],spwlist[1]+1):
2078 # Put into the list only if ispw exists
2079 if valid_ifs.count(ispw):
2080 selspws.append(ispw)
2081 del spwmasklist, spwlist
2082
2083 # parse frequency mask list
2084 if len(selset) > 1:
2085 freqmasklist = self._parse_selection(selset[1], typestr='float',
2086 offset=0.)
2087 else:
2088 # want to select the whole spectrum
2089 freqmasklist = [None]
2090
2091 ## define a dictionary of spw - masklist combination
2092 for ispw in selspws:
2093 #print "working on", ispw
2094 spwstr = str(ispw)
2095 if len(selspws) == 0:
2096 # empty spw
2097 continue
2098 else:
2099 ## want to get min and max of the spw and
2100 ## offset to set for '<' and '>'
2101 if frequnit == 'channel':
2102 minfreq = 0
2103 maxfreq = self.nchan(ifno=ispw)
2104 offset = 0.5
2105 else:
2106 ## This is ugly part. need improvement
2107 for ifrow in xrange(self.nrow()):
2108 if self.getif(ifrow) == ispw:
2109 #print "IF",ispw,"found in row =",ifrow
2110 break
2111 freqcoord = self.get_coordinate(ifrow)
2112 freqs = self._getabcissa(ifrow)
2113 minfreq = min(freqs)
2114 maxfreq = max(freqs)
2115 if len(freqs) == 1:
2116 offset = 0.5
2117 elif frequnit.find('Hz') > 0:
2118 offset = abs(freqcoord.to_frequency(1,
2119 unit=frequnit)
2120 -freqcoord.to_frequency(0,
2121 unit=frequnit)
2122 )*0.5
2123 elif frequnit.find('m/s') > 0:
2124 offset = abs(freqcoord.to_velocity(1,
2125 unit=frequnit)
2126 -freqcoord.to_velocity(0,
2127 unit=frequnit)
2128 )*0.5
2129 else:
2130 asaplog.post()
2131 asaplog.push("Invalid frequency unit")
2132 asaplog.post("ERROR")
2133 del freqs, freqcoord, ifrow
2134 for freq in freqmasklist:
2135 selmask = freq or [minfreq, maxfreq]
2136 if selmask[0] == None:
2137 ## selection was "<freq[1]".
2138 if selmask[1] < minfreq:
2139 ## avoid adding region selection
2140 selmask = None
2141 else:
2142 selmask = [minfreq,selmask[1]-offset]
2143 elif selmask[1] == None:
2144 ## selection was ">freq[0]"
2145 if selmask[0] > maxfreq:
2146 ## avoid adding region selection
2147 selmask = None
2148 else:
2149 selmask = [selmask[0]+offset,maxfreq]
2150 if selmask:
2151 if not seldict.has_key(spwstr):
2152 # new spw selection
2153 seldict[spwstr] = []
2154 seldict[spwstr] += [selmask]
2155 del minfreq,maxfreq,offset,freq,selmask
2156 del spwstr
2157 del freqmasklist
2158 del valid_ifs
2159 if len(seldict) == 0:
2160 asaplog.post()
2161 asaplog.push("No valid selection in the mask expression: "
2162 +maskstring)
2163 asaplog.post("WARN")
2164 return None
2165 msg = "Selected masklist:\n"
2166 for sif, lmask in seldict.iteritems():
2167 msg += " IF"+sif+" - "+str(lmask)+"\n"
2168 asaplog.push(msg)
2169 return seldict
2170
2171 @asaplog_post_dec
2172 def parse_idx_selection(self, mode, selexpr):
2173 """
2174 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2175 BEAMNO, and row number
2176
2177 Parameters:
2178 mode : which column to select.
2179 ['scan',|'if'|'pol'|'beam'|'row']
2180 selexpr : A comma separated selection expression.
2181 examples:
2182 '' = all (returns [])
2183 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2184 (returns [0,1,4,5,6,9])
2185 Returns:
2186 A List of selected indices
2187 """
2188 if selexpr == "":
2189 return []
2190 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2191 'b': 'beam', 'r': 'row'}
2192 smode = mode.lower()[0]
2193 if not (smode in valid_modes.keys()):
2194 msg = "Invalid mode '%s'. Valid modes are %s" %\
2195 (mode, str(valid_modes.values()))
2196 asaplog.post()
2197 asaplog.push(msg)
2198 asaplog.post("ERROR")
2199 mode = valid_modes[smode]
2200 minidx = None
2201 maxidx = None
2202 if smode == 'r':
2203 minidx = 0
2204 maxidx = self.nrow()
2205 else:
2206 idx = getattr(self,"get"+mode+"nos")()
2207 minidx = min(idx)
2208 maxidx = max(idx)
2209 del idx
2210 # split selexpr by "<spaces>,<spaces>"
2211 comma_sep = re.compile('\s*,\s*')
2212 sellist = comma_sep.split(selexpr)
2213 idxlist = []
2214 for currselstr in sellist:
2215 # single range (may include ~, < or >)
2216 currlist = self._parse_selection(currselstr, typestr='integer',
2217 minval=minidx,maxval=maxidx)
2218 for thelist in currlist:
2219 idxlist += range(thelist[0],thelist[1]+1)
2220 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2221 asaplog.push(msg)
2222 return idxlist
2223
2224 def _parse_selection(self, selstr, typestr='float', offset=0.,
2225 minval=None, maxval=None):
2226 """
2227 Parameters:
2228 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2229 typestr : The type of the values in returned list
2230 ('integer' or 'float')
2231 offset : The offset value to subtract from or add to
2232 the boundary value if the selection string
2233 includes '<' or '>' [Valid only for typestr='float']
2234 minval, maxval : The minimum/maximum values to set if the
2235 selection string includes '<' or '>'.
2236 The list element is filled with None by default.
2237 Returns:
2238 A list of min/max pair of selections.
2239 Example:
2240 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2241 --> returns [[0,2],[5,7],[9,9]]
2242 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2243 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2244 """
2245 # split selstr by '<spaces>;<spaces>'
2246 semi_sep = re.compile('\s*;\s*')
2247 selgroups = semi_sep.split(selstr)
2248 sellists = []
2249 if typestr.lower().startswith('int'):
2250 formatfunc = int
2251 offset = 1
2252 else:
2253 formatfunc = float
2254
2255 for currsel in selgroups:
2256 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2257 minsel = minval
2258 maxsel = maxval
2259 if currsel.find('~') > 0:
2260 # val0 <= x <= val1
2261 minsel = formatfunc(currsel.split('~')[0].strip())
2262 maxsel = formatfunc(currsel.split('~')[1].strip())
2263 elif currsel.strip().find('<=') > -1:
2264 bound = currsel.split('<=')
2265 try: # try "x <= val"
2266 minsel = minval
2267 maxsel = formatfunc(bound[1].strip())
2268 except ValueError: # now "val <= x"
2269 minsel = formatfunc(bound[0].strip())
2270 maxsel = maxval
2271 elif currsel.strip().find('>=') > -1:
2272 bound = currsel.split('>=')
2273 try: # try "x >= val"
2274 minsel = formatfunc(bound[1].strip())
2275 maxsel = maxval
2276 except ValueError: # now "val >= x"
2277 minsel = minval
2278 maxsel = formatfunc(bound[0].strip())
2279 elif currsel.strip().find('<') > -1:
2280 bound = currsel.split('<')
2281 try: # try "x < val"
2282 minsel = minval
2283 maxsel = formatfunc(bound[1].strip()) \
2284 - formatfunc(offset)
2285 except ValueError: # now "val < x"
2286 minsel = formatfunc(bound[0].strip()) \
2287 + formatfunc(offset)
2288 maxsel = maxval
2289 elif currsel.strip().find('>') > -1:
2290 bound = currsel.split('>')
2291 try: # try "x > val"
2292 minsel = formatfunc(bound[1].strip()) \
2293 + formatfunc(offset)
2294 maxsel = maxval
2295 except ValueError: # now "val > x"
2296 minsel = minval
2297 maxsel = formatfunc(bound[0].strip()) \
2298 - formatfunc(offset)
2299 else:
2300 minsel = formatfunc(currsel)
2301 maxsel = formatfunc(currsel)
2302 sellists.append([minsel,maxsel])
2303 return sellists
2304
2305# def get_restfreqs(self):
2306# """
2307# Get the restfrequency(s) stored in this scantable.
2308# The return value(s) are always of unit 'Hz'
2309# Parameters:
2310# none
2311# Returns:
2312# a list of doubles
2313# """
2314# return list(self._getrestfreqs())
2315
2316 def get_restfreqs(self, ids=None):
2317 """\
2318 Get the restfrequency(s) stored in this scantable.
2319 The return value(s) are always of unit 'Hz'
2320
2321 Parameters:
2322
2323 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2324 be retrieved
2325
2326 Returns:
2327
2328 dictionary containing ids and a list of doubles for each id
2329
2330 """
2331 if ids is None:
2332 rfreqs = {}
2333 idlist = self.getmolnos()
2334 for i in idlist:
2335 rfreqs[i] = list(self._getrestfreqs(i))
2336 return rfreqs
2337 else:
2338 if type(ids) == list or type(ids) == tuple:
2339 rfreqs = {}
2340 for i in ids:
2341 rfreqs[i] = list(self._getrestfreqs(i))
2342 return rfreqs
2343 else:
2344 return list(self._getrestfreqs(ids))
2345
2346 @asaplog_post_dec
2347 def set_restfreqs(self, freqs=None, unit='Hz'):
2348 """\
2349 Set or replace the restfrequency specified and
2350 if the 'freqs' argument holds a scalar,
2351 then that rest frequency will be applied to all the selected
2352 data. If the 'freqs' argument holds
2353 a vector, then it MUST be of equal or smaller length than
2354 the number of IFs (and the available restfrequencies will be
2355 replaced by this vector). In this case, *all* data have
2356 the restfrequency set per IF according
2357 to the corresponding value you give in the 'freqs' vector.
2358 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2359 IF 1 gets restfreq 2e9.
2360
2361 You can also specify the frequencies via a linecatalog.
2362
2363 Parameters:
2364
2365 freqs: list of rest frequency values or string idenitfiers
2366
2367 unit: unit for rest frequency (default 'Hz')
2368
2369
2370 Example::
2371
2372 # set the given restfrequency for the all currently selected IFs
2373 scan.set_restfreqs(freqs=1.4e9)
2374 # set restfrequencies for the n IFs (n > 1) in the order of the
2375 # list, i.e
2376 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2377 # len(list_of_restfreqs) == nIF
2378 # for nIF == 1 the following will set multiple restfrequency for
2379 # that IF
2380 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2381 # set multiple restfrequencies per IF. as a list of lists where
2382 # the outer list has nIF elements, the inner s arbitrary
2383 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2384
2385 *Note*:
2386
2387 To do more sophisticate Restfrequency setting, e.g. on a
2388 source and IF basis, use scantable.set_selection() before using
2389 this function::
2390
2391 # provided your scantable is called scan
2392 selection = selector()
2393 selection.set_name('ORION*')
2394 selection.set_ifs([1])
2395 scan.set_selection(selection)
2396 scan.set_restfreqs(freqs=86.6e9)
2397
2398 """
2399 varlist = vars()
2400 from asap import linecatalog
2401 # simple value
2402 if isinstance(freqs, int) or isinstance(freqs, float):
2403 self._setrestfreqs([freqs], [""], unit)
2404 # list of values
2405 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2406 # list values are scalars
2407 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2408 if len(freqs) == 1:
2409 self._setrestfreqs(freqs, [""], unit)
2410 else:
2411 # allow the 'old' mode of setting mulitple IFs
2412 savesel = self._getselection()
2413 sel = self.get_selection()
2414 iflist = self.getifnos()
2415 if len(freqs)>len(iflist):
2416 raise ValueError("number of elements in list of list "
2417 "exeeds the current IF selections")
2418 iflist = self.getifnos()
2419 for i, fval in enumerate(freqs):
2420 sel.set_ifs(iflist[i])
2421 self._setselection(sel)
2422 self._setrestfreqs([fval], [""], unit)
2423 self._setselection(savesel)
2424
2425 # list values are dict, {'value'=, 'name'=)
2426 elif isinstance(freqs[-1], dict):
2427 values = []
2428 names = []
2429 for d in freqs:
2430 values.append(d["value"])
2431 names.append(d["name"])
2432 self._setrestfreqs(values, names, unit)
2433 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2434 savesel = self._getselection()
2435 sel = self.get_selection()
2436 iflist = self.getifnos()
2437 if len(freqs)>len(iflist):
2438 raise ValueError("number of elements in list of list exeeds"
2439 " the current IF selections")
2440 for i, fval in enumerate(freqs):
2441 sel.set_ifs(iflist[i])
2442 self._setselection(sel)
2443 self._setrestfreqs(fval, [""], unit)
2444 self._setselection(savesel)
2445 # freqs are to be taken from a linecatalog
2446 elif isinstance(freqs, linecatalog):
2447 savesel = self._getselection()
2448 sel = self.get_selection()
2449 for i in xrange(freqs.nrow()):
2450 sel.set_ifs(iflist[i])
2451 self._setselection(sel)
2452 self._setrestfreqs([freqs.get_frequency(i)],
2453 [freqs.get_name(i)], "MHz")
2454 # ensure that we are not iterating past nIF
2455 if i == self.nif()-1: break
2456 self._setselection(savesel)
2457 else:
2458 return
2459 self._add_history("set_restfreqs", varlist)
2460
2461 @asaplog_post_dec
2462 def shift_refpix(self, delta):
2463 """\
2464 Shift the reference pixel of the Spectra Coordinate by an
2465 integer amount.
2466
2467 Parameters:
2468
2469 delta: the amount to shift by
2470
2471 *Note*:
2472
2473 Be careful using this with broadband data.
2474
2475 """
2476 varlist = vars()
2477 Scantable.shift_refpix(self, delta)
2478 s._add_history("shift_refpix", varlist)
2479
2480 @asaplog_post_dec
2481 def history(self, filename=None, nrows=-1, start=0):
2482 """\
2483 Print the history. Optionally to a file.
2484
2485 Parameters:
2486
2487 filename: The name of the file to save the history to.
2488
2489 """
2490 n = self._historylength()
2491 if nrows == -1:
2492 nrows = n
2493 if start+nrows > n:
2494 nrows = nrows-start
2495 if n > 1000 and nrows == n:
2496 nrows = 1000
2497 start = n-1000
2498 asaplog.push("Warning: History has {0} entries. Displaying last "
2499 "1000".format(n))
2500 hist = list(self._gethistory(nrows, start))
2501 out = "-"*80
2502 for h in hist:
2503 if not h.strip():
2504 continue
2505 if h.find("---") >-1:
2506 continue
2507 else:
2508 items = h.split("##")
2509 date = items[0]
2510 func = items[1]
2511 items = items[2:]
2512 out += "\n"+date+"\n"
2513 out += "Function: %s\n Parameters:" % (func)
2514 for i in items:
2515 if i == '':
2516 continue
2517 s = i.split("=")
2518 out += "\n %s = %s" % (s[0], s[1])
2519 out = "\n".join([out, "*"*80])
2520 if filename is not None:
2521 if filename is "":
2522 filename = 'scantable_history.txt'
2523 filename = os.path.expandvars(os.path.expanduser(filename))
2524 if not os.path.isdir(filename):
2525 data = open(filename, 'w')
2526 data.write(out)
2527 data.close()
2528 else:
2529 msg = "Illegal file name '%s'." % (filename)
2530 raise IOError(msg)
2531 return page(out)
2532
2533 #
2534 # Maths business
2535 #
2536 @asaplog_post_dec
2537 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2538 avmode="NONE"):
2539 """\
2540 Return the (time) weighted average of a scan. Scans will be averaged
2541 only if the source direction (RA/DEC) is within 1' otherwise
2542
2543 *Note*:
2544
2545 in channels only - align if necessary
2546
2547 Parameters:
2548
2549 mask: an optional mask (only used for 'var' and 'tsys'
2550 weighting)
2551
2552 scanav: True averages each scan separately
2553 False (default) averages all scans together,
2554
2555 weight: Weighting scheme.
2556 'none' (mean no weight)
2557 'var' (1/var(spec) weighted)
2558 'tsys' (1/Tsys**2 weighted)
2559 'tint' (integration time weighted)
2560 'tintsys' (Tint/Tsys**2)
2561 'median' ( median averaging)
2562 The default is 'tint'
2563
2564 align: align the spectra in velocity before averaging. It takes
2565 the time of the first spectrum as reference time.
2566 avmode: 'SOURCE' - also select by source name - or
2567 'NONE' (default). Not applicable for scanav=True or
2568 weight=median
2569
2570 Example::
2571
2572 # time average the scantable without using a mask
2573 newscan = scan.average_time()
2574
2575 """
2576 varlist = vars()
2577 weight = weight or 'TINT'
2578 mask = mask or ()
2579 scanav = (scanav and 'SCAN') or avmode.upper()
2580 scan = (self, )
2581
2582 if align:
2583 scan = (self.freq_align(insitu=False), )
2584 asaplog.push("Note: Alignment is don on a source-by-source basis")
2585 asaplog.push("Note: Averaging (by default) is not")
2586 # we need to set it to SOURCE averaging here
2587 s = None
2588 if weight.upper() == 'MEDIAN':
2589 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2590 scanav))
2591 else:
2592 s = scantable(self._math._average(scan, mask, weight.upper(),
2593 scanav))
2594 s._add_history("average_time", varlist)
2595 return s
2596
2597 @asaplog_post_dec
2598 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2599 """\
2600 Return a scan where all spectra are converted to either
2601 Jansky or Kelvin depending upon the flux units of the scan table.
2602 By default the function tries to look the values up internally.
2603 If it can't find them (or if you want to over-ride), you must
2604 specify EITHER jyperk OR eta (and D which it will try to look up
2605 also if you don't set it). jyperk takes precedence if you set both.
2606
2607 Parameters:
2608
2609 jyperk: the Jy / K conversion factor
2610
2611 eta: the aperture efficiency
2612
2613 d: the geometric diameter (metres)
2614
2615 insitu: if False a new scantable is returned.
2616 Otherwise, the scaling is done in-situ
2617 The default is taken from .asaprc (False)
2618
2619 """
2620 if insitu is None: insitu = rcParams['insitu']
2621 self._math._setinsitu(insitu)
2622 varlist = vars()
2623 jyperk = jyperk or -1.0
2624 d = d or -1.0
2625 eta = eta or -1.0
2626 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2627 s._add_history("convert_flux", varlist)
2628 if insitu: self._assign(s)
2629 else: return s
2630
2631 @asaplog_post_dec
2632 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2633 """\
2634 Return a scan after applying a gain-elevation correction.
2635 The correction can be made via either a polynomial or a
2636 table-based interpolation (and extrapolation if necessary).
2637 You specify polynomial coefficients, an ascii table or neither.
2638 If you specify neither, then a polynomial correction will be made
2639 with built in coefficients known for certain telescopes (an error
2640 will occur if the instrument is not known).
2641 The data and Tsys are *divided* by the scaling factors.
2642
2643 Parameters:
2644
2645 poly: Polynomial coefficients (default None) to compute a
2646 gain-elevation correction as a function of
2647 elevation (in degrees).
2648
2649 filename: The name of an ascii file holding correction factors.
2650 The first row of the ascii file must give the column
2651 names and these MUST include columns
2652 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2653 by this) somewhere.
2654 The second row must give the data type of the
2655 column. Use 'R' for Real and 'I' for Integer.
2656 An example file would be
2657 (actual factors are arbitrary) :
2658
2659 TIME ELEVATION FACTOR
2660 R R R
2661 0.1 0 0.8
2662 0.2 20 0.85
2663 0.3 40 0.9
2664 0.4 60 0.85
2665 0.5 80 0.8
2666 0.6 90 0.75
2667
2668 method: Interpolation method when correcting from a table.
2669 Values are 'nearest', 'linear' (default), 'cubic'
2670 and 'spline'
2671
2672 insitu: if False a new scantable is returned.
2673 Otherwise, the scaling is done in-situ
2674 The default is taken from .asaprc (False)
2675
2676 """
2677
2678 if insitu is None: insitu = rcParams['insitu']
2679 self._math._setinsitu(insitu)
2680 varlist = vars()
2681 poly = poly or ()
2682 from os.path import expandvars
2683 filename = expandvars(filename)
2684 s = scantable(self._math._gainel(self, poly, filename, method))
2685 s._add_history("gain_el", varlist)
2686 if insitu:
2687 self._assign(s)
2688 else:
2689 return s
2690
2691 @asaplog_post_dec
2692 def freq_align(self, reftime=None, method='cubic', insitu=None):
2693 """\
2694 Return a scan where all rows have been aligned in frequency/velocity.
2695 The alignment frequency frame (e.g. LSRK) is that set by function
2696 set_freqframe.
2697
2698 Parameters:
2699
2700 reftime: reference time to align at. By default, the time of
2701 the first row of data is used.
2702
2703 method: Interpolation method for regridding the spectra.
2704 Choose from 'nearest', 'linear', 'cubic' (default)
2705 and 'spline'
2706
2707 insitu: if False a new scantable is returned.
2708 Otherwise, the scaling is done in-situ
2709 The default is taken from .asaprc (False)
2710
2711 """
2712 if insitu is None: insitu = rcParams["insitu"]
2713 oldInsitu = self._math._insitu()
2714 self._math._setinsitu(insitu)
2715 varlist = vars()
2716 reftime = reftime or ""
2717 s = scantable(self._math._freq_align(self, reftime, method))
2718 s._add_history("freq_align", varlist)
2719 self._math._setinsitu(oldInsitu)
2720 if insitu:
2721 self._assign(s)
2722 else:
2723 return s
2724
2725 @asaplog_post_dec
2726 def opacity(self, tau=None, insitu=None):
2727 """\
2728 Apply an opacity correction. The data
2729 and Tsys are multiplied by the correction factor.
2730
2731 Parameters:
2732
2733 tau: (list of) opacity from which the correction factor is
2734 exp(tau*ZD)
2735 where ZD is the zenith-distance.
2736 If a list is provided, it has to be of length nIF,
2737 nIF*nPol or 1 and in order of IF/POL, e.g.
2738 [opif0pol0, opif0pol1, opif1pol0 ...]
2739 if tau is `None` the opacities are determined from a
2740 model.
2741
2742 insitu: if False a new scantable is returned.
2743 Otherwise, the scaling is done in-situ
2744 The default is taken from .asaprc (False)
2745
2746 """
2747 if insitu is None:
2748 insitu = rcParams['insitu']
2749 self._math._setinsitu(insitu)
2750 varlist = vars()
2751 if not hasattr(tau, "__len__"):
2752 tau = [tau]
2753 s = scantable(self._math._opacity(self, tau))
2754 s._add_history("opacity", varlist)
2755 if insitu:
2756 self._assign(s)
2757 else:
2758 return s
2759
2760 @asaplog_post_dec
2761 def bin(self, width=5, insitu=None):
2762 """\
2763 Return a scan where all spectra have been binned up.
2764
2765 Parameters:
2766
2767 width: The bin width (default=5) in pixels
2768
2769 insitu: if False a new scantable is returned.
2770 Otherwise, the scaling is done in-situ
2771 The default is taken from .asaprc (False)
2772
2773 """
2774 if insitu is None:
2775 insitu = rcParams['insitu']
2776 self._math._setinsitu(insitu)
2777 varlist = vars()
2778 s = scantable(self._math._bin(self, width))
2779 s._add_history("bin", varlist)
2780 if insitu:
2781 self._assign(s)
2782 else:
2783 return s
2784
2785 @asaplog_post_dec
2786 def reshape(self, first, last, insitu=None):
2787 """Resize the band by providing first and last channel.
2788 This will cut off all channels outside [first, last].
2789 """
2790 if insitu is None:
2791 insitu = rcParams['insitu']
2792 varlist = vars()
2793 if last < 0:
2794 last = self.nchan()-1 + last
2795 s = None
2796 if insitu:
2797 s = self
2798 else:
2799 s = self.copy()
2800 s._reshape(first,last)
2801 s._add_history("reshape", varlist)
2802 if not insitu:
2803 return s
2804
2805 @asaplog_post_dec
2806 def resample(self, width=5, method='cubic', insitu=None):
2807 """\
2808 Return a scan where all spectra have been binned up.
2809
2810 Parameters:
2811
2812 width: The bin width (default=5) in pixels
2813
2814 method: Interpolation method when correcting from a table.
2815 Values are 'nearest', 'linear', 'cubic' (default)
2816 and 'spline'
2817
2818 insitu: if False a new scantable is returned.
2819 Otherwise, the scaling is done in-situ
2820 The default is taken from .asaprc (False)
2821
2822 """
2823 if insitu is None:
2824 insitu = rcParams['insitu']
2825 self._math._setinsitu(insitu)
2826 varlist = vars()
2827 s = scantable(self._math._resample(self, method, width))
2828 s._add_history("resample", varlist)
2829 if insitu:
2830 self._assign(s)
2831 else:
2832 return s
2833
2834 @asaplog_post_dec
2835 def average_pol(self, mask=None, weight='none'):
2836 """\
2837 Average the Polarisations together.
2838
2839 Parameters:
2840
2841 mask: An optional mask defining the region, where the
2842 averaging will be applied. The output will have all
2843 specified points masked.
2844
2845 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2846 weighted), or 'tsys' (1/Tsys**2 weighted)
2847
2848 """
2849 varlist = vars()
2850 mask = mask or ()
2851 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2852 s._add_history("average_pol", varlist)
2853 return s
2854
2855 @asaplog_post_dec
2856 def average_beam(self, mask=None, weight='none'):
2857 """\
2858 Average the Beams together.
2859
2860 Parameters:
2861 mask: An optional mask defining the region, where the
2862 averaging will be applied. The output will have all
2863 specified points masked.
2864
2865 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2866 weighted), or 'tsys' (1/Tsys**2 weighted)
2867
2868 """
2869 varlist = vars()
2870 mask = mask or ()
2871 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2872 s._add_history("average_beam", varlist)
2873 return s
2874
2875 def parallactify(self, pflag):
2876 """\
2877 Set a flag to indicate whether this data should be treated as having
2878 been 'parallactified' (total phase == 0.0)
2879
2880 Parameters:
2881
2882 pflag: Bool indicating whether to turn this on (True) or
2883 off (False)
2884
2885 """
2886 varlist = vars()
2887 self._parallactify(pflag)
2888 self._add_history("parallactify", varlist)
2889
2890 @asaplog_post_dec
2891 def convert_pol(self, poltype=None):
2892 """\
2893 Convert the data to a different polarisation type.
2894 Note that you will need cross-polarisation terms for most conversions.
2895
2896 Parameters:
2897
2898 poltype: The new polarisation type. Valid types are:
2899 'linear', 'circular', 'stokes' and 'linpol'
2900
2901 """
2902 varlist = vars()
2903 s = scantable(self._math._convertpol(self, poltype))
2904 s._add_history("convert_pol", varlist)
2905 return s
2906
2907 @asaplog_post_dec
2908 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2909 insitu=None):
2910 """\
2911 Smooth the spectrum by the specified kernel (conserving flux).
2912
2913 Parameters:
2914
2915 kernel: The type of smoothing kernel. Select from
2916 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2917 or 'poly'
2918
2919 width: The width of the kernel in pixels. For hanning this is
2920 ignored otherwise it defauls to 5 pixels.
2921 For 'gaussian' it is the Full Width Half
2922 Maximum. For 'boxcar' it is the full width.
2923 For 'rmedian' and 'poly' it is the half width.
2924
2925 order: Optional parameter for 'poly' kernel (default is 2), to
2926 specify the order of the polnomial. Ignored by all other
2927 kernels.
2928
2929 plot: plot the original and the smoothed spectra.
2930 In this each indivual fit has to be approved, by
2931 typing 'y' or 'n'
2932
2933 insitu: if False a new scantable is returned.
2934 Otherwise, the scaling is done in-situ
2935 The default is taken from .asaprc (False)
2936
2937 """
2938 if insitu is None: insitu = rcParams['insitu']
2939 self._math._setinsitu(insitu)
2940 varlist = vars()
2941
2942 if plot: orgscan = self.copy()
2943
2944 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2945 s._add_history("smooth", varlist)
2946
2947 action = 'H'
2948 if plot:
2949 from asap.asapplotter import new_asaplot
2950 theplot = new_asaplot(rcParams['plotter.gui'])
2951 from matplotlib import rc as rcp
2952 rcp('lines', linewidth=1)
2953 theplot.set_panels()
2954 ylab=s._get_ordinate_label()
2955 #theplot.palette(0,["#777777","red"])
2956 for r in xrange(s.nrow()):
2957 xsm=s._getabcissa(r)
2958 ysm=s._getspectrum(r)
2959 xorg=orgscan._getabcissa(r)
2960 yorg=orgscan._getspectrum(r)
2961 if action != "N": #skip plotting if rejecting all
2962 theplot.clear()
2963 theplot.hold()
2964 theplot.set_axes('ylabel',ylab)
2965 theplot.set_axes('xlabel',s._getabcissalabel(r))
2966 theplot.set_axes('title',s._getsourcename(r))
2967 theplot.set_line(label='Original',color="#777777")
2968 theplot.plot(xorg,yorg)
2969 theplot.set_line(label='Smoothed',color="red")
2970 theplot.plot(xsm,ysm)
2971 ### Ugly part for legend
2972 for i in [0,1]:
2973 theplot.subplots[0]['lines'].append(
2974 [theplot.subplots[0]['axes'].lines[i]]
2975 )
2976 theplot.release()
2977 ### Ugly part for legend
2978 theplot.subplots[0]['lines']=[]
2979 res = self._get_verify_action("Accept smoothing?",action)
2980 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2981 if r == 0: action = None
2982 #res = raw_input("Accept smoothing ([y]/n): ")
2983 if res.upper() == 'N':
2984 # reject for the current rows
2985 s._setspectrum(yorg, r)
2986 elif res.upper() == 'R':
2987 # reject all the following rows
2988 action = "N"
2989 s._setspectrum(yorg, r)
2990 elif res.upper() == 'A':
2991 # accept all the following rows
2992 break
2993 theplot.quit()
2994 del theplot
2995 del orgscan
2996
2997 if insitu: self._assign(s)
2998 else: return s
2999
3000 @asaplog_post_dec
3001 def regrid_channel(self, width=5, plot=False, insitu=None):
3002 """\
3003 Regrid the spectra by the specified channel width
3004
3005 Parameters:
3006
3007 width: The channel width (float) of regridded spectra
3008 in the current spectral unit.
3009
3010 plot: [NOT IMPLEMENTED YET]
3011 plot the original and the regridded spectra.
3012 In this each indivual fit has to be approved, by
3013 typing 'y' or 'n'
3014
3015 insitu: if False a new scantable is returned.
3016 Otherwise, the scaling is done in-situ
3017 The default is taken from .asaprc (False)
3018
3019 """
3020 if insitu is None: insitu = rcParams['insitu']
3021 varlist = vars()
3022
3023 if plot:
3024 asaplog.post()
3025 asaplog.push("Verification plot is not implemtnetd yet.")
3026 asaplog.post("WARN")
3027
3028 s = self.copy()
3029 s._regrid_specchan(width)
3030
3031 s._add_history("regrid_channel", varlist)
3032
3033# if plot:
3034# from asap.asapplotter import new_asaplot
3035# theplot = new_asaplot(rcParams['plotter.gui'])
3036# from matplotlib import rc as rcp
3037# rcp('lines', linewidth=1)
3038# theplot.set_panels()
3039# ylab=s._get_ordinate_label()
3040# #theplot.palette(0,["#777777","red"])
3041# for r in xrange(s.nrow()):
3042# xsm=s._getabcissa(r)
3043# ysm=s._getspectrum(r)
3044# xorg=orgscan._getabcissa(r)
3045# yorg=orgscan._getspectrum(r)
3046# theplot.clear()
3047# theplot.hold()
3048# theplot.set_axes('ylabel',ylab)
3049# theplot.set_axes('xlabel',s._getabcissalabel(r))
3050# theplot.set_axes('title',s._getsourcename(r))
3051# theplot.set_line(label='Original',color="#777777")
3052# theplot.plot(xorg,yorg)
3053# theplot.set_line(label='Smoothed',color="red")
3054# theplot.plot(xsm,ysm)
3055# ### Ugly part for legend
3056# for i in [0,1]:
3057# theplot.subplots[0]['lines'].append(
3058# [theplot.subplots[0]['axes'].lines[i]]
3059# )
3060# theplot.release()
3061# ### Ugly part for legend
3062# theplot.subplots[0]['lines']=[]
3063# res = raw_input("Accept smoothing ([y]/n): ")
3064# if res.upper() == 'N':
3065# s._setspectrum(yorg, r)
3066# theplot.quit()
3067# del theplot
3068# del orgscan
3069
3070 if insitu: self._assign(s)
3071 else: return s
3072
3073 @asaplog_post_dec
3074 def _parse_wn(self, wn):
3075 if isinstance(wn, list) or isinstance(wn, tuple):
3076 return wn
3077 elif isinstance(wn, int):
3078 return [ wn ]
3079 elif isinstance(wn, str):
3080 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3081 val = wn.split('-')
3082 val = [int(val[0]), int(val[1])]
3083 val.sort()
3084 res = [i for i in xrange(val[0], val[1]+1)]
3085 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3086 val = int(wn[2:])+1
3087 res = [i for i in xrange(val)]
3088 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3089 val = int(wn[:-2])+1
3090 res = [i for i in xrange(val)]
3091 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3092 val = int(wn[1:])
3093 res = [i for i in xrange(val)]
3094 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3095 val = int(wn[:-1])
3096 res = [i for i in xrange(val)]
3097 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3098 # then interpreted in C++
3099 # side as [a,a+1,...,a_nyq]
3100 # (CAS-3759)
3101 val = int(wn[2:])
3102 res = [val, -999]
3103 #res = [i for i in xrange(val, self.nchan()/2+1)]
3104 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3105 # then interpreted in C++
3106 # side as [a,a+1,...,a_nyq]
3107 # (CAS-3759)
3108 val = int(wn[:-2])
3109 res = [val, -999]
3110 #res = [i for i in xrange(val, self.nchan()/2+1)]
3111 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3112 # then interpreted in C++
3113 # side as [a+1,a+2,...,a_nyq]
3114 # (CAS-3759)
3115 val = int(wn[1:])+1
3116 res = [val, -999]
3117 #res = [i for i in xrange(val, self.nchan()/2+1)]
3118 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3119 # then interpreted in C++
3120 # side as [a+1,a+2,...,a_nyq]
3121 # (CAS-3759)
3122 val = int(wn[:-1])+1
3123 res = [val, -999]
3124 #res = [i for i in xrange(val, self.nchan()/2+1)]
3125
3126 return res
3127 else:
3128 msg = 'wrong value given for addwn/rejwn'
3129 raise RuntimeError(msg)
3130
3131 @asaplog_post_dec
3132 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3133 """\
3134 Subtract baseline based on parameters written in Baseline Table.
3135
3136 Parameters:
3137 insitu: if True, baseline fitting/subtraction is done
3138 in-situ. If False, a new scantable with
3139 baseline subtracted is returned. Actually,
3140 format of the returned value depends on both
3141 insitu and retfitres (see below).
3142 The default is taken from .asaprc (False)
3143 retfitres: if True, the results of baseline fitting (i.e.,
3144 coefficients and rms) are returned.
3145 default is False.
3146 The format of the returned value of this
3147 function varies as follows:
3148 (1) in case insitu=True and retfitres=True:
3149 fitting result.
3150 (2) in case insitu=True and retfitres=False:
3151 None.
3152 (3) in case insitu=False and retfitres=True:
3153 a dictionary containing a new scantable
3154 (with baseline subtracted) and the fitting
3155 results.
3156 (4) in case insitu=False and retfitres=False:
3157 a new scantable (with baseline subtracted).
3158 inbltable: name of input baseline table. The row number of
3159 scantable and that of inbltable must be
3160 identical.
3161 outbltable: name of output baseline table where baseline
3162 parameters and fitting results recorded.
3163 default is ''(no output).
3164 overwrite: if True when an existing baseline table is
3165 specified for outbltable, overwrites it.
3166 Otherwise there is no harm.
3167 default is False.
3168 """
3169
3170 try:
3171 varlist = vars()
3172 if retfitres is None: retfitres = False
3173 if inbltable is None: raise ValueError("bltable missing.")
3174 if outbltable is None: outbltable = ''
3175 if overwrite is None: overwrite = False
3176
3177 if insitu is None: insitu = rcParams['insitu']
3178 if insitu:
3179 workscan = self
3180 else:
3181 workscan = self.copy()
3182
3183 sres = workscan._apply_bltable(inbltable,
3184 retfitres,
3185 outbltable,
3186 os.path.exists(outbltable),
3187 overwrite)
3188 if retfitres: res = parse_fitresult(sres)
3189
3190 workscan._add_history('apply_bltable', varlist)
3191
3192 if insitu:
3193 self._assign(workscan)
3194 if retfitres:
3195 return res
3196 else:
3197 return None
3198 else:
3199 if retfitres:
3200 return {'scantable': workscan, 'fitresults': res}
3201 else:
3202 return workscan
3203
3204 except RuntimeError, e:
3205 raise_fitting_failure_exception(e)
3206
3207 @asaplog_post_dec
3208 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3209 """\
3210 Subtract baseline based on parameters written in the input list.
3211
3212 Parameters:
3213 insitu: if True, baseline fitting/subtraction is done
3214 in-situ. If False, a new scantable with
3215 baseline subtracted is returned. Actually,
3216 format of the returned value depends on both
3217 insitu and retfitres (see below).
3218 The default is taken from .asaprc (False)
3219 retfitres: if True, the results of baseline fitting (i.e.,
3220 coefficients and rms) are returned.
3221 default is False.
3222 The format of the returned value of this
3223 function varies as follows:
3224 (1) in case insitu=True and retfitres=True:
3225 fitting result.
3226 (2) in case insitu=True and retfitres=False:
3227 None.
3228 (3) in case insitu=False and retfitres=True:
3229 a dictionary containing a new scantable
3230 (with baseline subtracted) and the fitting
3231 results.
3232 (4) in case insitu=False and retfitres=False:
3233 a new scantable (with baseline subtracted).
3234 blinfo: baseline parameter set stored in a dictionary
3235 or a list of dictionary. Each dictionary
3236 corresponds to each spectrum and must contain
3237 the following keys and values:
3238 'row': row number,
3239 'blfunc': function name. available ones include
3240 'poly', 'chebyshev', 'cspline' and
3241 'sinusoid',
3242 'order': maximum order of polynomial. needed
3243 if blfunc='poly' or 'chebyshev',
3244 'npiece': number or piecewise polynomial.
3245 needed if blfunc='cspline',
3246 'nwave': a list of sinusoidal wave numbers.
3247 needed if blfunc='sinusoid', and
3248 'masklist': min-max windows for channel mask.
3249 the specified ranges will be used
3250 for fitting.
3251 bltable: name of output baseline table where baseline
3252 parameters and fitting results recorded.
3253 default is ''(no output).
3254 overwrite: if True when an existing baseline table is
3255 specified for bltable, overwrites it.
3256 Otherwise there is no harm.
3257 default is False.
3258
3259 Example:
3260 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3261 'masklist':[[10,350],[352,510]]},
3262 {'row':1, 'blfunc':'cspline', 'npiece':3,
3263 'masklist':[[3,16],[19,404],[407,511]]}
3264 ])
3265
3266 the first spectrum (row=0) will be fitted with polynomial
3267 of order=5 and the next one (row=1) will be fitted with cubic
3268 spline consisting of 3 pieces.
3269 """
3270
3271 try:
3272 varlist = vars()
3273 if retfitres is None: retfitres = False
3274 if blinfo is None: blinfo = []
3275 if bltable is None: bltable = ''
3276 if overwrite is None: overwrite = False
3277
3278 if insitu is None: insitu = rcParams['insitu']
3279 if insitu:
3280 workscan = self
3281 else:
3282 workscan = self.copy()
3283
3284 nrow = workscan.nrow()
3285
3286 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3287
3288 print "in_blinfo=< "+ str(in_blinfo)+" >"
3289
3290 sres = workscan._sub_baseline(in_blinfo,
3291 retfitres,
3292 bltable,
3293 os.path.exists(bltable),
3294 overwrite)
3295 if retfitres: res = parse_fitresult(sres)
3296
3297 workscan._add_history('sub_baseline', varlist)
3298
3299 if insitu:
3300 self._assign(workscan)
3301 if retfitres:
3302 return res
3303 else:
3304 return None
3305 else:
3306 if retfitres:
3307 return {'scantable': workscan, 'fitresults': res}
3308 else:
3309 return workscan
3310
3311 except RuntimeError, e:
3312 raise_fitting_failure_exception(e)
3313
3314 @asaplog_post_dec
3315 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3316 whichrow=None, uselinefinder=None, edge=None,
3317 threshold=None, chan_avg_limit=None):
3318 """\
3319 Calculates and returns model selection criteria for a specified
3320 baseline model and a given spectrum data.
3321 Available values include Akaike Information Criterion (AIC), the
3322 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3323 Bayesian Information Criterion (BIC) and the Generalised Cross
3324 Validation (GCV).
3325
3326 Parameters:
3327 value: name of model selection criteria to calculate.
3328 available ones include 'aic', 'aicc', 'bic' and
3329 'gcv'. default is 'aicc'.
3330 blfunc: baseline function name. available ones include
3331 'chebyshev', 'cspline' and 'sinusoid'.
3332 default is 'chebyshev'.
3333 order: parameter for basline function. actually stands for
3334 order of polynomial (order) for 'chebyshev',
3335 number of spline pieces (npiece) for 'cspline' and
3336 maximum wave number for 'sinusoid', respectively.
3337 default is 5 (which is also the default order value
3338 for [auto_]chebyshev_baseline()).
3339 mask: an optional mask. default is [].
3340 whichrow: row number. default is 0 (the first row)
3341 uselinefinder: use sd.linefinder() to flag out line regions
3342 default is True.
3343 edge: an optional number of channel to drop at
3344 the edge of spectrum. If only one value is
3345 specified, the same number will be dropped
3346 from both sides of the spectrum. Default
3347 is to keep all channels. Nested tuples
3348 represent individual edge selection for
3349 different IFs (a number of spectral channels
3350 can be different)
3351 default is (0, 0).
3352 threshold: the threshold used by line finder. It is
3353 better to keep it large as only strong lines
3354 affect the baseline solution.
3355 default is 3.
3356 chan_avg_limit: a maximum number of consequtive spectral
3357 channels to average during the search of
3358 weak and broad lines. The default is no
3359 averaging (and no search for weak lines).
3360 If such lines can affect the fitted baseline
3361 (e.g. a high order polynomial is fitted),
3362 increase this parameter (usually values up
3363 to 8 are reasonable). Most users of this
3364 method should find the default value sufficient.
3365 default is 1.
3366
3367 Example:
3368 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3369 """
3370
3371 try:
3372 varlist = vars()
3373
3374 if value is None: value = 'aicc'
3375 if blfunc is None: blfunc = 'chebyshev'
3376 if order is None: order = 5
3377 if mask is None: mask = []
3378 if whichrow is None: whichrow = 0
3379 if uselinefinder is None: uselinefinder = True
3380 if edge is None: edge = (0, 0)
3381 if threshold is None: threshold = 3
3382 if chan_avg_limit is None: chan_avg_limit = 1
3383
3384 return self._calc_aic(value, blfunc, order, mask,
3385 whichrow, uselinefinder, edge,
3386 threshold, chan_avg_limit)
3387
3388 except RuntimeError, e:
3389 raise_fitting_failure_exception(e)
3390
3391 @asaplog_post_dec
3392 def sinusoid_baseline(self, mask=None, applyfft=None,
3393 fftmethod=None, fftthresh=None,
3394 addwn=None, rejwn=None,
3395 insitu=None,
3396 clipthresh=None, clipniter=None,
3397 plot=None,
3398 getresidual=None,
3399 showprogress=None, minnrow=None,
3400 outlog=None,
3401 blfile=None, csvformat=None,
3402 bltable=None):
3403 """\
3404 Return a scan which has been baselined (all rows) with sinusoidal
3405 functions.
3406
3407 Parameters:
3408 mask: an optional mask
3409 applyfft: if True use some method, such as FFT, to find
3410 strongest sinusoidal components in the wavenumber
3411 domain to be used for baseline fitting.
3412 default is True.
3413 fftmethod: method to find the strong sinusoidal components.
3414 now only 'fft' is available and it is the default.
3415 fftthresh: the threshold to select wave numbers to be used for
3416 fitting from the distribution of amplitudes in the
3417 wavenumber domain.
3418 both float and string values accepted.
3419 given a float value, the unit is set to sigma.
3420 for string values, allowed formats include:
3421 'xsigma' or 'x' (= x-sigma level. e.g.,
3422 '3sigma'), or
3423 'topx' (= the x strongest ones, e.g. 'top5').
3424 default is 3.0 (unit: sigma).
3425 addwn: the additional wave numbers to be used for fitting.
3426 list or integer value is accepted to specify every
3427 wave numbers. also string value can be used in case
3428 you need to specify wave numbers in a certain range,
3429 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3430 '<a' (= 0,1,...,a-2,a-1),
3431 '>=a' (= a, a+1, ... up to the maximum wave
3432 number corresponding to the Nyquist
3433 frequency for the case of FFT).
3434 default is [0].
3435 rejwn: the wave numbers NOT to be used for fitting.
3436 can be set just as addwn but has higher priority:
3437 wave numbers which are specified both in addwn
3438 and rejwn will NOT be used. default is [].
3439 insitu: if False a new scantable is returned.
3440 Otherwise, the scaling is done in-situ
3441 The default is taken from .asaprc (False)
3442 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3443 clipniter: maximum number of iteration of 'clipthresh'-sigma
3444 clipping (default is 0)
3445 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3446 plot the fit and the residual. In this each
3447 indivual fit has to be approved, by typing 'y'
3448 or 'n'
3449 getresidual: if False, returns best-fit values instead of
3450 residual. (default is True)
3451 showprogress: show progress status for large data.
3452 default is True.
3453 minnrow: minimum number of input spectra to show.
3454 default is 1000.
3455 outlog: Output the coefficients of the best-fit
3456 function to logger (default is False)
3457 blfile: Name of a text file in which the best-fit
3458 parameter values to be written
3459 (default is '': no file/logger output)
3460 csvformat: if True blfile is csv-formatted, default is False.
3461 bltable: name of a baseline table where fitting results
3462 (coefficients, rms, etc.) are to be written.
3463 if given, fitting results will NOT be output to
3464 scantable (insitu=True) or None will be
3465 returned (insitu=False).
3466 (default is "": no table output)
3467
3468 Example:
3469 # return a scan baselined by a combination of sinusoidal curves
3470 # having wave numbers in spectral window up to 10,
3471 # also with 3-sigma clipping, iteration up to 4 times
3472 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3473
3474 Note:
3475 The best-fit parameter values output in logger and/or blfile are now
3476 based on specunit of 'channel'.
3477 """
3478
3479 try:
3480 varlist = vars()
3481
3482 if insitu is None: insitu = rcParams['insitu']
3483 if insitu:
3484 workscan = self
3485 else:
3486 workscan = self.copy()
3487
3488 if mask is None: mask = []
3489 if applyfft is None: applyfft = True
3490 if fftmethod is None: fftmethod = 'fft'
3491 if fftthresh is None: fftthresh = 3.0
3492 if addwn is None: addwn = [0]
3493 if rejwn is None: rejwn = []
3494 if clipthresh is None: clipthresh = 3.0
3495 if clipniter is None: clipniter = 0
3496 if plot is None: plot = False
3497 if getresidual is None: getresidual = True
3498 if showprogress is None: showprogress = True
3499 if minnrow is None: minnrow = 1000
3500 if outlog is None: outlog = False
3501 if blfile is None: blfile = ''
3502 if csvformat is None: csvformat = False
3503 if bltable is None: bltable = ''
3504
3505 sapplyfft = 'true' if applyfft else 'false'
3506 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3507
3508 scsvformat = 'T' if csvformat else 'F'
3509
3510 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3511 workscan._sinusoid_baseline(mask,
3512 fftinfo,
3513 #applyfft, fftmethod.lower(),
3514 #str(fftthresh).lower(),
3515 workscan._parse_wn(addwn),
3516 workscan._parse_wn(rejwn),
3517 clipthresh, clipniter,
3518 getresidual,
3519 pack_progress_params(showprogress,
3520 minnrow),
3521 outlog, scsvformat+blfile,
3522 bltable)
3523 workscan._add_history('sinusoid_baseline', varlist)
3524
3525 if bltable == '':
3526 if insitu:
3527 self._assign(workscan)
3528 else:
3529 return workscan
3530 else:
3531 if not insitu:
3532 return None
3533
3534 except RuntimeError, e:
3535 raise_fitting_failure_exception(e)
3536
3537
3538 @asaplog_post_dec
3539 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3540 fftmethod=None, fftthresh=None,
3541 addwn=None, rejwn=None,
3542 insitu=None,
3543 clipthresh=None, clipniter=None,
3544 edge=None, threshold=None, chan_avg_limit=None,
3545 plot=None,
3546 getresidual=None,
3547 showprogress=None, minnrow=None,
3548 outlog=None,
3549 blfile=None, csvformat=None,
3550 bltable=None):
3551 """\
3552 Return a scan which has been baselined (all rows) with sinusoidal
3553 functions.
3554 Spectral lines are detected first using linefinder and masked out
3555 to avoid them affecting the baseline solution.
3556
3557 Parameters:
3558 mask: an optional mask retreived from scantable
3559 applyfft: if True use some method, such as FFT, to find
3560 strongest sinusoidal components in the wavenumber
3561 domain to be used for baseline fitting.
3562 default is True.
3563 fftmethod: method to find the strong sinusoidal components.
3564 now only 'fft' is available and it is the default.
3565 fftthresh: the threshold to select wave numbers to be used for
3566 fitting from the distribution of amplitudes in the
3567 wavenumber domain.
3568 both float and string values accepted.
3569 given a float value, the unit is set to sigma.
3570 for string values, allowed formats include:
3571 'xsigma' or 'x' (= x-sigma level. e.g.,
3572 '3sigma'), or
3573 'topx' (= the x strongest ones, e.g. 'top5').
3574 default is 3.0 (unit: sigma).
3575 addwn: the additional wave numbers to be used for fitting.
3576 list or integer value is accepted to specify every
3577 wave numbers. also string value can be used in case
3578 you need to specify wave numbers in a certain range,
3579 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3580 '<a' (= 0,1,...,a-2,a-1),
3581 '>=a' (= a, a+1, ... up to the maximum wave
3582 number corresponding to the Nyquist
3583 frequency for the case of FFT).
3584 default is [0].
3585 rejwn: the wave numbers NOT to be used for fitting.
3586 can be set just as addwn but has higher priority:
3587 wave numbers which are specified both in addwn
3588 and rejwn will NOT be used. default is [].
3589 insitu: if False a new scantable is returned.
3590 Otherwise, the scaling is done in-situ
3591 The default is taken from .asaprc (False)
3592 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3593 clipniter: maximum number of iteration of 'clipthresh'-sigma
3594 clipping (default is 0)
3595 edge: an optional number of channel to drop at
3596 the edge of spectrum. If only one value is
3597 specified, the same number will be dropped
3598 from both sides of the spectrum. Default
3599 is to keep all channels. Nested tuples
3600 represent individual edge selection for
3601 different IFs (a number of spectral channels
3602 can be different)
3603 threshold: the threshold used by line finder. It is
3604 better to keep it large as only strong lines
3605 affect the baseline solution.
3606 chan_avg_limit: a maximum number of consequtive spectral
3607 channels to average during the search of
3608 weak and broad lines. The default is no
3609 averaging (and no search for weak lines).
3610 If such lines can affect the fitted baseline
3611 (e.g. a high order polynomial is fitted),
3612 increase this parameter (usually values up
3613 to 8 are reasonable). Most users of this
3614 method should find the default value sufficient.
3615 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3616 plot the fit and the residual. In this each
3617 indivual fit has to be approved, by typing 'y'
3618 or 'n'
3619 getresidual: if False, returns best-fit values instead of
3620 residual. (default is True)
3621 showprogress: show progress status for large data.
3622 default is True.
3623 minnrow: minimum number of input spectra to show.
3624 default is 1000.
3625 outlog: Output the coefficients of the best-fit
3626 function to logger (default is False)
3627 blfile: Name of a text file in which the best-fit
3628 parameter values to be written
3629 (default is "": no file/logger output)
3630 csvformat: if True blfile is csv-formatted, default is False.
3631 bltable: name of a baseline table where fitting results
3632 (coefficients, rms, etc.) are to be written.
3633 if given, fitting results will NOT be output to
3634 scantable (insitu=True) or None will be
3635 returned (insitu=False).
3636 (default is "": no table output)
3637
3638 Example:
3639 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3640
3641 Note:
3642 The best-fit parameter values output in logger and/or blfile are now
3643 based on specunit of 'channel'.
3644 """
3645
3646 try:
3647 varlist = vars()
3648
3649 if insitu is None: insitu = rcParams['insitu']
3650 if insitu:
3651 workscan = self
3652 else:
3653 workscan = self.copy()
3654
3655 if mask is None: mask = []
3656 if applyfft is None: applyfft = True
3657 if fftmethod is None: fftmethod = 'fft'
3658 if fftthresh is None: fftthresh = 3.0
3659 if addwn is None: addwn = [0]
3660 if rejwn is None: rejwn = []
3661 if clipthresh is None: clipthresh = 3.0
3662 if clipniter is None: clipniter = 0
3663 if edge is None: edge = (0,0)
3664 if threshold is None: threshold = 3
3665 if chan_avg_limit is None: chan_avg_limit = 1
3666 if plot is None: plot = False
3667 if getresidual is None: getresidual = True
3668 if showprogress is None: showprogress = True
3669 if minnrow is None: minnrow = 1000
3670 if outlog is None: outlog = False
3671 if blfile is None: blfile = ''
3672 if csvformat is None: csvformat = False
3673 if bltable is None: bltable = ''
3674
3675 sapplyfft = 'true' if applyfft else 'false'
3676 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3677
3678 scsvformat = 'T' if csvformat else 'F'
3679
3680 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3681 workscan._auto_sinusoid_baseline(mask,
3682 fftinfo,
3683 workscan._parse_wn(addwn),
3684 workscan._parse_wn(rejwn),
3685 clipthresh, clipniter,
3686 normalise_edge_param(edge),
3687 threshold, chan_avg_limit,
3688 getresidual,
3689 pack_progress_params(showprogress,
3690 minnrow),
3691 outlog, scsvformat+blfile, bltable)
3692 workscan._add_history("auto_sinusoid_baseline", varlist)
3693
3694 if bltable == '':
3695 if insitu:
3696 self._assign(workscan)
3697 else:
3698 return workscan
3699 else:
3700 if not insitu:
3701 return None
3702
3703 except RuntimeError, e:
3704 raise_fitting_failure_exception(e)
3705
3706 @asaplog_post_dec
3707 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3708 clipthresh=None, clipniter=None, plot=None,
3709 getresidual=None, showprogress=None, minnrow=None,
3710 outlog=None, blfile=None, csvformat=None,
3711 bltable=None):
3712 """\
3713 Return a scan which has been baselined (all rows) by cubic spline
3714 function (piecewise cubic polynomial).
3715
3716 Parameters:
3717 mask: An optional mask
3718 npiece: Number of pieces. (default is 2)
3719 insitu: If False a new scantable is returned.
3720 Otherwise, the scaling is done in-situ
3721 The default is taken from .asaprc (False)
3722 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3723 clipniter: maximum number of iteration of 'clipthresh'-sigma
3724 clipping (default is 0)
3725 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3726 plot the fit and the residual. In this each
3727 indivual fit has to be approved, by typing 'y'
3728 or 'n'
3729 getresidual: if False, returns best-fit values instead of
3730 residual. (default is True)
3731 showprogress: show progress status for large data.
3732 default is True.
3733 minnrow: minimum number of input spectra to show.
3734 default is 1000.
3735 outlog: Output the coefficients of the best-fit
3736 function to logger (default is False)
3737 blfile: Name of a text file in which the best-fit
3738 parameter values to be written
3739 (default is "": no file/logger output)
3740 csvformat: if True blfile is csv-formatted, default is False.
3741 bltable: name of a baseline table where fitting results
3742 (coefficients, rms, etc.) are to be written.
3743 if given, fitting results will NOT be output to
3744 scantable (insitu=True) or None will be
3745 returned (insitu=False).
3746 (default is "": no table output)
3747
3748 Example:
3749 # return a scan baselined by a cubic spline consisting of 2 pieces
3750 # (i.e., 1 internal knot),
3751 # also with 3-sigma clipping, iteration up to 4 times
3752 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3753
3754 Note:
3755 The best-fit parameter values output in logger and/or blfile are now
3756 based on specunit of 'channel'.
3757 """
3758
3759 try:
3760 varlist = vars()
3761
3762 if insitu is None: insitu = rcParams['insitu']
3763 if insitu:
3764 workscan = self
3765 else:
3766 workscan = self.copy()
3767
3768 if mask is None: mask = []
3769 if npiece is None: npiece = 2
3770 if clipthresh is None: clipthresh = 3.0
3771 if clipniter is None: clipniter = 0
3772 if plot is None: plot = False
3773 if getresidual is None: getresidual = True
3774 if showprogress is None: showprogress = True
3775 if minnrow is None: minnrow = 1000
3776 if outlog is None: outlog = False
3777 if blfile is None: blfile = ''
3778 if csvformat is None: csvformat = False
3779 if bltable is None: bltable = ''
3780
3781 scsvformat = 'T' if csvformat else 'F'
3782
3783 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3784 workscan._cspline_baseline(mask, npiece,
3785 clipthresh, clipniter,
3786 getresidual,
3787 pack_progress_params(showprogress,
3788 minnrow),
3789 outlog, scsvformat+blfile,
3790 bltable)
3791 workscan._add_history("cspline_baseline", varlist)
3792
3793 if bltable == '':
3794 if insitu:
3795 self._assign(workscan)
3796 else:
3797 return workscan
3798 else:
3799 if not insitu:
3800 return None
3801
3802 except RuntimeError, e:
3803 raise_fitting_failure_exception(e)
3804
3805 @asaplog_post_dec
3806 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3807 clipthresh=None, clipniter=None,
3808 edge=None, threshold=None, chan_avg_limit=None,
3809 getresidual=None, plot=None,
3810 showprogress=None, minnrow=None, outlog=None,
3811 blfile=None, csvformat=None, bltable=None):
3812 """\
3813 Return a scan which has been baselined (all rows) by cubic spline
3814 function (piecewise cubic polynomial).
3815 Spectral lines are detected first using linefinder and masked out
3816 to avoid them affecting the baseline solution.
3817
3818 Parameters:
3819 mask: an optional mask retreived from scantable
3820 npiece: Number of pieces. (default is 2)
3821 insitu: if False a new scantable is returned.
3822 Otherwise, the scaling is done in-situ
3823 The default is taken from .asaprc (False)
3824 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3825 clipniter: maximum number of iteration of 'clipthresh'-sigma
3826 clipping (default is 0)
3827 edge: an optional number of channel to drop at
3828 the edge of spectrum. If only one value is
3829 specified, the same number will be dropped
3830 from both sides of the spectrum. Default
3831 is to keep all channels. Nested tuples
3832 represent individual edge selection for
3833 different IFs (a number of spectral channels
3834 can be different)
3835 threshold: the threshold used by line finder. It is
3836 better to keep it large as only strong lines
3837 affect the baseline solution.
3838 chan_avg_limit: a maximum number of consequtive spectral
3839 channels to average during the search of
3840 weak and broad lines. The default is no
3841 averaging (and no search for weak lines).
3842 If such lines can affect the fitted baseline
3843 (e.g. a high order polynomial is fitted),
3844 increase this parameter (usually values up
3845 to 8 are reasonable). Most users of this
3846 method should find the default value sufficient.
3847 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3848 plot the fit and the residual. In this each
3849 indivual fit has to be approved, by typing 'y'
3850 or 'n'
3851 getresidual: if False, returns best-fit values instead of
3852 residual. (default is True)
3853 showprogress: show progress status for large data.
3854 default is True.
3855 minnrow: minimum number of input spectra to show.
3856 default is 1000.
3857 outlog: Output the coefficients of the best-fit
3858 function to logger (default is False)
3859 blfile: Name of a text file in which the best-fit
3860 parameter values to be written
3861 (default is "": no file/logger output)
3862 csvformat: if True blfile is csv-formatted, default is False.
3863 bltable: name of a baseline table where fitting results
3864 (coefficients, rms, etc.) are to be written.
3865 if given, fitting results will NOT be output to
3866 scantable (insitu=True) or None will be
3867 returned (insitu=False).
3868 (default is "": no table output)
3869
3870 Example:
3871 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3872
3873 Note:
3874 The best-fit parameter values output in logger and/or blfile are now
3875 based on specunit of 'channel'.
3876 """
3877
3878 try:
3879 varlist = vars()
3880
3881 if insitu is None: insitu = rcParams['insitu']
3882 if insitu:
3883 workscan = self
3884 else:
3885 workscan = self.copy()
3886
3887 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3888 if mask is None: mask = []
3889 if npiece is None: npiece = 2
3890 if clipthresh is None: clipthresh = 3.0
3891 if clipniter is None: clipniter = 0
3892 if edge is None: edge = (0, 0)
3893 if threshold is None: threshold = 3
3894 if chan_avg_limit is None: chan_avg_limit = 1
3895 if plot is None: plot = False
3896 if getresidual is None: getresidual = True
3897 if showprogress is None: showprogress = True
3898 if minnrow is None: minnrow = 1000
3899 if outlog is None: outlog = False
3900 if blfile is None: blfile = ''
3901 if csvformat is None: csvformat = False
3902 if bltable is None: bltable = ''
3903
3904 scsvformat = 'T' if csvformat else 'F'
3905
3906 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3907 workscan._auto_cspline_baseline(mask, npiece,
3908 clipthresh, clipniter,
3909 normalise_edge_param(edge),
3910 threshold,
3911 chan_avg_limit, getresidual,
3912 pack_progress_params(showprogress,
3913 minnrow),
3914 outlog,
3915 scsvformat+blfile,
3916 bltable)
3917 workscan._add_history("auto_cspline_baseline", varlist)
3918
3919 if bltable == '':
3920 if insitu:
3921 self._assign(workscan)
3922 else:
3923 return workscan
3924 else:
3925 if not insitu:
3926 return None
3927
3928 except RuntimeError, e:
3929 raise_fitting_failure_exception(e)
3930
3931 @asaplog_post_dec
3932 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
3933 clipthresh=None, clipniter=None, plot=None,
3934 getresidual=None, showprogress=None, minnrow=None,
3935 outlog=None, blfile=None, csvformat=None,
3936 bltable=None):
3937 """\
3938 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3939
3940 Parameters:
3941 mask: An optional mask
3942 order: the maximum order of Chebyshev polynomial (default is 5)
3943 insitu: If False a new scantable is returned.
3944 Otherwise, the scaling is done in-situ
3945 The default is taken from .asaprc (False)
3946 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3947 clipniter: maximum number of iteration of 'clipthresh'-sigma
3948 clipping (default is 0)
3949 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3950 plot the fit and the residual. In this each
3951 indivual fit has to be approved, by typing 'y'
3952 or 'n'
3953 getresidual: if False, returns best-fit values instead of
3954 residual. (default is True)
3955 showprogress: show progress status for large data.
3956 default is True.
3957 minnrow: minimum number of input spectra to show.
3958 default is 1000.
3959 outlog: Output the coefficients of the best-fit
3960 function to logger (default is False)
3961 blfile: Name of a text file in which the best-fit
3962 parameter values to be written
3963 (default is "": no file/logger output)
3964 csvformat: if True blfile is csv-formatted, default is False.
3965 bltable: name of a baseline table where fitting results
3966 (coefficients, rms, etc.) are to be written.
3967 if given, fitting results will NOT be output to
3968 scantable (insitu=True) or None will be
3969 returned (insitu=False).
3970 (default is "": no table output)
3971
3972 Example:
3973 # return a scan baselined by a cubic spline consisting of 2 pieces
3974 # (i.e., 1 internal knot),
3975 # also with 3-sigma clipping, iteration up to 4 times
3976 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3977
3978 Note:
3979 The best-fit parameter values output in logger and/or blfile are now
3980 based on specunit of 'channel'.
3981 """
3982
3983 try:
3984 varlist = vars()
3985
3986 if insitu is None: insitu = rcParams['insitu']
3987 if insitu:
3988 workscan = self
3989 else:
3990 workscan = self.copy()
3991
3992 if mask is None: mask = []
3993 if order is None: order = 5
3994 if clipthresh is None: clipthresh = 3.0
3995 if clipniter is None: clipniter = 0
3996 if plot is None: plot = False
3997 if getresidual is None: getresidual = True
3998 if showprogress is None: showprogress = True
3999 if minnrow is None: minnrow = 1000
4000 if outlog is None: outlog = False
4001 if blfile is None: blfile = ''
4002 if csvformat is None: csvformat = False
4003 if bltable is None: bltable = ''
4004
4005 scsvformat = 'T' if csvformat else 'F'
4006
4007 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4008 workscan._chebyshev_baseline(mask, order,
4009 clipthresh, clipniter,
4010 getresidual,
4011 pack_progress_params(showprogress,
4012 minnrow),
4013 outlog, scsvformat+blfile,
4014 bltable)
4015 workscan._add_history("chebyshev_baseline", varlist)
4016
4017 if bltable == '':
4018 if insitu:
4019 self._assign(workscan)
4020 else:
4021 return workscan
4022 else:
4023 if not insitu:
4024 return None
4025
4026 except RuntimeError, e:
4027 raise_fitting_failure_exception(e)
4028
4029 @asaplog_post_dec
4030 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4031 clipthresh=None, clipniter=None,
4032 edge=None, threshold=None, chan_avg_limit=None,
4033 getresidual=None, plot=None,
4034 showprogress=None, minnrow=None, outlog=None,
4035 blfile=None, csvformat=None, bltable=None):
4036 """\
4037 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4038 Spectral lines are detected first using linefinder and masked out
4039 to avoid them affecting the baseline solution.
4040
4041 Parameters:
4042 mask: an optional mask retreived from scantable
4043 order: the maximum order of Chebyshev polynomial (default is 5)
4044 insitu: if False a new scantable is returned.
4045 Otherwise, the scaling is done in-situ
4046 The default is taken from .asaprc (False)
4047 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4048 clipniter: maximum number of iteration of 'clipthresh'-sigma
4049 clipping (default is 0)
4050 edge: an optional number of channel to drop at
4051 the edge of spectrum. If only one value is
4052 specified, the same number will be dropped
4053 from both sides of the spectrum. Default
4054 is to keep all channels. Nested tuples
4055 represent individual edge selection for
4056 different IFs (a number of spectral channels
4057 can be different)
4058 threshold: the threshold used by line finder. It is
4059 better to keep it large as only strong lines
4060 affect the baseline solution.
4061 chan_avg_limit: a maximum number of consequtive spectral
4062 channels to average during the search of
4063 weak and broad lines. The default is no
4064 averaging (and no search for weak lines).
4065 If such lines can affect the fitted baseline
4066 (e.g. a high order polynomial is fitted),
4067 increase this parameter (usually values up
4068 to 8 are reasonable). Most users of this
4069 method should find the default value sufficient.
4070 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4071 plot the fit and the residual. In this each
4072 indivual fit has to be approved, by typing 'y'
4073 or 'n'
4074 getresidual: if False, returns best-fit values instead of
4075 residual. (default is True)
4076 showprogress: show progress status for large data.
4077 default is True.
4078 minnrow: minimum number of input spectra to show.
4079 default is 1000.
4080 outlog: Output the coefficients of the best-fit
4081 function to logger (default is False)
4082 blfile: Name of a text file in which the best-fit
4083 parameter values to be written
4084 (default is "": no file/logger output)
4085 csvformat: if True blfile is csv-formatted, default is False.
4086 bltable: name of a baseline table where fitting results
4087 (coefficients, rms, etc.) are to be written.
4088 if given, fitting results will NOT be output to
4089 scantable (insitu=True) or None will be
4090 returned (insitu=False).
4091 (default is "": no table output)
4092
4093 Example:
4094 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4095
4096 Note:
4097 The best-fit parameter values output in logger and/or blfile are now
4098 based on specunit of 'channel'.
4099 """
4100
4101 try:
4102 varlist = vars()
4103
4104 if insitu is None: insitu = rcParams['insitu']
4105 if insitu:
4106 workscan = self
4107 else:
4108 workscan = self.copy()
4109
4110 if mask is None: mask = []
4111 if order is None: order = 5
4112 if clipthresh is None: clipthresh = 3.0
4113 if clipniter is None: clipniter = 0
4114 if edge is None: edge = (0, 0)
4115 if threshold is None: threshold = 3
4116 if chan_avg_limit is None: chan_avg_limit = 1
4117 if plot is None: plot = False
4118 if getresidual is None: getresidual = True
4119 if showprogress is None: showprogress = True
4120 if minnrow is None: minnrow = 1000
4121 if outlog is None: outlog = False
4122 if blfile is None: blfile = ''
4123 if csvformat is None: csvformat = False
4124 if bltable is None: bltable = ''
4125
4126 scsvformat = 'T' if csvformat else 'F'
4127
4128 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4129 workscan._auto_chebyshev_baseline(mask, order,
4130 clipthresh, clipniter,
4131 normalise_edge_param(edge),
4132 threshold,
4133 chan_avg_limit, getresidual,
4134 pack_progress_params(showprogress,
4135 minnrow),
4136 outlog, scsvformat+blfile,
4137 bltable)
4138 workscan._add_history("auto_chebyshev_baseline", varlist)
4139
4140 if bltable == '':
4141 if insitu:
4142 self._assign(workscan)
4143 else:
4144 return workscan
4145 else:
4146 if not insitu:
4147 return None
4148
4149 except RuntimeError, e:
4150 raise_fitting_failure_exception(e)
4151
4152 @asaplog_post_dec
4153 def poly_baseline(self, mask=None, order=None, insitu=None,
4154 clipthresh=None, clipniter=None, plot=None,
4155 getresidual=None, showprogress=None, minnrow=None,
4156 outlog=None, blfile=None, csvformat=None,
4157 bltable=None):
4158 """\
4159 Return a scan which has been baselined (all rows) by a polynomial.
4160 Parameters:
4161 mask: an optional mask
4162 order: the order of the polynomial (default is 0)
4163 insitu: if False a new scantable is returned.
4164 Otherwise, the scaling is done in-situ
4165 The default is taken from .asaprc (False)
4166 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4167 clipniter: maximum number of iteration of 'clipthresh'-sigma
4168 clipping (default is 0)
4169 plot: plot the fit and the residual. In this each
4170 indivual fit has to be approved, by typing 'y'
4171 or 'n'
4172 getresidual: if False, returns best-fit values instead of
4173 residual. (default is True)
4174 showprogress: show progress status for large data.
4175 default is True.
4176 minnrow: minimum number of input spectra to show.
4177 default is 1000.
4178 outlog: Output the coefficients of the best-fit
4179 function to logger (default is False)
4180 blfile: Name of a text file in which the best-fit
4181 parameter values to be written
4182 (default is "": no file/logger output)
4183 csvformat: if True blfile is csv-formatted, default is False.
4184 bltable: name of a baseline table where fitting results
4185 (coefficients, rms, etc.) are to be written.
4186 if given, fitting results will NOT be output to
4187 scantable (insitu=True) or None will be
4188 returned (insitu=False).
4189 (default is "": no table output)
4190
4191 Example:
4192 # return a scan baselined by a third order polynomial,
4193 # not using a mask
4194 bscan = scan.poly_baseline(order=3)
4195 """
4196
4197 try:
4198 varlist = vars()
4199
4200 if insitu is None:
4201 insitu = rcParams["insitu"]
4202 if insitu:
4203 workscan = self
4204 else:
4205 workscan = self.copy()
4206
4207 if mask is None: mask = []
4208 if order is None: order = 0
4209 if clipthresh is None: clipthresh = 3.0
4210 if clipniter is None: clipniter = 0
4211 if plot is None: plot = False
4212 if getresidual is None: getresidual = True
4213 if showprogress is None: showprogress = True
4214 if minnrow is None: minnrow = 1000
4215 if outlog is None: outlog = False
4216 if blfile is None: blfile = ''
4217 if csvformat is None: csvformat = False
4218 if bltable is None: bltable = ''
4219
4220 scsvformat = 'T' if csvformat else 'F'
4221
4222 if plot:
4223 outblfile = (blfile != "") and \
4224 os.path.exists(os.path.expanduser(
4225 os.path.expandvars(blfile))
4226 )
4227 if outblfile:
4228 blf = open(blfile, "a")
4229
4230 f = fitter()
4231 f.set_function(lpoly=order)
4232
4233 rows = xrange(workscan.nrow())
4234 #if len(rows) > 0: workscan._init_blinfo()
4235
4236 action = "H"
4237 for r in rows:
4238 f.x = workscan._getabcissa(r)
4239 f.y = workscan._getspectrum(r)
4240 if mask:
4241 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4242 else: # mask=None
4243 f.mask = workscan._getmask(r)
4244
4245 f.data = None
4246 f.fit()
4247
4248 if action != "Y": # skip plotting when accepting all
4249 f.plot(residual=True)
4250 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4251 #if accept_fit.upper() == "N":
4252 # #workscan._append_blinfo(None, None, None)
4253 # continue
4254 accept_fit = self._get_verify_action("Accept fit?",action)
4255 if r == 0: action = None
4256 if accept_fit.upper() == "N":
4257 continue
4258 elif accept_fit.upper() == "R":
4259 break
4260 elif accept_fit.upper() == "A":
4261 action = "Y"
4262
4263 blpars = f.get_parameters()
4264 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4265 #workscan._append_blinfo(blpars, masklist, f.mask)
4266 workscan._setspectrum((f.fitter.getresidual()
4267 if getresidual else f.fitter.getfit()), r)
4268
4269 if outblfile:
4270 rms = workscan.get_rms(f.mask, r)
4271 dataout = \
4272 workscan.format_blparams_row(blpars["params"],
4273 blpars["fixed"],
4274 rms, str(masklist),
4275 r, True, csvformat)
4276 blf.write(dataout)
4277
4278 f._p.unmap()
4279 f._p = None
4280
4281 if outblfile:
4282 blf.close()
4283 else:
4284 workscan._poly_baseline(mask, order,
4285 clipthresh, clipniter, #
4286 getresidual,
4287 pack_progress_params(showprogress,
4288 minnrow),
4289 outlog, scsvformat+blfile,
4290 bltable) #
4291
4292 workscan._add_history("poly_baseline", varlist)
4293
4294 if insitu:
4295 self._assign(workscan)
4296 else:
4297 return workscan
4298
4299 except RuntimeError, e:
4300 raise_fitting_failure_exception(e)
4301
4302 @asaplog_post_dec
4303 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4304 clipthresh=None, clipniter=None,
4305 edge=None, threshold=None, chan_avg_limit=None,
4306 getresidual=None, plot=None,
4307 showprogress=None, minnrow=None, outlog=None,
4308 blfile=None, csvformat=None, bltable=None):
4309 """\
4310 Return a scan which has been baselined (all rows) by a polynomial.
4311 Spectral lines are detected first using linefinder and masked out
4312 to avoid them affecting the baseline solution.
4313
4314 Parameters:
4315 mask: an optional mask retreived from scantable
4316 order: the order of the polynomial (default is 0)
4317 insitu: if False a new scantable is returned.
4318 Otherwise, the scaling is done in-situ
4319 The default is taken from .asaprc (False)
4320 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4321 clipniter: maximum number of iteration of 'clipthresh'-sigma
4322 clipping (default is 0)
4323 edge: an optional number of channel to drop at
4324 the edge of spectrum. If only one value is
4325 specified, the same number will be dropped
4326 from both sides of the spectrum. Default
4327 is to keep all channels. Nested tuples
4328 represent individual edge selection for
4329 different IFs (a number of spectral channels
4330 can be different)
4331 threshold: the threshold used by line finder. It is
4332 better to keep it large as only strong lines
4333 affect the baseline solution.
4334 chan_avg_limit: a maximum number of consequtive spectral
4335 channels to average during the search of
4336 weak and broad lines. The default is no
4337 averaging (and no search for weak lines).
4338 If such lines can affect the fitted baseline
4339 (e.g. a high order polynomial is fitted),
4340 increase this parameter (usually values up
4341 to 8 are reasonable). Most users of this
4342 method should find the default value sufficient.
4343 plot: plot the fit and the residual. In this each
4344 indivual fit has to be approved, by typing 'y'
4345 or 'n'
4346 getresidual: if False, returns best-fit values instead of
4347 residual. (default is True)
4348 showprogress: show progress status for large data.
4349 default is True.
4350 minnrow: minimum number of input spectra to show.
4351 default is 1000.
4352 outlog: Output the coefficients of the best-fit
4353 function to logger (default is False)
4354 blfile: Name of a text file in which the best-fit
4355 parameter values to be written
4356 (default is "": no file/logger output)
4357 csvformat: if True blfile is csv-formatted, default is False.
4358 bltable: name of a baseline table where fitting results
4359 (coefficients, rms, etc.) are to be written.
4360 if given, fitting results will NOT be output to
4361 scantable (insitu=True) or None will be
4362 returned (insitu=False).
4363 (default is "": no table output)
4364
4365 Example:
4366 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4367 """
4368
4369 try:
4370 varlist = vars()
4371
4372 if insitu is None:
4373 insitu = rcParams['insitu']
4374 if insitu:
4375 workscan = self
4376 else:
4377 workscan = self.copy()
4378
4379 if mask is None: mask = []
4380 if order is None: order = 0
4381 if clipthresh is None: clipthresh = 3.0
4382 if clipniter is None: clipniter = 0
4383 if edge is None: edge = (0, 0)
4384 if threshold is None: threshold = 3
4385 if chan_avg_limit is None: chan_avg_limit = 1
4386 if plot is None: plot = False
4387 if getresidual is None: getresidual = True
4388 if showprogress is None: showprogress = True
4389 if minnrow is None: minnrow = 1000
4390 if outlog is None: outlog = False
4391 if blfile is None: blfile = ''
4392 if csvformat is None: csvformat = False
4393 if bltable is None: bltable = ''
4394
4395 scsvformat = 'T' if csvformat else 'F'
4396
4397 edge = normalise_edge_param(edge)
4398
4399 if plot:
4400 outblfile = (blfile != "") and \
4401 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4402 if outblfile: blf = open(blfile, "a")
4403
4404 from asap.asaplinefind import linefinder
4405 fl = linefinder()
4406 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4407 fl.set_scan(workscan)
4408
4409 f = fitter()
4410 f.set_function(lpoly=order)
4411
4412 rows = xrange(workscan.nrow())
4413 #if len(rows) > 0: workscan._init_blinfo()
4414
4415 action = "H"
4416 for r in rows:
4417 idx = 2*workscan.getif(r)
4418 if mask:
4419 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4420 else: # mask=None
4421 msk = workscan._getmask(r)
4422 fl.find_lines(r, msk, edge[idx:idx+2])
4423
4424 f.x = workscan._getabcissa(r)
4425 f.y = workscan._getspectrum(r)
4426 f.mask = fl.get_mask()
4427 f.data = None
4428 f.fit()
4429
4430 if action != "Y": # skip plotting when accepting all
4431 f.plot(residual=True)
4432 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4433 accept_fit = self._get_verify_action("Accept fit?",action)
4434 if r == 0: action = None
4435 if accept_fit.upper() == "N":
4436 #workscan._append_blinfo(None, None, None)
4437 continue
4438 elif accept_fit.upper() == "R":
4439 break
4440 elif accept_fit.upper() == "A":
4441 action = "Y"
4442
4443 blpars = f.get_parameters()
4444 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4445 #workscan._append_blinfo(blpars, masklist, f.mask)
4446 workscan._setspectrum(
4447 (f.fitter.getresidual() if getresidual
4448 else f.fitter.getfit()), r
4449 )
4450
4451 if outblfile:
4452 rms = workscan.get_rms(f.mask, r)
4453 dataout = \
4454 workscan.format_blparams_row(blpars["params"],
4455 blpars["fixed"],
4456 rms, str(masklist),
4457 r, True, csvformat)
4458 blf.write(dataout)
4459
4460 f._p.unmap()
4461 f._p = None
4462
4463 if outblfile: blf.close()
4464 else:
4465 workscan._auto_poly_baseline(mask, order,
4466 clipthresh, clipniter,
4467 edge, threshold,
4468 chan_avg_limit, getresidual,
4469 pack_progress_params(showprogress,
4470 minnrow),
4471 outlog, scsvformat+blfile,
4472 bltable)
4473 workscan._add_history("auto_poly_baseline", varlist)
4474
4475 if bltable == '':
4476 if insitu:
4477 self._assign(workscan)
4478 else:
4479 return workscan
4480 else:
4481 if not insitu:
4482 return None
4483
4484 except RuntimeError, e:
4485 raise_fitting_failure_exception(e)
4486
4487 def _init_blinfo(self):
4488 """\
4489 Initialise the following three auxiliary members:
4490 blpars : parameters of the best-fit baseline,
4491 masklists : mask data (edge positions of masked channels) and
4492 actualmask : mask data (in boolean list),
4493 to keep for use later (including output to logger/text files).
4494 Used by poly_baseline() and auto_poly_baseline() in case of
4495 'plot=True'.
4496 """
4497 self.blpars = []
4498 self.masklists = []
4499 self.actualmask = []
4500 return
4501
4502 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4503 """\
4504 Append baseline-fitting related info to blpars, masklist and
4505 actualmask.
4506 """
4507 self.blpars.append(data_blpars)
4508 self.masklists.append(data_masklists)
4509 self.actualmask.append(data_actualmask)
4510 return
4511
4512 @asaplog_post_dec
4513 def rotate_linpolphase(self, angle):
4514 """\
4515 Rotate the phase of the complex polarization O=Q+iU correlation.
4516 This is always done in situ in the raw data. So if you call this
4517 function more than once then each call rotates the phase further.
4518
4519 Parameters:
4520
4521 angle: The angle (degrees) to rotate (add) by.
4522
4523 Example::
4524
4525 scan.rotate_linpolphase(2.3)
4526
4527 """
4528 varlist = vars()
4529 self._math._rotate_linpolphase(self, angle)
4530 self._add_history("rotate_linpolphase", varlist)
4531 return
4532
4533 @asaplog_post_dec
4534 def rotate_xyphase(self, angle):
4535 """\
4536 Rotate the phase of the XY correlation. This is always done in situ
4537 in the data. So if you call this function more than once
4538 then each call rotates the phase further.
4539
4540 Parameters:
4541
4542 angle: The angle (degrees) to rotate (add) by.
4543
4544 Example::
4545
4546 scan.rotate_xyphase(2.3)
4547
4548 """
4549 varlist = vars()
4550 self._math._rotate_xyphase(self, angle)
4551 self._add_history("rotate_xyphase", varlist)
4552 return
4553
4554 @asaplog_post_dec
4555 def swap_linears(self):
4556 """\
4557 Swap the linear polarisations XX and YY, or better the first two
4558 polarisations as this also works for ciculars.
4559 """
4560 varlist = vars()
4561 self._math._swap_linears(self)
4562 self._add_history("swap_linears", varlist)
4563 return
4564
4565 @asaplog_post_dec
4566 def invert_phase(self):
4567 """\
4568 Invert the phase of the complex polarisation
4569 """
4570 varlist = vars()
4571 self._math._invert_phase(self)
4572 self._add_history("invert_phase", varlist)
4573 return
4574
4575 @asaplog_post_dec
4576 def add(self, offset, insitu=None):
4577 """\
4578 Return a scan where all spectra have the offset added
4579
4580 Parameters:
4581
4582 offset: the offset
4583
4584 insitu: if False a new scantable is returned.
4585 Otherwise, the scaling is done in-situ
4586 The default is taken from .asaprc (False)
4587
4588 """
4589 if insitu is None: insitu = rcParams['insitu']
4590 self._math._setinsitu(insitu)
4591 varlist = vars()
4592 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4593 s._add_history("add", varlist)
4594 if insitu:
4595 self._assign(s)
4596 else:
4597 return s
4598
4599 @asaplog_post_dec
4600 def scale(self, factor, tsys=True, insitu=None):
4601 """\
4602
4603 Return a scan where all spectra are scaled by the given 'factor'
4604
4605 Parameters:
4606
4607 factor: the scaling factor (float or 1D float list)
4608
4609 insitu: if False a new scantable is returned.
4610 Otherwise, the scaling is done in-situ
4611 The default is taken from .asaprc (False)
4612
4613 tsys: if True (default) then apply the operation to Tsys
4614 as well as the data
4615
4616 """
4617 if insitu is None: insitu = rcParams['insitu']
4618 self._math._setinsitu(insitu)
4619 varlist = vars()
4620 s = None
4621 import numpy
4622 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4623 if isinstance(factor[0], list) or isinstance(factor[0],
4624 numpy.ndarray):
4625 from asapmath import _array2dOp
4626 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4627 else:
4628 s = scantable( self._math._arrayop( self, factor,
4629 "MUL", tsys ) )
4630 else:
4631 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4632 s._add_history("scale", varlist)
4633 if insitu:
4634 self._assign(s)
4635 else:
4636 return s
4637
4638 @preserve_selection
4639 def set_sourcetype(self, match, matchtype="pattern",
4640 sourcetype="reference"):
4641 """\
4642 Set the type of the source to be an source or reference scan
4643 using the provided pattern.
4644
4645 Parameters:
4646
4647 match: a Unix style pattern, regular expression or selector
4648
4649 matchtype: 'pattern' (default) UNIX style pattern or
4650 'regex' regular expression
4651
4652 sourcetype: the type of the source to use (source/reference)
4653
4654 """
4655 varlist = vars()
4656 stype = -1
4657 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4658 stype = 1
4659 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4660 stype = 0
4661 else:
4662 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4663 if matchtype.lower().startswith("p"):
4664 matchtype = "pattern"
4665 elif matchtype.lower().startswith("r"):
4666 matchtype = "regex"
4667 else:
4668 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4669 sel = selector()
4670 if isinstance(match, selector):
4671 sel = match
4672 else:
4673 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4674 self.set_selection(sel)
4675 self._setsourcetype(stype)
4676 self._add_history("set_sourcetype", varlist)
4677
4678
4679 def set_sourcename(self, name):
4680 varlist = vars()
4681 self._setsourcename(name)
4682 self._add_history("set_sourcename", varlist)
4683
4684 @asaplog_post_dec
4685 @preserve_selection
4686 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4687 """\
4688 This function allows to build quotients automatically.
4689 It assumes the observation to have the same number of
4690 "ons" and "offs"
4691
4692 Parameters:
4693
4694 preserve: you can preserve (default) the continuum or
4695 remove it. The equations used are
4696
4697 preserve: Output = Toff * (on/off) - Toff
4698
4699 remove: Output = Toff * (on/off) - Ton
4700
4701 mode: the on/off detection mode
4702 'paired' (default)
4703 identifies 'off' scans by the
4704 trailing '_R' (Mopra/Parkes) or
4705 '_e'/'_w' (Tid) and matches
4706 on/off pairs from the observing pattern
4707 'time'
4708 finds the closest off in time
4709
4710 .. todo:: verify argument is not implemented
4711
4712 """
4713 varlist = vars()
4714 modes = ["time", "paired"]
4715 if not mode in modes:
4716 msg = "please provide valid mode. Valid modes are %s" % (modes)
4717 raise ValueError(msg)
4718 s = None
4719 if mode.lower() == "paired":
4720 from asap._asap import srctype
4721 sel = self.get_selection()
4722 #sel.set_query("SRCTYPE==psoff")
4723 sel.set_types(srctype.psoff)
4724 self.set_selection(sel)
4725 offs = self.copy()
4726 #sel.set_query("SRCTYPE==pson")
4727 sel.set_types(srctype.pson)
4728 self.set_selection(sel)
4729 ons = self.copy()
4730 s = scantable(self._math._quotient(ons, offs, preserve))
4731 elif mode.lower() == "time":
4732 s = scantable(self._math._auto_quotient(self, mode, preserve))
4733 s._add_history("auto_quotient", varlist)
4734 return s
4735
4736 @asaplog_post_dec
4737 def mx_quotient(self, mask = None, weight='median', preserve=True):
4738 """\
4739 Form a quotient using "off" beams when observing in "MX" mode.
4740
4741 Parameters:
4742
4743 mask: an optional mask to be used when weight == 'stddev'
4744
4745 weight: How to average the off beams. Default is 'median'.
4746
4747 preserve: you can preserve (default) the continuum or
4748 remove it. The equations used are:
4749
4750 preserve: Output = Toff * (on/off) - Toff
4751
4752 remove: Output = Toff * (on/off) - Ton
4753
4754 """
4755 mask = mask or ()
4756 varlist = vars()
4757 on = scantable(self._math._mx_extract(self, 'on'))
4758 preoff = scantable(self._math._mx_extract(self, 'off'))
4759 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4760 from asapmath import quotient
4761 q = quotient(on, off, preserve)
4762 q._add_history("mx_quotient", varlist)
4763 return q
4764
4765 @asaplog_post_dec
4766 def freq_switch(self, insitu=None):
4767 """\
4768 Apply frequency switching to the data.
4769
4770 Parameters:
4771
4772 insitu: if False a new scantable is returned.
4773 Otherwise, the swictching is done in-situ
4774 The default is taken from .asaprc (False)
4775
4776 """
4777 if insitu is None: insitu = rcParams['insitu']
4778 self._math._setinsitu(insitu)
4779 varlist = vars()
4780 s = scantable(self._math._freqswitch(self))
4781 s._add_history("freq_switch", varlist)
4782 if insitu:
4783 self._assign(s)
4784 else:
4785 return s
4786
4787 @asaplog_post_dec
4788 def recalc_azel(self):
4789 """Recalculate the azimuth and elevation for each position."""
4790 varlist = vars()
4791 self._recalcazel()
4792 self._add_history("recalc_azel", varlist)
4793 return
4794
4795 @asaplog_post_dec
4796 def __add__(self, other):
4797 """
4798 implicit on all axes and on Tsys
4799 """
4800 varlist = vars()
4801 s = self.__op( other, "ADD" )
4802 s._add_history("operator +", varlist)
4803 return s
4804
4805 @asaplog_post_dec
4806 def __sub__(self, other):
4807 """
4808 implicit on all axes and on Tsys
4809 """
4810 varlist = vars()
4811 s = self.__op( other, "SUB" )
4812 s._add_history("operator -", varlist)
4813 return s
4814
4815 @asaplog_post_dec
4816 def __mul__(self, other):
4817 """
4818 implicit on all axes and on Tsys
4819 """
4820 varlist = vars()
4821 s = self.__op( other, "MUL" ) ;
4822 s._add_history("operator *", varlist)
4823 return s
4824
4825
4826 @asaplog_post_dec
4827 def __div__(self, other):
4828 """
4829 implicit on all axes and on Tsys
4830 """
4831 varlist = vars()
4832 s = self.__op( other, "DIV" )
4833 s._add_history("operator /", varlist)
4834 return s
4835
4836 @asaplog_post_dec
4837 def __op( self, other, mode ):
4838 s = None
4839 if isinstance(other, scantable):
4840 s = scantable(self._math._binaryop(self, other, mode))
4841 elif isinstance(other, float):
4842 if other == 0.0:
4843 raise ZeroDivisionError("Dividing by zero is not recommended")
4844 s = scantable(self._math._unaryop(self, other, mode, False))
4845 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
4846 if isinstance(other[0], list) \
4847 or isinstance(other[0], numpy.ndarray):
4848 from asapmath import _array2dOp
4849 s = _array2dOp( self, other, mode, False )
4850 else:
4851 s = scantable( self._math._arrayop( self, other,
4852 mode, False ) )
4853 else:
4854 raise TypeError("Other input is not a scantable or float value")
4855 return s
4856
4857 @asaplog_post_dec
4858 def get_fit(self, row=0):
4859 """\
4860 Print or return the stored fits for a row in the scantable
4861
4862 Parameters:
4863
4864 row: the row which the fit has been applied to.
4865
4866 """
4867 if row > self.nrow():
4868 return
4869 from asap.asapfit import asapfit
4870 fit = asapfit(self._getfit(row))
4871 asaplog.push( '%s' %(fit) )
4872 return fit.as_dict()
4873
4874 @preserve_selection
4875 def flag_nans(self):
4876 """\
4877 Utility function to flag NaN values in the scantable.
4878 """
4879 import numpy
4880 basesel = self.get_selection()
4881 for i in range(self.nrow()):
4882 sel = self.get_row_selector(i)
4883 self.set_selection(basesel+sel)
4884 nans = numpy.isnan(self._getspectrum(0))
4885 if numpy.any(nans):
4886 bnans = [ bool(v) for v in nans]
4887 self.flag(bnans)
4888
4889 self.set_selection(basesel)
4890
4891 def get_row_selector(self, rowno):
4892 return selector(rows=[rowno])
4893
4894 def _add_history(self, funcname, parameters):
4895 if not rcParams['scantable.history']:
4896 return
4897 # create date
4898 sep = "##"
4899 from datetime import datetime
4900 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4901 hist = dstr+sep
4902 hist += funcname+sep#cdate+sep
4903 if parameters.has_key('self'):
4904 del parameters['self']
4905 for k, v in parameters.iteritems():
4906 if type(v) is dict:
4907 for k2, v2 in v.iteritems():
4908 hist += k2
4909 hist += "="
4910 if isinstance(v2, scantable):
4911 hist += 'scantable'
4912 elif k2 == 'mask':
4913 if isinstance(v2, list) or isinstance(v2, tuple):
4914 hist += str(self._zip_mask(v2))
4915 else:
4916 hist += str(v2)
4917 else:
4918 hist += str(v2)
4919 else:
4920 hist += k
4921 hist += "="
4922 if isinstance(v, scantable):
4923 hist += 'scantable'
4924 elif k == 'mask':
4925 if isinstance(v, list) or isinstance(v, tuple):
4926 hist += str(self._zip_mask(v))
4927 else:
4928 hist += str(v)
4929 else:
4930 hist += str(v)
4931 hist += sep
4932 hist = hist[:-2] # remove trailing '##'
4933 self._addhistory(hist)
4934
4935
4936 def _zip_mask(self, mask):
4937 mask = list(mask)
4938 i = 0
4939 segments = []
4940 while mask[i:].count(1):
4941 i += mask[i:].index(1)
4942 if mask[i:].count(0):
4943 j = i + mask[i:].index(0)
4944 else:
4945 j = len(mask)
4946 segments.append([i, j])
4947 i = j
4948 return segments
4949
4950 def _get_ordinate_label(self):
4951 fu = "("+self.get_fluxunit()+")"
4952 import re
4953 lbl = "Intensity"
4954 if re.match(".K.", fu):
4955 lbl = "Brightness Temperature "+ fu
4956 elif re.match(".Jy.", fu):
4957 lbl = "Flux density "+ fu
4958 return lbl
4959
4960 def _check_ifs(self):
4961# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
4962 nchans = [self.nchan(i) for i in self.getifnos()]
4963 nchans = filter(lambda t: t > 0, nchans)
4964 return (sum(nchans)/len(nchans) == nchans[0])
4965
4966 @asaplog_post_dec
4967 def _fill(self, names, unit, average, opts={}):
4968 first = True
4969 fullnames = []
4970 for name in names:
4971 name = os.path.expandvars(name)
4972 name = os.path.expanduser(name)
4973 if not os.path.exists(name):
4974 msg = "File '%s' does not exists" % (name)
4975 raise IOError(msg)
4976 fullnames.append(name)
4977 if average:
4978 asaplog.push('Auto averaging integrations')
4979 stype = int(rcParams['scantable.storage'].lower() == 'disk')
4980 for name in fullnames:
4981 tbl = Scantable(stype)
4982 if is_ms( name ):
4983 r = msfiller( tbl )
4984 else:
4985 r = filler( tbl )
4986 msg = "Importing %s..." % (name)
4987 asaplog.push(msg, False)
4988 r.open(name, opts)
4989 rx = rcParams['scantable.reference']
4990 r.setreferenceexpr(rx)
4991 r.fill()
4992 if average:
4993 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
4994 if not first:
4995 tbl = self._math._merge([self, tbl])
4996 Scantable.__init__(self, tbl)
4997 r.close()
4998 del r, tbl
4999 first = False
5000 #flush log
5001 asaplog.post()
5002 if unit is not None:
5003 self.set_fluxunit(unit)
5004 if not is_casapy():
5005 self.set_freqframe(rcParams['scantable.freqframe'])
5006
5007 def _get_verify_action( self, msg, action=None ):
5008 valid_act = ['Y', 'N', 'A', 'R']
5009 if not action or not isinstance(action, str):
5010 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5011 if action == '':
5012 return "Y"
5013 elif (action.upper()[0] in valid_act):
5014 return action.upper()[0]
5015 elif (action.upper()[0] in ['H','?']):
5016 print "Available actions of verification [Y|n|a|r]"
5017 print " Y : Yes for current data (default)"
5018 print " N : No for current data"
5019 print " A : Accept all in the following and exit from verification"
5020 print " R : Reject all in the following and exit from verification"
5021 print " H or ?: help (show this message)"
5022 return self._get_verify_action(msg)
5023 else:
5024 return 'Y'
5025
5026 def __getitem__(self, key):
5027 if key < 0:
5028 key += self.nrow()
5029 if key >= self.nrow():
5030 raise IndexError("Row index out of range.")
5031 return self._getspectrum(key)
5032
5033 def __setitem__(self, key, value):
5034 if key < 0:
5035 key += self.nrow()
5036 if key >= self.nrow():
5037 raise IndexError("Row index out of range.")
5038 if not hasattr(value, "__len__") or \
5039 len(value) > self.nchan(self.getif(key)):
5040 raise ValueError("Spectrum length doesn't match.")
5041 return self._setspectrum(value, key)
5042
5043 def __len__(self):
5044 return self.nrow()
5045
5046 def __iter__(self):
5047 for i in range(len(self)):
5048 yield self[i]
Note: See TracBrowser for help on using the repository browser.