source: trunk/python/scantable.py@ 2883

Last change on this file since 2883 was 2882, checked in by WataruKawasaki, 11 years ago

New Development: Yes

JIRA Issue: Yes CAS-5859

Ready for Test: Yes

Interface Changes: -

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: implemented a scantable method to parse MS-style spw/channel selection syntax.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 199.6 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s):
319 if not is_frequency(s):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s = s.strip()
326 factor = 1.0
327
328 prefix = s[-3:-2]
329 if is_number(prefix):
330 res = float(s[:-2])
331 else:
332 res = float(s[:-3]) * factor_list[prefix_list.index(prefix)]
333
334 return res
335
336def is_velocity(s):
337 s = s.strip()
338 return (s[-3:].lower() == "m/s")
339
340def get_velocity_by_string(s):
341 if not is_velocity(s):
342 raise RuntimeError("Invalid input string.")
343
344 prefix_list = [".", "k"]
345 factor_list = [1e-3, 1.0]
346
347 s = s.strip()
348 factor = 1.0
349
350 prefix = s[-4:-3]
351 if is_number(prefix):
352 res = float(s[:-3]) * 1e-3
353 else:
354 res = float(s[:-4]) * factor_list[prefix_list.index(prefix)]
355
356 return res # in km/s
357
358def get_frequency_by_velocity(restfreq, vel):
359 # vel is in unit of km/s
360
361 # speed of light
362 vel_c = 299792.458
363
364 import math
365 r = vel / vel_c
366 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
367
368
369class scantable(Scantable):
370 """\
371 The ASAP container for scans (single-dish data).
372 """
373
374 @asaplog_post_dec
375 def __init__(self, filename, average=None, unit=None, parallactify=None,
376 **args):
377 """\
378 Create a scantable from a saved one or make a reference
379
380 Parameters:
381
382 filename: the name of an asap table on disk
383 or
384 the name of a rpfits/sdfits/ms file
385 (integrations within scans are auto averaged
386 and the whole file is read) or
387 [advanced] a reference to an existing scantable
388
389 average: average all integrations withinb a scan on read.
390 The default (True) is taken from .asaprc.
391
392 unit: brightness unit; must be consistent with K or Jy.
393 Over-rides the default selected by the filler
394 (input rpfits/sdfits/ms) or replaces the value
395 in existing scantables
396
397 antenna: for MeasurementSet input data only:
398 Antenna selection. integer (id) or string
399 (name or id).
400
401 parallactify: Indicate that the data had been parallactified.
402 Default (false) is taken from rc file.
403
404 getpt: Whether to import direction from MS/POINTING
405 table properly or not.
406 This is effective only when filename is MS.
407 The default (True) is to import direction
408 from MS/POINTING.
409 """
410 if average is None:
411 average = rcParams['scantable.autoaverage']
412 parallactify = parallactify or rcParams['scantable.parallactify']
413 varlist = vars()
414 from asap._asap import stmath
415 self._math = stmath( rcParams['insitu'] )
416 if isinstance(filename, Scantable):
417 Scantable.__init__(self, filename)
418 else:
419 if isinstance(filename, str):
420 filename = os.path.expandvars(filename)
421 filename = os.path.expanduser(filename)
422 if not os.path.exists(filename):
423 s = "File '%s' not found." % (filename)
424 raise IOError(s)
425 if is_scantable(filename):
426 ondisk = rcParams['scantable.storage'] == 'disk'
427 Scantable.__init__(self, filename, ondisk)
428 if unit is not None:
429 self.set_fluxunit(unit)
430 if average:
431 self._assign( self.average_time( scanav=True ) )
432 # do not reset to the default freqframe
433 #self.set_freqframe(rcParams['scantable.freqframe'])
434 elif is_ms(filename):
435 # Measurement Set
436 opts={'ms': {}}
437 mskeys=['getpt','antenna']
438 for key in mskeys:
439 if key in args.keys():
440 opts['ms'][key] = args[key]
441 self._fill([filename], unit, average, opts)
442 elif os.path.isfile(filename):
443 opts={'nro': {}}
444 nrokeys=['freqref']
445 for key in nrokeys:
446 if key in args.keys():
447 opts['nro'][key] = args[key]
448 self._fill([filename], unit, average, opts)
449 # only apply to new data not "copy constructor"
450 self.parallactify(parallactify)
451 else:
452 msg = "The given file '%s'is not a valid " \
453 "asap table." % (filename)
454 raise IOError(msg)
455 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
456 and isinstance(filename[-1], str):
457 self._fill(filename, unit, average)
458 self.parallactify(parallactify)
459 self._add_history("scantable", varlist)
460
461 @asaplog_post_dec
462 def save(self, name=None, format=None, overwrite=False):
463 """\
464 Store the scantable on disk. This can be an asap (aips++) Table,
465 SDFITS or MS2 format.
466
467 Parameters:
468
469 name: the name of the outputfile. For format 'ASCII'
470 this is the root file name (data in 'name'.txt
471 and header in 'name'_header.txt)
472
473 format: an optional file format. Default is ASAP.
474 Allowed are:
475
476 * 'ASAP' (save as ASAP [aips++] Table),
477 * 'SDFITS' (save as SDFITS file)
478 * 'ASCII' (saves as ascii text file)
479 * 'MS2' (saves as an casacore MeasurementSet V2)
480 * 'FITS' (save as image FITS - not readable by
481 class)
482 * 'CLASS' (save as FITS readable by CLASS)
483
484 overwrite: If the file should be overwritten if it exists.
485 The default False is to return with warning
486 without writing the output. USE WITH CARE.
487
488 Example::
489
490 scan.save('myscan.asap')
491 scan.save('myscan.sdfits', 'SDFITS')
492
493 """
494 from os import path
495 format = format or rcParams['scantable.save']
496 suffix = '.'+format.lower()
497 if name is None or name == "":
498 name = 'scantable'+suffix
499 msg = "No filename given. Using default name %s..." % name
500 asaplog.push(msg)
501 name = path.expandvars(name)
502 if path.isfile(name) or path.isdir(name):
503 if not overwrite:
504 msg = "File %s exists." % name
505 raise IOError(msg)
506 format2 = format.upper()
507 if format2 == 'ASAP':
508 self._save(name)
509 elif format2 == 'MS2':
510 msopt = {'ms': {'overwrite': overwrite } }
511 from asap._asap import mswriter
512 writer = mswriter( self )
513 writer.write( name, msopt )
514 else:
515 from asap._asap import stwriter as stw
516 writer = stw(format2)
517 writer.write(self, name)
518 return
519
520 def copy(self):
521 """Return a copy of this scantable.
522
523 *Note*:
524
525 This makes a full (deep) copy. scan2 = scan1 makes a reference.
526
527 Example::
528
529 copiedscan = scan.copy()
530
531 """
532 sd = scantable(Scantable._copy(self))
533 return sd
534
535 def drop_scan(self, scanid=None):
536 """\
537 Return a new scantable where the specified scan number(s) has(have)
538 been dropped.
539
540 Parameters:
541
542 scanid: a (list of) scan number(s)
543
544 """
545 from asap import _is_sequence_or_number as _is_valid
546 from asap import _to_list
547 from asap import unique
548 if not _is_valid(scanid):
549 raise RuntimeError( 'Please specify a scanno to drop from the'
550 ' scantable' )
551 scanid = _to_list(scanid)
552 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
553 for sid in scanid: allscans.remove(sid)
554 if len(allscans) == 0:
555 raise ValueError("Can't remove all scans")
556 sel = selector(scans=allscans)
557 return self._select_copy(sel)
558
559 def _select_copy(self, selection):
560 orig = self.get_selection()
561 self.set_selection(orig+selection)
562 cp = self.copy()
563 self.set_selection(orig)
564 return cp
565
566 def get_scan(self, scanid=None):
567 """\
568 Return a specific scan (by scanno) or collection of scans (by
569 source name) in a new scantable.
570
571 *Note*:
572
573 See scantable.drop_scan() for the inverse operation.
574
575 Parameters:
576
577 scanid: a (list of) scanno or a source name, unix-style
578 patterns are accepted for source name matching, e.g.
579 '*_R' gets all 'ref scans
580
581 Example::
582
583 # get all scans containing the source '323p459'
584 newscan = scan.get_scan('323p459')
585 # get all 'off' scans
586 refscans = scan.get_scan('*_R')
587 # get a susbset of scans by scanno (as listed in scan.summary())
588 newscan = scan.get_scan([0, 2, 7, 10])
589
590 """
591 if scanid is None:
592 raise RuntimeError( 'Please specify a scan no or name to '
593 'retrieve from the scantable' )
594 try:
595 bsel = self.get_selection()
596 sel = selector()
597 if type(scanid) is str:
598 sel.set_name(scanid)
599 return self._select_copy(sel)
600 elif type(scanid) is int:
601 sel.set_scans([scanid])
602 return self._select_copy(sel)
603 elif type(scanid) is list:
604 sel.set_scans(scanid)
605 return self._select_copy(sel)
606 else:
607 msg = "Illegal scanid type, use 'int' or 'list' if ints."
608 raise TypeError(msg)
609 except RuntimeError:
610 raise
611
612 def __str__(self):
613 tempFile = tempfile.NamedTemporaryFile()
614 Scantable._summary(self, tempFile.name)
615 tempFile.seek(0)
616 asaplog.clear()
617 return tempFile.file.read()
618
619 @asaplog_post_dec
620 def summary(self, filename=None):
621 """\
622 Print a summary of the contents of this scantable.
623
624 Parameters:
625
626 filename: the name of a file to write the putput to
627 Default - no file output
628
629 """
630 if filename is not None:
631 if filename is "":
632 filename = 'scantable_summary.txt'
633 from os.path import expandvars, isdir
634 filename = expandvars(filename)
635 if isdir(filename):
636 msg = "Illegal file name '%s'." % (filename)
637 raise IOError(msg)
638 else:
639 filename = ""
640 Scantable._summary(self, filename)
641
642 def get_spectrum(self, rowno):
643 """Return the spectrum for the current row in the scantable as a list.
644
645 Parameters:
646
647 rowno: the row number to retrieve the spectrum from
648
649 """
650 return self._getspectrum(rowno)
651
652 def get_mask(self, rowno):
653 """Return the mask for the current row in the scantable as a list.
654
655 Parameters:
656
657 rowno: the row number to retrieve the mask from
658
659 """
660 return self._getmask(rowno)
661
662 def set_spectrum(self, spec, rowno):
663 """Set the spectrum for the current row in the scantable.
664
665 Parameters:
666
667 spec: the new spectrum
668
669 rowno: the row number to set the spectrum for
670
671 """
672 assert(len(spec) == self.nchan(self.getif(rowno)))
673 return self._setspectrum(spec, rowno)
674
675 def get_coordinate(self, rowno):
676 """Return the (spectral) coordinate for a a given 'rowno'.
677
678 *Note*:
679
680 * This coordinate is only valid until a scantable method modifies
681 the frequency axis.
682 * This coordinate does contain the original frequency set-up
683 NOT the new frame. The conversions however are done using the user
684 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
685 use scantable.freq_align first. Without it there is no closure,
686 i.e.::
687
688 c = myscan.get_coordinate(0)
689 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
690
691 Parameters:
692
693 rowno: the row number for the spectral coordinate
694
695 """
696 return coordinate(Scantable.get_coordinate(self, rowno))
697
698 def get_selection(self):
699 """\
700 Get the selection object currently set on this scantable.
701
702 Example::
703
704 sel = scan.get_selection()
705 sel.set_ifs(0) # select IF 0
706 scan.set_selection(sel) # apply modified selection
707
708 """
709 return selector(self._getselection())
710
711 def set_selection(self, selection=None, **kw):
712 """\
713 Select a subset of the data. All following operations on this scantable
714 are only applied to thi selection.
715
716 Parameters:
717
718 selection: a selector object (default unset the selection), or
719 any combination of 'pols', 'ifs', 'beams', 'scans',
720 'cycles', 'name', 'query'
721
722 Examples::
723
724 sel = selector() # create a selection object
725 self.set_scans([0, 3]) # select SCANNO 0 and 3
726 scan.set_selection(sel) # set the selection
727 scan.summary() # will only print summary of scanno 0 an 3
728 scan.set_selection() # unset the selection
729 # or the equivalent
730 scan.set_selection(scans=[0,3])
731 scan.summary() # will only print summary of scanno 0 an 3
732 scan.set_selection() # unset the selection
733
734 """
735 if selection is None:
736 # reset
737 if len(kw) == 0:
738 selection = selector()
739 else:
740 # try keywords
741 for k in kw:
742 if k not in selector.fields:
743 raise KeyError("Invalid selection key '%s', "
744 "valid keys are %s" % (k,
745 selector.fields))
746 selection = selector(**kw)
747 self._setselection(selection)
748
749 def get_row(self, row=0, insitu=None):
750 """\
751 Select a row in the scantable.
752 Return a scantable with single row.
753
754 Parameters:
755
756 row: row no of integration, default is 0.
757 insitu: if False a new scantable is returned. Otherwise, the
758 scaling is done in-situ. The default is taken from .asaprc
759 (False)
760
761 """
762 if insitu is None:
763 insitu = rcParams['insitu']
764 if not insitu:
765 workscan = self.copy()
766 else:
767 workscan = self
768 # Select a row
769 sel = selector()
770 sel.set_rows([row])
771 workscan.set_selection(sel)
772 if not workscan.nrow() == 1:
773 msg = "Could not identify single row. %d rows selected." \
774 % (workscan.nrow())
775 raise RuntimeError(msg)
776 if insitu:
777 self._assign(workscan)
778 else:
779 return workscan
780
781 @asaplog_post_dec
782 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
783 """\
784 Determine the specified statistic of the current beam/if/pol
785 Takes a 'mask' as an optional parameter to specify which
786 channels should be excluded.
787
788 Parameters:
789
790 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
791 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
792
793 mask: an optional mask specifying where the statistic
794 should be determined.
795
796 form: format string to print statistic values
797
798 row: row number of spectrum to process.
799 (default is None: for all rows)
800
801 Example:
802 scan.set_unit('channel')
803 msk = scan.create_mask([100, 200], [500, 600])
804 scan.stats(stat='mean', mask=m)
805
806 """
807 mask = mask or []
808 if not self._check_ifs():
809 raise ValueError("Cannot apply mask as the IFs have different "
810 "number of channels. Please use setselection() "
811 "to select individual IFs")
812 rtnabc = False
813 if stat.lower().endswith('_abc'): rtnabc = True
814 getchan = False
815 if stat.lower().startswith('min') or stat.lower().startswith('max'):
816 chan = self._math._minmaxchan(self, mask, stat)
817 getchan = True
818 statvals = []
819 if not rtnabc:
820 if row == None:
821 statvals = self._math._stats(self, mask, stat)
822 else:
823 statvals = self._math._statsrow(self, mask, stat, int(row))
824
825 #def cb(i):
826 # return statvals[i]
827
828 #return self._row_callback(cb, stat)
829
830 label=stat
831 #callback=cb
832 out = ""
833 #outvec = []
834 sep = '-'*50
835
836 if row == None:
837 rows = xrange(self.nrow())
838 elif isinstance(row, int):
839 rows = [ row ]
840
841 for i in rows:
842 refstr = ''
843 statunit= ''
844 if getchan:
845 qx, qy = self.chan2data(rowno=i, chan=chan[i])
846 if rtnabc:
847 statvals.append(qx['value'])
848 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
849 statunit= '['+qx['unit']+']'
850 else:
851 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
852
853 tm = self._gettime(i)
854 src = self._getsourcename(i)
855 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
856 out += 'Time[%s]:\n' % (tm)
857 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
858 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
859 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
860 #outvec.append(callback(i))
861 if len(rows) > 1:
862 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
863 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
864 else:
865 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
866 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
867 out += sep+"\n"
868
869 import os
870 if os.environ.has_key( 'USER' ):
871 usr = os.environ['USER']
872 else:
873 import commands
874 usr = commands.getoutput( 'whoami' )
875 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
876 f = open(tmpfile,'w')
877 print >> f, sep
878 print >> f, ' %s %s' % (label, statunit)
879 print >> f, sep
880 print >> f, out
881 f.close()
882 f = open(tmpfile,'r')
883 x = f.readlines()
884 f.close()
885 asaplog.push(''.join(x), False)
886
887 return statvals
888
889 def chan2data(self, rowno=0, chan=0):
890 """\
891 Returns channel/frequency/velocity and spectral value
892 at an arbitrary row and channel in the scantable.
893
894 Parameters:
895
896 rowno: a row number in the scantable. Default is the
897 first row, i.e. rowno=0
898
899 chan: a channel in the scantable. Default is the first
900 channel, i.e. pos=0
901
902 """
903 if isinstance(rowno, int) and isinstance(chan, int):
904 qx = {'unit': self.get_unit(),
905 'value': self._getabcissa(rowno)[chan]}
906 qy = {'unit': self.get_fluxunit(),
907 'value': self._getspectrum(rowno)[chan]}
908 return qx, qy
909
910 def stddev(self, mask=None):
911 """\
912 Determine the standard deviation of the current beam/if/pol
913 Takes a 'mask' as an optional parameter to specify which
914 channels should be excluded.
915
916 Parameters:
917
918 mask: an optional mask specifying where the standard
919 deviation should be determined.
920
921 Example::
922
923 scan.set_unit('channel')
924 msk = scan.create_mask([100, 200], [500, 600])
925 scan.stddev(mask=m)
926
927 """
928 return self.stats(stat='stddev', mask=mask);
929
930
931 def get_column_names(self):
932 """\
933 Return a list of column names, which can be used for selection.
934 """
935 return list(Scantable.get_column_names(self))
936
937 def get_tsys(self, row=-1):
938 """\
939 Return the System temperatures.
940
941 Parameters:
942
943 row: the rowno to get the information for. (default all rows)
944
945 Returns:
946
947 a list of Tsys values for the current selection
948
949 """
950 if row > -1:
951 return self._get_column(self._gettsys, row)
952 return self._row_callback(self._gettsys, "Tsys")
953
954 def get_tsysspectrum(self, row=-1):
955 """\
956 Return the channel dependent system temperatures.
957
958 Parameters:
959
960 row: the rowno to get the information for. (default all rows)
961
962 Returns:
963
964 a list of Tsys values for the current selection
965
966 """
967 return self._get_column( self._gettsysspectrum, row )
968
969 def set_tsys(self, values, row=-1):
970 """\
971 Set the Tsys value(s) of the given 'row' or the whole scantable
972 (selection).
973
974 Parameters:
975
976 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
977 row: the row number to apply Tsys values to.
978 (default all rows)
979
980 """
981
982 if not hasattr(values, "__len__"):
983 values = [values]
984 self._settsys(values, row)
985
986 def get_weather(self, row=-1):
987 """\
988 Return the weather informations.
989
990 Parameters:
991
992 row: the rowno to get the information for. (default all rows)
993
994 Returns:
995
996 a dict or list of of dicts of values for the current selection
997
998 """
999
1000 values = self._get_column(self._get_weather, row)
1001 if row > -1:
1002 return {'temperature': values[0],
1003 'pressure': values[1], 'humidity' : values[2],
1004 'windspeed' : values[3], 'windaz' : values[4]
1005 }
1006 else:
1007 out = []
1008 for r in values:
1009
1010 out.append({'temperature': r[0],
1011 'pressure': r[1], 'humidity' : r[2],
1012 'windspeed' : r[3], 'windaz' : r[4]
1013 })
1014 return out
1015
1016 def _row_callback(self, callback, label):
1017 out = ""
1018 outvec = []
1019 sep = '-'*50
1020 for i in range(self.nrow()):
1021 tm = self._gettime(i)
1022 src = self._getsourcename(i)
1023 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1024 out += 'Time[%s]:\n' % (tm)
1025 if self.nbeam(-1) > 1:
1026 out += ' Beam[%d] ' % (self.getbeam(i))
1027 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1028 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1029 outvec.append(callback(i))
1030 out += '= %3.3f\n' % (outvec[i])
1031 out += sep+'\n'
1032
1033 asaplog.push(sep)
1034 asaplog.push(" %s" % (label))
1035 asaplog.push(sep)
1036 asaplog.push(out)
1037 asaplog.post()
1038 return outvec
1039
1040 def _get_column(self, callback, row=-1, *args):
1041 """
1042 """
1043 if row == -1:
1044 return [callback(i, *args) for i in range(self.nrow())]
1045 else:
1046 if 0 <= row < self.nrow():
1047 return callback(row, *args)
1048
1049
1050 def get_time(self, row=-1, asdatetime=False, prec=-1):
1051 """\
1052 Get a list of time stamps for the observations.
1053 Return a datetime object or a string (default) for each
1054 integration time stamp in the scantable.
1055
1056 Parameters:
1057
1058 row: row no of integration. Default -1 return all rows
1059
1060 asdatetime: return values as datetime objects rather than strings
1061
1062 prec: number of digits shown. Default -1 to automatic
1063 calculation.
1064 Note this number is equals to the digits of MVTime,
1065 i.e., 0<prec<3: dates with hh:: only,
1066 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1067 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1068
1069 """
1070 from datetime import datetime
1071 if prec < 0:
1072 # automagically set necessary precision +1
1073 prec = 7 - \
1074 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1075 prec = max(6, int(prec))
1076 else:
1077 prec = max(0, prec)
1078 if asdatetime:
1079 #precision can be 1 millisecond at max
1080 prec = min(12, prec)
1081
1082 times = self._get_column(self._gettime, row, prec)
1083 if not asdatetime:
1084 return times
1085 format = "%Y/%m/%d/%H:%M:%S.%f"
1086 if prec < 7:
1087 nsub = 1 + (((6-prec)/2) % 3)
1088 substr = [".%f","%S","%M"]
1089 for i in range(nsub):
1090 format = format.replace(substr[i],"")
1091 if isinstance(times, list):
1092 return [datetime.strptime(i, format) for i in times]
1093 else:
1094 return datetime.strptime(times, format)
1095
1096
1097 def get_inttime(self, row=-1):
1098 """\
1099 Get a list of integration times for the observations.
1100 Return a time in seconds for each integration in the scantable.
1101
1102 Parameters:
1103
1104 row: row no of integration. Default -1 return all rows.
1105
1106 """
1107 return self._get_column(self._getinttime, row)
1108
1109
1110 def get_sourcename(self, row=-1):
1111 """\
1112 Get a list source names for the observations.
1113 Return a string for each integration in the scantable.
1114 Parameters:
1115
1116 row: row no of integration. Default -1 return all rows.
1117
1118 """
1119 return self._get_column(self._getsourcename, row)
1120
1121 def get_elevation(self, row=-1):
1122 """\
1123 Get a list of elevations for the observations.
1124 Return a float for each integration in the scantable.
1125
1126 Parameters:
1127
1128 row: row no of integration. Default -1 return all rows.
1129
1130 """
1131 return self._get_column(self._getelevation, row)
1132
1133 def get_azimuth(self, row=-1):
1134 """\
1135 Get a list of azimuths for the observations.
1136 Return a float for each integration in the scantable.
1137
1138 Parameters:
1139 row: row no of integration. Default -1 return all rows.
1140
1141 """
1142 return self._get_column(self._getazimuth, row)
1143
1144 def get_parangle(self, row=-1):
1145 """\
1146 Get a list of parallactic angles for the observations.
1147 Return a float for each integration in the scantable.
1148
1149 Parameters:
1150
1151 row: row no of integration. Default -1 return all rows.
1152
1153 """
1154 return self._get_column(self._getparangle, row)
1155
1156 def get_direction(self, row=-1):
1157 """
1158 Get a list of Positions on the sky (direction) for the observations.
1159 Return a string for each integration in the scantable.
1160
1161 Parameters:
1162
1163 row: row no of integration. Default -1 return all rows
1164
1165 """
1166 return self._get_column(self._getdirection, row)
1167
1168 def get_directionval(self, row=-1):
1169 """\
1170 Get a list of Positions on the sky (direction) for the observations.
1171 Return a float for each integration in the scantable.
1172
1173 Parameters:
1174
1175 row: row no of integration. Default -1 return all rows
1176
1177 """
1178 return self._get_column(self._getdirectionvec, row)
1179
1180 @asaplog_post_dec
1181 def set_unit(self, unit='channel'):
1182 """\
1183 Set the unit for all following operations on this scantable
1184
1185 Parameters:
1186
1187 unit: optional unit, default is 'channel'. Use one of '*Hz',
1188 'km/s', 'channel' or equivalent ''
1189
1190 """
1191 varlist = vars()
1192 if unit in ['', 'pixel', 'channel']:
1193 unit = ''
1194 inf = list(self._getcoordinfo())
1195 inf[0] = unit
1196 self._setcoordinfo(inf)
1197 self._add_history("set_unit", varlist)
1198
1199 @asaplog_post_dec
1200 def set_instrument(self, instr):
1201 """\
1202 Set the instrument for subsequent processing.
1203
1204 Parameters:
1205
1206 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1207 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1208
1209 """
1210 self._setInstrument(instr)
1211 self._add_history("set_instument", vars())
1212
1213 @asaplog_post_dec
1214 def set_feedtype(self, feedtype):
1215 """\
1216 Overwrite the feed type, which might not be set correctly.
1217
1218 Parameters:
1219
1220 feedtype: 'linear' or 'circular'
1221
1222 """
1223 self._setfeedtype(feedtype)
1224 self._add_history("set_feedtype", vars())
1225
1226 @asaplog_post_dec
1227 def set_doppler(self, doppler='RADIO'):
1228 """\
1229 Set the doppler for all following operations on this scantable.
1230
1231 Parameters:
1232
1233 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1234
1235 """
1236 varlist = vars()
1237 inf = list(self._getcoordinfo())
1238 inf[2] = doppler
1239 self._setcoordinfo(inf)
1240 self._add_history("set_doppler", vars())
1241
1242 @asaplog_post_dec
1243 def set_freqframe(self, frame=None):
1244 """\
1245 Set the frame type of the Spectral Axis.
1246
1247 Parameters:
1248
1249 frame: an optional frame type, default 'LSRK'. Valid frames are:
1250 'TOPO', 'LSRD', 'LSRK', 'BARY',
1251 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1252
1253 Example::
1254
1255 scan.set_freqframe('BARY')
1256
1257 """
1258 frame = frame or rcParams['scantable.freqframe']
1259 varlist = vars()
1260 # "REST" is not implemented in casacore
1261 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1262 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1263 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1264 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1265
1266 if frame in valid:
1267 inf = list(self._getcoordinfo())
1268 inf[1] = frame
1269 self._setcoordinfo(inf)
1270 self._add_history("set_freqframe", varlist)
1271 else:
1272 msg = "Please specify a valid freq type. Valid types are:\n", valid
1273 raise TypeError(msg)
1274
1275 @asaplog_post_dec
1276 def set_dirframe(self, frame=""):
1277 """\
1278 Set the frame type of the Direction on the sky.
1279
1280 Parameters:
1281
1282 frame: an optional frame type, default ''. Valid frames are:
1283 'J2000', 'B1950', 'GALACTIC'
1284
1285 Example:
1286
1287 scan.set_dirframe('GALACTIC')
1288
1289 """
1290 varlist = vars()
1291 Scantable.set_dirframe(self, frame)
1292 self._add_history("set_dirframe", varlist)
1293
1294 def get_unit(self):
1295 """\
1296 Get the default unit set in this scantable
1297
1298 Returns:
1299
1300 A unit string
1301
1302 """
1303 inf = self._getcoordinfo()
1304 unit = inf[0]
1305 if unit == '': unit = 'channel'
1306 return unit
1307
1308 @asaplog_post_dec
1309 def get_abcissa(self, rowno=0):
1310 """\
1311 Get the abcissa in the current coordinate setup for the currently
1312 selected Beam/IF/Pol
1313
1314 Parameters:
1315
1316 rowno: an optional row number in the scantable. Default is the
1317 first row, i.e. rowno=0
1318
1319 Returns:
1320
1321 The abcissa values and the format string (as a dictionary)
1322
1323 """
1324 abc = self._getabcissa(rowno)
1325 lbl = self._getabcissalabel(rowno)
1326 return abc, lbl
1327
1328 @asaplog_post_dec
1329 def flag(self, mask=None, unflag=False, row=-1):
1330 """\
1331 Flag the selected data using an optional channel mask.
1332
1333 Parameters:
1334
1335 mask: an optional channel mask, created with create_mask. Default
1336 (no mask) is all channels.
1337
1338 unflag: if True, unflag the data
1339
1340 row: an optional row number in the scantable.
1341 Default -1 flags all rows
1342
1343 """
1344 varlist = vars()
1345 mask = mask or []
1346 self._flag(row, mask, unflag)
1347 self._add_history("flag", varlist)
1348
1349 @asaplog_post_dec
1350 def flag_row(self, rows=None, unflag=False):
1351 """\
1352 Flag the selected data in row-based manner.
1353
1354 Parameters:
1355
1356 rows: list of row numbers to be flagged. Default is no row
1357 (must be explicitly specified to execute row-based
1358 flagging).
1359
1360 unflag: if True, unflag the data.
1361
1362 """
1363 varlist = vars()
1364 if rows is None:
1365 rows = []
1366 self._flag_row(rows, unflag)
1367 self._add_history("flag_row", varlist)
1368
1369 @asaplog_post_dec
1370 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1371 """\
1372 Flag the selected data outside a specified range (in channel-base)
1373
1374 Parameters:
1375
1376 uthres: upper threshold.
1377
1378 dthres: lower threshold
1379
1380 clipoutside: True for flagging data outside the range
1381 [dthres:uthres].
1382 False for flagging data inside the range.
1383
1384 unflag: if True, unflag the data.
1385
1386 """
1387 varlist = vars()
1388 self._clip(uthres, dthres, clipoutside, unflag)
1389 self._add_history("clip", varlist)
1390
1391 @asaplog_post_dec
1392 def lag_flag(self, start, end, unit="MHz", insitu=None):
1393 """\
1394 Flag the data in 'lag' space by providing a frequency to remove.
1395 Flagged data in the scantable get interpolated over the region.
1396 No taper is applied.
1397
1398 Parameters:
1399
1400 start: the start frequency (really a period within the
1401 bandwidth) or period to remove
1402
1403 end: the end frequency or period to remove
1404
1405 unit: the frequency unit (default 'MHz') or '' for
1406 explicit lag channels
1407
1408 *Notes*:
1409
1410 It is recommended to flag edges of the band or strong
1411 signals beforehand.
1412
1413 """
1414 if insitu is None: insitu = rcParams['insitu']
1415 self._math._setinsitu(insitu)
1416 varlist = vars()
1417 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1418 if not (unit == "" or base.has_key(unit)):
1419 raise ValueError("%s is not a valid unit." % unit)
1420 if unit == "":
1421 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1422 else:
1423 s = scantable(self._math._lag_flag(self, start*base[unit],
1424 end*base[unit], "frequency"))
1425 s._add_history("lag_flag", varlist)
1426 if insitu:
1427 self._assign(s)
1428 else:
1429 return s
1430
1431 @asaplog_post_dec
1432 def fft(self, rowno=None, mask=None, getrealimag=False):
1433 """\
1434 Apply FFT to the spectra.
1435 Flagged data in the scantable get interpolated over the region.
1436
1437 Parameters:
1438
1439 rowno: The row number(s) to be processed. int, list
1440 and tuple are accepted. By default (None), FFT
1441 is applied to the whole data.
1442
1443 mask: Auxiliary channel mask(s). Given as a boolean
1444 list, it is applied to all specified rows.
1445 A list of boolean lists can also be used to
1446 apply different masks. In the latter case, the
1447 length of 'mask' must be the same as that of
1448 'rowno'. The default is None.
1449
1450 getrealimag: If True, returns the real and imaginary part
1451 values of the complex results.
1452 If False (the default), returns the amplitude
1453 (absolute value) normalised with Ndata/2 and
1454 phase (argument, in unit of radian).
1455
1456 Returns:
1457
1458 A list of dictionaries containing the results for each spectrum.
1459 Each dictionary contains two values, the real and the imaginary
1460 parts when getrealimag = True, or the amplitude(absolute value)
1461 and the phase(argument) when getrealimag = False. The key for
1462 these values are 'real' and 'imag', or 'ampl' and 'phase',
1463 respectively.
1464 """
1465 if rowno is None:
1466 rowno = []
1467 if isinstance(rowno, int):
1468 rowno = [rowno]
1469 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1470 raise TypeError("The row number(s) must be int, list or tuple.")
1471 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1472
1473 usecommonmask = True
1474
1475 if mask is None:
1476 mask = []
1477 if isinstance(mask, list) or isinstance(mask, tuple):
1478 if len(mask) == 0:
1479 mask = [[]]
1480 else:
1481 if isinstance(mask[0], bool):
1482 if len(mask) != self.nchan(self.getif(rowno[0])):
1483 raise ValueError("The spectra and the mask have "
1484 "different length.")
1485 mask = [mask]
1486 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1487 usecommonmask = False
1488 if len(mask) != len(rowno):
1489 raise ValueError("When specifying masks for each "
1490 "spectrum, the numbers of them "
1491 "must be identical.")
1492 for i in xrange(mask):
1493 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1494 raise ValueError("The spectra and the mask have "
1495 "different length.")
1496 else:
1497 raise TypeError("The mask must be a boolean list or "
1498 "a list of boolean list.")
1499 else:
1500 raise TypeError("The mask must be a boolean list or a list of "
1501 "boolean list.")
1502
1503 res = []
1504
1505 imask = 0
1506 for whichrow in rowno:
1507 fspec = self._fft(whichrow, mask[imask], getrealimag)
1508 nspec = len(fspec)
1509
1510 i = 0
1511 v1 = []
1512 v2 = []
1513 reselem = {"real":[],"imag":[]} if getrealimag \
1514 else {"ampl":[],"phase":[]}
1515
1516 while (i < nspec):
1517 v1.append(fspec[i])
1518 v2.append(fspec[i+1])
1519 i += 2
1520
1521 if getrealimag:
1522 reselem["real"] += v1
1523 reselem["imag"] += v2
1524 else:
1525 reselem["ampl"] += v1
1526 reselem["phase"] += v2
1527
1528 res.append(reselem)
1529
1530 if not usecommonmask:
1531 imask += 1
1532
1533 return res
1534
1535 @asaplog_post_dec
1536 def create_mask(self, *args, **kwargs):
1537 """\
1538 Compute and return a mask based on [min, max] windows.
1539 The specified windows are to be INCLUDED, when the mask is
1540 applied.
1541
1542 Parameters:
1543
1544 [min, max], [min2, max2], ...
1545 Pairs of start/end points (inclusive)specifying the regions
1546 to be masked
1547
1548 invert: optional argument. If specified as True,
1549 return an inverted mask, i.e. the regions
1550 specified are EXCLUDED
1551
1552 row: create the mask using the specified row for
1553 unit conversions, default is row=0
1554 only necessary if frequency varies over rows.
1555
1556 Examples::
1557
1558 scan.set_unit('channel')
1559 # a)
1560 msk = scan.create_mask([400, 500], [800, 900])
1561 # masks everything outside 400 and 500
1562 # and 800 and 900 in the unit 'channel'
1563
1564 # b)
1565 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1566 # masks the regions between 400 and 500
1567 # and 800 and 900 in the unit 'channel'
1568
1569 # c)
1570 #mask only channel 400
1571 msk = scan.create_mask([400])
1572
1573 """
1574 row = kwargs.get("row", 0)
1575 data = self._getabcissa(row)
1576 u = self._getcoordinfo()[0]
1577 if u == "":
1578 u = "channel"
1579 msg = "The current mask window unit is %s" % u
1580 i = self._check_ifs()
1581 if not i:
1582 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1583 asaplog.push(msg)
1584 n = len(data)
1585 msk = _n_bools(n, False)
1586 # test if args is a 'list' or a 'normal *args - UGLY!!!
1587
1588 ws = (isinstance(args[-1][-1], int)
1589 or isinstance(args[-1][-1], float)) and args or args[0]
1590 for window in ws:
1591 if len(window) == 1:
1592 window = [window[0], window[0]]
1593 if len(window) == 0 or len(window) > 2:
1594 raise ValueError("A window needs to be defined as "
1595 "[start(, end)]")
1596 if window[0] > window[1]:
1597 tmp = window[0]
1598 window[0] = window[1]
1599 window[1] = tmp
1600 for i in range(n):
1601 if data[i] >= window[0] and data[i] <= window[1]:
1602 msk[i] = True
1603 if kwargs.has_key('invert'):
1604 if kwargs.get('invert'):
1605 msk = mask_not(msk)
1606 return msk
1607
1608 def get_masklist(self, mask=None, row=0, silent=False):
1609 """\
1610 Compute and return a list of mask windows, [min, max].
1611
1612 Parameters:
1613
1614 mask: channel mask, created with create_mask.
1615
1616 row: calcutate the masklist using the specified row
1617 for unit conversions, default is row=0
1618 only necessary if frequency varies over rows.
1619
1620 Returns:
1621
1622 [min, max], [min2, max2], ...
1623 Pairs of start/end points (inclusive)specifying
1624 the masked regions
1625
1626 """
1627 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1628 raise TypeError("The mask should be list or tuple.")
1629 if len(mask) <= 0:
1630 raise TypeError("The mask elements should be > 0")
1631 data = self._getabcissa(row)
1632 if len(data) != len(mask):
1633 msg = "Number of channels in scantable != number of mask elements"
1634 raise TypeError(msg)
1635 u = self._getcoordinfo()[0]
1636 if u == "":
1637 u = "channel"
1638 msg = "The current mask window unit is %s" % u
1639 i = self._check_ifs()
1640 if not i:
1641 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1642 if not silent:
1643 asaplog.push(msg)
1644 masklist = []
1645 ist, ien = None, None
1646 ist, ien=self.get_mask_indices(mask)
1647 if ist is not None and ien is not None:
1648 for i in xrange(len(ist)):
1649 range=[data[ist[i]],data[ien[i]]]
1650 range.sort()
1651 masklist.append([range[0],range[1]])
1652 return masklist
1653
1654 def get_mask_indices(self, mask=None):
1655 """\
1656 Compute and Return lists of mask start indices and mask end indices.
1657
1658 Parameters:
1659
1660 mask: channel mask, created with create_mask.
1661
1662 Returns:
1663
1664 List of mask start indices and that of mask end indices,
1665 i.e., [istart1,istart2,....], [iend1,iend2,....].
1666
1667 """
1668 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1669 raise TypeError("The mask should be list or tuple.")
1670 if len(mask) <= 0:
1671 raise TypeError("The mask elements should be > 0")
1672 istart = []
1673 iend = []
1674 if mask[0]:
1675 istart.append(0)
1676 for i in range(len(mask)-1):
1677 if not mask[i] and mask[i+1]:
1678 istart.append(i+1)
1679 elif mask[i] and not mask[i+1]:
1680 iend.append(i)
1681 if mask[len(mask)-1]:
1682 iend.append(len(mask)-1)
1683 if len(istart) != len(iend):
1684 raise RuntimeError("Numbers of mask start != mask end.")
1685 for i in range(len(istart)):
1686 if istart[i] > iend[i]:
1687 raise RuntimeError("Mask start index > mask end index")
1688 break
1689 return istart,iend
1690
1691 @asaplog_post_dec
1692 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1693 """
1694 Parse MS type spw/channel selection syntax.
1695
1696 Parameters:
1697 selectstring : A string expression of spw and channel selection.
1698 Comma-separated expressions mean different spw -
1699 channel combinations. Spws and channel selections
1700 are partitioned by a colon ':'. In a single
1701 selection expression, you can put multiple values
1702 separated by semicolons ';'. Both for spw and
1703 channel selection, allowed cases include single
1704 value, blank('') or asterisk('*') to specify all
1705 available values, two values connected with a
1706 tilde ('~') to specify an inclusive range. Unit
1707 strings for frequency or velocity can be added to
1708 the tilde-connected values. For channel selection
1709 expression, placing a '<' or a '>' is possible to
1710 specify a semi-infinite interval as well.
1711
1712 examples:
1713 '' or '*' = all spws (all channels)
1714 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1715 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1716 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1717 all channels in spw8
1718 '1.3GHz~1.5GHz' = all spws that fall in or have
1719 at least some overwrap with
1720 frequency range between 1.3GHz
1721 and 1.5GHz.
1722 '1.3GHz~1.5GHz:1.3GHz~1.5GHz' = channels that
1723 fall between the
1724 specified frequency
1725 range in spws that
1726 fall in or have
1727 overwrap with the
1728 specified frequency
1729 range.
1730 '1:-200km/s~250km/s' = channels that fall between
1731 the specified velocity range
1732 in spw 1.
1733 Returns:
1734 A dictionary of selected (valid) spw and masklist pairs,
1735 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1736 """
1737 if not isinstance(selectstring, str):
1738 asaplog.post()
1739 asaplog.push("Expression of spw/channel selection must be a string.")
1740 asaplog.post("ERROR")
1741
1742 orig_unit = self.get_unit()
1743 self.set_unit('channel')
1744
1745 orig_restfreq = self.get_restfreqs()
1746 orig_restfreq_list = []
1747 for i in orig_restfreq.keys():
1748 if len(orig_restfreq[i]) == 1:
1749 orig_restfreq_list.append(orig_restfreq[i][0])
1750 else:
1751 orig_restfreq_list.append(orig_restfreq[i])
1752
1753 orig_coord = self._getcoordinfo()
1754 orig_frame = orig_coord[1]
1755 orig_doppler = orig_coord[2]
1756
1757 if restfreq is None: restfreq = orig_restfreq_list
1758 if frame is None: frame = orig_frame
1759 if doppler is None: doppler = orig_doppler
1760
1761 self.set_restfreqs(restfreq)
1762 self.set_freqframe(frame)
1763 self.set_doppler(doppler)
1764
1765 valid_ifs = self.getifnos()
1766
1767 comma_sep = selectstring.split(",")
1768 res = {}
1769
1770 for cms_elem in comma_sep:
1771 if (cms_elem.strip() == ""): continue
1772
1773 colon_sep = cms_elem.split(":")
1774
1775 if (len(colon_sep) > 2):
1776 raise RuntimeError("Invalid selection expression: more than two colons!")
1777
1778 # parse spw expression and store result in spw_list.
1779 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1780 # 'a*Hz~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1781 # 'a*m/s~b*m/s' (where * can be '' or 'k') and also
1782 # several of the above expressions connected with ';'.
1783
1784 spw_list = []
1785
1786 semicolon_sep = colon_sep[0].split(";")
1787
1788 for scs_elem in semicolon_sep:
1789 scs_elem = scs_elem.strip()
1790
1791 lt_sep = scs_elem.split("<")
1792 gt_sep = scs_elem.split(">")
1793 ti_sep = scs_elem.split("~")
1794
1795 lt_sep_length = len(lt_sep)
1796 gt_sep_length = len(gt_sep)
1797 ti_sep_length = len(ti_sep)
1798
1799 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1800
1801 if (len_product > 2):
1802 # '<', '>' and '~' must not coexist in a single spw expression
1803
1804 raise RuntimeError("Invalid spw selection.")
1805
1806 elif (len_product == 1):
1807 # '', '*', or single spw number.
1808
1809 if (scs_elem == "") or (scs_elem == "*"):
1810 spw_list = valid_ifs[:] # deep copy
1811
1812 else: # single number
1813 try:
1814 #checking if the given number is valid for spw ID
1815 idx = valid_ifs.index(int(scs_elem))
1816 spw_list.append(valid_ifs[idx])
1817
1818 except:
1819 asaplog.post()
1820 asaplog.push("Wrong spw number (" + scs_elem + ") given. ignored.")
1821 asaplog.post("WARNING")
1822
1823 else: # (len_product == 2)
1824 # namely, one of '<', '>' or '~' appers just once.
1825
1826 if (lt_sep_length == 2): # '<a'
1827 if is_number(lt_sep[1]):
1828 for i in valid_ifs:
1829 if (i < float(lt_sep[1])):
1830 spw_list.append(i)
1831
1832 else:
1833 RuntimeError("Invalid spw selection.")
1834
1835 elif (gt_sep_length == 2): # '>a'
1836 if is_number(gt_sep[1]):
1837 for i in valid_ifs:
1838 if (i > float(gt_sep[1])):
1839 spw_list.append(i)
1840
1841 else:
1842 RuntimeError("Invalid spw selection.")
1843
1844 else: # (ti_sep_length == 2) where both boundaries inclusive
1845 expr0 = ti_sep[0].strip()
1846 expr1 = ti_sep[1].strip()
1847
1848 if is_number(expr0) and is_number(expr1):
1849 # 'a~b'
1850 expr_pmin = min(float(expr0), float(expr1))
1851 expr_pmax = max(float(expr0), float(expr1))
1852 for i in valid_ifs:
1853 if (expr_pmin <= i) and (i <= expr_pmax):
1854 spw_list.append(i)
1855
1856 elif is_frequency(expr0) and is_frequency(expr1):
1857 # 'a*Hz~b*Hz'
1858 expr_f0 = get_freq_by_string(expr0)
1859 expr_f1 = get_freq_by_string(expr1)
1860
1861 for coord in self._get_coordinate_list():
1862 expr_p0 = coord['coord'].to_pixel(expr_f0)
1863 expr_p1 = coord['coord'].to_pixel(expr_f1)
1864 expr_pmin = min(expr_p0, expr_p1)
1865 expr_pmax = max(expr_p0, expr_p1)
1866
1867 spw = coord['if']
1868 pmin = 0.0
1869 pmax = float(self.nchan(spw) - 1)
1870
1871 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
1872 spw_list.append(spw)
1873
1874 elif is_velocity(expr0) and is_velocity(expr1):
1875 # 'a*m/s~b*m/s'
1876 expr_v0 = get_velocity_by_string(expr0)
1877 expr_v1 = get_velocity_by_string(expr1)
1878 expr_vmin = min(expr_v0, expr_v1)
1879 expr_vmax = max(expr_v0, expr_v1)
1880
1881 for coord in self._get_coordinate_list():
1882 spw = coord['if']
1883
1884 pmin = 0.0
1885 pmax = float(self.nchan(spw) - 1)
1886
1887 vel0 = coord['coord'].to_velocity(pmin)
1888 vel1 = coord['coord'].to_velocity(pmax)
1889
1890 vmin = min(vel0, vel1)
1891 vmax = max(vel0, vel1)
1892
1893 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
1894 spw_list.append(spw)
1895
1896 else:
1897 # cases such as 'aGHz~bkm/s' are not allowed now
1898 raise RuntimeError("Invalid spw selection.")
1899
1900 # parse channel expression and store the result in crange_list.
1901 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
1902 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
1903 # and also several of the above expressions connected with ';'.
1904
1905 for spw in spw_list:
1906 pmin = 0.0
1907 pmax = float(self.nchan(spw) - 1)
1908
1909 if (len(colon_sep) == 1):
1910 # no expression for channel selection,
1911 # which means all channels are to be selected.
1912 crange_list = [[pmin, pmax]]
1913
1914 else: # (len(colon_sep) == 2)
1915 crange_list = []
1916
1917 found = False
1918 for i in self._get_coordinate_list():
1919 if (i['if'] == spw):
1920 coord = i['coord']
1921 found = True
1922 break
1923
1924 if not found:
1925 raise RuntimeError("Invalid spw value.")
1926
1927 semicolon_sep = colon_sep[1].split(";")
1928 for scs_elem in semicolon_sep:
1929 scs_elem = scs_elem.strip()
1930
1931 ti_sep = scs_elem.split("~")
1932 ti_sep_length = len(ti_sep)
1933
1934 if (ti_sep_length > 2):
1935 raise RuntimeError("Invalid channel selection.")
1936
1937 elif (ti_sep_length == 1):
1938 if (scs_elem == "") or (scs_elem == "*"):
1939 # '' and '*' for all channels
1940 crange_list = [[pmin, pmax]]
1941 break
1942 elif (is_number(scs_elem)):
1943 # single channel given
1944 crange_list.append([float(scs_elem), float(scs_elem)])
1945 else:
1946 raise RuntimeError("Invalid channel selection.")
1947
1948 else: #(ti_sep_length == 2)
1949 expr0 = ti_sep[0].strip()
1950 expr1 = ti_sep[1].strip()
1951
1952 if is_number(expr0) and is_number(expr1):
1953 # 'a~b'
1954 expr_pmin = min(float(expr0), float(expr1))
1955 expr_pmax = max(float(expr0), float(expr1))
1956
1957 elif is_frequency(expr0) and is_frequency(expr1):
1958 # 'a*Hz~b*Hz'
1959 expr_p0 = coord.to_pixel(get_freq_by_string(expr0))
1960 expr_p1 = coord.to_pixel(get_freq_by_string(expr1))
1961 expr_pmin = min(expr_p0, expr_p1)
1962 expr_pmax = max(expr_p0, expr_p1)
1963
1964 elif is_velocity(expr0) and is_velocity(expr1):
1965 # 'a*m/s~b*m/s'
1966 restf = self.get_restfreqs().values()[0][0]
1967 expr_f0 = get_frequency_by_velocity(restf, get_velocity_by_string(expr0))
1968 expr_f1 = get_frequency_by_velocity(restf, get_velocity_by_string(expr1))
1969 expr_p0 = coord.to_pixel(expr_f0)
1970 expr_p1 = coord.to_pixel(expr_f1)
1971 expr_pmin = min(expr_p0, expr_p1)
1972 expr_pmax = max(expr_p0, expr_p1)
1973
1974 else:
1975 # cases such as 'aGHz~bkm/s' are not allowed now
1976 raise RuntimeError("Invalid channel selection.")
1977
1978 cmin = max(pmin, expr_pmin)
1979 cmax = min(pmax, expr_pmax)
1980 # if the given range of channel selection has overwrap with
1981 # that of current spw, output the overwrap area.
1982 if (cmin <= cmax):
1983 cmin = float(int(cmin + 0.5))
1984 cmax = float(int(cmax + 0.5))
1985 crange_list.append([cmin, cmax])
1986
1987 if (len(crange_list) == 0):
1988 crange_list.append([])
1989
1990 if res.has_key(spw):
1991 res[spw].extend(crange_list)
1992 else:
1993 res[spw] = crange_list
1994
1995 # restore original values
1996 self.set_restfreqs(orig_restfreq_list)
1997 self.set_freqframe(orig_frame)
1998 self.set_doppler(orig_doppler)
1999 self.set_unit(orig_unit)
2000
2001 return res
2002
2003 @asaplog_post_dec
2004 def get_first_rowno_by_if(self, ifno):
2005 found = False
2006 for irow in xrange(self.nrow()):
2007 if (self.getif(irow) == ifno):
2008 res = irow
2009 found = True
2010 break
2011
2012 if not found: raise RuntimeError("Invalid IF value.")
2013
2014 return res
2015
2016 @asaplog_post_dec
2017 def _get_coordinate_list(self):
2018 res = []
2019 spws = self.getifnos()
2020 for spw in spws:
2021 elem = {}
2022 elem['if'] = spw
2023 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2024 res.append(elem)
2025
2026 return res
2027
2028##################################
2029
2030 @asaplog_post_dec
2031 def parse_maskexpr(self, maskstring):
2032 """
2033 Parse CASA type mask selection syntax (IF dependent).
2034
2035 Parameters:
2036 maskstring : A string mask selection expression.
2037 A comma separated selections mean different IF -
2038 channel combinations. IFs and channel selections
2039 are partitioned by a colon, ':'.
2040 examples:
2041 '' = all IFs (all channels)
2042 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2043 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2044 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2045 all channels in IF8
2046 Returns:
2047 A dictionary of selected (valid) IF and masklist pairs,
2048 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2049 """
2050 if not isinstance(maskstring,str):
2051 asaplog.post()
2052 asaplog.push("Mask expression should be a string.")
2053 asaplog.post("ERROR")
2054
2055 valid_ifs = self.getifnos()
2056 frequnit = self.get_unit()
2057 seldict = {}
2058 if maskstring == "":
2059 maskstring = str(valid_ifs)[1:-1]
2060 ## split each selection "IF range[:CHAN range]"
2061 # split maskstring by "<spaces>,<spaces>"
2062 comma_sep = re.compile('\s*,\s*')
2063 sellist = comma_sep.split(maskstring)
2064 # separator by "<spaces>:<spaces>"
2065 collon_sep = re.compile('\s*:\s*')
2066 for currselstr in sellist:
2067 selset = collon_sep.split(currselstr)
2068 # spw and mask string (may include ~, < or >)
2069 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2070 minval=min(valid_ifs),
2071 maxval=max(valid_ifs))
2072 for spwlist in spwmasklist:
2073 selspws = []
2074 for ispw in range(spwlist[0],spwlist[1]+1):
2075 # Put into the list only if ispw exists
2076 if valid_ifs.count(ispw):
2077 selspws.append(ispw)
2078 del spwmasklist, spwlist
2079
2080 # parse frequency mask list
2081 if len(selset) > 1:
2082 freqmasklist = self._parse_selection(selset[1], typestr='float',
2083 offset=0.)
2084 else:
2085 # want to select the whole spectrum
2086 freqmasklist = [None]
2087
2088 ## define a dictionary of spw - masklist combination
2089 for ispw in selspws:
2090 #print "working on", ispw
2091 spwstr = str(ispw)
2092 if len(selspws) == 0:
2093 # empty spw
2094 continue
2095 else:
2096 ## want to get min and max of the spw and
2097 ## offset to set for '<' and '>'
2098 if frequnit == 'channel':
2099 minfreq = 0
2100 maxfreq = self.nchan(ifno=ispw)
2101 offset = 0.5
2102 else:
2103 ## This is ugly part. need improvement
2104 for ifrow in xrange(self.nrow()):
2105 if self.getif(ifrow) == ispw:
2106 #print "IF",ispw,"found in row =",ifrow
2107 break
2108 freqcoord = self.get_coordinate(ifrow)
2109 freqs = self._getabcissa(ifrow)
2110 minfreq = min(freqs)
2111 maxfreq = max(freqs)
2112 if len(freqs) == 1:
2113 offset = 0.5
2114 elif frequnit.find('Hz') > 0:
2115 offset = abs(freqcoord.to_frequency(1,
2116 unit=frequnit)
2117 -freqcoord.to_frequency(0,
2118 unit=frequnit)
2119 )*0.5
2120 elif frequnit.find('m/s') > 0:
2121 offset = abs(freqcoord.to_velocity(1,
2122 unit=frequnit)
2123 -freqcoord.to_velocity(0,
2124 unit=frequnit)
2125 )*0.5
2126 else:
2127 asaplog.post()
2128 asaplog.push("Invalid frequency unit")
2129 asaplog.post("ERROR")
2130 del freqs, freqcoord, ifrow
2131 for freq in freqmasklist:
2132 selmask = freq or [minfreq, maxfreq]
2133 if selmask[0] == None:
2134 ## selection was "<freq[1]".
2135 if selmask[1] < minfreq:
2136 ## avoid adding region selection
2137 selmask = None
2138 else:
2139 selmask = [minfreq,selmask[1]-offset]
2140 elif selmask[1] == None:
2141 ## selection was ">freq[0]"
2142 if selmask[0] > maxfreq:
2143 ## avoid adding region selection
2144 selmask = None
2145 else:
2146 selmask = [selmask[0]+offset,maxfreq]
2147 if selmask:
2148 if not seldict.has_key(spwstr):
2149 # new spw selection
2150 seldict[spwstr] = []
2151 seldict[spwstr] += [selmask]
2152 del minfreq,maxfreq,offset,freq,selmask
2153 del spwstr
2154 del freqmasklist
2155 del valid_ifs
2156 if len(seldict) == 0:
2157 asaplog.post()
2158 asaplog.push("No valid selection in the mask expression: "
2159 +maskstring)
2160 asaplog.post("WARN")
2161 return None
2162 msg = "Selected masklist:\n"
2163 for sif, lmask in seldict.iteritems():
2164 msg += " IF"+sif+" - "+str(lmask)+"\n"
2165 asaplog.push(msg)
2166 return seldict
2167
2168 @asaplog_post_dec
2169 def parse_idx_selection(self, mode, selexpr):
2170 """
2171 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2172 BEAMNO, and row number
2173
2174 Parameters:
2175 mode : which column to select.
2176 ['scan',|'if'|'pol'|'beam'|'row']
2177 selexpr : A comma separated selection expression.
2178 examples:
2179 '' = all (returns [])
2180 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2181 (returns [0,1,4,5,6,9])
2182 Returns:
2183 A List of selected indices
2184 """
2185 if selexpr == "":
2186 return []
2187 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2188 'b': 'beam', 'r': 'row'}
2189 smode = mode.lower()[0]
2190 if not (smode in valid_modes.keys()):
2191 msg = "Invalid mode '%s'. Valid modes are %s" %\
2192 (mode, str(valid_modes.values()))
2193 asaplog.post()
2194 asaplog.push(msg)
2195 asaplog.post("ERROR")
2196 mode = valid_modes[smode]
2197 minidx = None
2198 maxidx = None
2199 if smode == 'r':
2200 minidx = 0
2201 maxidx = self.nrow()
2202 else:
2203 idx = getattr(self,"get"+mode+"nos")()
2204 minidx = min(idx)
2205 maxidx = max(idx)
2206 del idx
2207 # split selexpr by "<spaces>,<spaces>"
2208 comma_sep = re.compile('\s*,\s*')
2209 sellist = comma_sep.split(selexpr)
2210 idxlist = []
2211 for currselstr in sellist:
2212 # single range (may include ~, < or >)
2213 currlist = self._parse_selection(currselstr, typestr='integer',
2214 minval=minidx,maxval=maxidx)
2215 for thelist in currlist:
2216 idxlist += range(thelist[0],thelist[1]+1)
2217 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2218 asaplog.push(msg)
2219 return idxlist
2220
2221 def _parse_selection(self, selstr, typestr='float', offset=0.,
2222 minval=None, maxval=None):
2223 """
2224 Parameters:
2225 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2226 typestr : The type of the values in returned list
2227 ('integer' or 'float')
2228 offset : The offset value to subtract from or add to
2229 the boundary value if the selection string
2230 includes '<' or '>' [Valid only for typestr='float']
2231 minval, maxval : The minimum/maximum values to set if the
2232 selection string includes '<' or '>'.
2233 The list element is filled with None by default.
2234 Returns:
2235 A list of min/max pair of selections.
2236 Example:
2237 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2238 --> returns [[0,2],[5,7],[9,9]]
2239 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2240 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2241 """
2242 # split selstr by '<spaces>;<spaces>'
2243 semi_sep = re.compile('\s*;\s*')
2244 selgroups = semi_sep.split(selstr)
2245 sellists = []
2246 if typestr.lower().startswith('int'):
2247 formatfunc = int
2248 offset = 1
2249 else:
2250 formatfunc = float
2251
2252 for currsel in selgroups:
2253 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2254 minsel = minval
2255 maxsel = maxval
2256 if currsel.find('~') > 0:
2257 # val0 <= x <= val1
2258 minsel = formatfunc(currsel.split('~')[0].strip())
2259 maxsel = formatfunc(currsel.split('~')[1].strip())
2260 elif currsel.strip().find('<=') > -1:
2261 bound = currsel.split('<=')
2262 try: # try "x <= val"
2263 minsel = minval
2264 maxsel = formatfunc(bound[1].strip())
2265 except ValueError: # now "val <= x"
2266 minsel = formatfunc(bound[0].strip())
2267 maxsel = maxval
2268 elif currsel.strip().find('>=') > -1:
2269 bound = currsel.split('>=')
2270 try: # try "x >= val"
2271 minsel = formatfunc(bound[1].strip())
2272 maxsel = maxval
2273 except ValueError: # now "val >= x"
2274 minsel = minval
2275 maxsel = formatfunc(bound[0].strip())
2276 elif currsel.strip().find('<') > -1:
2277 bound = currsel.split('<')
2278 try: # try "x < val"
2279 minsel = minval
2280 maxsel = formatfunc(bound[1].strip()) \
2281 - formatfunc(offset)
2282 except ValueError: # now "val < x"
2283 minsel = formatfunc(bound[0].strip()) \
2284 + formatfunc(offset)
2285 maxsel = maxval
2286 elif currsel.strip().find('>') > -1:
2287 bound = currsel.split('>')
2288 try: # try "x > val"
2289 minsel = formatfunc(bound[1].strip()) \
2290 + formatfunc(offset)
2291 maxsel = maxval
2292 except ValueError: # now "val > x"
2293 minsel = minval
2294 maxsel = formatfunc(bound[0].strip()) \
2295 - formatfunc(offset)
2296 else:
2297 minsel = formatfunc(currsel)
2298 maxsel = formatfunc(currsel)
2299 sellists.append([minsel,maxsel])
2300 return sellists
2301
2302# def get_restfreqs(self):
2303# """
2304# Get the restfrequency(s) stored in this scantable.
2305# The return value(s) are always of unit 'Hz'
2306# Parameters:
2307# none
2308# Returns:
2309# a list of doubles
2310# """
2311# return list(self._getrestfreqs())
2312
2313 def get_restfreqs(self, ids=None):
2314 """\
2315 Get the restfrequency(s) stored in this scantable.
2316 The return value(s) are always of unit 'Hz'
2317
2318 Parameters:
2319
2320 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2321 be retrieved
2322
2323 Returns:
2324
2325 dictionary containing ids and a list of doubles for each id
2326
2327 """
2328 if ids is None:
2329 rfreqs = {}
2330 idlist = self.getmolnos()
2331 for i in idlist:
2332 rfreqs[i] = list(self._getrestfreqs(i))
2333 return rfreqs
2334 else:
2335 if type(ids) == list or type(ids) == tuple:
2336 rfreqs = {}
2337 for i in ids:
2338 rfreqs[i] = list(self._getrestfreqs(i))
2339 return rfreqs
2340 else:
2341 return list(self._getrestfreqs(ids))
2342
2343 @asaplog_post_dec
2344 def set_restfreqs(self, freqs=None, unit='Hz'):
2345 """\
2346 Set or replace the restfrequency specified and
2347 if the 'freqs' argument holds a scalar,
2348 then that rest frequency will be applied to all the selected
2349 data. If the 'freqs' argument holds
2350 a vector, then it MUST be of equal or smaller length than
2351 the number of IFs (and the available restfrequencies will be
2352 replaced by this vector). In this case, *all* data have
2353 the restfrequency set per IF according
2354 to the corresponding value you give in the 'freqs' vector.
2355 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2356 IF 1 gets restfreq 2e9.
2357
2358 You can also specify the frequencies via a linecatalog.
2359
2360 Parameters:
2361
2362 freqs: list of rest frequency values or string idenitfiers
2363
2364 unit: unit for rest frequency (default 'Hz')
2365
2366
2367 Example::
2368
2369 # set the given restfrequency for the all currently selected IFs
2370 scan.set_restfreqs(freqs=1.4e9)
2371 # set restfrequencies for the n IFs (n > 1) in the order of the
2372 # list, i.e
2373 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2374 # len(list_of_restfreqs) == nIF
2375 # for nIF == 1 the following will set multiple restfrequency for
2376 # that IF
2377 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2378 # set multiple restfrequencies per IF. as a list of lists where
2379 # the outer list has nIF elements, the inner s arbitrary
2380 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2381
2382 *Note*:
2383
2384 To do more sophisticate Restfrequency setting, e.g. on a
2385 source and IF basis, use scantable.set_selection() before using
2386 this function::
2387
2388 # provided your scantable is called scan
2389 selection = selector()
2390 selection.set_name('ORION*')
2391 selection.set_ifs([1])
2392 scan.set_selection(selection)
2393 scan.set_restfreqs(freqs=86.6e9)
2394
2395 """
2396 varlist = vars()
2397 from asap import linecatalog
2398 # simple value
2399 if isinstance(freqs, int) or isinstance(freqs, float):
2400 self._setrestfreqs([freqs], [""], unit)
2401 # list of values
2402 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2403 # list values are scalars
2404 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2405 if len(freqs) == 1:
2406 self._setrestfreqs(freqs, [""], unit)
2407 else:
2408 # allow the 'old' mode of setting mulitple IFs
2409 savesel = self._getselection()
2410 sel = self.get_selection()
2411 iflist = self.getifnos()
2412 if len(freqs)>len(iflist):
2413 raise ValueError("number of elements in list of list "
2414 "exeeds the current IF selections")
2415 iflist = self.getifnos()
2416 for i, fval in enumerate(freqs):
2417 sel.set_ifs(iflist[i])
2418 self._setselection(sel)
2419 self._setrestfreqs([fval], [""], unit)
2420 self._setselection(savesel)
2421
2422 # list values are dict, {'value'=, 'name'=)
2423 elif isinstance(freqs[-1], dict):
2424 values = []
2425 names = []
2426 for d in freqs:
2427 values.append(d["value"])
2428 names.append(d["name"])
2429 self._setrestfreqs(values, names, unit)
2430 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2431 savesel = self._getselection()
2432 sel = self.get_selection()
2433 iflist = self.getifnos()
2434 if len(freqs)>len(iflist):
2435 raise ValueError("number of elements in list of list exeeds"
2436 " the current IF selections")
2437 for i, fval in enumerate(freqs):
2438 sel.set_ifs(iflist[i])
2439 self._setselection(sel)
2440 self._setrestfreqs(fval, [""], unit)
2441 self._setselection(savesel)
2442 # freqs are to be taken from a linecatalog
2443 elif isinstance(freqs, linecatalog):
2444 savesel = self._getselection()
2445 sel = self.get_selection()
2446 for i in xrange(freqs.nrow()):
2447 sel.set_ifs(iflist[i])
2448 self._setselection(sel)
2449 self._setrestfreqs([freqs.get_frequency(i)],
2450 [freqs.get_name(i)], "MHz")
2451 # ensure that we are not iterating past nIF
2452 if i == self.nif()-1: break
2453 self._setselection(savesel)
2454 else:
2455 return
2456 self._add_history("set_restfreqs", varlist)
2457
2458 @asaplog_post_dec
2459 def shift_refpix(self, delta):
2460 """\
2461 Shift the reference pixel of the Spectra Coordinate by an
2462 integer amount.
2463
2464 Parameters:
2465
2466 delta: the amount to shift by
2467
2468 *Note*:
2469
2470 Be careful using this with broadband data.
2471
2472 """
2473 varlist = vars()
2474 Scantable.shift_refpix(self, delta)
2475 s._add_history("shift_refpix", varlist)
2476
2477 @asaplog_post_dec
2478 def history(self, filename=None, nrows=-1, start=0):
2479 """\
2480 Print the history. Optionally to a file.
2481
2482 Parameters:
2483
2484 filename: The name of the file to save the history to.
2485
2486 """
2487 n = self._historylength()
2488 if nrows == -1:
2489 nrows = n
2490 if start+nrows > n:
2491 nrows = nrows-start
2492 if n > 1000 and nrows == n:
2493 nrows = 1000
2494 start = n-1000
2495 asaplog.push("Warning: History has {0} entries. Displaying last "
2496 "1000".format(n))
2497 hist = list(self._gethistory(nrows, start))
2498 out = "-"*80
2499 for h in hist:
2500 if not h.strip():
2501 continue
2502 if h.find("---") >-1:
2503 continue
2504 else:
2505 items = h.split("##")
2506 date = items[0]
2507 func = items[1]
2508 items = items[2:]
2509 out += "\n"+date+"\n"
2510 out += "Function: %s\n Parameters:" % (func)
2511 for i in items:
2512 if i == '':
2513 continue
2514 s = i.split("=")
2515 out += "\n %s = %s" % (s[0], s[1])
2516 out = "\n".join([out, "*"*80])
2517 if filename is not None:
2518 if filename is "":
2519 filename = 'scantable_history.txt'
2520 filename = os.path.expandvars(os.path.expanduser(filename))
2521 if not os.path.isdir(filename):
2522 data = open(filename, 'w')
2523 data.write(out)
2524 data.close()
2525 else:
2526 msg = "Illegal file name '%s'." % (filename)
2527 raise IOError(msg)
2528 return page(out)
2529
2530 #
2531 # Maths business
2532 #
2533 @asaplog_post_dec
2534 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2535 avmode="NONE"):
2536 """\
2537 Return the (time) weighted average of a scan. Scans will be averaged
2538 only if the source direction (RA/DEC) is within 1' otherwise
2539
2540 *Note*:
2541
2542 in channels only - align if necessary
2543
2544 Parameters:
2545
2546 mask: an optional mask (only used for 'var' and 'tsys'
2547 weighting)
2548
2549 scanav: True averages each scan separately
2550 False (default) averages all scans together,
2551
2552 weight: Weighting scheme.
2553 'none' (mean no weight)
2554 'var' (1/var(spec) weighted)
2555 'tsys' (1/Tsys**2 weighted)
2556 'tint' (integration time weighted)
2557 'tintsys' (Tint/Tsys**2)
2558 'median' ( median averaging)
2559 The default is 'tint'
2560
2561 align: align the spectra in velocity before averaging. It takes
2562 the time of the first spectrum as reference time.
2563 avmode: 'SOURCE' - also select by source name - or
2564 'NONE' (default). Not applicable for scanav=True or
2565 weight=median
2566
2567 Example::
2568
2569 # time average the scantable without using a mask
2570 newscan = scan.average_time()
2571
2572 """
2573 varlist = vars()
2574 weight = weight or 'TINT'
2575 mask = mask or ()
2576 scanav = (scanav and 'SCAN') or avmode.upper()
2577 scan = (self, )
2578
2579 if align:
2580 scan = (self.freq_align(insitu=False), )
2581 asaplog.push("Note: Alignment is don on a source-by-source basis")
2582 asaplog.push("Note: Averaging (by default) is not")
2583 # we need to set it to SOURCE averaging here
2584 s = None
2585 if weight.upper() == 'MEDIAN':
2586 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2587 scanav))
2588 else:
2589 s = scantable(self._math._average(scan, mask, weight.upper(),
2590 scanav))
2591 s._add_history("average_time", varlist)
2592 return s
2593
2594 @asaplog_post_dec
2595 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2596 """\
2597 Return a scan where all spectra are converted to either
2598 Jansky or Kelvin depending upon the flux units of the scan table.
2599 By default the function tries to look the values up internally.
2600 If it can't find them (or if you want to over-ride), you must
2601 specify EITHER jyperk OR eta (and D which it will try to look up
2602 also if you don't set it). jyperk takes precedence if you set both.
2603
2604 Parameters:
2605
2606 jyperk: the Jy / K conversion factor
2607
2608 eta: the aperture efficiency
2609
2610 d: the geometric diameter (metres)
2611
2612 insitu: if False a new scantable is returned.
2613 Otherwise, the scaling is done in-situ
2614 The default is taken from .asaprc (False)
2615
2616 """
2617 if insitu is None: insitu = rcParams['insitu']
2618 self._math._setinsitu(insitu)
2619 varlist = vars()
2620 jyperk = jyperk or -1.0
2621 d = d or -1.0
2622 eta = eta or -1.0
2623 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2624 s._add_history("convert_flux", varlist)
2625 if insitu: self._assign(s)
2626 else: return s
2627
2628 @asaplog_post_dec
2629 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2630 """\
2631 Return a scan after applying a gain-elevation correction.
2632 The correction can be made via either a polynomial or a
2633 table-based interpolation (and extrapolation if necessary).
2634 You specify polynomial coefficients, an ascii table or neither.
2635 If you specify neither, then a polynomial correction will be made
2636 with built in coefficients known for certain telescopes (an error
2637 will occur if the instrument is not known).
2638 The data and Tsys are *divided* by the scaling factors.
2639
2640 Parameters:
2641
2642 poly: Polynomial coefficients (default None) to compute a
2643 gain-elevation correction as a function of
2644 elevation (in degrees).
2645
2646 filename: The name of an ascii file holding correction factors.
2647 The first row of the ascii file must give the column
2648 names and these MUST include columns
2649 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2650 by this) somewhere.
2651 The second row must give the data type of the
2652 column. Use 'R' for Real and 'I' for Integer.
2653 An example file would be
2654 (actual factors are arbitrary) :
2655
2656 TIME ELEVATION FACTOR
2657 R R R
2658 0.1 0 0.8
2659 0.2 20 0.85
2660 0.3 40 0.9
2661 0.4 60 0.85
2662 0.5 80 0.8
2663 0.6 90 0.75
2664
2665 method: Interpolation method when correcting from a table.
2666 Values are 'nearest', 'linear' (default), 'cubic'
2667 and 'spline'
2668
2669 insitu: if False a new scantable is returned.
2670 Otherwise, the scaling is done in-situ
2671 The default is taken from .asaprc (False)
2672
2673 """
2674
2675 if insitu is None: insitu = rcParams['insitu']
2676 self._math._setinsitu(insitu)
2677 varlist = vars()
2678 poly = poly or ()
2679 from os.path import expandvars
2680 filename = expandvars(filename)
2681 s = scantable(self._math._gainel(self, poly, filename, method))
2682 s._add_history("gain_el", varlist)
2683 if insitu:
2684 self._assign(s)
2685 else:
2686 return s
2687
2688 @asaplog_post_dec
2689 def freq_align(self, reftime=None, method='cubic', insitu=None):
2690 """\
2691 Return a scan where all rows have been aligned in frequency/velocity.
2692 The alignment frequency frame (e.g. LSRK) is that set by function
2693 set_freqframe.
2694
2695 Parameters:
2696
2697 reftime: reference time to align at. By default, the time of
2698 the first row of data is used.
2699
2700 method: Interpolation method for regridding the spectra.
2701 Choose from 'nearest', 'linear', 'cubic' (default)
2702 and 'spline'
2703
2704 insitu: if False a new scantable is returned.
2705 Otherwise, the scaling is done in-situ
2706 The default is taken from .asaprc (False)
2707
2708 """
2709 if insitu is None: insitu = rcParams["insitu"]
2710 oldInsitu = self._math._insitu()
2711 self._math._setinsitu(insitu)
2712 varlist = vars()
2713 reftime = reftime or ""
2714 s = scantable(self._math._freq_align(self, reftime, method))
2715 s._add_history("freq_align", varlist)
2716 self._math._setinsitu(oldInsitu)
2717 if insitu:
2718 self._assign(s)
2719 else:
2720 return s
2721
2722 @asaplog_post_dec
2723 def opacity(self, tau=None, insitu=None):
2724 """\
2725 Apply an opacity correction. The data
2726 and Tsys are multiplied by the correction factor.
2727
2728 Parameters:
2729
2730 tau: (list of) opacity from which the correction factor is
2731 exp(tau*ZD)
2732 where ZD is the zenith-distance.
2733 If a list is provided, it has to be of length nIF,
2734 nIF*nPol or 1 and in order of IF/POL, e.g.
2735 [opif0pol0, opif0pol1, opif1pol0 ...]
2736 if tau is `None` the opacities are determined from a
2737 model.
2738
2739 insitu: if False a new scantable is returned.
2740 Otherwise, the scaling is done in-situ
2741 The default is taken from .asaprc (False)
2742
2743 """
2744 if insitu is None:
2745 insitu = rcParams['insitu']
2746 self._math._setinsitu(insitu)
2747 varlist = vars()
2748 if not hasattr(tau, "__len__"):
2749 tau = [tau]
2750 s = scantable(self._math._opacity(self, tau))
2751 s._add_history("opacity", varlist)
2752 if insitu:
2753 self._assign(s)
2754 else:
2755 return s
2756
2757 @asaplog_post_dec
2758 def bin(self, width=5, insitu=None):
2759 """\
2760 Return a scan where all spectra have been binned up.
2761
2762 Parameters:
2763
2764 width: The bin width (default=5) in pixels
2765
2766 insitu: if False a new scantable is returned.
2767 Otherwise, the scaling is done in-situ
2768 The default is taken from .asaprc (False)
2769
2770 """
2771 if insitu is None:
2772 insitu = rcParams['insitu']
2773 self._math._setinsitu(insitu)
2774 varlist = vars()
2775 s = scantable(self._math._bin(self, width))
2776 s._add_history("bin", varlist)
2777 if insitu:
2778 self._assign(s)
2779 else:
2780 return s
2781
2782 @asaplog_post_dec
2783 def reshape(self, first, last, insitu=None):
2784 """Resize the band by providing first and last channel.
2785 This will cut off all channels outside [first, last].
2786 """
2787 if insitu is None:
2788 insitu = rcParams['insitu']
2789 varlist = vars()
2790 if last < 0:
2791 last = self.nchan()-1 + last
2792 s = None
2793 if insitu:
2794 s = self
2795 else:
2796 s = self.copy()
2797 s._reshape(first,last)
2798 s._add_history("reshape", varlist)
2799 if not insitu:
2800 return s
2801
2802 @asaplog_post_dec
2803 def resample(self, width=5, method='cubic', insitu=None):
2804 """\
2805 Return a scan where all spectra have been binned up.
2806
2807 Parameters:
2808
2809 width: The bin width (default=5) in pixels
2810
2811 method: Interpolation method when correcting from a table.
2812 Values are 'nearest', 'linear', 'cubic' (default)
2813 and 'spline'
2814
2815 insitu: if False a new scantable is returned.
2816 Otherwise, the scaling is done in-situ
2817 The default is taken from .asaprc (False)
2818
2819 """
2820 if insitu is None:
2821 insitu = rcParams['insitu']
2822 self._math._setinsitu(insitu)
2823 varlist = vars()
2824 s = scantable(self._math._resample(self, method, width))
2825 s._add_history("resample", varlist)
2826 if insitu:
2827 self._assign(s)
2828 else:
2829 return s
2830
2831 @asaplog_post_dec
2832 def average_pol(self, mask=None, weight='none'):
2833 """\
2834 Average the Polarisations together.
2835
2836 Parameters:
2837
2838 mask: An optional mask defining the region, where the
2839 averaging will be applied. The output will have all
2840 specified points masked.
2841
2842 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2843 weighted), or 'tsys' (1/Tsys**2 weighted)
2844
2845 """
2846 varlist = vars()
2847 mask = mask or ()
2848 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2849 s._add_history("average_pol", varlist)
2850 return s
2851
2852 @asaplog_post_dec
2853 def average_beam(self, mask=None, weight='none'):
2854 """\
2855 Average the Beams together.
2856
2857 Parameters:
2858 mask: An optional mask defining the region, where the
2859 averaging will be applied. The output will have all
2860 specified points masked.
2861
2862 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2863 weighted), or 'tsys' (1/Tsys**2 weighted)
2864
2865 """
2866 varlist = vars()
2867 mask = mask or ()
2868 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2869 s._add_history("average_beam", varlist)
2870 return s
2871
2872 def parallactify(self, pflag):
2873 """\
2874 Set a flag to indicate whether this data should be treated as having
2875 been 'parallactified' (total phase == 0.0)
2876
2877 Parameters:
2878
2879 pflag: Bool indicating whether to turn this on (True) or
2880 off (False)
2881
2882 """
2883 varlist = vars()
2884 self._parallactify(pflag)
2885 self._add_history("parallactify", varlist)
2886
2887 @asaplog_post_dec
2888 def convert_pol(self, poltype=None):
2889 """\
2890 Convert the data to a different polarisation type.
2891 Note that you will need cross-polarisation terms for most conversions.
2892
2893 Parameters:
2894
2895 poltype: The new polarisation type. Valid types are:
2896 'linear', 'circular', 'stokes' and 'linpol'
2897
2898 """
2899 varlist = vars()
2900 s = scantable(self._math._convertpol(self, poltype))
2901 s._add_history("convert_pol", varlist)
2902 return s
2903
2904 @asaplog_post_dec
2905 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2906 insitu=None):
2907 """\
2908 Smooth the spectrum by the specified kernel (conserving flux).
2909
2910 Parameters:
2911
2912 kernel: The type of smoothing kernel. Select from
2913 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2914 or 'poly'
2915
2916 width: The width of the kernel in pixels. For hanning this is
2917 ignored otherwise it defauls to 5 pixels.
2918 For 'gaussian' it is the Full Width Half
2919 Maximum. For 'boxcar' it is the full width.
2920 For 'rmedian' and 'poly' it is the half width.
2921
2922 order: Optional parameter for 'poly' kernel (default is 2), to
2923 specify the order of the polnomial. Ignored by all other
2924 kernels.
2925
2926 plot: plot the original and the smoothed spectra.
2927 In this each indivual fit has to be approved, by
2928 typing 'y' or 'n'
2929
2930 insitu: if False a new scantable is returned.
2931 Otherwise, the scaling is done in-situ
2932 The default is taken from .asaprc (False)
2933
2934 """
2935 if insitu is None: insitu = rcParams['insitu']
2936 self._math._setinsitu(insitu)
2937 varlist = vars()
2938
2939 if plot: orgscan = self.copy()
2940
2941 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2942 s._add_history("smooth", varlist)
2943
2944 action = 'H'
2945 if plot:
2946 from asap.asapplotter import new_asaplot
2947 theplot = new_asaplot(rcParams['plotter.gui'])
2948 from matplotlib import rc as rcp
2949 rcp('lines', linewidth=1)
2950 theplot.set_panels()
2951 ylab=s._get_ordinate_label()
2952 #theplot.palette(0,["#777777","red"])
2953 for r in xrange(s.nrow()):
2954 xsm=s._getabcissa(r)
2955 ysm=s._getspectrum(r)
2956 xorg=orgscan._getabcissa(r)
2957 yorg=orgscan._getspectrum(r)
2958 if action != "N": #skip plotting if rejecting all
2959 theplot.clear()
2960 theplot.hold()
2961 theplot.set_axes('ylabel',ylab)
2962 theplot.set_axes('xlabel',s._getabcissalabel(r))
2963 theplot.set_axes('title',s._getsourcename(r))
2964 theplot.set_line(label='Original',color="#777777")
2965 theplot.plot(xorg,yorg)
2966 theplot.set_line(label='Smoothed',color="red")
2967 theplot.plot(xsm,ysm)
2968 ### Ugly part for legend
2969 for i in [0,1]:
2970 theplot.subplots[0]['lines'].append(
2971 [theplot.subplots[0]['axes'].lines[i]]
2972 )
2973 theplot.release()
2974 ### Ugly part for legend
2975 theplot.subplots[0]['lines']=[]
2976 res = self._get_verify_action("Accept smoothing?",action)
2977 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2978 if r == 0: action = None
2979 #res = raw_input("Accept smoothing ([y]/n): ")
2980 if res.upper() == 'N':
2981 # reject for the current rows
2982 s._setspectrum(yorg, r)
2983 elif res.upper() == 'R':
2984 # reject all the following rows
2985 action = "N"
2986 s._setspectrum(yorg, r)
2987 elif res.upper() == 'A':
2988 # accept all the following rows
2989 break
2990 theplot.quit()
2991 del theplot
2992 del orgscan
2993
2994 if insitu: self._assign(s)
2995 else: return s
2996
2997 @asaplog_post_dec
2998 def regrid_channel(self, width=5, plot=False, insitu=None):
2999 """\
3000 Regrid the spectra by the specified channel width
3001
3002 Parameters:
3003
3004 width: The channel width (float) of regridded spectra
3005 in the current spectral unit.
3006
3007 plot: [NOT IMPLEMENTED YET]
3008 plot the original and the regridded spectra.
3009 In this each indivual fit has to be approved, by
3010 typing 'y' or 'n'
3011
3012 insitu: if False a new scantable is returned.
3013 Otherwise, the scaling is done in-situ
3014 The default is taken from .asaprc (False)
3015
3016 """
3017 if insitu is None: insitu = rcParams['insitu']
3018 varlist = vars()
3019
3020 if plot:
3021 asaplog.post()
3022 asaplog.push("Verification plot is not implemtnetd yet.")
3023 asaplog.post("WARN")
3024
3025 s = self.copy()
3026 s._regrid_specchan(width)
3027
3028 s._add_history("regrid_channel", varlist)
3029
3030# if plot:
3031# from asap.asapplotter import new_asaplot
3032# theplot = new_asaplot(rcParams['plotter.gui'])
3033# from matplotlib import rc as rcp
3034# rcp('lines', linewidth=1)
3035# theplot.set_panels()
3036# ylab=s._get_ordinate_label()
3037# #theplot.palette(0,["#777777","red"])
3038# for r in xrange(s.nrow()):
3039# xsm=s._getabcissa(r)
3040# ysm=s._getspectrum(r)
3041# xorg=orgscan._getabcissa(r)
3042# yorg=orgscan._getspectrum(r)
3043# theplot.clear()
3044# theplot.hold()
3045# theplot.set_axes('ylabel',ylab)
3046# theplot.set_axes('xlabel',s._getabcissalabel(r))
3047# theplot.set_axes('title',s._getsourcename(r))
3048# theplot.set_line(label='Original',color="#777777")
3049# theplot.plot(xorg,yorg)
3050# theplot.set_line(label='Smoothed',color="red")
3051# theplot.plot(xsm,ysm)
3052# ### Ugly part for legend
3053# for i in [0,1]:
3054# theplot.subplots[0]['lines'].append(
3055# [theplot.subplots[0]['axes'].lines[i]]
3056# )
3057# theplot.release()
3058# ### Ugly part for legend
3059# theplot.subplots[0]['lines']=[]
3060# res = raw_input("Accept smoothing ([y]/n): ")
3061# if res.upper() == 'N':
3062# s._setspectrum(yorg, r)
3063# theplot.quit()
3064# del theplot
3065# del orgscan
3066
3067 if insitu: self._assign(s)
3068 else: return s
3069
3070 @asaplog_post_dec
3071 def _parse_wn(self, wn):
3072 if isinstance(wn, list) or isinstance(wn, tuple):
3073 return wn
3074 elif isinstance(wn, int):
3075 return [ wn ]
3076 elif isinstance(wn, str):
3077 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3078 val = wn.split('-')
3079 val = [int(val[0]), int(val[1])]
3080 val.sort()
3081 res = [i for i in xrange(val[0], val[1]+1)]
3082 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3083 val = int(wn[2:])+1
3084 res = [i for i in xrange(val)]
3085 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3086 val = int(wn[:-2])+1
3087 res = [i for i in xrange(val)]
3088 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3089 val = int(wn[1:])
3090 res = [i for i in xrange(val)]
3091 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3092 val = int(wn[:-1])
3093 res = [i for i in xrange(val)]
3094 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3095 # then interpreted in C++
3096 # side as [a,a+1,...,a_nyq]
3097 # (CAS-3759)
3098 val = int(wn[2:])
3099 res = [val, -999]
3100 #res = [i for i in xrange(val, self.nchan()/2+1)]
3101 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3102 # then interpreted in C++
3103 # side as [a,a+1,...,a_nyq]
3104 # (CAS-3759)
3105 val = int(wn[:-2])
3106 res = [val, -999]
3107 #res = [i for i in xrange(val, self.nchan()/2+1)]
3108 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3109 # then interpreted in C++
3110 # side as [a+1,a+2,...,a_nyq]
3111 # (CAS-3759)
3112 val = int(wn[1:])+1
3113 res = [val, -999]
3114 #res = [i for i in xrange(val, self.nchan()/2+1)]
3115 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3116 # then interpreted in C++
3117 # side as [a+1,a+2,...,a_nyq]
3118 # (CAS-3759)
3119 val = int(wn[:-1])+1
3120 res = [val, -999]
3121 #res = [i for i in xrange(val, self.nchan()/2+1)]
3122
3123 return res
3124 else:
3125 msg = 'wrong value given for addwn/rejwn'
3126 raise RuntimeError(msg)
3127
3128 @asaplog_post_dec
3129 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3130 """\
3131 Subtract baseline based on parameters written in Baseline Table.
3132
3133 Parameters:
3134 insitu: if True, baseline fitting/subtraction is done
3135 in-situ. If False, a new scantable with
3136 baseline subtracted is returned. Actually,
3137 format of the returned value depends on both
3138 insitu and retfitres (see below).
3139 The default is taken from .asaprc (False)
3140 retfitres: if True, the results of baseline fitting (i.e.,
3141 coefficients and rms) are returned.
3142 default is False.
3143 The format of the returned value of this
3144 function varies as follows:
3145 (1) in case insitu=True and retfitres=True:
3146 fitting result.
3147 (2) in case insitu=True and retfitres=False:
3148 None.
3149 (3) in case insitu=False and retfitres=True:
3150 a dictionary containing a new scantable
3151 (with baseline subtracted) and the fitting
3152 results.
3153 (4) in case insitu=False and retfitres=False:
3154 a new scantable (with baseline subtracted).
3155 inbltable: name of input baseline table. The row number of
3156 scantable and that of inbltable must be
3157 identical.
3158 outbltable: name of output baseline table where baseline
3159 parameters and fitting results recorded.
3160 default is ''(no output).
3161 overwrite: if True when an existing baseline table is
3162 specified for outbltable, overwrites it.
3163 Otherwise there is no harm.
3164 default is False.
3165 """
3166
3167 try:
3168 varlist = vars()
3169 if retfitres is None: retfitres = False
3170 if inbltable is None: raise ValueError("bltable missing.")
3171 if outbltable is None: outbltable = ''
3172 if overwrite is None: overwrite = False
3173
3174 if insitu is None: insitu = rcParams['insitu']
3175 if insitu:
3176 workscan = self
3177 else:
3178 workscan = self.copy()
3179
3180 sres = workscan._apply_bltable(inbltable,
3181 retfitres,
3182 outbltable,
3183 os.path.exists(outbltable),
3184 overwrite)
3185 if retfitres: res = parse_fitresult(sres)
3186
3187 workscan._add_history('apply_bltable', varlist)
3188
3189 if insitu:
3190 self._assign(workscan)
3191 if retfitres:
3192 return res
3193 else:
3194 return None
3195 else:
3196 if retfitres:
3197 return {'scantable': workscan, 'fitresults': res}
3198 else:
3199 return workscan
3200
3201 except RuntimeError, e:
3202 raise_fitting_failure_exception(e)
3203
3204 @asaplog_post_dec
3205 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3206 """\
3207 Subtract baseline based on parameters written in the input list.
3208
3209 Parameters:
3210 insitu: if True, baseline fitting/subtraction is done
3211 in-situ. If False, a new scantable with
3212 baseline subtracted is returned. Actually,
3213 format of the returned value depends on both
3214 insitu and retfitres (see below).
3215 The default is taken from .asaprc (False)
3216 retfitres: if True, the results of baseline fitting (i.e.,
3217 coefficients and rms) are returned.
3218 default is False.
3219 The format of the returned value of this
3220 function varies as follows:
3221 (1) in case insitu=True and retfitres=True:
3222 fitting result.
3223 (2) in case insitu=True and retfitres=False:
3224 None.
3225 (3) in case insitu=False and retfitres=True:
3226 a dictionary containing a new scantable
3227 (with baseline subtracted) and the fitting
3228 results.
3229 (4) in case insitu=False and retfitres=False:
3230 a new scantable (with baseline subtracted).
3231 blinfo: baseline parameter set stored in a dictionary
3232 or a list of dictionary. Each dictionary
3233 corresponds to each spectrum and must contain
3234 the following keys and values:
3235 'row': row number,
3236 'blfunc': function name. available ones include
3237 'poly', 'chebyshev', 'cspline' and
3238 'sinusoid',
3239 'order': maximum order of polynomial. needed
3240 if blfunc='poly' or 'chebyshev',
3241 'npiece': number or piecewise polynomial.
3242 needed if blfunc='cspline',
3243 'nwave': a list of sinusoidal wave numbers.
3244 needed if blfunc='sinusoid', and
3245 'masklist': min-max windows for channel mask.
3246 the specified ranges will be used
3247 for fitting.
3248 bltable: name of output baseline table where baseline
3249 parameters and fitting results recorded.
3250 default is ''(no output).
3251 overwrite: if True when an existing baseline table is
3252 specified for bltable, overwrites it.
3253 Otherwise there is no harm.
3254 default is False.
3255
3256 Example:
3257 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3258 'masklist':[[10,350],[352,510]]},
3259 {'row':1, 'blfunc':'cspline', 'npiece':3,
3260 'masklist':[[3,16],[19,404],[407,511]]}
3261 ])
3262
3263 the first spectrum (row=0) will be fitted with polynomial
3264 of order=5 and the next one (row=1) will be fitted with cubic
3265 spline consisting of 3 pieces.
3266 """
3267
3268 try:
3269 varlist = vars()
3270 if retfitres is None: retfitres = False
3271 if blinfo is None: blinfo = []
3272 if bltable is None: bltable = ''
3273 if overwrite is None: overwrite = False
3274
3275 if insitu is None: insitu = rcParams['insitu']
3276 if insitu:
3277 workscan = self
3278 else:
3279 workscan = self.copy()
3280
3281 nrow = workscan.nrow()
3282
3283 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3284
3285 print "in_blinfo=< "+ str(in_blinfo)+" >"
3286
3287 sres = workscan._sub_baseline(in_blinfo,
3288 retfitres,
3289 bltable,
3290 os.path.exists(bltable),
3291 overwrite)
3292 if retfitres: res = parse_fitresult(sres)
3293
3294 workscan._add_history('sub_baseline', varlist)
3295
3296 if insitu:
3297 self._assign(workscan)
3298 if retfitres:
3299 return res
3300 else:
3301 return None
3302 else:
3303 if retfitres:
3304 return {'scantable': workscan, 'fitresults': res}
3305 else:
3306 return workscan
3307
3308 except RuntimeError, e:
3309 raise_fitting_failure_exception(e)
3310
3311 @asaplog_post_dec
3312 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3313 whichrow=None, uselinefinder=None, edge=None,
3314 threshold=None, chan_avg_limit=None):
3315 """\
3316 Calculates and returns model selection criteria for a specified
3317 baseline model and a given spectrum data.
3318 Available values include Akaike Information Criterion (AIC), the
3319 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3320 Bayesian Information Criterion (BIC) and the Generalised Cross
3321 Validation (GCV).
3322
3323 Parameters:
3324 value: name of model selection criteria to calculate.
3325 available ones include 'aic', 'aicc', 'bic' and
3326 'gcv'. default is 'aicc'.
3327 blfunc: baseline function name. available ones include
3328 'chebyshev', 'cspline' and 'sinusoid'.
3329 default is 'chebyshev'.
3330 order: parameter for basline function. actually stands for
3331 order of polynomial (order) for 'chebyshev',
3332 number of spline pieces (npiece) for 'cspline' and
3333 maximum wave number for 'sinusoid', respectively.
3334 default is 5 (which is also the default order value
3335 for [auto_]chebyshev_baseline()).
3336 mask: an optional mask. default is [].
3337 whichrow: row number. default is 0 (the first row)
3338 uselinefinder: use sd.linefinder() to flag out line regions
3339 default is True.
3340 edge: an optional number of channel to drop at
3341 the edge of spectrum. If only one value is
3342 specified, the same number will be dropped
3343 from both sides of the spectrum. Default
3344 is to keep all channels. Nested tuples
3345 represent individual edge selection for
3346 different IFs (a number of spectral channels
3347 can be different)
3348 default is (0, 0).
3349 threshold: the threshold used by line finder. It is
3350 better to keep it large as only strong lines
3351 affect the baseline solution.
3352 default is 3.
3353 chan_avg_limit: a maximum number of consequtive spectral
3354 channels to average during the search of
3355 weak and broad lines. The default is no
3356 averaging (and no search for weak lines).
3357 If such lines can affect the fitted baseline
3358 (e.g. a high order polynomial is fitted),
3359 increase this parameter (usually values up
3360 to 8 are reasonable). Most users of this
3361 method should find the default value sufficient.
3362 default is 1.
3363
3364 Example:
3365 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3366 """
3367
3368 try:
3369 varlist = vars()
3370
3371 if value is None: value = 'aicc'
3372 if blfunc is None: blfunc = 'chebyshev'
3373 if order is None: order = 5
3374 if mask is None: mask = []
3375 if whichrow is None: whichrow = 0
3376 if uselinefinder is None: uselinefinder = True
3377 if edge is None: edge = (0, 0)
3378 if threshold is None: threshold = 3
3379 if chan_avg_limit is None: chan_avg_limit = 1
3380
3381 return self._calc_aic(value, blfunc, order, mask,
3382 whichrow, uselinefinder, edge,
3383 threshold, chan_avg_limit)
3384
3385 except RuntimeError, e:
3386 raise_fitting_failure_exception(e)
3387
3388 @asaplog_post_dec
3389 def sinusoid_baseline(self, mask=None, applyfft=None,
3390 fftmethod=None, fftthresh=None,
3391 addwn=None, rejwn=None,
3392 insitu=None,
3393 clipthresh=None, clipniter=None,
3394 plot=None,
3395 getresidual=None,
3396 showprogress=None, minnrow=None,
3397 outlog=None,
3398 blfile=None, csvformat=None,
3399 bltable=None):
3400 """\
3401 Return a scan which has been baselined (all rows) with sinusoidal
3402 functions.
3403
3404 Parameters:
3405 mask: an optional mask
3406 applyfft: if True use some method, such as FFT, to find
3407 strongest sinusoidal components in the wavenumber
3408 domain to be used for baseline fitting.
3409 default is True.
3410 fftmethod: method to find the strong sinusoidal components.
3411 now only 'fft' is available and it is the default.
3412 fftthresh: the threshold to select wave numbers to be used for
3413 fitting from the distribution of amplitudes in the
3414 wavenumber domain.
3415 both float and string values accepted.
3416 given a float value, the unit is set to sigma.
3417 for string values, allowed formats include:
3418 'xsigma' or 'x' (= x-sigma level. e.g.,
3419 '3sigma'), or
3420 'topx' (= the x strongest ones, e.g. 'top5').
3421 default is 3.0 (unit: sigma).
3422 addwn: the additional wave numbers to be used for fitting.
3423 list or integer value is accepted to specify every
3424 wave numbers. also string value can be used in case
3425 you need to specify wave numbers in a certain range,
3426 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3427 '<a' (= 0,1,...,a-2,a-1),
3428 '>=a' (= a, a+1, ... up to the maximum wave
3429 number corresponding to the Nyquist
3430 frequency for the case of FFT).
3431 default is [0].
3432 rejwn: the wave numbers NOT to be used for fitting.
3433 can be set just as addwn but has higher priority:
3434 wave numbers which are specified both in addwn
3435 and rejwn will NOT be used. default is [].
3436 insitu: if False a new scantable is returned.
3437 Otherwise, the scaling is done in-situ
3438 The default is taken from .asaprc (False)
3439 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3440 clipniter: maximum number of iteration of 'clipthresh'-sigma
3441 clipping (default is 0)
3442 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3443 plot the fit and the residual. In this each
3444 indivual fit has to be approved, by typing 'y'
3445 or 'n'
3446 getresidual: if False, returns best-fit values instead of
3447 residual. (default is True)
3448 showprogress: show progress status for large data.
3449 default is True.
3450 minnrow: minimum number of input spectra to show.
3451 default is 1000.
3452 outlog: Output the coefficients of the best-fit
3453 function to logger (default is False)
3454 blfile: Name of a text file in which the best-fit
3455 parameter values to be written
3456 (default is '': no file/logger output)
3457 csvformat: if True blfile is csv-formatted, default is False.
3458 bltable: name of a baseline table where fitting results
3459 (coefficients, rms, etc.) are to be written.
3460 if given, fitting results will NOT be output to
3461 scantable (insitu=True) or None will be
3462 returned (insitu=False).
3463 (default is "": no table output)
3464
3465 Example:
3466 # return a scan baselined by a combination of sinusoidal curves
3467 # having wave numbers in spectral window up to 10,
3468 # also with 3-sigma clipping, iteration up to 4 times
3469 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3470
3471 Note:
3472 The best-fit parameter values output in logger and/or blfile are now
3473 based on specunit of 'channel'.
3474 """
3475
3476 try:
3477 varlist = vars()
3478
3479 if insitu is None: insitu = rcParams['insitu']
3480 if insitu:
3481 workscan = self
3482 else:
3483 workscan = self.copy()
3484
3485 if mask is None: mask = []
3486 if applyfft is None: applyfft = True
3487 if fftmethod is None: fftmethod = 'fft'
3488 if fftthresh is None: fftthresh = 3.0
3489 if addwn is None: addwn = [0]
3490 if rejwn is None: rejwn = []
3491 if clipthresh is None: clipthresh = 3.0
3492 if clipniter is None: clipniter = 0
3493 if plot is None: plot = False
3494 if getresidual is None: getresidual = True
3495 if showprogress is None: showprogress = True
3496 if minnrow is None: minnrow = 1000
3497 if outlog is None: outlog = False
3498 if blfile is None: blfile = ''
3499 if csvformat is None: csvformat = False
3500 if bltable is None: bltable = ''
3501
3502 sapplyfft = 'true' if applyfft else 'false'
3503 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3504
3505 scsvformat = 'T' if csvformat else 'F'
3506
3507 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3508 workscan._sinusoid_baseline(mask,
3509 fftinfo,
3510 #applyfft, fftmethod.lower(),
3511 #str(fftthresh).lower(),
3512 workscan._parse_wn(addwn),
3513 workscan._parse_wn(rejwn),
3514 clipthresh, clipniter,
3515 getresidual,
3516 pack_progress_params(showprogress,
3517 minnrow),
3518 outlog, scsvformat+blfile,
3519 bltable)
3520 workscan._add_history('sinusoid_baseline', varlist)
3521
3522 if bltable == '':
3523 if insitu:
3524 self._assign(workscan)
3525 else:
3526 return workscan
3527 else:
3528 if not insitu:
3529 return None
3530
3531 except RuntimeError, e:
3532 raise_fitting_failure_exception(e)
3533
3534
3535 @asaplog_post_dec
3536 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3537 fftmethod=None, fftthresh=None,
3538 addwn=None, rejwn=None,
3539 insitu=None,
3540 clipthresh=None, clipniter=None,
3541 edge=None, threshold=None, chan_avg_limit=None,
3542 plot=None,
3543 getresidual=None,
3544 showprogress=None, minnrow=None,
3545 outlog=None,
3546 blfile=None, csvformat=None,
3547 bltable=None):
3548 """\
3549 Return a scan which has been baselined (all rows) with sinusoidal
3550 functions.
3551 Spectral lines are detected first using linefinder and masked out
3552 to avoid them affecting the baseline solution.
3553
3554 Parameters:
3555 mask: an optional mask retreived from scantable
3556 applyfft: if True use some method, such as FFT, to find
3557 strongest sinusoidal components in the wavenumber
3558 domain to be used for baseline fitting.
3559 default is True.
3560 fftmethod: method to find the strong sinusoidal components.
3561 now only 'fft' is available and it is the default.
3562 fftthresh: the threshold to select wave numbers to be used for
3563 fitting from the distribution of amplitudes in the
3564 wavenumber domain.
3565 both float and string values accepted.
3566 given a float value, the unit is set to sigma.
3567 for string values, allowed formats include:
3568 'xsigma' or 'x' (= x-sigma level. e.g.,
3569 '3sigma'), or
3570 'topx' (= the x strongest ones, e.g. 'top5').
3571 default is 3.0 (unit: sigma).
3572 addwn: the additional wave numbers to be used for fitting.
3573 list or integer value is accepted to specify every
3574 wave numbers. also string value can be used in case
3575 you need to specify wave numbers in a certain range,
3576 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3577 '<a' (= 0,1,...,a-2,a-1),
3578 '>=a' (= a, a+1, ... up to the maximum wave
3579 number corresponding to the Nyquist
3580 frequency for the case of FFT).
3581 default is [0].
3582 rejwn: the wave numbers NOT to be used for fitting.
3583 can be set just as addwn but has higher priority:
3584 wave numbers which are specified both in addwn
3585 and rejwn will NOT be used. default is [].
3586 insitu: if False a new scantable is returned.
3587 Otherwise, the scaling is done in-situ
3588 The default is taken from .asaprc (False)
3589 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3590 clipniter: maximum number of iteration of 'clipthresh'-sigma
3591 clipping (default is 0)
3592 edge: an optional number of channel to drop at
3593 the edge of spectrum. If only one value is
3594 specified, the same number will be dropped
3595 from both sides of the spectrum. Default
3596 is to keep all channels. Nested tuples
3597 represent individual edge selection for
3598 different IFs (a number of spectral channels
3599 can be different)
3600 threshold: the threshold used by line finder. It is
3601 better to keep it large as only strong lines
3602 affect the baseline solution.
3603 chan_avg_limit: a maximum number of consequtive spectral
3604 channels to average during the search of
3605 weak and broad lines. The default is no
3606 averaging (and no search for weak lines).
3607 If such lines can affect the fitted baseline
3608 (e.g. a high order polynomial is fitted),
3609 increase this parameter (usually values up
3610 to 8 are reasonable). Most users of this
3611 method should find the default value sufficient.
3612 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3613 plot the fit and the residual. In this each
3614 indivual fit has to be approved, by typing 'y'
3615 or 'n'
3616 getresidual: if False, returns best-fit values instead of
3617 residual. (default is True)
3618 showprogress: show progress status for large data.
3619 default is True.
3620 minnrow: minimum number of input spectra to show.
3621 default is 1000.
3622 outlog: Output the coefficients of the best-fit
3623 function to logger (default is False)
3624 blfile: Name of a text file in which the best-fit
3625 parameter values to be written
3626 (default is "": no file/logger output)
3627 csvformat: if True blfile is csv-formatted, default is False.
3628 bltable: name of a baseline table where fitting results
3629 (coefficients, rms, etc.) are to be written.
3630 if given, fitting results will NOT be output to
3631 scantable (insitu=True) or None will be
3632 returned (insitu=False).
3633 (default is "": no table output)
3634
3635 Example:
3636 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3637
3638 Note:
3639 The best-fit parameter values output in logger and/or blfile are now
3640 based on specunit of 'channel'.
3641 """
3642
3643 try:
3644 varlist = vars()
3645
3646 if insitu is None: insitu = rcParams['insitu']
3647 if insitu:
3648 workscan = self
3649 else:
3650 workscan = self.copy()
3651
3652 if mask is None: mask = []
3653 if applyfft is None: applyfft = True
3654 if fftmethod is None: fftmethod = 'fft'
3655 if fftthresh is None: fftthresh = 3.0
3656 if addwn is None: addwn = [0]
3657 if rejwn is None: rejwn = []
3658 if clipthresh is None: clipthresh = 3.0
3659 if clipniter is None: clipniter = 0
3660 if edge is None: edge = (0,0)
3661 if threshold is None: threshold = 3
3662 if chan_avg_limit is None: chan_avg_limit = 1
3663 if plot is None: plot = False
3664 if getresidual is None: getresidual = True
3665 if showprogress is None: showprogress = True
3666 if minnrow is None: minnrow = 1000
3667 if outlog is None: outlog = False
3668 if blfile is None: blfile = ''
3669 if csvformat is None: csvformat = False
3670 if bltable is None: bltable = ''
3671
3672 sapplyfft = 'true' if applyfft else 'false'
3673 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3674
3675 scsvformat = 'T' if csvformat else 'F'
3676
3677 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3678 workscan._auto_sinusoid_baseline(mask,
3679 fftinfo,
3680 workscan._parse_wn(addwn),
3681 workscan._parse_wn(rejwn),
3682 clipthresh, clipniter,
3683 normalise_edge_param(edge),
3684 threshold, chan_avg_limit,
3685 getresidual,
3686 pack_progress_params(showprogress,
3687 minnrow),
3688 outlog, scsvformat+blfile, bltable)
3689 workscan._add_history("auto_sinusoid_baseline", varlist)
3690
3691 if bltable == '':
3692 if insitu:
3693 self._assign(workscan)
3694 else:
3695 return workscan
3696 else:
3697 if not insitu:
3698 return None
3699
3700 except RuntimeError, e:
3701 raise_fitting_failure_exception(e)
3702
3703 @asaplog_post_dec
3704 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3705 clipthresh=None, clipniter=None, plot=None,
3706 getresidual=None, showprogress=None, minnrow=None,
3707 outlog=None, blfile=None, csvformat=None,
3708 bltable=None):
3709 """\
3710 Return a scan which has been baselined (all rows) by cubic spline
3711 function (piecewise cubic polynomial).
3712
3713 Parameters:
3714 mask: An optional mask
3715 npiece: Number of pieces. (default is 2)
3716 insitu: If False a new scantable is returned.
3717 Otherwise, the scaling is done in-situ
3718 The default is taken from .asaprc (False)
3719 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3720 clipniter: maximum number of iteration of 'clipthresh'-sigma
3721 clipping (default is 0)
3722 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3723 plot the fit and the residual. In this each
3724 indivual fit has to be approved, by typing 'y'
3725 or 'n'
3726 getresidual: if False, returns best-fit values instead of
3727 residual. (default is True)
3728 showprogress: show progress status for large data.
3729 default is True.
3730 minnrow: minimum number of input spectra to show.
3731 default is 1000.
3732 outlog: Output the coefficients of the best-fit
3733 function to logger (default is False)
3734 blfile: Name of a text file in which the best-fit
3735 parameter values to be written
3736 (default is "": no file/logger output)
3737 csvformat: if True blfile is csv-formatted, default is False.
3738 bltable: name of a baseline table where fitting results
3739 (coefficients, rms, etc.) are to be written.
3740 if given, fitting results will NOT be output to
3741 scantable (insitu=True) or None will be
3742 returned (insitu=False).
3743 (default is "": no table output)
3744
3745 Example:
3746 # return a scan baselined by a cubic spline consisting of 2 pieces
3747 # (i.e., 1 internal knot),
3748 # also with 3-sigma clipping, iteration up to 4 times
3749 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3750
3751 Note:
3752 The best-fit parameter values output in logger and/or blfile are now
3753 based on specunit of 'channel'.
3754 """
3755
3756 try:
3757 varlist = vars()
3758
3759 if insitu is None: insitu = rcParams['insitu']
3760 if insitu:
3761 workscan = self
3762 else:
3763 workscan = self.copy()
3764
3765 if mask is None: mask = []
3766 if npiece is None: npiece = 2
3767 if clipthresh is None: clipthresh = 3.0
3768 if clipniter is None: clipniter = 0
3769 if plot is None: plot = False
3770 if getresidual is None: getresidual = True
3771 if showprogress is None: showprogress = True
3772 if minnrow is None: minnrow = 1000
3773 if outlog is None: outlog = False
3774 if blfile is None: blfile = ''
3775 if csvformat is None: csvformat = False
3776 if bltable is None: bltable = ''
3777
3778 scsvformat = 'T' if csvformat else 'F'
3779
3780 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3781 workscan._cspline_baseline(mask, npiece,
3782 clipthresh, clipniter,
3783 getresidual,
3784 pack_progress_params(showprogress,
3785 minnrow),
3786 outlog, scsvformat+blfile,
3787 bltable)
3788 workscan._add_history("cspline_baseline", varlist)
3789
3790 if bltable == '':
3791 if insitu:
3792 self._assign(workscan)
3793 else:
3794 return workscan
3795 else:
3796 if not insitu:
3797 return None
3798
3799 except RuntimeError, e:
3800 raise_fitting_failure_exception(e)
3801
3802 @asaplog_post_dec
3803 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3804 clipthresh=None, clipniter=None,
3805 edge=None, threshold=None, chan_avg_limit=None,
3806 getresidual=None, plot=None,
3807 showprogress=None, minnrow=None, outlog=None,
3808 blfile=None, csvformat=None, bltable=None):
3809 """\
3810 Return a scan which has been baselined (all rows) by cubic spline
3811 function (piecewise cubic polynomial).
3812 Spectral lines are detected first using linefinder and masked out
3813 to avoid them affecting the baseline solution.
3814
3815 Parameters:
3816 mask: an optional mask retreived from scantable
3817 npiece: Number of pieces. (default is 2)
3818 insitu: if False a new scantable is returned.
3819 Otherwise, the scaling is done in-situ
3820 The default is taken from .asaprc (False)
3821 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3822 clipniter: maximum number of iteration of 'clipthresh'-sigma
3823 clipping (default is 0)
3824 edge: an optional number of channel to drop at
3825 the edge of spectrum. If only one value is
3826 specified, the same number will be dropped
3827 from both sides of the spectrum. Default
3828 is to keep all channels. Nested tuples
3829 represent individual edge selection for
3830 different IFs (a number of spectral channels
3831 can be different)
3832 threshold: the threshold used by line finder. It is
3833 better to keep it large as only strong lines
3834 affect the baseline solution.
3835 chan_avg_limit: a maximum number of consequtive spectral
3836 channels to average during the search of
3837 weak and broad lines. The default is no
3838 averaging (and no search for weak lines).
3839 If such lines can affect the fitted baseline
3840 (e.g. a high order polynomial is fitted),
3841 increase this parameter (usually values up
3842 to 8 are reasonable). Most users of this
3843 method should find the default value sufficient.
3844 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3845 plot the fit and the residual. In this each
3846 indivual fit has to be approved, by typing 'y'
3847 or 'n'
3848 getresidual: if False, returns best-fit values instead of
3849 residual. (default is True)
3850 showprogress: show progress status for large data.
3851 default is True.
3852 minnrow: minimum number of input spectra to show.
3853 default is 1000.
3854 outlog: Output the coefficients of the best-fit
3855 function to logger (default is False)
3856 blfile: Name of a text file in which the best-fit
3857 parameter values to be written
3858 (default is "": no file/logger output)
3859 csvformat: if True blfile is csv-formatted, default is False.
3860 bltable: name of a baseline table where fitting results
3861 (coefficients, rms, etc.) are to be written.
3862 if given, fitting results will NOT be output to
3863 scantable (insitu=True) or None will be
3864 returned (insitu=False).
3865 (default is "": no table output)
3866
3867 Example:
3868 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3869
3870 Note:
3871 The best-fit parameter values output in logger and/or blfile are now
3872 based on specunit of 'channel'.
3873 """
3874
3875 try:
3876 varlist = vars()
3877
3878 if insitu is None: insitu = rcParams['insitu']
3879 if insitu:
3880 workscan = self
3881 else:
3882 workscan = self.copy()
3883
3884 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3885 if mask is None: mask = []
3886 if npiece is None: npiece = 2
3887 if clipthresh is None: clipthresh = 3.0
3888 if clipniter is None: clipniter = 0
3889 if edge is None: edge = (0, 0)
3890 if threshold is None: threshold = 3
3891 if chan_avg_limit is None: chan_avg_limit = 1
3892 if plot is None: plot = False
3893 if getresidual is None: getresidual = True
3894 if showprogress is None: showprogress = True
3895 if minnrow is None: minnrow = 1000
3896 if outlog is None: outlog = False
3897 if blfile is None: blfile = ''
3898 if csvformat is None: csvformat = False
3899 if bltable is None: bltable = ''
3900
3901 scsvformat = 'T' if csvformat else 'F'
3902
3903 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3904 workscan._auto_cspline_baseline(mask, npiece,
3905 clipthresh, clipniter,
3906 normalise_edge_param(edge),
3907 threshold,
3908 chan_avg_limit, getresidual,
3909 pack_progress_params(showprogress,
3910 minnrow),
3911 outlog,
3912 scsvformat+blfile,
3913 bltable)
3914 workscan._add_history("auto_cspline_baseline", varlist)
3915
3916 if bltable == '':
3917 if insitu:
3918 self._assign(workscan)
3919 else:
3920 return workscan
3921 else:
3922 if not insitu:
3923 return None
3924
3925 except RuntimeError, e:
3926 raise_fitting_failure_exception(e)
3927
3928 @asaplog_post_dec
3929 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
3930 clipthresh=None, clipniter=None, plot=None,
3931 getresidual=None, showprogress=None, minnrow=None,
3932 outlog=None, blfile=None, csvformat=None,
3933 bltable=None):
3934 """\
3935 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3936
3937 Parameters:
3938 mask: An optional mask
3939 order: the maximum order of Chebyshev polynomial (default is 5)
3940 insitu: If False a new scantable is returned.
3941 Otherwise, the scaling is done in-situ
3942 The default is taken from .asaprc (False)
3943 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3944 clipniter: maximum number of iteration of 'clipthresh'-sigma
3945 clipping (default is 0)
3946 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3947 plot the fit and the residual. In this each
3948 indivual fit has to be approved, by typing 'y'
3949 or 'n'
3950 getresidual: if False, returns best-fit values instead of
3951 residual. (default is True)
3952 showprogress: show progress status for large data.
3953 default is True.
3954 minnrow: minimum number of input spectra to show.
3955 default is 1000.
3956 outlog: Output the coefficients of the best-fit
3957 function to logger (default is False)
3958 blfile: Name of a text file in which the best-fit
3959 parameter values to be written
3960 (default is "": no file/logger output)
3961 csvformat: if True blfile is csv-formatted, default is False.
3962 bltable: name of a baseline table where fitting results
3963 (coefficients, rms, etc.) are to be written.
3964 if given, fitting results will NOT be output to
3965 scantable (insitu=True) or None will be
3966 returned (insitu=False).
3967 (default is "": no table output)
3968
3969 Example:
3970 # return a scan baselined by a cubic spline consisting of 2 pieces
3971 # (i.e., 1 internal knot),
3972 # also with 3-sigma clipping, iteration up to 4 times
3973 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3974
3975 Note:
3976 The best-fit parameter values output in logger and/or blfile are now
3977 based on specunit of 'channel'.
3978 """
3979
3980 try:
3981 varlist = vars()
3982
3983 if insitu is None: insitu = rcParams['insitu']
3984 if insitu:
3985 workscan = self
3986 else:
3987 workscan = self.copy()
3988
3989 if mask is None: mask = []
3990 if order is None: order = 5
3991 if clipthresh is None: clipthresh = 3.0
3992 if clipniter is None: clipniter = 0
3993 if plot is None: plot = False
3994 if getresidual is None: getresidual = True
3995 if showprogress is None: showprogress = True
3996 if minnrow is None: minnrow = 1000
3997 if outlog is None: outlog = False
3998 if blfile is None: blfile = ''
3999 if csvformat is None: csvformat = False
4000 if bltable is None: bltable = ''
4001
4002 scsvformat = 'T' if csvformat else 'F'
4003
4004 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4005 workscan._chebyshev_baseline(mask, order,
4006 clipthresh, clipniter,
4007 getresidual,
4008 pack_progress_params(showprogress,
4009 minnrow),
4010 outlog, scsvformat+blfile,
4011 bltable)
4012 workscan._add_history("chebyshev_baseline", varlist)
4013
4014 if bltable == '':
4015 if insitu:
4016 self._assign(workscan)
4017 else:
4018 return workscan
4019 else:
4020 if not insitu:
4021 return None
4022
4023 except RuntimeError, e:
4024 raise_fitting_failure_exception(e)
4025
4026 @asaplog_post_dec
4027 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4028 clipthresh=None, clipniter=None,
4029 edge=None, threshold=None, chan_avg_limit=None,
4030 getresidual=None, plot=None,
4031 showprogress=None, minnrow=None, outlog=None,
4032 blfile=None, csvformat=None, bltable=None):
4033 """\
4034 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4035 Spectral lines are detected first using linefinder and masked out
4036 to avoid them affecting the baseline solution.
4037
4038 Parameters:
4039 mask: an optional mask retreived from scantable
4040 order: the maximum order of Chebyshev polynomial (default is 5)
4041 insitu: if False a new scantable is returned.
4042 Otherwise, the scaling is done in-situ
4043 The default is taken from .asaprc (False)
4044 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4045 clipniter: maximum number of iteration of 'clipthresh'-sigma
4046 clipping (default is 0)
4047 edge: an optional number of channel to drop at
4048 the edge of spectrum. If only one value is
4049 specified, the same number will be dropped
4050 from both sides of the spectrum. Default
4051 is to keep all channels. Nested tuples
4052 represent individual edge selection for
4053 different IFs (a number of spectral channels
4054 can be different)
4055 threshold: the threshold used by line finder. It is
4056 better to keep it large as only strong lines
4057 affect the baseline solution.
4058 chan_avg_limit: a maximum number of consequtive spectral
4059 channels to average during the search of
4060 weak and broad lines. The default is no
4061 averaging (and no search for weak lines).
4062 If such lines can affect the fitted baseline
4063 (e.g. a high order polynomial is fitted),
4064 increase this parameter (usually values up
4065 to 8 are reasonable). Most users of this
4066 method should find the default value sufficient.
4067 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4068 plot the fit and the residual. In this each
4069 indivual fit has to be approved, by typing 'y'
4070 or 'n'
4071 getresidual: if False, returns best-fit values instead of
4072 residual. (default is True)
4073 showprogress: show progress status for large data.
4074 default is True.
4075 minnrow: minimum number of input spectra to show.
4076 default is 1000.
4077 outlog: Output the coefficients of the best-fit
4078 function to logger (default is False)
4079 blfile: Name of a text file in which the best-fit
4080 parameter values to be written
4081 (default is "": no file/logger output)
4082 csvformat: if True blfile is csv-formatted, default is False.
4083 bltable: name of a baseline table where fitting results
4084 (coefficients, rms, etc.) are to be written.
4085 if given, fitting results will NOT be output to
4086 scantable (insitu=True) or None will be
4087 returned (insitu=False).
4088 (default is "": no table output)
4089
4090 Example:
4091 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4092
4093 Note:
4094 The best-fit parameter values output in logger and/or blfile are now
4095 based on specunit of 'channel'.
4096 """
4097
4098 try:
4099 varlist = vars()
4100
4101 if insitu is None: insitu = rcParams['insitu']
4102 if insitu:
4103 workscan = self
4104 else:
4105 workscan = self.copy()
4106
4107 if mask is None: mask = []
4108 if order is None: order = 5
4109 if clipthresh is None: clipthresh = 3.0
4110 if clipniter is None: clipniter = 0
4111 if edge is None: edge = (0, 0)
4112 if threshold is None: threshold = 3
4113 if chan_avg_limit is None: chan_avg_limit = 1
4114 if plot is None: plot = False
4115 if getresidual is None: getresidual = True
4116 if showprogress is None: showprogress = True
4117 if minnrow is None: minnrow = 1000
4118 if outlog is None: outlog = False
4119 if blfile is None: blfile = ''
4120 if csvformat is None: csvformat = False
4121 if bltable is None: bltable = ''
4122
4123 scsvformat = 'T' if csvformat else 'F'
4124
4125 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4126 workscan._auto_chebyshev_baseline(mask, order,
4127 clipthresh, clipniter,
4128 normalise_edge_param(edge),
4129 threshold,
4130 chan_avg_limit, getresidual,
4131 pack_progress_params(showprogress,
4132 minnrow),
4133 outlog, scsvformat+blfile,
4134 bltable)
4135 workscan._add_history("auto_chebyshev_baseline", varlist)
4136
4137 if bltable == '':
4138 if insitu:
4139 self._assign(workscan)
4140 else:
4141 return workscan
4142 else:
4143 if not insitu:
4144 return None
4145
4146 except RuntimeError, e:
4147 raise_fitting_failure_exception(e)
4148
4149 @asaplog_post_dec
4150 def poly_baseline(self, mask=None, order=None, insitu=None,
4151 clipthresh=None, clipniter=None, plot=None,
4152 getresidual=None, showprogress=None, minnrow=None,
4153 outlog=None, blfile=None, csvformat=None,
4154 bltable=None):
4155 """\
4156 Return a scan which has been baselined (all rows) by a polynomial.
4157 Parameters:
4158 mask: an optional mask
4159 order: the order of the polynomial (default is 0)
4160 insitu: if False a new scantable is returned.
4161 Otherwise, the scaling is done in-situ
4162 The default is taken from .asaprc (False)
4163 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4164 clipniter: maximum number of iteration of 'clipthresh'-sigma
4165 clipping (default is 0)
4166 plot: plot the fit and the residual. In this each
4167 indivual fit has to be approved, by typing 'y'
4168 or 'n'
4169 getresidual: if False, returns best-fit values instead of
4170 residual. (default is True)
4171 showprogress: show progress status for large data.
4172 default is True.
4173 minnrow: minimum number of input spectra to show.
4174 default is 1000.
4175 outlog: Output the coefficients of the best-fit
4176 function to logger (default is False)
4177 blfile: Name of a text file in which the best-fit
4178 parameter values to be written
4179 (default is "": no file/logger output)
4180 csvformat: if True blfile is csv-formatted, default is False.
4181 bltable: name of a baseline table where fitting results
4182 (coefficients, rms, etc.) are to be written.
4183 if given, fitting results will NOT be output to
4184 scantable (insitu=True) or None will be
4185 returned (insitu=False).
4186 (default is "": no table output)
4187
4188 Example:
4189 # return a scan baselined by a third order polynomial,
4190 # not using a mask
4191 bscan = scan.poly_baseline(order=3)
4192 """
4193
4194 try:
4195 varlist = vars()
4196
4197 if insitu is None:
4198 insitu = rcParams["insitu"]
4199 if insitu:
4200 workscan = self
4201 else:
4202 workscan = self.copy()
4203
4204 if mask is None: mask = []
4205 if order is None: order = 0
4206 if clipthresh is None: clipthresh = 3.0
4207 if clipniter is None: clipniter = 0
4208 if plot is None: plot = False
4209 if getresidual is None: getresidual = True
4210 if showprogress is None: showprogress = True
4211 if minnrow is None: minnrow = 1000
4212 if outlog is None: outlog = False
4213 if blfile is None: blfile = ''
4214 if csvformat is None: csvformat = False
4215 if bltable is None: bltable = ''
4216
4217 scsvformat = 'T' if csvformat else 'F'
4218
4219 if plot:
4220 outblfile = (blfile != "") and \
4221 os.path.exists(os.path.expanduser(
4222 os.path.expandvars(blfile))
4223 )
4224 if outblfile:
4225 blf = open(blfile, "a")
4226
4227 f = fitter()
4228 f.set_function(lpoly=order)
4229
4230 rows = xrange(workscan.nrow())
4231 #if len(rows) > 0: workscan._init_blinfo()
4232
4233 action = "H"
4234 for r in rows:
4235 f.x = workscan._getabcissa(r)
4236 f.y = workscan._getspectrum(r)
4237 if mask:
4238 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4239 else: # mask=None
4240 f.mask = workscan._getmask(r)
4241
4242 f.data = None
4243 f.fit()
4244
4245 if action != "Y": # skip plotting when accepting all
4246 f.plot(residual=True)
4247 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4248 #if accept_fit.upper() == "N":
4249 # #workscan._append_blinfo(None, None, None)
4250 # continue
4251 accept_fit = self._get_verify_action("Accept fit?",action)
4252 if r == 0: action = None
4253 if accept_fit.upper() == "N":
4254 continue
4255 elif accept_fit.upper() == "R":
4256 break
4257 elif accept_fit.upper() == "A":
4258 action = "Y"
4259
4260 blpars = f.get_parameters()
4261 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4262 #workscan._append_blinfo(blpars, masklist, f.mask)
4263 workscan._setspectrum((f.fitter.getresidual()
4264 if getresidual else f.fitter.getfit()), r)
4265
4266 if outblfile:
4267 rms = workscan.get_rms(f.mask, r)
4268 dataout = \
4269 workscan.format_blparams_row(blpars["params"],
4270 blpars["fixed"],
4271 rms, str(masklist),
4272 r, True, csvformat)
4273 blf.write(dataout)
4274
4275 f._p.unmap()
4276 f._p = None
4277
4278 if outblfile:
4279 blf.close()
4280 else:
4281 workscan._poly_baseline(mask, order,
4282 clipthresh, clipniter, #
4283 getresidual,
4284 pack_progress_params(showprogress,
4285 minnrow),
4286 outlog, scsvformat+blfile,
4287 bltable) #
4288
4289 workscan._add_history("poly_baseline", varlist)
4290
4291 if insitu:
4292 self._assign(workscan)
4293 else:
4294 return workscan
4295
4296 except RuntimeError, e:
4297 raise_fitting_failure_exception(e)
4298
4299 @asaplog_post_dec
4300 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4301 clipthresh=None, clipniter=None,
4302 edge=None, threshold=None, chan_avg_limit=None,
4303 getresidual=None, plot=None,
4304 showprogress=None, minnrow=None, outlog=None,
4305 blfile=None, csvformat=None, bltable=None):
4306 """\
4307 Return a scan which has been baselined (all rows) by a polynomial.
4308 Spectral lines are detected first using linefinder and masked out
4309 to avoid them affecting the baseline solution.
4310
4311 Parameters:
4312 mask: an optional mask retreived from scantable
4313 order: the order of the polynomial (default is 0)
4314 insitu: if False a new scantable is returned.
4315 Otherwise, the scaling is done in-situ
4316 The default is taken from .asaprc (False)
4317 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4318 clipniter: maximum number of iteration of 'clipthresh'-sigma
4319 clipping (default is 0)
4320 edge: an optional number of channel to drop at
4321 the edge of spectrum. If only one value is
4322 specified, the same number will be dropped
4323 from both sides of the spectrum. Default
4324 is to keep all channels. Nested tuples
4325 represent individual edge selection for
4326 different IFs (a number of spectral channels
4327 can be different)
4328 threshold: the threshold used by line finder. It is
4329 better to keep it large as only strong lines
4330 affect the baseline solution.
4331 chan_avg_limit: a maximum number of consequtive spectral
4332 channels to average during the search of
4333 weak and broad lines. The default is no
4334 averaging (and no search for weak lines).
4335 If such lines can affect the fitted baseline
4336 (e.g. a high order polynomial is fitted),
4337 increase this parameter (usually values up
4338 to 8 are reasonable). Most users of this
4339 method should find the default value sufficient.
4340 plot: plot the fit and the residual. In this each
4341 indivual fit has to be approved, by typing 'y'
4342 or 'n'
4343 getresidual: if False, returns best-fit values instead of
4344 residual. (default is True)
4345 showprogress: show progress status for large data.
4346 default is True.
4347 minnrow: minimum number of input spectra to show.
4348 default is 1000.
4349 outlog: Output the coefficients of the best-fit
4350 function to logger (default is False)
4351 blfile: Name of a text file in which the best-fit
4352 parameter values to be written
4353 (default is "": no file/logger output)
4354 csvformat: if True blfile is csv-formatted, default is False.
4355 bltable: name of a baseline table where fitting results
4356 (coefficients, rms, etc.) are to be written.
4357 if given, fitting results will NOT be output to
4358 scantable (insitu=True) or None will be
4359 returned (insitu=False).
4360 (default is "": no table output)
4361
4362 Example:
4363 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4364 """
4365
4366 try:
4367 varlist = vars()
4368
4369 if insitu is None:
4370 insitu = rcParams['insitu']
4371 if insitu:
4372 workscan = self
4373 else:
4374 workscan = self.copy()
4375
4376 if mask is None: mask = []
4377 if order is None: order = 0
4378 if clipthresh is None: clipthresh = 3.0
4379 if clipniter is None: clipniter = 0
4380 if edge is None: edge = (0, 0)
4381 if threshold is None: threshold = 3
4382 if chan_avg_limit is None: chan_avg_limit = 1
4383 if plot is None: plot = False
4384 if getresidual is None: getresidual = True
4385 if showprogress is None: showprogress = True
4386 if minnrow is None: minnrow = 1000
4387 if outlog is None: outlog = False
4388 if blfile is None: blfile = ''
4389 if csvformat is None: csvformat = False
4390 if bltable is None: bltable = ''
4391
4392 scsvformat = 'T' if csvformat else 'F'
4393
4394 edge = normalise_edge_param(edge)
4395
4396 if plot:
4397 outblfile = (blfile != "") and \
4398 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4399 if outblfile: blf = open(blfile, "a")
4400
4401 from asap.asaplinefind import linefinder
4402 fl = linefinder()
4403 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4404 fl.set_scan(workscan)
4405
4406 f = fitter()
4407 f.set_function(lpoly=order)
4408
4409 rows = xrange(workscan.nrow())
4410 #if len(rows) > 0: workscan._init_blinfo()
4411
4412 action = "H"
4413 for r in rows:
4414 idx = 2*workscan.getif(r)
4415 if mask:
4416 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4417 else: # mask=None
4418 msk = workscan._getmask(r)
4419 fl.find_lines(r, msk, edge[idx:idx+2])
4420
4421 f.x = workscan._getabcissa(r)
4422 f.y = workscan._getspectrum(r)
4423 f.mask = fl.get_mask()
4424 f.data = None
4425 f.fit()
4426
4427 if action != "Y": # skip plotting when accepting all
4428 f.plot(residual=True)
4429 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4430 accept_fit = self._get_verify_action("Accept fit?",action)
4431 if r == 0: action = None
4432 if accept_fit.upper() == "N":
4433 #workscan._append_blinfo(None, None, None)
4434 continue
4435 elif accept_fit.upper() == "R":
4436 break
4437 elif accept_fit.upper() == "A":
4438 action = "Y"
4439
4440 blpars = f.get_parameters()
4441 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4442 #workscan._append_blinfo(blpars, masklist, f.mask)
4443 workscan._setspectrum(
4444 (f.fitter.getresidual() if getresidual
4445 else f.fitter.getfit()), r
4446 )
4447
4448 if outblfile:
4449 rms = workscan.get_rms(f.mask, r)
4450 dataout = \
4451 workscan.format_blparams_row(blpars["params"],
4452 blpars["fixed"],
4453 rms, str(masklist),
4454 r, True, csvformat)
4455 blf.write(dataout)
4456
4457 f._p.unmap()
4458 f._p = None
4459
4460 if outblfile: blf.close()
4461 else:
4462 workscan._auto_poly_baseline(mask, order,
4463 clipthresh, clipniter,
4464 edge, threshold,
4465 chan_avg_limit, getresidual,
4466 pack_progress_params(showprogress,
4467 minnrow),
4468 outlog, scsvformat+blfile,
4469 bltable)
4470 workscan._add_history("auto_poly_baseline", varlist)
4471
4472 if bltable == '':
4473 if insitu:
4474 self._assign(workscan)
4475 else:
4476 return workscan
4477 else:
4478 if not insitu:
4479 return None
4480
4481 except RuntimeError, e:
4482 raise_fitting_failure_exception(e)
4483
4484 def _init_blinfo(self):
4485 """\
4486 Initialise the following three auxiliary members:
4487 blpars : parameters of the best-fit baseline,
4488 masklists : mask data (edge positions of masked channels) and
4489 actualmask : mask data (in boolean list),
4490 to keep for use later (including output to logger/text files).
4491 Used by poly_baseline() and auto_poly_baseline() in case of
4492 'plot=True'.
4493 """
4494 self.blpars = []
4495 self.masklists = []
4496 self.actualmask = []
4497 return
4498
4499 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4500 """\
4501 Append baseline-fitting related info to blpars, masklist and
4502 actualmask.
4503 """
4504 self.blpars.append(data_blpars)
4505 self.masklists.append(data_masklists)
4506 self.actualmask.append(data_actualmask)
4507 return
4508
4509 @asaplog_post_dec
4510 def rotate_linpolphase(self, angle):
4511 """\
4512 Rotate the phase of the complex polarization O=Q+iU correlation.
4513 This is always done in situ in the raw data. So if you call this
4514 function more than once then each call rotates the phase further.
4515
4516 Parameters:
4517
4518 angle: The angle (degrees) to rotate (add) by.
4519
4520 Example::
4521
4522 scan.rotate_linpolphase(2.3)
4523
4524 """
4525 varlist = vars()
4526 self._math._rotate_linpolphase(self, angle)
4527 self._add_history("rotate_linpolphase", varlist)
4528 return
4529
4530 @asaplog_post_dec
4531 def rotate_xyphase(self, angle):
4532 """\
4533 Rotate the phase of the XY correlation. This is always done in situ
4534 in the data. So if you call this function more than once
4535 then each call rotates the phase further.
4536
4537 Parameters:
4538
4539 angle: The angle (degrees) to rotate (add) by.
4540
4541 Example::
4542
4543 scan.rotate_xyphase(2.3)
4544
4545 """
4546 varlist = vars()
4547 self._math._rotate_xyphase(self, angle)
4548 self._add_history("rotate_xyphase", varlist)
4549 return
4550
4551 @asaplog_post_dec
4552 def swap_linears(self):
4553 """\
4554 Swap the linear polarisations XX and YY, or better the first two
4555 polarisations as this also works for ciculars.
4556 """
4557 varlist = vars()
4558 self._math._swap_linears(self)
4559 self._add_history("swap_linears", varlist)
4560 return
4561
4562 @asaplog_post_dec
4563 def invert_phase(self):
4564 """\
4565 Invert the phase of the complex polarisation
4566 """
4567 varlist = vars()
4568 self._math._invert_phase(self)
4569 self._add_history("invert_phase", varlist)
4570 return
4571
4572 @asaplog_post_dec
4573 def add(self, offset, insitu=None):
4574 """\
4575 Return a scan where all spectra have the offset added
4576
4577 Parameters:
4578
4579 offset: the offset
4580
4581 insitu: if False a new scantable is returned.
4582 Otherwise, the scaling is done in-situ
4583 The default is taken from .asaprc (False)
4584
4585 """
4586 if insitu is None: insitu = rcParams['insitu']
4587 self._math._setinsitu(insitu)
4588 varlist = vars()
4589 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4590 s._add_history("add", varlist)
4591 if insitu:
4592 self._assign(s)
4593 else:
4594 return s
4595
4596 @asaplog_post_dec
4597 def scale(self, factor, tsys=True, insitu=None):
4598 """\
4599
4600 Return a scan where all spectra are scaled by the given 'factor'
4601
4602 Parameters:
4603
4604 factor: the scaling factor (float or 1D float list)
4605
4606 insitu: if False a new scantable is returned.
4607 Otherwise, the scaling is done in-situ
4608 The default is taken from .asaprc (False)
4609
4610 tsys: if True (default) then apply the operation to Tsys
4611 as well as the data
4612
4613 """
4614 if insitu is None: insitu = rcParams['insitu']
4615 self._math._setinsitu(insitu)
4616 varlist = vars()
4617 s = None
4618 import numpy
4619 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4620 if isinstance(factor[0], list) or isinstance(factor[0],
4621 numpy.ndarray):
4622 from asapmath import _array2dOp
4623 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4624 else:
4625 s = scantable( self._math._arrayop( self, factor,
4626 "MUL", tsys ) )
4627 else:
4628 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4629 s._add_history("scale", varlist)
4630 if insitu:
4631 self._assign(s)
4632 else:
4633 return s
4634
4635 @preserve_selection
4636 def set_sourcetype(self, match, matchtype="pattern",
4637 sourcetype="reference"):
4638 """\
4639 Set the type of the source to be an source or reference scan
4640 using the provided pattern.
4641
4642 Parameters:
4643
4644 match: a Unix style pattern, regular expression or selector
4645
4646 matchtype: 'pattern' (default) UNIX style pattern or
4647 'regex' regular expression
4648
4649 sourcetype: the type of the source to use (source/reference)
4650
4651 """
4652 varlist = vars()
4653 stype = -1
4654 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4655 stype = 1
4656 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4657 stype = 0
4658 else:
4659 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4660 if matchtype.lower().startswith("p"):
4661 matchtype = "pattern"
4662 elif matchtype.lower().startswith("r"):
4663 matchtype = "regex"
4664 else:
4665 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4666 sel = selector()
4667 if isinstance(match, selector):
4668 sel = match
4669 else:
4670 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4671 self.set_selection(sel)
4672 self._setsourcetype(stype)
4673 self._add_history("set_sourcetype", varlist)
4674
4675
4676 def set_sourcename(self, name):
4677 varlist = vars()
4678 self._setsourcename(name)
4679 self._add_history("set_sourcename", varlist)
4680
4681 @asaplog_post_dec
4682 @preserve_selection
4683 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4684 """\
4685 This function allows to build quotients automatically.
4686 It assumes the observation to have the same number of
4687 "ons" and "offs"
4688
4689 Parameters:
4690
4691 preserve: you can preserve (default) the continuum or
4692 remove it. The equations used are
4693
4694 preserve: Output = Toff * (on/off) - Toff
4695
4696 remove: Output = Toff * (on/off) - Ton
4697
4698 mode: the on/off detection mode
4699 'paired' (default)
4700 identifies 'off' scans by the
4701 trailing '_R' (Mopra/Parkes) or
4702 '_e'/'_w' (Tid) and matches
4703 on/off pairs from the observing pattern
4704 'time'
4705 finds the closest off in time
4706
4707 .. todo:: verify argument is not implemented
4708
4709 """
4710 varlist = vars()
4711 modes = ["time", "paired"]
4712 if not mode in modes:
4713 msg = "please provide valid mode. Valid modes are %s" % (modes)
4714 raise ValueError(msg)
4715 s = None
4716 if mode.lower() == "paired":
4717 from asap._asap import srctype
4718 sel = self.get_selection()
4719 #sel.set_query("SRCTYPE==psoff")
4720 sel.set_types(srctype.psoff)
4721 self.set_selection(sel)
4722 offs = self.copy()
4723 #sel.set_query("SRCTYPE==pson")
4724 sel.set_types(srctype.pson)
4725 self.set_selection(sel)
4726 ons = self.copy()
4727 s = scantable(self._math._quotient(ons, offs, preserve))
4728 elif mode.lower() == "time":
4729 s = scantable(self._math._auto_quotient(self, mode, preserve))
4730 s._add_history("auto_quotient", varlist)
4731 return s
4732
4733 @asaplog_post_dec
4734 def mx_quotient(self, mask = None, weight='median', preserve=True):
4735 """\
4736 Form a quotient using "off" beams when observing in "MX" mode.
4737
4738 Parameters:
4739
4740 mask: an optional mask to be used when weight == 'stddev'
4741
4742 weight: How to average the off beams. Default is 'median'.
4743
4744 preserve: you can preserve (default) the continuum or
4745 remove it. The equations used are:
4746
4747 preserve: Output = Toff * (on/off) - Toff
4748
4749 remove: Output = Toff * (on/off) - Ton
4750
4751 """
4752 mask = mask or ()
4753 varlist = vars()
4754 on = scantable(self._math._mx_extract(self, 'on'))
4755 preoff = scantable(self._math._mx_extract(self, 'off'))
4756 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4757 from asapmath import quotient
4758 q = quotient(on, off, preserve)
4759 q._add_history("mx_quotient", varlist)
4760 return q
4761
4762 @asaplog_post_dec
4763 def freq_switch(self, insitu=None):
4764 """\
4765 Apply frequency switching to the data.
4766
4767 Parameters:
4768
4769 insitu: if False a new scantable is returned.
4770 Otherwise, the swictching is done in-situ
4771 The default is taken from .asaprc (False)
4772
4773 """
4774 if insitu is None: insitu = rcParams['insitu']
4775 self._math._setinsitu(insitu)
4776 varlist = vars()
4777 s = scantable(self._math._freqswitch(self))
4778 s._add_history("freq_switch", varlist)
4779 if insitu:
4780 self._assign(s)
4781 else:
4782 return s
4783
4784 @asaplog_post_dec
4785 def recalc_azel(self):
4786 """Recalculate the azimuth and elevation for each position."""
4787 varlist = vars()
4788 self._recalcazel()
4789 self._add_history("recalc_azel", varlist)
4790 return
4791
4792 @asaplog_post_dec
4793 def __add__(self, other):
4794 """
4795 implicit on all axes and on Tsys
4796 """
4797 varlist = vars()
4798 s = self.__op( other, "ADD" )
4799 s._add_history("operator +", varlist)
4800 return s
4801
4802 @asaplog_post_dec
4803 def __sub__(self, other):
4804 """
4805 implicit on all axes and on Tsys
4806 """
4807 varlist = vars()
4808 s = self.__op( other, "SUB" )
4809 s._add_history("operator -", varlist)
4810 return s
4811
4812 @asaplog_post_dec
4813 def __mul__(self, other):
4814 """
4815 implicit on all axes and on Tsys
4816 """
4817 varlist = vars()
4818 s = self.__op( other, "MUL" ) ;
4819 s._add_history("operator *", varlist)
4820 return s
4821
4822
4823 @asaplog_post_dec
4824 def __div__(self, other):
4825 """
4826 implicit on all axes and on Tsys
4827 """
4828 varlist = vars()
4829 s = self.__op( other, "DIV" )
4830 s._add_history("operator /", varlist)
4831 return s
4832
4833 @asaplog_post_dec
4834 def __op( self, other, mode ):
4835 s = None
4836 if isinstance(other, scantable):
4837 s = scantable(self._math._binaryop(self, other, mode))
4838 elif isinstance(other, float):
4839 if other == 0.0:
4840 raise ZeroDivisionError("Dividing by zero is not recommended")
4841 s = scantable(self._math._unaryop(self, other, mode, False))
4842 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
4843 if isinstance(other[0], list) \
4844 or isinstance(other[0], numpy.ndarray):
4845 from asapmath import _array2dOp
4846 s = _array2dOp( self, other, mode, False )
4847 else:
4848 s = scantable( self._math._arrayop( self, other,
4849 mode, False ) )
4850 else:
4851 raise TypeError("Other input is not a scantable or float value")
4852 return s
4853
4854 @asaplog_post_dec
4855 def get_fit(self, row=0):
4856 """\
4857 Print or return the stored fits for a row in the scantable
4858
4859 Parameters:
4860
4861 row: the row which the fit has been applied to.
4862
4863 """
4864 if row > self.nrow():
4865 return
4866 from asap.asapfit import asapfit
4867 fit = asapfit(self._getfit(row))
4868 asaplog.push( '%s' %(fit) )
4869 return fit.as_dict()
4870
4871 @preserve_selection
4872 def flag_nans(self):
4873 """\
4874 Utility function to flag NaN values in the scantable.
4875 """
4876 import numpy
4877 basesel = self.get_selection()
4878 for i in range(self.nrow()):
4879 sel = self.get_row_selector(i)
4880 self.set_selection(basesel+sel)
4881 nans = numpy.isnan(self._getspectrum(0))
4882 if numpy.any(nans):
4883 bnans = [ bool(v) for v in nans]
4884 self.flag(bnans)
4885
4886 self.set_selection(basesel)
4887
4888 def get_row_selector(self, rowno):
4889 return selector(rows=[rowno])
4890
4891 def _add_history(self, funcname, parameters):
4892 if not rcParams['scantable.history']:
4893 return
4894 # create date
4895 sep = "##"
4896 from datetime import datetime
4897 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4898 hist = dstr+sep
4899 hist += funcname+sep#cdate+sep
4900 if parameters.has_key('self'):
4901 del parameters['self']
4902 for k, v in parameters.iteritems():
4903 if type(v) is dict:
4904 for k2, v2 in v.iteritems():
4905 hist += k2
4906 hist += "="
4907 if isinstance(v2, scantable):
4908 hist += 'scantable'
4909 elif k2 == 'mask':
4910 if isinstance(v2, list) or isinstance(v2, tuple):
4911 hist += str(self._zip_mask(v2))
4912 else:
4913 hist += str(v2)
4914 else:
4915 hist += str(v2)
4916 else:
4917 hist += k
4918 hist += "="
4919 if isinstance(v, scantable):
4920 hist += 'scantable'
4921 elif k == 'mask':
4922 if isinstance(v, list) or isinstance(v, tuple):
4923 hist += str(self._zip_mask(v))
4924 else:
4925 hist += str(v)
4926 else:
4927 hist += str(v)
4928 hist += sep
4929 hist = hist[:-2] # remove trailing '##'
4930 self._addhistory(hist)
4931
4932
4933 def _zip_mask(self, mask):
4934 mask = list(mask)
4935 i = 0
4936 segments = []
4937 while mask[i:].count(1):
4938 i += mask[i:].index(1)
4939 if mask[i:].count(0):
4940 j = i + mask[i:].index(0)
4941 else:
4942 j = len(mask)
4943 segments.append([i, j])
4944 i = j
4945 return segments
4946
4947 def _get_ordinate_label(self):
4948 fu = "("+self.get_fluxunit()+")"
4949 import re
4950 lbl = "Intensity"
4951 if re.match(".K.", fu):
4952 lbl = "Brightness Temperature "+ fu
4953 elif re.match(".Jy.", fu):
4954 lbl = "Flux density "+ fu
4955 return lbl
4956
4957 def _check_ifs(self):
4958# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
4959 nchans = [self.nchan(i) for i in self.getifnos()]
4960 nchans = filter(lambda t: t > 0, nchans)
4961 return (sum(nchans)/len(nchans) == nchans[0])
4962
4963 @asaplog_post_dec
4964 def _fill(self, names, unit, average, opts={}):
4965 first = True
4966 fullnames = []
4967 for name in names:
4968 name = os.path.expandvars(name)
4969 name = os.path.expanduser(name)
4970 if not os.path.exists(name):
4971 msg = "File '%s' does not exists" % (name)
4972 raise IOError(msg)
4973 fullnames.append(name)
4974 if average:
4975 asaplog.push('Auto averaging integrations')
4976 stype = int(rcParams['scantable.storage'].lower() == 'disk')
4977 for name in fullnames:
4978 tbl = Scantable(stype)
4979 if is_ms( name ):
4980 r = msfiller( tbl )
4981 else:
4982 r = filler( tbl )
4983 msg = "Importing %s..." % (name)
4984 asaplog.push(msg, False)
4985 r.open(name, opts)
4986 rx = rcParams['scantable.reference']
4987 r.setreferenceexpr(rx)
4988 r.fill()
4989 if average:
4990 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
4991 if not first:
4992 tbl = self._math._merge([self, tbl])
4993 Scantable.__init__(self, tbl)
4994 r.close()
4995 del r, tbl
4996 first = False
4997 #flush log
4998 asaplog.post()
4999 if unit is not None:
5000 self.set_fluxunit(unit)
5001 if not is_casapy():
5002 self.set_freqframe(rcParams['scantable.freqframe'])
5003
5004 def _get_verify_action( self, msg, action=None ):
5005 valid_act = ['Y', 'N', 'A', 'R']
5006 if not action or not isinstance(action, str):
5007 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5008 if action == '':
5009 return "Y"
5010 elif (action.upper()[0] in valid_act):
5011 return action.upper()[0]
5012 elif (action.upper()[0] in ['H','?']):
5013 print "Available actions of verification [Y|n|a|r]"
5014 print " Y : Yes for current data (default)"
5015 print " N : No for current data"
5016 print " A : Accept all in the following and exit from verification"
5017 print " R : Reject all in the following and exit from verification"
5018 print " H or ?: help (show this message)"
5019 return self._get_verify_action(msg)
5020 else:
5021 return 'Y'
5022
5023 def __getitem__(self, key):
5024 if key < 0:
5025 key += self.nrow()
5026 if key >= self.nrow():
5027 raise IndexError("Row index out of range.")
5028 return self._getspectrum(key)
5029
5030 def __setitem__(self, key, value):
5031 if key < 0:
5032 key += self.nrow()
5033 if key >= self.nrow():
5034 raise IndexError("Row index out of range.")
5035 if not hasattr(value, "__len__") or \
5036 len(value) > self.nchan(self.getif(key)):
5037 raise ValueError("Spectrum length doesn't match.")
5038 return self._setspectrum(value, key)
5039
5040 def __len__(self):
5041 return self.nrow()
5042
5043 def __iter__(self):
5044 for i in range(len(self)):
5045 yield self[i]
Note: See TracBrowser for help on using the repository browser.