source: trunk/python/scantable.py@ 2884

Last change on this file since 2884 was 2884, checked in by WataruKawasaki, 11 years ago

New Development: No

JIRA Issue: Yes CAS-5859

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: parameter syntax

Test Programs:

Put in Release Notes:

Module(s): sd

Description: changed accepting selection syntax given in unit of frequency or velocity so that unit string should be given only to the second value.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 199.7 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
374
375
376class scantable(Scantable):
377 """\
378 The ASAP container for scans (single-dish data).
379 """
380
381 @asaplog_post_dec
382 def __init__(self, filename, average=None, unit=None, parallactify=None,
383 **args):
384 """\
385 Create a scantable from a saved one or make a reference
386
387 Parameters:
388
389 filename: the name of an asap table on disk
390 or
391 the name of a rpfits/sdfits/ms file
392 (integrations within scans are auto averaged
393 and the whole file is read) or
394 [advanced] a reference to an existing scantable
395
396 average: average all integrations withinb a scan on read.
397 The default (True) is taken from .asaprc.
398
399 unit: brightness unit; must be consistent with K or Jy.
400 Over-rides the default selected by the filler
401 (input rpfits/sdfits/ms) or replaces the value
402 in existing scantables
403
404 antenna: for MeasurementSet input data only:
405 Antenna selection. integer (id) or string
406 (name or id).
407
408 parallactify: Indicate that the data had been parallactified.
409 Default (false) is taken from rc file.
410
411 getpt: Whether to import direction from MS/POINTING
412 table properly or not.
413 This is effective only when filename is MS.
414 The default (True) is to import direction
415 from MS/POINTING.
416 """
417 if average is None:
418 average = rcParams['scantable.autoaverage']
419 parallactify = parallactify or rcParams['scantable.parallactify']
420 varlist = vars()
421 from asap._asap import stmath
422 self._math = stmath( rcParams['insitu'] )
423 if isinstance(filename, Scantable):
424 Scantable.__init__(self, filename)
425 else:
426 if isinstance(filename, str):
427 filename = os.path.expandvars(filename)
428 filename = os.path.expanduser(filename)
429 if not os.path.exists(filename):
430 s = "File '%s' not found." % (filename)
431 raise IOError(s)
432 if is_scantable(filename):
433 ondisk = rcParams['scantable.storage'] == 'disk'
434 Scantable.__init__(self, filename, ondisk)
435 if unit is not None:
436 self.set_fluxunit(unit)
437 if average:
438 self._assign( self.average_time( scanav=True ) )
439 # do not reset to the default freqframe
440 #self.set_freqframe(rcParams['scantable.freqframe'])
441 elif is_ms(filename):
442 # Measurement Set
443 opts={'ms': {}}
444 mskeys=['getpt','antenna']
445 for key in mskeys:
446 if key in args.keys():
447 opts['ms'][key] = args[key]
448 self._fill([filename], unit, average, opts)
449 elif os.path.isfile(filename):
450 opts={'nro': {}}
451 nrokeys=['freqref']
452 for key in nrokeys:
453 if key in args.keys():
454 opts['nro'][key] = args[key]
455 self._fill([filename], unit, average, opts)
456 # only apply to new data not "copy constructor"
457 self.parallactify(parallactify)
458 else:
459 msg = "The given file '%s'is not a valid " \
460 "asap table." % (filename)
461 raise IOError(msg)
462 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
463 and isinstance(filename[-1], str):
464 self._fill(filename, unit, average)
465 self.parallactify(parallactify)
466 self._add_history("scantable", varlist)
467
468 @asaplog_post_dec
469 def save(self, name=None, format=None, overwrite=False):
470 """\
471 Store the scantable on disk. This can be an asap (aips++) Table,
472 SDFITS or MS2 format.
473
474 Parameters:
475
476 name: the name of the outputfile. For format 'ASCII'
477 this is the root file name (data in 'name'.txt
478 and header in 'name'_header.txt)
479
480 format: an optional file format. Default is ASAP.
481 Allowed are:
482
483 * 'ASAP' (save as ASAP [aips++] Table),
484 * 'SDFITS' (save as SDFITS file)
485 * 'ASCII' (saves as ascii text file)
486 * 'MS2' (saves as an casacore MeasurementSet V2)
487 * 'FITS' (save as image FITS - not readable by
488 class)
489 * 'CLASS' (save as FITS readable by CLASS)
490
491 overwrite: If the file should be overwritten if it exists.
492 The default False is to return with warning
493 without writing the output. USE WITH CARE.
494
495 Example::
496
497 scan.save('myscan.asap')
498 scan.save('myscan.sdfits', 'SDFITS')
499
500 """
501 from os import path
502 format = format or rcParams['scantable.save']
503 suffix = '.'+format.lower()
504 if name is None or name == "":
505 name = 'scantable'+suffix
506 msg = "No filename given. Using default name %s..." % name
507 asaplog.push(msg)
508 name = path.expandvars(name)
509 if path.isfile(name) or path.isdir(name):
510 if not overwrite:
511 msg = "File %s exists." % name
512 raise IOError(msg)
513 format2 = format.upper()
514 if format2 == 'ASAP':
515 self._save(name)
516 elif format2 == 'MS2':
517 msopt = {'ms': {'overwrite': overwrite } }
518 from asap._asap import mswriter
519 writer = mswriter( self )
520 writer.write( name, msopt )
521 else:
522 from asap._asap import stwriter as stw
523 writer = stw(format2)
524 writer.write(self, name)
525 return
526
527 def copy(self):
528 """Return a copy of this scantable.
529
530 *Note*:
531
532 This makes a full (deep) copy. scan2 = scan1 makes a reference.
533
534 Example::
535
536 copiedscan = scan.copy()
537
538 """
539 sd = scantable(Scantable._copy(self))
540 return sd
541
542 def drop_scan(self, scanid=None):
543 """\
544 Return a new scantable where the specified scan number(s) has(have)
545 been dropped.
546
547 Parameters:
548
549 scanid: a (list of) scan number(s)
550
551 """
552 from asap import _is_sequence_or_number as _is_valid
553 from asap import _to_list
554 from asap import unique
555 if not _is_valid(scanid):
556 raise RuntimeError( 'Please specify a scanno to drop from the'
557 ' scantable' )
558 scanid = _to_list(scanid)
559 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
560 for sid in scanid: allscans.remove(sid)
561 if len(allscans) == 0:
562 raise ValueError("Can't remove all scans")
563 sel = selector(scans=allscans)
564 return self._select_copy(sel)
565
566 def _select_copy(self, selection):
567 orig = self.get_selection()
568 self.set_selection(orig+selection)
569 cp = self.copy()
570 self.set_selection(orig)
571 return cp
572
573 def get_scan(self, scanid=None):
574 """\
575 Return a specific scan (by scanno) or collection of scans (by
576 source name) in a new scantable.
577
578 *Note*:
579
580 See scantable.drop_scan() for the inverse operation.
581
582 Parameters:
583
584 scanid: a (list of) scanno or a source name, unix-style
585 patterns are accepted for source name matching, e.g.
586 '*_R' gets all 'ref scans
587
588 Example::
589
590 # get all scans containing the source '323p459'
591 newscan = scan.get_scan('323p459')
592 # get all 'off' scans
593 refscans = scan.get_scan('*_R')
594 # get a susbset of scans by scanno (as listed in scan.summary())
595 newscan = scan.get_scan([0, 2, 7, 10])
596
597 """
598 if scanid is None:
599 raise RuntimeError( 'Please specify a scan no or name to '
600 'retrieve from the scantable' )
601 try:
602 bsel = self.get_selection()
603 sel = selector()
604 if type(scanid) is str:
605 sel.set_name(scanid)
606 return self._select_copy(sel)
607 elif type(scanid) is int:
608 sel.set_scans([scanid])
609 return self._select_copy(sel)
610 elif type(scanid) is list:
611 sel.set_scans(scanid)
612 return self._select_copy(sel)
613 else:
614 msg = "Illegal scanid type, use 'int' or 'list' if ints."
615 raise TypeError(msg)
616 except RuntimeError:
617 raise
618
619 def __str__(self):
620 tempFile = tempfile.NamedTemporaryFile()
621 Scantable._summary(self, tempFile.name)
622 tempFile.seek(0)
623 asaplog.clear()
624 return tempFile.file.read()
625
626 @asaplog_post_dec
627 def summary(self, filename=None):
628 """\
629 Print a summary of the contents of this scantable.
630
631 Parameters:
632
633 filename: the name of a file to write the putput to
634 Default - no file output
635
636 """
637 if filename is not None:
638 if filename is "":
639 filename = 'scantable_summary.txt'
640 from os.path import expandvars, isdir
641 filename = expandvars(filename)
642 if isdir(filename):
643 msg = "Illegal file name '%s'." % (filename)
644 raise IOError(msg)
645 else:
646 filename = ""
647 Scantable._summary(self, filename)
648
649 def get_spectrum(self, rowno):
650 """Return the spectrum for the current row in the scantable as a list.
651
652 Parameters:
653
654 rowno: the row number to retrieve the spectrum from
655
656 """
657 return self._getspectrum(rowno)
658
659 def get_mask(self, rowno):
660 """Return the mask for the current row in the scantable as a list.
661
662 Parameters:
663
664 rowno: the row number to retrieve the mask from
665
666 """
667 return self._getmask(rowno)
668
669 def set_spectrum(self, spec, rowno):
670 """Set the spectrum for the current row in the scantable.
671
672 Parameters:
673
674 spec: the new spectrum
675
676 rowno: the row number to set the spectrum for
677
678 """
679 assert(len(spec) == self.nchan(self.getif(rowno)))
680 return self._setspectrum(spec, rowno)
681
682 def get_coordinate(self, rowno):
683 """Return the (spectral) coordinate for a a given 'rowno'.
684
685 *Note*:
686
687 * This coordinate is only valid until a scantable method modifies
688 the frequency axis.
689 * This coordinate does contain the original frequency set-up
690 NOT the new frame. The conversions however are done using the user
691 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
692 use scantable.freq_align first. Without it there is no closure,
693 i.e.::
694
695 c = myscan.get_coordinate(0)
696 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
697
698 Parameters:
699
700 rowno: the row number for the spectral coordinate
701
702 """
703 return coordinate(Scantable.get_coordinate(self, rowno))
704
705 def get_selection(self):
706 """\
707 Get the selection object currently set on this scantable.
708
709 Example::
710
711 sel = scan.get_selection()
712 sel.set_ifs(0) # select IF 0
713 scan.set_selection(sel) # apply modified selection
714
715 """
716 return selector(self._getselection())
717
718 def set_selection(self, selection=None, **kw):
719 """\
720 Select a subset of the data. All following operations on this scantable
721 are only applied to thi selection.
722
723 Parameters:
724
725 selection: a selector object (default unset the selection), or
726 any combination of 'pols', 'ifs', 'beams', 'scans',
727 'cycles', 'name', 'query'
728
729 Examples::
730
731 sel = selector() # create a selection object
732 self.set_scans([0, 3]) # select SCANNO 0 and 3
733 scan.set_selection(sel) # set the selection
734 scan.summary() # will only print summary of scanno 0 an 3
735 scan.set_selection() # unset the selection
736 # or the equivalent
737 scan.set_selection(scans=[0,3])
738 scan.summary() # will only print summary of scanno 0 an 3
739 scan.set_selection() # unset the selection
740
741 """
742 if selection is None:
743 # reset
744 if len(kw) == 0:
745 selection = selector()
746 else:
747 # try keywords
748 for k in kw:
749 if k not in selector.fields:
750 raise KeyError("Invalid selection key '%s', "
751 "valid keys are %s" % (k,
752 selector.fields))
753 selection = selector(**kw)
754 self._setselection(selection)
755
756 def get_row(self, row=0, insitu=None):
757 """\
758 Select a row in the scantable.
759 Return a scantable with single row.
760
761 Parameters:
762
763 row: row no of integration, default is 0.
764 insitu: if False a new scantable is returned. Otherwise, the
765 scaling is done in-situ. The default is taken from .asaprc
766 (False)
767
768 """
769 if insitu is None:
770 insitu = rcParams['insitu']
771 if not insitu:
772 workscan = self.copy()
773 else:
774 workscan = self
775 # Select a row
776 sel = selector()
777 sel.set_rows([row])
778 workscan.set_selection(sel)
779 if not workscan.nrow() == 1:
780 msg = "Could not identify single row. %d rows selected." \
781 % (workscan.nrow())
782 raise RuntimeError(msg)
783 if insitu:
784 self._assign(workscan)
785 else:
786 return workscan
787
788 @asaplog_post_dec
789 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
790 """\
791 Determine the specified statistic of the current beam/if/pol
792 Takes a 'mask' as an optional parameter to specify which
793 channels should be excluded.
794
795 Parameters:
796
797 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
798 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
799
800 mask: an optional mask specifying where the statistic
801 should be determined.
802
803 form: format string to print statistic values
804
805 row: row number of spectrum to process.
806 (default is None: for all rows)
807
808 Example:
809 scan.set_unit('channel')
810 msk = scan.create_mask([100, 200], [500, 600])
811 scan.stats(stat='mean', mask=m)
812
813 """
814 mask = mask or []
815 if not self._check_ifs():
816 raise ValueError("Cannot apply mask as the IFs have different "
817 "number of channels. Please use setselection() "
818 "to select individual IFs")
819 rtnabc = False
820 if stat.lower().endswith('_abc'): rtnabc = True
821 getchan = False
822 if stat.lower().startswith('min') or stat.lower().startswith('max'):
823 chan = self._math._minmaxchan(self, mask, stat)
824 getchan = True
825 statvals = []
826 if not rtnabc:
827 if row == None:
828 statvals = self._math._stats(self, mask, stat)
829 else:
830 statvals = self._math._statsrow(self, mask, stat, int(row))
831
832 #def cb(i):
833 # return statvals[i]
834
835 #return self._row_callback(cb, stat)
836
837 label=stat
838 #callback=cb
839 out = ""
840 #outvec = []
841 sep = '-'*50
842
843 if row == None:
844 rows = xrange(self.nrow())
845 elif isinstance(row, int):
846 rows = [ row ]
847
848 for i in rows:
849 refstr = ''
850 statunit= ''
851 if getchan:
852 qx, qy = self.chan2data(rowno=i, chan=chan[i])
853 if rtnabc:
854 statvals.append(qx['value'])
855 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
856 statunit= '['+qx['unit']+']'
857 else:
858 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
859
860 tm = self._gettime(i)
861 src = self._getsourcename(i)
862 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
863 out += 'Time[%s]:\n' % (tm)
864 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
865 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
866 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
867 #outvec.append(callback(i))
868 if len(rows) > 1:
869 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
870 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
871 else:
872 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
873 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
874 out += sep+"\n"
875
876 import os
877 if os.environ.has_key( 'USER' ):
878 usr = os.environ['USER']
879 else:
880 import commands
881 usr = commands.getoutput( 'whoami' )
882 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
883 f = open(tmpfile,'w')
884 print >> f, sep
885 print >> f, ' %s %s' % (label, statunit)
886 print >> f, sep
887 print >> f, out
888 f.close()
889 f = open(tmpfile,'r')
890 x = f.readlines()
891 f.close()
892 asaplog.push(''.join(x), False)
893
894 return statvals
895
896 def chan2data(self, rowno=0, chan=0):
897 """\
898 Returns channel/frequency/velocity and spectral value
899 at an arbitrary row and channel in the scantable.
900
901 Parameters:
902
903 rowno: a row number in the scantable. Default is the
904 first row, i.e. rowno=0
905
906 chan: a channel in the scantable. Default is the first
907 channel, i.e. pos=0
908
909 """
910 if isinstance(rowno, int) and isinstance(chan, int):
911 qx = {'unit': self.get_unit(),
912 'value': self._getabcissa(rowno)[chan]}
913 qy = {'unit': self.get_fluxunit(),
914 'value': self._getspectrum(rowno)[chan]}
915 return qx, qy
916
917 def stddev(self, mask=None):
918 """\
919 Determine the standard deviation of the current beam/if/pol
920 Takes a 'mask' as an optional parameter to specify which
921 channels should be excluded.
922
923 Parameters:
924
925 mask: an optional mask specifying where the standard
926 deviation should be determined.
927
928 Example::
929
930 scan.set_unit('channel')
931 msk = scan.create_mask([100, 200], [500, 600])
932 scan.stddev(mask=m)
933
934 """
935 return self.stats(stat='stddev', mask=mask);
936
937
938 def get_column_names(self):
939 """\
940 Return a list of column names, which can be used for selection.
941 """
942 return list(Scantable.get_column_names(self))
943
944 def get_tsys(self, row=-1):
945 """\
946 Return the System temperatures.
947
948 Parameters:
949
950 row: the rowno to get the information for. (default all rows)
951
952 Returns:
953
954 a list of Tsys values for the current selection
955
956 """
957 if row > -1:
958 return self._get_column(self._gettsys, row)
959 return self._row_callback(self._gettsys, "Tsys")
960
961 def get_tsysspectrum(self, row=-1):
962 """\
963 Return the channel dependent system temperatures.
964
965 Parameters:
966
967 row: the rowno to get the information for. (default all rows)
968
969 Returns:
970
971 a list of Tsys values for the current selection
972
973 """
974 return self._get_column( self._gettsysspectrum, row )
975
976 def set_tsys(self, values, row=-1):
977 """\
978 Set the Tsys value(s) of the given 'row' or the whole scantable
979 (selection).
980
981 Parameters:
982
983 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
984 row: the row number to apply Tsys values to.
985 (default all rows)
986
987 """
988
989 if not hasattr(values, "__len__"):
990 values = [values]
991 self._settsys(values, row)
992
993 def get_weather(self, row=-1):
994 """\
995 Return the weather informations.
996
997 Parameters:
998
999 row: the rowno to get the information for. (default all rows)
1000
1001 Returns:
1002
1003 a dict or list of of dicts of values for the current selection
1004
1005 """
1006
1007 values = self._get_column(self._get_weather, row)
1008 if row > -1:
1009 return {'temperature': values[0],
1010 'pressure': values[1], 'humidity' : values[2],
1011 'windspeed' : values[3], 'windaz' : values[4]
1012 }
1013 else:
1014 out = []
1015 for r in values:
1016
1017 out.append({'temperature': r[0],
1018 'pressure': r[1], 'humidity' : r[2],
1019 'windspeed' : r[3], 'windaz' : r[4]
1020 })
1021 return out
1022
1023 def _row_callback(self, callback, label):
1024 out = ""
1025 outvec = []
1026 sep = '-'*50
1027 for i in range(self.nrow()):
1028 tm = self._gettime(i)
1029 src = self._getsourcename(i)
1030 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1031 out += 'Time[%s]:\n' % (tm)
1032 if self.nbeam(-1) > 1:
1033 out += ' Beam[%d] ' % (self.getbeam(i))
1034 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1035 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1036 outvec.append(callback(i))
1037 out += '= %3.3f\n' % (outvec[i])
1038 out += sep+'\n'
1039
1040 asaplog.push(sep)
1041 asaplog.push(" %s" % (label))
1042 asaplog.push(sep)
1043 asaplog.push(out)
1044 asaplog.post()
1045 return outvec
1046
1047 def _get_column(self, callback, row=-1, *args):
1048 """
1049 """
1050 if row == -1:
1051 return [callback(i, *args) for i in range(self.nrow())]
1052 else:
1053 if 0 <= row < self.nrow():
1054 return callback(row, *args)
1055
1056
1057 def get_time(self, row=-1, asdatetime=False, prec=-1):
1058 """\
1059 Get a list of time stamps for the observations.
1060 Return a datetime object or a string (default) for each
1061 integration time stamp in the scantable.
1062
1063 Parameters:
1064
1065 row: row no of integration. Default -1 return all rows
1066
1067 asdatetime: return values as datetime objects rather than strings
1068
1069 prec: number of digits shown. Default -1 to automatic
1070 calculation.
1071 Note this number is equals to the digits of MVTime,
1072 i.e., 0<prec<3: dates with hh:: only,
1073 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1074 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1075
1076 """
1077 from datetime import datetime
1078 if prec < 0:
1079 # automagically set necessary precision +1
1080 prec = 7 - \
1081 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1082 prec = max(6, int(prec))
1083 else:
1084 prec = max(0, prec)
1085 if asdatetime:
1086 #precision can be 1 millisecond at max
1087 prec = min(12, prec)
1088
1089 times = self._get_column(self._gettime, row, prec)
1090 if not asdatetime:
1091 return times
1092 format = "%Y/%m/%d/%H:%M:%S.%f"
1093 if prec < 7:
1094 nsub = 1 + (((6-prec)/2) % 3)
1095 substr = [".%f","%S","%M"]
1096 for i in range(nsub):
1097 format = format.replace(substr[i],"")
1098 if isinstance(times, list):
1099 return [datetime.strptime(i, format) for i in times]
1100 else:
1101 return datetime.strptime(times, format)
1102
1103
1104 def get_inttime(self, row=-1):
1105 """\
1106 Get a list of integration times for the observations.
1107 Return a time in seconds for each integration in the scantable.
1108
1109 Parameters:
1110
1111 row: row no of integration. Default -1 return all rows.
1112
1113 """
1114 return self._get_column(self._getinttime, row)
1115
1116
1117 def get_sourcename(self, row=-1):
1118 """\
1119 Get a list source names for the observations.
1120 Return a string for each integration in the scantable.
1121 Parameters:
1122
1123 row: row no of integration. Default -1 return all rows.
1124
1125 """
1126 return self._get_column(self._getsourcename, row)
1127
1128 def get_elevation(self, row=-1):
1129 """\
1130 Get a list of elevations for the observations.
1131 Return a float for each integration in the scantable.
1132
1133 Parameters:
1134
1135 row: row no of integration. Default -1 return all rows.
1136
1137 """
1138 return self._get_column(self._getelevation, row)
1139
1140 def get_azimuth(self, row=-1):
1141 """\
1142 Get a list of azimuths for the observations.
1143 Return a float for each integration in the scantable.
1144
1145 Parameters:
1146 row: row no of integration. Default -1 return all rows.
1147
1148 """
1149 return self._get_column(self._getazimuth, row)
1150
1151 def get_parangle(self, row=-1):
1152 """\
1153 Get a list of parallactic angles for the observations.
1154 Return a float for each integration in the scantable.
1155
1156 Parameters:
1157
1158 row: row no of integration. Default -1 return all rows.
1159
1160 """
1161 return self._get_column(self._getparangle, row)
1162
1163 def get_direction(self, row=-1):
1164 """
1165 Get a list of Positions on the sky (direction) for the observations.
1166 Return a string for each integration in the scantable.
1167
1168 Parameters:
1169
1170 row: row no of integration. Default -1 return all rows
1171
1172 """
1173 return self._get_column(self._getdirection, row)
1174
1175 def get_directionval(self, row=-1):
1176 """\
1177 Get a list of Positions on the sky (direction) for the observations.
1178 Return a float for each integration in the scantable.
1179
1180 Parameters:
1181
1182 row: row no of integration. Default -1 return all rows
1183
1184 """
1185 return self._get_column(self._getdirectionvec, row)
1186
1187 @asaplog_post_dec
1188 def set_unit(self, unit='channel'):
1189 """\
1190 Set the unit for all following operations on this scantable
1191
1192 Parameters:
1193
1194 unit: optional unit, default is 'channel'. Use one of '*Hz',
1195 'km/s', 'channel' or equivalent ''
1196
1197 """
1198 varlist = vars()
1199 if unit in ['', 'pixel', 'channel']:
1200 unit = ''
1201 inf = list(self._getcoordinfo())
1202 inf[0] = unit
1203 self._setcoordinfo(inf)
1204 self._add_history("set_unit", varlist)
1205
1206 @asaplog_post_dec
1207 def set_instrument(self, instr):
1208 """\
1209 Set the instrument for subsequent processing.
1210
1211 Parameters:
1212
1213 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1214 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1215
1216 """
1217 self._setInstrument(instr)
1218 self._add_history("set_instument", vars())
1219
1220 @asaplog_post_dec
1221 def set_feedtype(self, feedtype):
1222 """\
1223 Overwrite the feed type, which might not be set correctly.
1224
1225 Parameters:
1226
1227 feedtype: 'linear' or 'circular'
1228
1229 """
1230 self._setfeedtype(feedtype)
1231 self._add_history("set_feedtype", vars())
1232
1233 @asaplog_post_dec
1234 def set_doppler(self, doppler='RADIO'):
1235 """\
1236 Set the doppler for all following operations on this scantable.
1237
1238 Parameters:
1239
1240 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1241
1242 """
1243 varlist = vars()
1244 inf = list(self._getcoordinfo())
1245 inf[2] = doppler
1246 self._setcoordinfo(inf)
1247 self._add_history("set_doppler", vars())
1248
1249 @asaplog_post_dec
1250 def set_freqframe(self, frame=None):
1251 """\
1252 Set the frame type of the Spectral Axis.
1253
1254 Parameters:
1255
1256 frame: an optional frame type, default 'LSRK'. Valid frames are:
1257 'TOPO', 'LSRD', 'LSRK', 'BARY',
1258 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1259
1260 Example::
1261
1262 scan.set_freqframe('BARY')
1263
1264 """
1265 frame = frame or rcParams['scantable.freqframe']
1266 varlist = vars()
1267 # "REST" is not implemented in casacore
1268 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1269 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1270 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1271 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1272
1273 if frame in valid:
1274 inf = list(self._getcoordinfo())
1275 inf[1] = frame
1276 self._setcoordinfo(inf)
1277 self._add_history("set_freqframe", varlist)
1278 else:
1279 msg = "Please specify a valid freq type. Valid types are:\n", valid
1280 raise TypeError(msg)
1281
1282 @asaplog_post_dec
1283 def set_dirframe(self, frame=""):
1284 """\
1285 Set the frame type of the Direction on the sky.
1286
1287 Parameters:
1288
1289 frame: an optional frame type, default ''. Valid frames are:
1290 'J2000', 'B1950', 'GALACTIC'
1291
1292 Example:
1293
1294 scan.set_dirframe('GALACTIC')
1295
1296 """
1297 varlist = vars()
1298 Scantable.set_dirframe(self, frame)
1299 self._add_history("set_dirframe", varlist)
1300
1301 def get_unit(self):
1302 """\
1303 Get the default unit set in this scantable
1304
1305 Returns:
1306
1307 A unit string
1308
1309 """
1310 inf = self._getcoordinfo()
1311 unit = inf[0]
1312 if unit == '': unit = 'channel'
1313 return unit
1314
1315 @asaplog_post_dec
1316 def get_abcissa(self, rowno=0):
1317 """\
1318 Get the abcissa in the current coordinate setup for the currently
1319 selected Beam/IF/Pol
1320
1321 Parameters:
1322
1323 rowno: an optional row number in the scantable. Default is the
1324 first row, i.e. rowno=0
1325
1326 Returns:
1327
1328 The abcissa values and the format string (as a dictionary)
1329
1330 """
1331 abc = self._getabcissa(rowno)
1332 lbl = self._getabcissalabel(rowno)
1333 return abc, lbl
1334
1335 @asaplog_post_dec
1336 def flag(self, mask=None, unflag=False, row=-1):
1337 """\
1338 Flag the selected data using an optional channel mask.
1339
1340 Parameters:
1341
1342 mask: an optional channel mask, created with create_mask. Default
1343 (no mask) is all channels.
1344
1345 unflag: if True, unflag the data
1346
1347 row: an optional row number in the scantable.
1348 Default -1 flags all rows
1349
1350 """
1351 varlist = vars()
1352 mask = mask or []
1353 self._flag(row, mask, unflag)
1354 self._add_history("flag", varlist)
1355
1356 @asaplog_post_dec
1357 def flag_row(self, rows=None, unflag=False):
1358 """\
1359 Flag the selected data in row-based manner.
1360
1361 Parameters:
1362
1363 rows: list of row numbers to be flagged. Default is no row
1364 (must be explicitly specified to execute row-based
1365 flagging).
1366
1367 unflag: if True, unflag the data.
1368
1369 """
1370 varlist = vars()
1371 if rows is None:
1372 rows = []
1373 self._flag_row(rows, unflag)
1374 self._add_history("flag_row", varlist)
1375
1376 @asaplog_post_dec
1377 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1378 """\
1379 Flag the selected data outside a specified range (in channel-base)
1380
1381 Parameters:
1382
1383 uthres: upper threshold.
1384
1385 dthres: lower threshold
1386
1387 clipoutside: True for flagging data outside the range
1388 [dthres:uthres].
1389 False for flagging data inside the range.
1390
1391 unflag: if True, unflag the data.
1392
1393 """
1394 varlist = vars()
1395 self._clip(uthres, dthres, clipoutside, unflag)
1396 self._add_history("clip", varlist)
1397
1398 @asaplog_post_dec
1399 def lag_flag(self, start, end, unit="MHz", insitu=None):
1400 """\
1401 Flag the data in 'lag' space by providing a frequency to remove.
1402 Flagged data in the scantable get interpolated over the region.
1403 No taper is applied.
1404
1405 Parameters:
1406
1407 start: the start frequency (really a period within the
1408 bandwidth) or period to remove
1409
1410 end: the end frequency or period to remove
1411
1412 unit: the frequency unit (default 'MHz') or '' for
1413 explicit lag channels
1414
1415 *Notes*:
1416
1417 It is recommended to flag edges of the band or strong
1418 signals beforehand.
1419
1420 """
1421 if insitu is None: insitu = rcParams['insitu']
1422 self._math._setinsitu(insitu)
1423 varlist = vars()
1424 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1425 if not (unit == "" or base.has_key(unit)):
1426 raise ValueError("%s is not a valid unit." % unit)
1427 if unit == "":
1428 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1429 else:
1430 s = scantable(self._math._lag_flag(self, start*base[unit],
1431 end*base[unit], "frequency"))
1432 s._add_history("lag_flag", varlist)
1433 if insitu:
1434 self._assign(s)
1435 else:
1436 return s
1437
1438 @asaplog_post_dec
1439 def fft(self, rowno=None, mask=None, getrealimag=False):
1440 """\
1441 Apply FFT to the spectra.
1442 Flagged data in the scantable get interpolated over the region.
1443
1444 Parameters:
1445
1446 rowno: The row number(s) to be processed. int, list
1447 and tuple are accepted. By default (None), FFT
1448 is applied to the whole data.
1449
1450 mask: Auxiliary channel mask(s). Given as a boolean
1451 list, it is applied to all specified rows.
1452 A list of boolean lists can also be used to
1453 apply different masks. In the latter case, the
1454 length of 'mask' must be the same as that of
1455 'rowno'. The default is None.
1456
1457 getrealimag: If True, returns the real and imaginary part
1458 values of the complex results.
1459 If False (the default), returns the amplitude
1460 (absolute value) normalised with Ndata/2 and
1461 phase (argument, in unit of radian).
1462
1463 Returns:
1464
1465 A list of dictionaries containing the results for each spectrum.
1466 Each dictionary contains two values, the real and the imaginary
1467 parts when getrealimag = True, or the amplitude(absolute value)
1468 and the phase(argument) when getrealimag = False. The key for
1469 these values are 'real' and 'imag', or 'ampl' and 'phase',
1470 respectively.
1471 """
1472 if rowno is None:
1473 rowno = []
1474 if isinstance(rowno, int):
1475 rowno = [rowno]
1476 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1477 raise TypeError("The row number(s) must be int, list or tuple.")
1478 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1479
1480 usecommonmask = True
1481
1482 if mask is None:
1483 mask = []
1484 if isinstance(mask, list) or isinstance(mask, tuple):
1485 if len(mask) == 0:
1486 mask = [[]]
1487 else:
1488 if isinstance(mask[0], bool):
1489 if len(mask) != self.nchan(self.getif(rowno[0])):
1490 raise ValueError("The spectra and the mask have "
1491 "different length.")
1492 mask = [mask]
1493 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1494 usecommonmask = False
1495 if len(mask) != len(rowno):
1496 raise ValueError("When specifying masks for each "
1497 "spectrum, the numbers of them "
1498 "must be identical.")
1499 for i in xrange(mask):
1500 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1501 raise ValueError("The spectra and the mask have "
1502 "different length.")
1503 else:
1504 raise TypeError("The mask must be a boolean list or "
1505 "a list of boolean list.")
1506 else:
1507 raise TypeError("The mask must be a boolean list or a list of "
1508 "boolean list.")
1509
1510 res = []
1511
1512 imask = 0
1513 for whichrow in rowno:
1514 fspec = self._fft(whichrow, mask[imask], getrealimag)
1515 nspec = len(fspec)
1516
1517 i = 0
1518 v1 = []
1519 v2 = []
1520 reselem = {"real":[],"imag":[]} if getrealimag \
1521 else {"ampl":[],"phase":[]}
1522
1523 while (i < nspec):
1524 v1.append(fspec[i])
1525 v2.append(fspec[i+1])
1526 i += 2
1527
1528 if getrealimag:
1529 reselem["real"] += v1
1530 reselem["imag"] += v2
1531 else:
1532 reselem["ampl"] += v1
1533 reselem["phase"] += v2
1534
1535 res.append(reselem)
1536
1537 if not usecommonmask:
1538 imask += 1
1539
1540 return res
1541
1542 @asaplog_post_dec
1543 def create_mask(self, *args, **kwargs):
1544 """\
1545 Compute and return a mask based on [min, max] windows.
1546 The specified windows are to be INCLUDED, when the mask is
1547 applied.
1548
1549 Parameters:
1550
1551 [min, max], [min2, max2], ...
1552 Pairs of start/end points (inclusive)specifying the regions
1553 to be masked
1554
1555 invert: optional argument. If specified as True,
1556 return an inverted mask, i.e. the regions
1557 specified are EXCLUDED
1558
1559 row: create the mask using the specified row for
1560 unit conversions, default is row=0
1561 only necessary if frequency varies over rows.
1562
1563 Examples::
1564
1565 scan.set_unit('channel')
1566 # a)
1567 msk = scan.create_mask([400, 500], [800, 900])
1568 # masks everything outside 400 and 500
1569 # and 800 and 900 in the unit 'channel'
1570
1571 # b)
1572 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1573 # masks the regions between 400 and 500
1574 # and 800 and 900 in the unit 'channel'
1575
1576 # c)
1577 #mask only channel 400
1578 msk = scan.create_mask([400])
1579
1580 """
1581 row = kwargs.get("row", 0)
1582 data = self._getabcissa(row)
1583 u = self._getcoordinfo()[0]
1584 if u == "":
1585 u = "channel"
1586 msg = "The current mask window unit is %s" % u
1587 i = self._check_ifs()
1588 if not i:
1589 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1590 asaplog.push(msg)
1591 n = len(data)
1592 msk = _n_bools(n, False)
1593 # test if args is a 'list' or a 'normal *args - UGLY!!!
1594
1595 ws = (isinstance(args[-1][-1], int)
1596 or isinstance(args[-1][-1], float)) and args or args[0]
1597 for window in ws:
1598 if len(window) == 1:
1599 window = [window[0], window[0]]
1600 if len(window) == 0 or len(window) > 2:
1601 raise ValueError("A window needs to be defined as "
1602 "[start(, end)]")
1603 if window[0] > window[1]:
1604 tmp = window[0]
1605 window[0] = window[1]
1606 window[1] = tmp
1607 for i in range(n):
1608 if data[i] >= window[0] and data[i] <= window[1]:
1609 msk[i] = True
1610 if kwargs.has_key('invert'):
1611 if kwargs.get('invert'):
1612 msk = mask_not(msk)
1613 return msk
1614
1615 def get_masklist(self, mask=None, row=0, silent=False):
1616 """\
1617 Compute and return a list of mask windows, [min, max].
1618
1619 Parameters:
1620
1621 mask: channel mask, created with create_mask.
1622
1623 row: calcutate the masklist using the specified row
1624 for unit conversions, default is row=0
1625 only necessary if frequency varies over rows.
1626
1627 Returns:
1628
1629 [min, max], [min2, max2], ...
1630 Pairs of start/end points (inclusive)specifying
1631 the masked regions
1632
1633 """
1634 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1635 raise TypeError("The mask should be list or tuple.")
1636 if len(mask) <= 0:
1637 raise TypeError("The mask elements should be > 0")
1638 data = self._getabcissa(row)
1639 if len(data) != len(mask):
1640 msg = "Number of channels in scantable != number of mask elements"
1641 raise TypeError(msg)
1642 u = self._getcoordinfo()[0]
1643 if u == "":
1644 u = "channel"
1645 msg = "The current mask window unit is %s" % u
1646 i = self._check_ifs()
1647 if not i:
1648 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1649 if not silent:
1650 asaplog.push(msg)
1651 masklist = []
1652 ist, ien = None, None
1653 ist, ien=self.get_mask_indices(mask)
1654 if ist is not None and ien is not None:
1655 for i in xrange(len(ist)):
1656 range=[data[ist[i]],data[ien[i]]]
1657 range.sort()
1658 masklist.append([range[0],range[1]])
1659 return masklist
1660
1661 def get_mask_indices(self, mask=None):
1662 """\
1663 Compute and Return lists of mask start indices and mask end indices.
1664
1665 Parameters:
1666
1667 mask: channel mask, created with create_mask.
1668
1669 Returns:
1670
1671 List of mask start indices and that of mask end indices,
1672 i.e., [istart1,istart2,....], [iend1,iend2,....].
1673
1674 """
1675 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1676 raise TypeError("The mask should be list or tuple.")
1677 if len(mask) <= 0:
1678 raise TypeError("The mask elements should be > 0")
1679 istart = []
1680 iend = []
1681 if mask[0]:
1682 istart.append(0)
1683 for i in range(len(mask)-1):
1684 if not mask[i] and mask[i+1]:
1685 istart.append(i+1)
1686 elif mask[i] and not mask[i+1]:
1687 iend.append(i)
1688 if mask[len(mask)-1]:
1689 iend.append(len(mask)-1)
1690 if len(istart) != len(iend):
1691 raise RuntimeError("Numbers of mask start != mask end.")
1692 for i in range(len(istart)):
1693 if istart[i] > iend[i]:
1694 raise RuntimeError("Mask start index > mask end index")
1695 break
1696 return istart,iend
1697
1698 @asaplog_post_dec
1699 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1700 """
1701 Parse MS type spw/channel selection syntax.
1702
1703 Parameters:
1704 selectstring : A string expression of spw and channel selection.
1705 Comma-separated expressions mean different spw -
1706 channel combinations. Spws and channel selections
1707 are partitioned by a colon ':'. In a single
1708 selection expression, you can put multiple values
1709 separated by semicolons ';'. Both for spw and
1710 channel selection, allowed cases include single
1711 value, blank('') or asterisk('*') to specify all
1712 available values, two values connected with a
1713 tilde ('~') to specify an inclusive range. Unit
1714 strings for frequency or velocity can be added to
1715 the tilde-connected values. For channel selection
1716 expression, placing a '<' or a '>' is possible to
1717 specify a semi-infinite interval as well.
1718
1719 examples:
1720 '' or '*' = all spws (all channels)
1721 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1722 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1723 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1724 all channels in spw8
1725 '1.3~1.5GHz' = all spws that fall in or have at
1726 least some overwrap with frequency
1727 range between 1.3GHz and 1.5GHz.
1728 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1729 between the specified
1730 frequency range in spws
1731 that fall in or have
1732 overwrap with the
1733 specified frequency
1734 range.
1735 '1:-200~250km/s' = channels that fall between the
1736 specified velocity range in
1737 spw 1.
1738 Returns:
1739 A dictionary of selected (valid) spw and masklist pairs,
1740 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1741 """
1742 if not isinstance(selectstring, str):
1743 asaplog.post()
1744 asaplog.push("Expression of spw/channel selection must be a string.")
1745 asaplog.post("ERROR")
1746
1747 orig_unit = self.get_unit()
1748 self.set_unit('channel')
1749
1750 orig_restfreq = self.get_restfreqs()
1751 orig_restfreq_list = []
1752 for i in orig_restfreq.keys():
1753 if len(orig_restfreq[i]) == 1:
1754 orig_restfreq_list.append(orig_restfreq[i][0])
1755 else:
1756 orig_restfreq_list.append(orig_restfreq[i])
1757
1758 orig_coord = self._getcoordinfo()
1759 orig_frame = orig_coord[1]
1760 orig_doppler = orig_coord[2]
1761
1762 if restfreq is None: restfreq = orig_restfreq_list
1763 if frame is None: frame = orig_frame
1764 if doppler is None: doppler = orig_doppler
1765
1766 self.set_restfreqs(restfreq)
1767 self.set_freqframe(frame)
1768 self.set_doppler(doppler)
1769
1770 valid_ifs = self.getifnos()
1771
1772 comma_sep = selectstring.split(",")
1773 res = {}
1774
1775 for cms_elem in comma_sep:
1776 if (cms_elem.strip() == ""): continue
1777
1778 colon_sep = cms_elem.split(":")
1779
1780 if (len(colon_sep) > 2):
1781 raise RuntimeError("Invalid selection expression: more than two colons!")
1782
1783 # parse spw expression and store result in spw_list.
1784 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1785 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1786 # 'a~b*m/s' (where * can be '' or 'k') and also
1787 # several of the above expressions connected with ';'.
1788
1789 spw_list = []
1790
1791 semicolon_sep = colon_sep[0].split(";")
1792
1793 for scs_elem in semicolon_sep:
1794 scs_elem = scs_elem.strip()
1795
1796 lt_sep = scs_elem.split("<")
1797 gt_sep = scs_elem.split(">")
1798 ti_sep = scs_elem.split("~")
1799
1800 lt_sep_length = len(lt_sep)
1801 gt_sep_length = len(gt_sep)
1802 ti_sep_length = len(ti_sep)
1803
1804 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1805
1806 if (len_product > 2):
1807 # '<', '>' and '~' must not coexist in a single spw expression
1808
1809 raise RuntimeError("Invalid spw selection.")
1810
1811 elif (len_product == 1):
1812 # '', '*', or single spw number.
1813
1814 if (scs_elem == "") or (scs_elem == "*"):
1815 spw_list = valid_ifs[:] # deep copy
1816
1817 else: # single number
1818 try:
1819 #checking if the given number is valid for spw ID
1820 idx = valid_ifs.index(int(scs_elem))
1821 spw_list.append(valid_ifs[idx])
1822
1823 except:
1824 asaplog.post()
1825 asaplog.push("Wrong spw number (" + scs_elem + ") given. ignored.")
1826 asaplog.post("WARNING")
1827
1828 else: # (len_product == 2)
1829 # namely, one of '<', '>' or '~' appers just once.
1830
1831 if (lt_sep_length == 2): # '<a'
1832 if is_number(lt_sep[1]):
1833 for i in valid_ifs:
1834 if (i < float(lt_sep[1])):
1835 spw_list.append(i)
1836
1837 else:
1838 RuntimeError("Invalid spw selection.")
1839
1840 elif (gt_sep_length == 2): # '>a'
1841 if is_number(gt_sep[1]):
1842 for i in valid_ifs:
1843 if (i > float(gt_sep[1])):
1844 spw_list.append(i)
1845
1846 else:
1847 RuntimeError("Invalid spw selection.")
1848
1849 else: # (ti_sep_length == 2) where both boundaries inclusive
1850 expr0 = ti_sep[0].strip()
1851 expr1 = ti_sep[1].strip()
1852
1853 if is_number(expr0) and is_number(expr1):
1854 # 'a~b'
1855 expr_pmin = min(float(expr0), float(expr1))
1856 expr_pmax = max(float(expr0), float(expr1))
1857 for i in valid_ifs:
1858 if (expr_pmin <= i) and (i <= expr_pmax):
1859 spw_list.append(i)
1860
1861 elif is_number(expr0) and is_frequency(expr1):
1862 # 'a~b*Hz'
1863 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1864
1865 for coord in self._get_coordinate_list():
1866 expr_p0 = coord['coord'].to_pixel(expr_f0)
1867 expr_p1 = coord['coord'].to_pixel(expr_f1)
1868 expr_pmin = min(expr_p0, expr_p1)
1869 expr_pmax = max(expr_p0, expr_p1)
1870
1871 spw = coord['if']
1872 pmin = 0.0
1873 pmax = float(self.nchan(spw) - 1)
1874
1875 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
1876 spw_list.append(spw)
1877
1878 elif is_number(expr0) and is_velocity(expr1):
1879 # 'a~b*m/s'
1880 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
1881 expr_vmin = min(expr_v0, expr_v1)
1882 expr_vmax = max(expr_v0, expr_v1)
1883
1884 for coord in self._get_coordinate_list():
1885 spw = coord['if']
1886
1887 pmin = 0.0
1888 pmax = float(self.nchan(spw) - 1)
1889
1890 vel0 = coord['coord'].to_velocity(pmin)
1891 vel1 = coord['coord'].to_velocity(pmax)
1892
1893 vmin = min(vel0, vel1)
1894 vmax = max(vel0, vel1)
1895
1896 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
1897 spw_list.append(spw)
1898
1899 else:
1900 # cases such as 'aGHz~bkm/s' are not allowed now
1901 raise RuntimeError("Invalid spw selection.")
1902
1903 # parse channel expression and store the result in crange_list.
1904 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
1905 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
1906 # and also several of the above expressions connected with ';'.
1907
1908 for spw in spw_list:
1909 pmin = 0.0
1910 pmax = float(self.nchan(spw) - 1)
1911
1912 if (len(colon_sep) == 1):
1913 # no expression for channel selection,
1914 # which means all channels are to be selected.
1915 crange_list = [[pmin, pmax]]
1916
1917 else: # (len(colon_sep) == 2)
1918 crange_list = []
1919
1920 found = False
1921 for i in self._get_coordinate_list():
1922 if (i['if'] == spw):
1923 coord = i['coord']
1924 found = True
1925 break
1926
1927 if not found:
1928 raise RuntimeError("Invalid spw value.")
1929
1930 semicolon_sep = colon_sep[1].split(";")
1931 for scs_elem in semicolon_sep:
1932 scs_elem = scs_elem.strip()
1933
1934 ti_sep = scs_elem.split("~")
1935 ti_sep_length = len(ti_sep)
1936
1937 if (ti_sep_length > 2):
1938 raise RuntimeError("Invalid channel selection.")
1939
1940 elif (ti_sep_length == 1):
1941 if (scs_elem == "") or (scs_elem == "*"):
1942 # '' and '*' for all channels
1943 crange_list = [[pmin, pmax]]
1944 break
1945 elif (is_number(scs_elem)):
1946 # single channel given
1947 crange_list.append([float(scs_elem), float(scs_elem)])
1948 else:
1949 raise RuntimeError("Invalid channel selection.")
1950
1951 else: #(ti_sep_length == 2)
1952 expr0 = ti_sep[0].strip()
1953 expr1 = ti_sep[1].strip()
1954
1955 if is_number(expr0) and is_number(expr1):
1956 # 'a~b'
1957 expr_pmin = min(float(expr0), float(expr1))
1958 expr_pmax = max(float(expr0), float(expr1))
1959
1960 elif is_number(expr0) and is_frequency(expr1):
1961 # 'a~b*Hz'
1962 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1963 expr_p0 = coord.to_pixel(expr_f0)
1964 expr_p1 = coord.to_pixel(expr_f1)
1965 expr_pmin = min(expr_p0, expr_p1)
1966 expr_pmax = max(expr_p0, expr_p1)
1967
1968 elif is_number(expr0) and is_velocity(expr1):
1969 # 'a~b*m/s'
1970 restf = self.get_restfreqs().values()[0][0]
1971 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
1972 expr_f0 = get_frequency_by_velocity(restf, expr_v0)
1973 expr_f1 = get_frequency_by_velocity(restf, expr_v1)
1974 expr_p0 = coord.to_pixel(expr_f0)
1975 expr_p1 = coord.to_pixel(expr_f1)
1976 expr_pmin = min(expr_p0, expr_p1)
1977 expr_pmax = max(expr_p0, expr_p1)
1978
1979 else:
1980 # cases such as 'aGHz~bkm/s' are not allowed now
1981 raise RuntimeError("Invalid channel selection.")
1982
1983 cmin = max(pmin, expr_pmin)
1984 cmax = min(pmax, expr_pmax)
1985 # if the given range of channel selection has overwrap with
1986 # that of current spw, output the overwrap area.
1987 if (cmin <= cmax):
1988 cmin = float(int(cmin + 0.5))
1989 cmax = float(int(cmax + 0.5))
1990 crange_list.append([cmin, cmax])
1991
1992 if (len(crange_list) == 0):
1993 crange_list.append([])
1994
1995 if res.has_key(spw):
1996 res[spw].extend(crange_list)
1997 else:
1998 res[spw] = crange_list
1999
2000 # restore original values
2001 self.set_restfreqs(orig_restfreq_list)
2002 self.set_freqframe(orig_frame)
2003 self.set_doppler(orig_doppler)
2004 self.set_unit(orig_unit)
2005
2006 return res
2007
2008 @asaplog_post_dec
2009 def get_first_rowno_by_if(self, ifno):
2010 found = False
2011 for irow in xrange(self.nrow()):
2012 if (self.getif(irow) == ifno):
2013 res = irow
2014 found = True
2015 break
2016
2017 if not found: raise RuntimeError("Invalid IF value.")
2018
2019 return res
2020
2021 @asaplog_post_dec
2022 def _get_coordinate_list(self):
2023 res = []
2024 spws = self.getifnos()
2025 for spw in spws:
2026 elem = {}
2027 elem['if'] = spw
2028 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2029 res.append(elem)
2030
2031 return res
2032
2033##################################
2034
2035 @asaplog_post_dec
2036 def parse_maskexpr(self, maskstring):
2037 """
2038 Parse CASA type mask selection syntax (IF dependent).
2039
2040 Parameters:
2041 maskstring : A string mask selection expression.
2042 A comma separated selections mean different IF -
2043 channel combinations. IFs and channel selections
2044 are partitioned by a colon, ':'.
2045 examples:
2046 '' = all IFs (all channels)
2047 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2048 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2049 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2050 all channels in IF8
2051 Returns:
2052 A dictionary of selected (valid) IF and masklist pairs,
2053 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2054 """
2055 if not isinstance(maskstring,str):
2056 asaplog.post()
2057 asaplog.push("Mask expression should be a string.")
2058 asaplog.post("ERROR")
2059
2060 valid_ifs = self.getifnos()
2061 frequnit = self.get_unit()
2062 seldict = {}
2063 if maskstring == "":
2064 maskstring = str(valid_ifs)[1:-1]
2065 ## split each selection "IF range[:CHAN range]"
2066 # split maskstring by "<spaces>,<spaces>"
2067 comma_sep = re.compile('\s*,\s*')
2068 sellist = comma_sep.split(maskstring)
2069 # separator by "<spaces>:<spaces>"
2070 collon_sep = re.compile('\s*:\s*')
2071 for currselstr in sellist:
2072 selset = collon_sep.split(currselstr)
2073 # spw and mask string (may include ~, < or >)
2074 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2075 minval=min(valid_ifs),
2076 maxval=max(valid_ifs))
2077 for spwlist in spwmasklist:
2078 selspws = []
2079 for ispw in range(spwlist[0],spwlist[1]+1):
2080 # Put into the list only if ispw exists
2081 if valid_ifs.count(ispw):
2082 selspws.append(ispw)
2083 del spwmasklist, spwlist
2084
2085 # parse frequency mask list
2086 if len(selset) > 1:
2087 freqmasklist = self._parse_selection(selset[1], typestr='float',
2088 offset=0.)
2089 else:
2090 # want to select the whole spectrum
2091 freqmasklist = [None]
2092
2093 ## define a dictionary of spw - masklist combination
2094 for ispw in selspws:
2095 #print "working on", ispw
2096 spwstr = str(ispw)
2097 if len(selspws) == 0:
2098 # empty spw
2099 continue
2100 else:
2101 ## want to get min and max of the spw and
2102 ## offset to set for '<' and '>'
2103 if frequnit == 'channel':
2104 minfreq = 0
2105 maxfreq = self.nchan(ifno=ispw)
2106 offset = 0.5
2107 else:
2108 ## This is ugly part. need improvement
2109 for ifrow in xrange(self.nrow()):
2110 if self.getif(ifrow) == ispw:
2111 #print "IF",ispw,"found in row =",ifrow
2112 break
2113 freqcoord = self.get_coordinate(ifrow)
2114 freqs = self._getabcissa(ifrow)
2115 minfreq = min(freqs)
2116 maxfreq = max(freqs)
2117 if len(freqs) == 1:
2118 offset = 0.5
2119 elif frequnit.find('Hz') > 0:
2120 offset = abs(freqcoord.to_frequency(1,
2121 unit=frequnit)
2122 -freqcoord.to_frequency(0,
2123 unit=frequnit)
2124 )*0.5
2125 elif frequnit.find('m/s') > 0:
2126 offset = abs(freqcoord.to_velocity(1,
2127 unit=frequnit)
2128 -freqcoord.to_velocity(0,
2129 unit=frequnit)
2130 )*0.5
2131 else:
2132 asaplog.post()
2133 asaplog.push("Invalid frequency unit")
2134 asaplog.post("ERROR")
2135 del freqs, freqcoord, ifrow
2136 for freq in freqmasklist:
2137 selmask = freq or [minfreq, maxfreq]
2138 if selmask[0] == None:
2139 ## selection was "<freq[1]".
2140 if selmask[1] < minfreq:
2141 ## avoid adding region selection
2142 selmask = None
2143 else:
2144 selmask = [minfreq,selmask[1]-offset]
2145 elif selmask[1] == None:
2146 ## selection was ">freq[0]"
2147 if selmask[0] > maxfreq:
2148 ## avoid adding region selection
2149 selmask = None
2150 else:
2151 selmask = [selmask[0]+offset,maxfreq]
2152 if selmask:
2153 if not seldict.has_key(spwstr):
2154 # new spw selection
2155 seldict[spwstr] = []
2156 seldict[spwstr] += [selmask]
2157 del minfreq,maxfreq,offset,freq,selmask
2158 del spwstr
2159 del freqmasklist
2160 del valid_ifs
2161 if len(seldict) == 0:
2162 asaplog.post()
2163 asaplog.push("No valid selection in the mask expression: "
2164 +maskstring)
2165 asaplog.post("WARN")
2166 return None
2167 msg = "Selected masklist:\n"
2168 for sif, lmask in seldict.iteritems():
2169 msg += " IF"+sif+" - "+str(lmask)+"\n"
2170 asaplog.push(msg)
2171 return seldict
2172
2173 @asaplog_post_dec
2174 def parse_idx_selection(self, mode, selexpr):
2175 """
2176 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2177 BEAMNO, and row number
2178
2179 Parameters:
2180 mode : which column to select.
2181 ['scan',|'if'|'pol'|'beam'|'row']
2182 selexpr : A comma separated selection expression.
2183 examples:
2184 '' = all (returns [])
2185 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2186 (returns [0,1,4,5,6,9])
2187 Returns:
2188 A List of selected indices
2189 """
2190 if selexpr == "":
2191 return []
2192 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2193 'b': 'beam', 'r': 'row'}
2194 smode = mode.lower()[0]
2195 if not (smode in valid_modes.keys()):
2196 msg = "Invalid mode '%s'. Valid modes are %s" %\
2197 (mode, str(valid_modes.values()))
2198 asaplog.post()
2199 asaplog.push(msg)
2200 asaplog.post("ERROR")
2201 mode = valid_modes[smode]
2202 minidx = None
2203 maxidx = None
2204 if smode == 'r':
2205 minidx = 0
2206 maxidx = self.nrow()
2207 else:
2208 idx = getattr(self,"get"+mode+"nos")()
2209 minidx = min(idx)
2210 maxidx = max(idx)
2211 del idx
2212 # split selexpr by "<spaces>,<spaces>"
2213 comma_sep = re.compile('\s*,\s*')
2214 sellist = comma_sep.split(selexpr)
2215 idxlist = []
2216 for currselstr in sellist:
2217 # single range (may include ~, < or >)
2218 currlist = self._parse_selection(currselstr, typestr='integer',
2219 minval=minidx,maxval=maxidx)
2220 for thelist in currlist:
2221 idxlist += range(thelist[0],thelist[1]+1)
2222 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2223 asaplog.push(msg)
2224 return idxlist
2225
2226 def _parse_selection(self, selstr, typestr='float', offset=0.,
2227 minval=None, maxval=None):
2228 """
2229 Parameters:
2230 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2231 typestr : The type of the values in returned list
2232 ('integer' or 'float')
2233 offset : The offset value to subtract from or add to
2234 the boundary value if the selection string
2235 includes '<' or '>' [Valid only for typestr='float']
2236 minval, maxval : The minimum/maximum values to set if the
2237 selection string includes '<' or '>'.
2238 The list element is filled with None by default.
2239 Returns:
2240 A list of min/max pair of selections.
2241 Example:
2242 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2243 --> returns [[0,2],[5,7],[9,9]]
2244 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2245 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2246 """
2247 # split selstr by '<spaces>;<spaces>'
2248 semi_sep = re.compile('\s*;\s*')
2249 selgroups = semi_sep.split(selstr)
2250 sellists = []
2251 if typestr.lower().startswith('int'):
2252 formatfunc = int
2253 offset = 1
2254 else:
2255 formatfunc = float
2256
2257 for currsel in selgroups:
2258 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2259 minsel = minval
2260 maxsel = maxval
2261 if currsel.find('~') > 0:
2262 # val0 <= x <= val1
2263 minsel = formatfunc(currsel.split('~')[0].strip())
2264 maxsel = formatfunc(currsel.split('~')[1].strip())
2265 elif currsel.strip().find('<=') > -1:
2266 bound = currsel.split('<=')
2267 try: # try "x <= val"
2268 minsel = minval
2269 maxsel = formatfunc(bound[1].strip())
2270 except ValueError: # now "val <= x"
2271 minsel = formatfunc(bound[0].strip())
2272 maxsel = maxval
2273 elif currsel.strip().find('>=') > -1:
2274 bound = currsel.split('>=')
2275 try: # try "x >= val"
2276 minsel = formatfunc(bound[1].strip())
2277 maxsel = maxval
2278 except ValueError: # now "val >= x"
2279 minsel = minval
2280 maxsel = formatfunc(bound[0].strip())
2281 elif currsel.strip().find('<') > -1:
2282 bound = currsel.split('<')
2283 try: # try "x < val"
2284 minsel = minval
2285 maxsel = formatfunc(bound[1].strip()) \
2286 - formatfunc(offset)
2287 except ValueError: # now "val < x"
2288 minsel = formatfunc(bound[0].strip()) \
2289 + formatfunc(offset)
2290 maxsel = maxval
2291 elif currsel.strip().find('>') > -1:
2292 bound = currsel.split('>')
2293 try: # try "x > val"
2294 minsel = formatfunc(bound[1].strip()) \
2295 + formatfunc(offset)
2296 maxsel = maxval
2297 except ValueError: # now "val > x"
2298 minsel = minval
2299 maxsel = formatfunc(bound[0].strip()) \
2300 - formatfunc(offset)
2301 else:
2302 minsel = formatfunc(currsel)
2303 maxsel = formatfunc(currsel)
2304 sellists.append([minsel,maxsel])
2305 return sellists
2306
2307# def get_restfreqs(self):
2308# """
2309# Get the restfrequency(s) stored in this scantable.
2310# The return value(s) are always of unit 'Hz'
2311# Parameters:
2312# none
2313# Returns:
2314# a list of doubles
2315# """
2316# return list(self._getrestfreqs())
2317
2318 def get_restfreqs(self, ids=None):
2319 """\
2320 Get the restfrequency(s) stored in this scantable.
2321 The return value(s) are always of unit 'Hz'
2322
2323 Parameters:
2324
2325 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2326 be retrieved
2327
2328 Returns:
2329
2330 dictionary containing ids and a list of doubles for each id
2331
2332 """
2333 if ids is None:
2334 rfreqs = {}
2335 idlist = self.getmolnos()
2336 for i in idlist:
2337 rfreqs[i] = list(self._getrestfreqs(i))
2338 return rfreqs
2339 else:
2340 if type(ids) == list or type(ids) == tuple:
2341 rfreqs = {}
2342 for i in ids:
2343 rfreqs[i] = list(self._getrestfreqs(i))
2344 return rfreqs
2345 else:
2346 return list(self._getrestfreqs(ids))
2347
2348 @asaplog_post_dec
2349 def set_restfreqs(self, freqs=None, unit='Hz'):
2350 """\
2351 Set or replace the restfrequency specified and
2352 if the 'freqs' argument holds a scalar,
2353 then that rest frequency will be applied to all the selected
2354 data. If the 'freqs' argument holds
2355 a vector, then it MUST be of equal or smaller length than
2356 the number of IFs (and the available restfrequencies will be
2357 replaced by this vector). In this case, *all* data have
2358 the restfrequency set per IF according
2359 to the corresponding value you give in the 'freqs' vector.
2360 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2361 IF 1 gets restfreq 2e9.
2362
2363 You can also specify the frequencies via a linecatalog.
2364
2365 Parameters:
2366
2367 freqs: list of rest frequency values or string idenitfiers
2368
2369 unit: unit for rest frequency (default 'Hz')
2370
2371
2372 Example::
2373
2374 # set the given restfrequency for the all currently selected IFs
2375 scan.set_restfreqs(freqs=1.4e9)
2376 # set restfrequencies for the n IFs (n > 1) in the order of the
2377 # list, i.e
2378 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2379 # len(list_of_restfreqs) == nIF
2380 # for nIF == 1 the following will set multiple restfrequency for
2381 # that IF
2382 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2383 # set multiple restfrequencies per IF. as a list of lists where
2384 # the outer list has nIF elements, the inner s arbitrary
2385 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2386
2387 *Note*:
2388
2389 To do more sophisticate Restfrequency setting, e.g. on a
2390 source and IF basis, use scantable.set_selection() before using
2391 this function::
2392
2393 # provided your scantable is called scan
2394 selection = selector()
2395 selection.set_name('ORION*')
2396 selection.set_ifs([1])
2397 scan.set_selection(selection)
2398 scan.set_restfreqs(freqs=86.6e9)
2399
2400 """
2401 varlist = vars()
2402 from asap import linecatalog
2403 # simple value
2404 if isinstance(freqs, int) or isinstance(freqs, float):
2405 self._setrestfreqs([freqs], [""], unit)
2406 # list of values
2407 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2408 # list values are scalars
2409 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2410 if len(freqs) == 1:
2411 self._setrestfreqs(freqs, [""], unit)
2412 else:
2413 # allow the 'old' mode of setting mulitple IFs
2414 savesel = self._getselection()
2415 sel = self.get_selection()
2416 iflist = self.getifnos()
2417 if len(freqs)>len(iflist):
2418 raise ValueError("number of elements in list of list "
2419 "exeeds the current IF selections")
2420 iflist = self.getifnos()
2421 for i, fval in enumerate(freqs):
2422 sel.set_ifs(iflist[i])
2423 self._setselection(sel)
2424 self._setrestfreqs([fval], [""], unit)
2425 self._setselection(savesel)
2426
2427 # list values are dict, {'value'=, 'name'=)
2428 elif isinstance(freqs[-1], dict):
2429 values = []
2430 names = []
2431 for d in freqs:
2432 values.append(d["value"])
2433 names.append(d["name"])
2434 self._setrestfreqs(values, names, unit)
2435 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2436 savesel = self._getselection()
2437 sel = self.get_selection()
2438 iflist = self.getifnos()
2439 if len(freqs)>len(iflist):
2440 raise ValueError("number of elements in list of list exeeds"
2441 " the current IF selections")
2442 for i, fval in enumerate(freqs):
2443 sel.set_ifs(iflist[i])
2444 self._setselection(sel)
2445 self._setrestfreqs(fval, [""], unit)
2446 self._setselection(savesel)
2447 # freqs are to be taken from a linecatalog
2448 elif isinstance(freqs, linecatalog):
2449 savesel = self._getselection()
2450 sel = self.get_selection()
2451 for i in xrange(freqs.nrow()):
2452 sel.set_ifs(iflist[i])
2453 self._setselection(sel)
2454 self._setrestfreqs([freqs.get_frequency(i)],
2455 [freqs.get_name(i)], "MHz")
2456 # ensure that we are not iterating past nIF
2457 if i == self.nif()-1: break
2458 self._setselection(savesel)
2459 else:
2460 return
2461 self._add_history("set_restfreqs", varlist)
2462
2463 @asaplog_post_dec
2464 def shift_refpix(self, delta):
2465 """\
2466 Shift the reference pixel of the Spectra Coordinate by an
2467 integer amount.
2468
2469 Parameters:
2470
2471 delta: the amount to shift by
2472
2473 *Note*:
2474
2475 Be careful using this with broadband data.
2476
2477 """
2478 varlist = vars()
2479 Scantable.shift_refpix(self, delta)
2480 s._add_history("shift_refpix", varlist)
2481
2482 @asaplog_post_dec
2483 def history(self, filename=None, nrows=-1, start=0):
2484 """\
2485 Print the history. Optionally to a file.
2486
2487 Parameters:
2488
2489 filename: The name of the file to save the history to.
2490
2491 """
2492 n = self._historylength()
2493 if nrows == -1:
2494 nrows = n
2495 if start+nrows > n:
2496 nrows = nrows-start
2497 if n > 1000 and nrows == n:
2498 nrows = 1000
2499 start = n-1000
2500 asaplog.push("Warning: History has {0} entries. Displaying last "
2501 "1000".format(n))
2502 hist = list(self._gethistory(nrows, start))
2503 out = "-"*80
2504 for h in hist:
2505 if not h.strip():
2506 continue
2507 if h.find("---") >-1:
2508 continue
2509 else:
2510 items = h.split("##")
2511 date = items[0]
2512 func = items[1]
2513 items = items[2:]
2514 out += "\n"+date+"\n"
2515 out += "Function: %s\n Parameters:" % (func)
2516 for i in items:
2517 if i == '':
2518 continue
2519 s = i.split("=")
2520 out += "\n %s = %s" % (s[0], s[1])
2521 out = "\n".join([out, "*"*80])
2522 if filename is not None:
2523 if filename is "":
2524 filename = 'scantable_history.txt'
2525 filename = os.path.expandvars(os.path.expanduser(filename))
2526 if not os.path.isdir(filename):
2527 data = open(filename, 'w')
2528 data.write(out)
2529 data.close()
2530 else:
2531 msg = "Illegal file name '%s'." % (filename)
2532 raise IOError(msg)
2533 return page(out)
2534
2535 #
2536 # Maths business
2537 #
2538 @asaplog_post_dec
2539 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2540 avmode="NONE"):
2541 """\
2542 Return the (time) weighted average of a scan. Scans will be averaged
2543 only if the source direction (RA/DEC) is within 1' otherwise
2544
2545 *Note*:
2546
2547 in channels only - align if necessary
2548
2549 Parameters:
2550
2551 mask: an optional mask (only used for 'var' and 'tsys'
2552 weighting)
2553
2554 scanav: True averages each scan separately
2555 False (default) averages all scans together,
2556
2557 weight: Weighting scheme.
2558 'none' (mean no weight)
2559 'var' (1/var(spec) weighted)
2560 'tsys' (1/Tsys**2 weighted)
2561 'tint' (integration time weighted)
2562 'tintsys' (Tint/Tsys**2)
2563 'median' ( median averaging)
2564 The default is 'tint'
2565
2566 align: align the spectra in velocity before averaging. It takes
2567 the time of the first spectrum as reference time.
2568 avmode: 'SOURCE' - also select by source name - or
2569 'NONE' (default). Not applicable for scanav=True or
2570 weight=median
2571
2572 Example::
2573
2574 # time average the scantable without using a mask
2575 newscan = scan.average_time()
2576
2577 """
2578 varlist = vars()
2579 weight = weight or 'TINT'
2580 mask = mask or ()
2581 scanav = (scanav and 'SCAN') or avmode.upper()
2582 scan = (self, )
2583
2584 if align:
2585 scan = (self.freq_align(insitu=False), )
2586 asaplog.push("Note: Alignment is don on a source-by-source basis")
2587 asaplog.push("Note: Averaging (by default) is not")
2588 # we need to set it to SOURCE averaging here
2589 s = None
2590 if weight.upper() == 'MEDIAN':
2591 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2592 scanav))
2593 else:
2594 s = scantable(self._math._average(scan, mask, weight.upper(),
2595 scanav))
2596 s._add_history("average_time", varlist)
2597 return s
2598
2599 @asaplog_post_dec
2600 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2601 """\
2602 Return a scan where all spectra are converted to either
2603 Jansky or Kelvin depending upon the flux units of the scan table.
2604 By default the function tries to look the values up internally.
2605 If it can't find them (or if you want to over-ride), you must
2606 specify EITHER jyperk OR eta (and D which it will try to look up
2607 also if you don't set it). jyperk takes precedence if you set both.
2608
2609 Parameters:
2610
2611 jyperk: the Jy / K conversion factor
2612
2613 eta: the aperture efficiency
2614
2615 d: the geometric diameter (metres)
2616
2617 insitu: if False a new scantable is returned.
2618 Otherwise, the scaling is done in-situ
2619 The default is taken from .asaprc (False)
2620
2621 """
2622 if insitu is None: insitu = rcParams['insitu']
2623 self._math._setinsitu(insitu)
2624 varlist = vars()
2625 jyperk = jyperk or -1.0
2626 d = d or -1.0
2627 eta = eta or -1.0
2628 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2629 s._add_history("convert_flux", varlist)
2630 if insitu: self._assign(s)
2631 else: return s
2632
2633 @asaplog_post_dec
2634 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2635 """\
2636 Return a scan after applying a gain-elevation correction.
2637 The correction can be made via either a polynomial or a
2638 table-based interpolation (and extrapolation if necessary).
2639 You specify polynomial coefficients, an ascii table or neither.
2640 If you specify neither, then a polynomial correction will be made
2641 with built in coefficients known for certain telescopes (an error
2642 will occur if the instrument is not known).
2643 The data and Tsys are *divided* by the scaling factors.
2644
2645 Parameters:
2646
2647 poly: Polynomial coefficients (default None) to compute a
2648 gain-elevation correction as a function of
2649 elevation (in degrees).
2650
2651 filename: The name of an ascii file holding correction factors.
2652 The first row of the ascii file must give the column
2653 names and these MUST include columns
2654 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2655 by this) somewhere.
2656 The second row must give the data type of the
2657 column. Use 'R' for Real and 'I' for Integer.
2658 An example file would be
2659 (actual factors are arbitrary) :
2660
2661 TIME ELEVATION FACTOR
2662 R R R
2663 0.1 0 0.8
2664 0.2 20 0.85
2665 0.3 40 0.9
2666 0.4 60 0.85
2667 0.5 80 0.8
2668 0.6 90 0.75
2669
2670 method: Interpolation method when correcting from a table.
2671 Values are 'nearest', 'linear' (default), 'cubic'
2672 and 'spline'
2673
2674 insitu: if False a new scantable is returned.
2675 Otherwise, the scaling is done in-situ
2676 The default is taken from .asaprc (False)
2677
2678 """
2679
2680 if insitu is None: insitu = rcParams['insitu']
2681 self._math._setinsitu(insitu)
2682 varlist = vars()
2683 poly = poly or ()
2684 from os.path import expandvars
2685 filename = expandvars(filename)
2686 s = scantable(self._math._gainel(self, poly, filename, method))
2687 s._add_history("gain_el", varlist)
2688 if insitu:
2689 self._assign(s)
2690 else:
2691 return s
2692
2693 @asaplog_post_dec
2694 def freq_align(self, reftime=None, method='cubic', insitu=None):
2695 """\
2696 Return a scan where all rows have been aligned in frequency/velocity.
2697 The alignment frequency frame (e.g. LSRK) is that set by function
2698 set_freqframe.
2699
2700 Parameters:
2701
2702 reftime: reference time to align at. By default, the time of
2703 the first row of data is used.
2704
2705 method: Interpolation method for regridding the spectra.
2706 Choose from 'nearest', 'linear', 'cubic' (default)
2707 and 'spline'
2708
2709 insitu: if False a new scantable is returned.
2710 Otherwise, the scaling is done in-situ
2711 The default is taken from .asaprc (False)
2712
2713 """
2714 if insitu is None: insitu = rcParams["insitu"]
2715 oldInsitu = self._math._insitu()
2716 self._math._setinsitu(insitu)
2717 varlist = vars()
2718 reftime = reftime or ""
2719 s = scantable(self._math._freq_align(self, reftime, method))
2720 s._add_history("freq_align", varlist)
2721 self._math._setinsitu(oldInsitu)
2722 if insitu:
2723 self._assign(s)
2724 else:
2725 return s
2726
2727 @asaplog_post_dec
2728 def opacity(self, tau=None, insitu=None):
2729 """\
2730 Apply an opacity correction. The data
2731 and Tsys are multiplied by the correction factor.
2732
2733 Parameters:
2734
2735 tau: (list of) opacity from which the correction factor is
2736 exp(tau*ZD)
2737 where ZD is the zenith-distance.
2738 If a list is provided, it has to be of length nIF,
2739 nIF*nPol or 1 and in order of IF/POL, e.g.
2740 [opif0pol0, opif0pol1, opif1pol0 ...]
2741 if tau is `None` the opacities are determined from a
2742 model.
2743
2744 insitu: if False a new scantable is returned.
2745 Otherwise, the scaling is done in-situ
2746 The default is taken from .asaprc (False)
2747
2748 """
2749 if insitu is None:
2750 insitu = rcParams['insitu']
2751 self._math._setinsitu(insitu)
2752 varlist = vars()
2753 if not hasattr(tau, "__len__"):
2754 tau = [tau]
2755 s = scantable(self._math._opacity(self, tau))
2756 s._add_history("opacity", varlist)
2757 if insitu:
2758 self._assign(s)
2759 else:
2760 return s
2761
2762 @asaplog_post_dec
2763 def bin(self, width=5, insitu=None):
2764 """\
2765 Return a scan where all spectra have been binned up.
2766
2767 Parameters:
2768
2769 width: The bin width (default=5) in pixels
2770
2771 insitu: if False a new scantable is returned.
2772 Otherwise, the scaling is done in-situ
2773 The default is taken from .asaprc (False)
2774
2775 """
2776 if insitu is None:
2777 insitu = rcParams['insitu']
2778 self._math._setinsitu(insitu)
2779 varlist = vars()
2780 s = scantable(self._math._bin(self, width))
2781 s._add_history("bin", varlist)
2782 if insitu:
2783 self._assign(s)
2784 else:
2785 return s
2786
2787 @asaplog_post_dec
2788 def reshape(self, first, last, insitu=None):
2789 """Resize the band by providing first and last channel.
2790 This will cut off all channels outside [first, last].
2791 """
2792 if insitu is None:
2793 insitu = rcParams['insitu']
2794 varlist = vars()
2795 if last < 0:
2796 last = self.nchan()-1 + last
2797 s = None
2798 if insitu:
2799 s = self
2800 else:
2801 s = self.copy()
2802 s._reshape(first,last)
2803 s._add_history("reshape", varlist)
2804 if not insitu:
2805 return s
2806
2807 @asaplog_post_dec
2808 def resample(self, width=5, method='cubic', insitu=None):
2809 """\
2810 Return a scan where all spectra have been binned up.
2811
2812 Parameters:
2813
2814 width: The bin width (default=5) in pixels
2815
2816 method: Interpolation method when correcting from a table.
2817 Values are 'nearest', 'linear', 'cubic' (default)
2818 and 'spline'
2819
2820 insitu: if False a new scantable is returned.
2821 Otherwise, the scaling is done in-situ
2822 The default is taken from .asaprc (False)
2823
2824 """
2825 if insitu is None:
2826 insitu = rcParams['insitu']
2827 self._math._setinsitu(insitu)
2828 varlist = vars()
2829 s = scantable(self._math._resample(self, method, width))
2830 s._add_history("resample", varlist)
2831 if insitu:
2832 self._assign(s)
2833 else:
2834 return s
2835
2836 @asaplog_post_dec
2837 def average_pol(self, mask=None, weight='none'):
2838 """\
2839 Average the Polarisations together.
2840
2841 Parameters:
2842
2843 mask: An optional mask defining the region, where the
2844 averaging will be applied. The output will have all
2845 specified points masked.
2846
2847 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2848 weighted), or 'tsys' (1/Tsys**2 weighted)
2849
2850 """
2851 varlist = vars()
2852 mask = mask or ()
2853 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2854 s._add_history("average_pol", varlist)
2855 return s
2856
2857 @asaplog_post_dec
2858 def average_beam(self, mask=None, weight='none'):
2859 """\
2860 Average the Beams together.
2861
2862 Parameters:
2863 mask: An optional mask defining the region, where the
2864 averaging will be applied. The output will have all
2865 specified points masked.
2866
2867 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2868 weighted), or 'tsys' (1/Tsys**2 weighted)
2869
2870 """
2871 varlist = vars()
2872 mask = mask or ()
2873 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2874 s._add_history("average_beam", varlist)
2875 return s
2876
2877 def parallactify(self, pflag):
2878 """\
2879 Set a flag to indicate whether this data should be treated as having
2880 been 'parallactified' (total phase == 0.0)
2881
2882 Parameters:
2883
2884 pflag: Bool indicating whether to turn this on (True) or
2885 off (False)
2886
2887 """
2888 varlist = vars()
2889 self._parallactify(pflag)
2890 self._add_history("parallactify", varlist)
2891
2892 @asaplog_post_dec
2893 def convert_pol(self, poltype=None):
2894 """\
2895 Convert the data to a different polarisation type.
2896 Note that you will need cross-polarisation terms for most conversions.
2897
2898 Parameters:
2899
2900 poltype: The new polarisation type. Valid types are:
2901 'linear', 'circular', 'stokes' and 'linpol'
2902
2903 """
2904 varlist = vars()
2905 s = scantable(self._math._convertpol(self, poltype))
2906 s._add_history("convert_pol", varlist)
2907 return s
2908
2909 @asaplog_post_dec
2910 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2911 insitu=None):
2912 """\
2913 Smooth the spectrum by the specified kernel (conserving flux).
2914
2915 Parameters:
2916
2917 kernel: The type of smoothing kernel. Select from
2918 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2919 or 'poly'
2920
2921 width: The width of the kernel in pixels. For hanning this is
2922 ignored otherwise it defauls to 5 pixels.
2923 For 'gaussian' it is the Full Width Half
2924 Maximum. For 'boxcar' it is the full width.
2925 For 'rmedian' and 'poly' it is the half width.
2926
2927 order: Optional parameter for 'poly' kernel (default is 2), to
2928 specify the order of the polnomial. Ignored by all other
2929 kernels.
2930
2931 plot: plot the original and the smoothed spectra.
2932 In this each indivual fit has to be approved, by
2933 typing 'y' or 'n'
2934
2935 insitu: if False a new scantable is returned.
2936 Otherwise, the scaling is done in-situ
2937 The default is taken from .asaprc (False)
2938
2939 """
2940 if insitu is None: insitu = rcParams['insitu']
2941 self._math._setinsitu(insitu)
2942 varlist = vars()
2943
2944 if plot: orgscan = self.copy()
2945
2946 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2947 s._add_history("smooth", varlist)
2948
2949 action = 'H'
2950 if plot:
2951 from asap.asapplotter import new_asaplot
2952 theplot = new_asaplot(rcParams['plotter.gui'])
2953 from matplotlib import rc as rcp
2954 rcp('lines', linewidth=1)
2955 theplot.set_panels()
2956 ylab=s._get_ordinate_label()
2957 #theplot.palette(0,["#777777","red"])
2958 for r in xrange(s.nrow()):
2959 xsm=s._getabcissa(r)
2960 ysm=s._getspectrum(r)
2961 xorg=orgscan._getabcissa(r)
2962 yorg=orgscan._getspectrum(r)
2963 if action != "N": #skip plotting if rejecting all
2964 theplot.clear()
2965 theplot.hold()
2966 theplot.set_axes('ylabel',ylab)
2967 theplot.set_axes('xlabel',s._getabcissalabel(r))
2968 theplot.set_axes('title',s._getsourcename(r))
2969 theplot.set_line(label='Original',color="#777777")
2970 theplot.plot(xorg,yorg)
2971 theplot.set_line(label='Smoothed',color="red")
2972 theplot.plot(xsm,ysm)
2973 ### Ugly part for legend
2974 for i in [0,1]:
2975 theplot.subplots[0]['lines'].append(
2976 [theplot.subplots[0]['axes'].lines[i]]
2977 )
2978 theplot.release()
2979 ### Ugly part for legend
2980 theplot.subplots[0]['lines']=[]
2981 res = self._get_verify_action("Accept smoothing?",action)
2982 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2983 if r == 0: action = None
2984 #res = raw_input("Accept smoothing ([y]/n): ")
2985 if res.upper() == 'N':
2986 # reject for the current rows
2987 s._setspectrum(yorg, r)
2988 elif res.upper() == 'R':
2989 # reject all the following rows
2990 action = "N"
2991 s._setspectrum(yorg, r)
2992 elif res.upper() == 'A':
2993 # accept all the following rows
2994 break
2995 theplot.quit()
2996 del theplot
2997 del orgscan
2998
2999 if insitu: self._assign(s)
3000 else: return s
3001
3002 @asaplog_post_dec
3003 def regrid_channel(self, width=5, plot=False, insitu=None):
3004 """\
3005 Regrid the spectra by the specified channel width
3006
3007 Parameters:
3008
3009 width: The channel width (float) of regridded spectra
3010 in the current spectral unit.
3011
3012 plot: [NOT IMPLEMENTED YET]
3013 plot the original and the regridded spectra.
3014 In this each indivual fit has to be approved, by
3015 typing 'y' or 'n'
3016
3017 insitu: if False a new scantable is returned.
3018 Otherwise, the scaling is done in-situ
3019 The default is taken from .asaprc (False)
3020
3021 """
3022 if insitu is None: insitu = rcParams['insitu']
3023 varlist = vars()
3024
3025 if plot:
3026 asaplog.post()
3027 asaplog.push("Verification plot is not implemtnetd yet.")
3028 asaplog.post("WARN")
3029
3030 s = self.copy()
3031 s._regrid_specchan(width)
3032
3033 s._add_history("regrid_channel", varlist)
3034
3035# if plot:
3036# from asap.asapplotter import new_asaplot
3037# theplot = new_asaplot(rcParams['plotter.gui'])
3038# from matplotlib import rc as rcp
3039# rcp('lines', linewidth=1)
3040# theplot.set_panels()
3041# ylab=s._get_ordinate_label()
3042# #theplot.palette(0,["#777777","red"])
3043# for r in xrange(s.nrow()):
3044# xsm=s._getabcissa(r)
3045# ysm=s._getspectrum(r)
3046# xorg=orgscan._getabcissa(r)
3047# yorg=orgscan._getspectrum(r)
3048# theplot.clear()
3049# theplot.hold()
3050# theplot.set_axes('ylabel',ylab)
3051# theplot.set_axes('xlabel',s._getabcissalabel(r))
3052# theplot.set_axes('title',s._getsourcename(r))
3053# theplot.set_line(label='Original',color="#777777")
3054# theplot.plot(xorg,yorg)
3055# theplot.set_line(label='Smoothed',color="red")
3056# theplot.plot(xsm,ysm)
3057# ### Ugly part for legend
3058# for i in [0,1]:
3059# theplot.subplots[0]['lines'].append(
3060# [theplot.subplots[0]['axes'].lines[i]]
3061# )
3062# theplot.release()
3063# ### Ugly part for legend
3064# theplot.subplots[0]['lines']=[]
3065# res = raw_input("Accept smoothing ([y]/n): ")
3066# if res.upper() == 'N':
3067# s._setspectrum(yorg, r)
3068# theplot.quit()
3069# del theplot
3070# del orgscan
3071
3072 if insitu: self._assign(s)
3073 else: return s
3074
3075 @asaplog_post_dec
3076 def _parse_wn(self, wn):
3077 if isinstance(wn, list) or isinstance(wn, tuple):
3078 return wn
3079 elif isinstance(wn, int):
3080 return [ wn ]
3081 elif isinstance(wn, str):
3082 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3083 val = wn.split('-')
3084 val = [int(val[0]), int(val[1])]
3085 val.sort()
3086 res = [i for i in xrange(val[0], val[1]+1)]
3087 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3088 val = int(wn[2:])+1
3089 res = [i for i in xrange(val)]
3090 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3091 val = int(wn[:-2])+1
3092 res = [i for i in xrange(val)]
3093 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3094 val = int(wn[1:])
3095 res = [i for i in xrange(val)]
3096 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3097 val = int(wn[:-1])
3098 res = [i for i in xrange(val)]
3099 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3100 # then interpreted in C++
3101 # side as [a,a+1,...,a_nyq]
3102 # (CAS-3759)
3103 val = int(wn[2:])
3104 res = [val, -999]
3105 #res = [i for i in xrange(val, self.nchan()/2+1)]
3106 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3107 # then interpreted in C++
3108 # side as [a,a+1,...,a_nyq]
3109 # (CAS-3759)
3110 val = int(wn[:-2])
3111 res = [val, -999]
3112 #res = [i for i in xrange(val, self.nchan()/2+1)]
3113 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3114 # then interpreted in C++
3115 # side as [a+1,a+2,...,a_nyq]
3116 # (CAS-3759)
3117 val = int(wn[1:])+1
3118 res = [val, -999]
3119 #res = [i for i in xrange(val, self.nchan()/2+1)]
3120 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3121 # then interpreted in C++
3122 # side as [a+1,a+2,...,a_nyq]
3123 # (CAS-3759)
3124 val = int(wn[:-1])+1
3125 res = [val, -999]
3126 #res = [i for i in xrange(val, self.nchan()/2+1)]
3127
3128 return res
3129 else:
3130 msg = 'wrong value given for addwn/rejwn'
3131 raise RuntimeError(msg)
3132
3133 @asaplog_post_dec
3134 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3135 """\
3136 Subtract baseline based on parameters written in Baseline Table.
3137
3138 Parameters:
3139 insitu: if True, baseline fitting/subtraction is done
3140 in-situ. If False, a new scantable with
3141 baseline subtracted is returned. Actually,
3142 format of the returned value depends on both
3143 insitu and retfitres (see below).
3144 The default is taken from .asaprc (False)
3145 retfitres: if True, the results of baseline fitting (i.e.,
3146 coefficients and rms) are returned.
3147 default is False.
3148 The format of the returned value of this
3149 function varies as follows:
3150 (1) in case insitu=True and retfitres=True:
3151 fitting result.
3152 (2) in case insitu=True and retfitres=False:
3153 None.
3154 (3) in case insitu=False and retfitres=True:
3155 a dictionary containing a new scantable
3156 (with baseline subtracted) and the fitting
3157 results.
3158 (4) in case insitu=False and retfitres=False:
3159 a new scantable (with baseline subtracted).
3160 inbltable: name of input baseline table. The row number of
3161 scantable and that of inbltable must be
3162 identical.
3163 outbltable: name of output baseline table where baseline
3164 parameters and fitting results recorded.
3165 default is ''(no output).
3166 overwrite: if True when an existing baseline table is
3167 specified for outbltable, overwrites it.
3168 Otherwise there is no harm.
3169 default is False.
3170 """
3171
3172 try:
3173 varlist = vars()
3174 if retfitres is None: retfitres = False
3175 if inbltable is None: raise ValueError("bltable missing.")
3176 if outbltable is None: outbltable = ''
3177 if overwrite is None: overwrite = False
3178
3179 if insitu is None: insitu = rcParams['insitu']
3180 if insitu:
3181 workscan = self
3182 else:
3183 workscan = self.copy()
3184
3185 sres = workscan._apply_bltable(inbltable,
3186 retfitres,
3187 outbltable,
3188 os.path.exists(outbltable),
3189 overwrite)
3190 if retfitres: res = parse_fitresult(sres)
3191
3192 workscan._add_history('apply_bltable', varlist)
3193
3194 if insitu:
3195 self._assign(workscan)
3196 if retfitres:
3197 return res
3198 else:
3199 return None
3200 else:
3201 if retfitres:
3202 return {'scantable': workscan, 'fitresults': res}
3203 else:
3204 return workscan
3205
3206 except RuntimeError, e:
3207 raise_fitting_failure_exception(e)
3208
3209 @asaplog_post_dec
3210 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3211 """\
3212 Subtract baseline based on parameters written in the input list.
3213
3214 Parameters:
3215 insitu: if True, baseline fitting/subtraction is done
3216 in-situ. If False, a new scantable with
3217 baseline subtracted is returned. Actually,
3218 format of the returned value depends on both
3219 insitu and retfitres (see below).
3220 The default is taken from .asaprc (False)
3221 retfitres: if True, the results of baseline fitting (i.e.,
3222 coefficients and rms) are returned.
3223 default is False.
3224 The format of the returned value of this
3225 function varies as follows:
3226 (1) in case insitu=True and retfitres=True:
3227 fitting result.
3228 (2) in case insitu=True and retfitres=False:
3229 None.
3230 (3) in case insitu=False and retfitres=True:
3231 a dictionary containing a new scantable
3232 (with baseline subtracted) and the fitting
3233 results.
3234 (4) in case insitu=False and retfitres=False:
3235 a new scantable (with baseline subtracted).
3236 blinfo: baseline parameter set stored in a dictionary
3237 or a list of dictionary. Each dictionary
3238 corresponds to each spectrum and must contain
3239 the following keys and values:
3240 'row': row number,
3241 'blfunc': function name. available ones include
3242 'poly', 'chebyshev', 'cspline' and
3243 'sinusoid',
3244 'order': maximum order of polynomial. needed
3245 if blfunc='poly' or 'chebyshev',
3246 'npiece': number or piecewise polynomial.
3247 needed if blfunc='cspline',
3248 'nwave': a list of sinusoidal wave numbers.
3249 needed if blfunc='sinusoid', and
3250 'masklist': min-max windows for channel mask.
3251 the specified ranges will be used
3252 for fitting.
3253 bltable: name of output baseline table where baseline
3254 parameters and fitting results recorded.
3255 default is ''(no output).
3256 overwrite: if True when an existing baseline table is
3257 specified for bltable, overwrites it.
3258 Otherwise there is no harm.
3259 default is False.
3260
3261 Example:
3262 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3263 'masklist':[[10,350],[352,510]]},
3264 {'row':1, 'blfunc':'cspline', 'npiece':3,
3265 'masklist':[[3,16],[19,404],[407,511]]}
3266 ])
3267
3268 the first spectrum (row=0) will be fitted with polynomial
3269 of order=5 and the next one (row=1) will be fitted with cubic
3270 spline consisting of 3 pieces.
3271 """
3272
3273 try:
3274 varlist = vars()
3275 if retfitres is None: retfitres = False
3276 if blinfo is None: blinfo = []
3277 if bltable is None: bltable = ''
3278 if overwrite is None: overwrite = False
3279
3280 if insitu is None: insitu = rcParams['insitu']
3281 if insitu:
3282 workscan = self
3283 else:
3284 workscan = self.copy()
3285
3286 nrow = workscan.nrow()
3287
3288 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3289
3290 print "in_blinfo=< "+ str(in_blinfo)+" >"
3291
3292 sres = workscan._sub_baseline(in_blinfo,
3293 retfitres,
3294 bltable,
3295 os.path.exists(bltable),
3296 overwrite)
3297 if retfitres: res = parse_fitresult(sres)
3298
3299 workscan._add_history('sub_baseline', varlist)
3300
3301 if insitu:
3302 self._assign(workscan)
3303 if retfitres:
3304 return res
3305 else:
3306 return None
3307 else:
3308 if retfitres:
3309 return {'scantable': workscan, 'fitresults': res}
3310 else:
3311 return workscan
3312
3313 except RuntimeError, e:
3314 raise_fitting_failure_exception(e)
3315
3316 @asaplog_post_dec
3317 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3318 whichrow=None, uselinefinder=None, edge=None,
3319 threshold=None, chan_avg_limit=None):
3320 """\
3321 Calculates and returns model selection criteria for a specified
3322 baseline model and a given spectrum data.
3323 Available values include Akaike Information Criterion (AIC), the
3324 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3325 Bayesian Information Criterion (BIC) and the Generalised Cross
3326 Validation (GCV).
3327
3328 Parameters:
3329 value: name of model selection criteria to calculate.
3330 available ones include 'aic', 'aicc', 'bic' and
3331 'gcv'. default is 'aicc'.
3332 blfunc: baseline function name. available ones include
3333 'chebyshev', 'cspline' and 'sinusoid'.
3334 default is 'chebyshev'.
3335 order: parameter for basline function. actually stands for
3336 order of polynomial (order) for 'chebyshev',
3337 number of spline pieces (npiece) for 'cspline' and
3338 maximum wave number for 'sinusoid', respectively.
3339 default is 5 (which is also the default order value
3340 for [auto_]chebyshev_baseline()).
3341 mask: an optional mask. default is [].
3342 whichrow: row number. default is 0 (the first row)
3343 uselinefinder: use sd.linefinder() to flag out line regions
3344 default is True.
3345 edge: an optional number of channel to drop at
3346 the edge of spectrum. If only one value is
3347 specified, the same number will be dropped
3348 from both sides of the spectrum. Default
3349 is to keep all channels. Nested tuples
3350 represent individual edge selection for
3351 different IFs (a number of spectral channels
3352 can be different)
3353 default is (0, 0).
3354 threshold: the threshold used by line finder. It is
3355 better to keep it large as only strong lines
3356 affect the baseline solution.
3357 default is 3.
3358 chan_avg_limit: a maximum number of consequtive spectral
3359 channels to average during the search of
3360 weak and broad lines. The default is no
3361 averaging (and no search for weak lines).
3362 If such lines can affect the fitted baseline
3363 (e.g. a high order polynomial is fitted),
3364 increase this parameter (usually values up
3365 to 8 are reasonable). Most users of this
3366 method should find the default value sufficient.
3367 default is 1.
3368
3369 Example:
3370 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3371 """
3372
3373 try:
3374 varlist = vars()
3375
3376 if value is None: value = 'aicc'
3377 if blfunc is None: blfunc = 'chebyshev'
3378 if order is None: order = 5
3379 if mask is None: mask = []
3380 if whichrow is None: whichrow = 0
3381 if uselinefinder is None: uselinefinder = True
3382 if edge is None: edge = (0, 0)
3383 if threshold is None: threshold = 3
3384 if chan_avg_limit is None: chan_avg_limit = 1
3385
3386 return self._calc_aic(value, blfunc, order, mask,
3387 whichrow, uselinefinder, edge,
3388 threshold, chan_avg_limit)
3389
3390 except RuntimeError, e:
3391 raise_fitting_failure_exception(e)
3392
3393 @asaplog_post_dec
3394 def sinusoid_baseline(self, mask=None, applyfft=None,
3395 fftmethod=None, fftthresh=None,
3396 addwn=None, rejwn=None,
3397 insitu=None,
3398 clipthresh=None, clipniter=None,
3399 plot=None,
3400 getresidual=None,
3401 showprogress=None, minnrow=None,
3402 outlog=None,
3403 blfile=None, csvformat=None,
3404 bltable=None):
3405 """\
3406 Return a scan which has been baselined (all rows) with sinusoidal
3407 functions.
3408
3409 Parameters:
3410 mask: an optional mask
3411 applyfft: if True use some method, such as FFT, to find
3412 strongest sinusoidal components in the wavenumber
3413 domain to be used for baseline fitting.
3414 default is True.
3415 fftmethod: method to find the strong sinusoidal components.
3416 now only 'fft' is available and it is the default.
3417 fftthresh: the threshold to select wave numbers to be used for
3418 fitting from the distribution of amplitudes in the
3419 wavenumber domain.
3420 both float and string values accepted.
3421 given a float value, the unit is set to sigma.
3422 for string values, allowed formats include:
3423 'xsigma' or 'x' (= x-sigma level. e.g.,
3424 '3sigma'), or
3425 'topx' (= the x strongest ones, e.g. 'top5').
3426 default is 3.0 (unit: sigma).
3427 addwn: the additional wave numbers to be used for fitting.
3428 list or integer value is accepted to specify every
3429 wave numbers. also string value can be used in case
3430 you need to specify wave numbers in a certain range,
3431 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3432 '<a' (= 0,1,...,a-2,a-1),
3433 '>=a' (= a, a+1, ... up to the maximum wave
3434 number corresponding to the Nyquist
3435 frequency for the case of FFT).
3436 default is [0].
3437 rejwn: the wave numbers NOT to be used for fitting.
3438 can be set just as addwn but has higher priority:
3439 wave numbers which are specified both in addwn
3440 and rejwn will NOT be used. default is [].
3441 insitu: if False a new scantable is returned.
3442 Otherwise, the scaling is done in-situ
3443 The default is taken from .asaprc (False)
3444 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3445 clipniter: maximum number of iteration of 'clipthresh'-sigma
3446 clipping (default is 0)
3447 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3448 plot the fit and the residual. In this each
3449 indivual fit has to be approved, by typing 'y'
3450 or 'n'
3451 getresidual: if False, returns best-fit values instead of
3452 residual. (default is True)
3453 showprogress: show progress status for large data.
3454 default is True.
3455 minnrow: minimum number of input spectra to show.
3456 default is 1000.
3457 outlog: Output the coefficients of the best-fit
3458 function to logger (default is False)
3459 blfile: Name of a text file in which the best-fit
3460 parameter values to be written
3461 (default is '': no file/logger output)
3462 csvformat: if True blfile is csv-formatted, default is False.
3463 bltable: name of a baseline table where fitting results
3464 (coefficients, rms, etc.) are to be written.
3465 if given, fitting results will NOT be output to
3466 scantable (insitu=True) or None will be
3467 returned (insitu=False).
3468 (default is "": no table output)
3469
3470 Example:
3471 # return a scan baselined by a combination of sinusoidal curves
3472 # having wave numbers in spectral window up to 10,
3473 # also with 3-sigma clipping, iteration up to 4 times
3474 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3475
3476 Note:
3477 The best-fit parameter values output in logger and/or blfile are now
3478 based on specunit of 'channel'.
3479 """
3480
3481 try:
3482 varlist = vars()
3483
3484 if insitu is None: insitu = rcParams['insitu']
3485 if insitu:
3486 workscan = self
3487 else:
3488 workscan = self.copy()
3489
3490 if mask is None: mask = []
3491 if applyfft is None: applyfft = True
3492 if fftmethod is None: fftmethod = 'fft'
3493 if fftthresh is None: fftthresh = 3.0
3494 if addwn is None: addwn = [0]
3495 if rejwn is None: rejwn = []
3496 if clipthresh is None: clipthresh = 3.0
3497 if clipniter is None: clipniter = 0
3498 if plot is None: plot = False
3499 if getresidual is None: getresidual = True
3500 if showprogress is None: showprogress = True
3501 if minnrow is None: minnrow = 1000
3502 if outlog is None: outlog = False
3503 if blfile is None: blfile = ''
3504 if csvformat is None: csvformat = False
3505 if bltable is None: bltable = ''
3506
3507 sapplyfft = 'true' if applyfft else 'false'
3508 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3509
3510 scsvformat = 'T' if csvformat else 'F'
3511
3512 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3513 workscan._sinusoid_baseline(mask,
3514 fftinfo,
3515 #applyfft, fftmethod.lower(),
3516 #str(fftthresh).lower(),
3517 workscan._parse_wn(addwn),
3518 workscan._parse_wn(rejwn),
3519 clipthresh, clipniter,
3520 getresidual,
3521 pack_progress_params(showprogress,
3522 minnrow),
3523 outlog, scsvformat+blfile,
3524 bltable)
3525 workscan._add_history('sinusoid_baseline', varlist)
3526
3527 if bltable == '':
3528 if insitu:
3529 self._assign(workscan)
3530 else:
3531 return workscan
3532 else:
3533 if not insitu:
3534 return None
3535
3536 except RuntimeError, e:
3537 raise_fitting_failure_exception(e)
3538
3539
3540 @asaplog_post_dec
3541 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3542 fftmethod=None, fftthresh=None,
3543 addwn=None, rejwn=None,
3544 insitu=None,
3545 clipthresh=None, clipniter=None,
3546 edge=None, threshold=None, chan_avg_limit=None,
3547 plot=None,
3548 getresidual=None,
3549 showprogress=None, minnrow=None,
3550 outlog=None,
3551 blfile=None, csvformat=None,
3552 bltable=None):
3553 """\
3554 Return a scan which has been baselined (all rows) with sinusoidal
3555 functions.
3556 Spectral lines are detected first using linefinder and masked out
3557 to avoid them affecting the baseline solution.
3558
3559 Parameters:
3560 mask: an optional mask retreived from scantable
3561 applyfft: if True use some method, such as FFT, to find
3562 strongest sinusoidal components in the wavenumber
3563 domain to be used for baseline fitting.
3564 default is True.
3565 fftmethod: method to find the strong sinusoidal components.
3566 now only 'fft' is available and it is the default.
3567 fftthresh: the threshold to select wave numbers to be used for
3568 fitting from the distribution of amplitudes in the
3569 wavenumber domain.
3570 both float and string values accepted.
3571 given a float value, the unit is set to sigma.
3572 for string values, allowed formats include:
3573 'xsigma' or 'x' (= x-sigma level. e.g.,
3574 '3sigma'), or
3575 'topx' (= the x strongest ones, e.g. 'top5').
3576 default is 3.0 (unit: sigma).
3577 addwn: the additional wave numbers to be used for fitting.
3578 list or integer value is accepted to specify every
3579 wave numbers. also string value can be used in case
3580 you need to specify wave numbers in a certain range,
3581 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3582 '<a' (= 0,1,...,a-2,a-1),
3583 '>=a' (= a, a+1, ... up to the maximum wave
3584 number corresponding to the Nyquist
3585 frequency for the case of FFT).
3586 default is [0].
3587 rejwn: the wave numbers NOT to be used for fitting.
3588 can be set just as addwn but has higher priority:
3589 wave numbers which are specified both in addwn
3590 and rejwn will NOT be used. default is [].
3591 insitu: if False a new scantable is returned.
3592 Otherwise, the scaling is done in-situ
3593 The default is taken from .asaprc (False)
3594 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3595 clipniter: maximum number of iteration of 'clipthresh'-sigma
3596 clipping (default is 0)
3597 edge: an optional number of channel to drop at
3598 the edge of spectrum. If only one value is
3599 specified, the same number will be dropped
3600 from both sides of the spectrum. Default
3601 is to keep all channels. Nested tuples
3602 represent individual edge selection for
3603 different IFs (a number of spectral channels
3604 can be different)
3605 threshold: the threshold used by line finder. It is
3606 better to keep it large as only strong lines
3607 affect the baseline solution.
3608 chan_avg_limit: a maximum number of consequtive spectral
3609 channels to average during the search of
3610 weak and broad lines. The default is no
3611 averaging (and no search for weak lines).
3612 If such lines can affect the fitted baseline
3613 (e.g. a high order polynomial is fitted),
3614 increase this parameter (usually values up
3615 to 8 are reasonable). Most users of this
3616 method should find the default value sufficient.
3617 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3618 plot the fit and the residual. In this each
3619 indivual fit has to be approved, by typing 'y'
3620 or 'n'
3621 getresidual: if False, returns best-fit values instead of
3622 residual. (default is True)
3623 showprogress: show progress status for large data.
3624 default is True.
3625 minnrow: minimum number of input spectra to show.
3626 default is 1000.
3627 outlog: Output the coefficients of the best-fit
3628 function to logger (default is False)
3629 blfile: Name of a text file in which the best-fit
3630 parameter values to be written
3631 (default is "": no file/logger output)
3632 csvformat: if True blfile is csv-formatted, default is False.
3633 bltable: name of a baseline table where fitting results
3634 (coefficients, rms, etc.) are to be written.
3635 if given, fitting results will NOT be output to
3636 scantable (insitu=True) or None will be
3637 returned (insitu=False).
3638 (default is "": no table output)
3639
3640 Example:
3641 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3642
3643 Note:
3644 The best-fit parameter values output in logger and/or blfile are now
3645 based on specunit of 'channel'.
3646 """
3647
3648 try:
3649 varlist = vars()
3650
3651 if insitu is None: insitu = rcParams['insitu']
3652 if insitu:
3653 workscan = self
3654 else:
3655 workscan = self.copy()
3656
3657 if mask is None: mask = []
3658 if applyfft is None: applyfft = True
3659 if fftmethod is None: fftmethod = 'fft'
3660 if fftthresh is None: fftthresh = 3.0
3661 if addwn is None: addwn = [0]
3662 if rejwn is None: rejwn = []
3663 if clipthresh is None: clipthresh = 3.0
3664 if clipniter is None: clipniter = 0
3665 if edge is None: edge = (0,0)
3666 if threshold is None: threshold = 3
3667 if chan_avg_limit is None: chan_avg_limit = 1
3668 if plot is None: plot = False
3669 if getresidual is None: getresidual = True
3670 if showprogress is None: showprogress = True
3671 if minnrow is None: minnrow = 1000
3672 if outlog is None: outlog = False
3673 if blfile is None: blfile = ''
3674 if csvformat is None: csvformat = False
3675 if bltable is None: bltable = ''
3676
3677 sapplyfft = 'true' if applyfft else 'false'
3678 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3679
3680 scsvformat = 'T' if csvformat else 'F'
3681
3682 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3683 workscan._auto_sinusoid_baseline(mask,
3684 fftinfo,
3685 workscan._parse_wn(addwn),
3686 workscan._parse_wn(rejwn),
3687 clipthresh, clipniter,
3688 normalise_edge_param(edge),
3689 threshold, chan_avg_limit,
3690 getresidual,
3691 pack_progress_params(showprogress,
3692 minnrow),
3693 outlog, scsvformat+blfile, bltable)
3694 workscan._add_history("auto_sinusoid_baseline", varlist)
3695
3696 if bltable == '':
3697 if insitu:
3698 self._assign(workscan)
3699 else:
3700 return workscan
3701 else:
3702 if not insitu:
3703 return None
3704
3705 except RuntimeError, e:
3706 raise_fitting_failure_exception(e)
3707
3708 @asaplog_post_dec
3709 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3710 clipthresh=None, clipniter=None, plot=None,
3711 getresidual=None, showprogress=None, minnrow=None,
3712 outlog=None, blfile=None, csvformat=None,
3713 bltable=None):
3714 """\
3715 Return a scan which has been baselined (all rows) by cubic spline
3716 function (piecewise cubic polynomial).
3717
3718 Parameters:
3719 mask: An optional mask
3720 npiece: Number of pieces. (default is 2)
3721 insitu: If False a new scantable is returned.
3722 Otherwise, the scaling is done in-situ
3723 The default is taken from .asaprc (False)
3724 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3725 clipniter: maximum number of iteration of 'clipthresh'-sigma
3726 clipping (default is 0)
3727 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3728 plot the fit and the residual. In this each
3729 indivual fit has to be approved, by typing 'y'
3730 or 'n'
3731 getresidual: if False, returns best-fit values instead of
3732 residual. (default is True)
3733 showprogress: show progress status for large data.
3734 default is True.
3735 minnrow: minimum number of input spectra to show.
3736 default is 1000.
3737 outlog: Output the coefficients of the best-fit
3738 function to logger (default is False)
3739 blfile: Name of a text file in which the best-fit
3740 parameter values to be written
3741 (default is "": no file/logger output)
3742 csvformat: if True blfile is csv-formatted, default is False.
3743 bltable: name of a baseline table where fitting results
3744 (coefficients, rms, etc.) are to be written.
3745 if given, fitting results will NOT be output to
3746 scantable (insitu=True) or None will be
3747 returned (insitu=False).
3748 (default is "": no table output)
3749
3750 Example:
3751 # return a scan baselined by a cubic spline consisting of 2 pieces
3752 # (i.e., 1 internal knot),
3753 # also with 3-sigma clipping, iteration up to 4 times
3754 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3755
3756 Note:
3757 The best-fit parameter values output in logger and/or blfile are now
3758 based on specunit of 'channel'.
3759 """
3760
3761 try:
3762 varlist = vars()
3763
3764 if insitu is None: insitu = rcParams['insitu']
3765 if insitu:
3766 workscan = self
3767 else:
3768 workscan = self.copy()
3769
3770 if mask is None: mask = []
3771 if npiece is None: npiece = 2
3772 if clipthresh is None: clipthresh = 3.0
3773 if clipniter is None: clipniter = 0
3774 if plot is None: plot = False
3775 if getresidual is None: getresidual = True
3776 if showprogress is None: showprogress = True
3777 if minnrow is None: minnrow = 1000
3778 if outlog is None: outlog = False
3779 if blfile is None: blfile = ''
3780 if csvformat is None: csvformat = False
3781 if bltable is None: bltable = ''
3782
3783 scsvformat = 'T' if csvformat else 'F'
3784
3785 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3786 workscan._cspline_baseline(mask, npiece,
3787 clipthresh, clipniter,
3788 getresidual,
3789 pack_progress_params(showprogress,
3790 minnrow),
3791 outlog, scsvformat+blfile,
3792 bltable)
3793 workscan._add_history("cspline_baseline", varlist)
3794
3795 if bltable == '':
3796 if insitu:
3797 self._assign(workscan)
3798 else:
3799 return workscan
3800 else:
3801 if not insitu:
3802 return None
3803
3804 except RuntimeError, e:
3805 raise_fitting_failure_exception(e)
3806
3807 @asaplog_post_dec
3808 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3809 clipthresh=None, clipniter=None,
3810 edge=None, threshold=None, chan_avg_limit=None,
3811 getresidual=None, plot=None,
3812 showprogress=None, minnrow=None, outlog=None,
3813 blfile=None, csvformat=None, bltable=None):
3814 """\
3815 Return a scan which has been baselined (all rows) by cubic spline
3816 function (piecewise cubic polynomial).
3817 Spectral lines are detected first using linefinder and masked out
3818 to avoid them affecting the baseline solution.
3819
3820 Parameters:
3821 mask: an optional mask retreived from scantable
3822 npiece: Number of pieces. (default is 2)
3823 insitu: if False a new scantable is returned.
3824 Otherwise, the scaling is done in-situ
3825 The default is taken from .asaprc (False)
3826 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3827 clipniter: maximum number of iteration of 'clipthresh'-sigma
3828 clipping (default is 0)
3829 edge: an optional number of channel to drop at
3830 the edge of spectrum. If only one value is
3831 specified, the same number will be dropped
3832 from both sides of the spectrum. Default
3833 is to keep all channels. Nested tuples
3834 represent individual edge selection for
3835 different IFs (a number of spectral channels
3836 can be different)
3837 threshold: the threshold used by line finder. It is
3838 better to keep it large as only strong lines
3839 affect the baseline solution.
3840 chan_avg_limit: a maximum number of consequtive spectral
3841 channels to average during the search of
3842 weak and broad lines. The default is no
3843 averaging (and no search for weak lines).
3844 If such lines can affect the fitted baseline
3845 (e.g. a high order polynomial is fitted),
3846 increase this parameter (usually values up
3847 to 8 are reasonable). Most users of this
3848 method should find the default value sufficient.
3849 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3850 plot the fit and the residual. In this each
3851 indivual fit has to be approved, by typing 'y'
3852 or 'n'
3853 getresidual: if False, returns best-fit values instead of
3854 residual. (default is True)
3855 showprogress: show progress status for large data.
3856 default is True.
3857 minnrow: minimum number of input spectra to show.
3858 default is 1000.
3859 outlog: Output the coefficients of the best-fit
3860 function to logger (default is False)
3861 blfile: Name of a text file in which the best-fit
3862 parameter values to be written
3863 (default is "": no file/logger output)
3864 csvformat: if True blfile is csv-formatted, default is False.
3865 bltable: name of a baseline table where fitting results
3866 (coefficients, rms, etc.) are to be written.
3867 if given, fitting results will NOT be output to
3868 scantable (insitu=True) or None will be
3869 returned (insitu=False).
3870 (default is "": no table output)
3871
3872 Example:
3873 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3874
3875 Note:
3876 The best-fit parameter values output in logger and/or blfile are now
3877 based on specunit of 'channel'.
3878 """
3879
3880 try:
3881 varlist = vars()
3882
3883 if insitu is None: insitu = rcParams['insitu']
3884 if insitu:
3885 workscan = self
3886 else:
3887 workscan = self.copy()
3888
3889 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3890 if mask is None: mask = []
3891 if npiece is None: npiece = 2
3892 if clipthresh is None: clipthresh = 3.0
3893 if clipniter is None: clipniter = 0
3894 if edge is None: edge = (0, 0)
3895 if threshold is None: threshold = 3
3896 if chan_avg_limit is None: chan_avg_limit = 1
3897 if plot is None: plot = False
3898 if getresidual is None: getresidual = True
3899 if showprogress is None: showprogress = True
3900 if minnrow is None: minnrow = 1000
3901 if outlog is None: outlog = False
3902 if blfile is None: blfile = ''
3903 if csvformat is None: csvformat = False
3904 if bltable is None: bltable = ''
3905
3906 scsvformat = 'T' if csvformat else 'F'
3907
3908 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3909 workscan._auto_cspline_baseline(mask, npiece,
3910 clipthresh, clipniter,
3911 normalise_edge_param(edge),
3912 threshold,
3913 chan_avg_limit, getresidual,
3914 pack_progress_params(showprogress,
3915 minnrow),
3916 outlog,
3917 scsvformat+blfile,
3918 bltable)
3919 workscan._add_history("auto_cspline_baseline", varlist)
3920
3921 if bltable == '':
3922 if insitu:
3923 self._assign(workscan)
3924 else:
3925 return workscan
3926 else:
3927 if not insitu:
3928 return None
3929
3930 except RuntimeError, e:
3931 raise_fitting_failure_exception(e)
3932
3933 @asaplog_post_dec
3934 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
3935 clipthresh=None, clipniter=None, plot=None,
3936 getresidual=None, showprogress=None, minnrow=None,
3937 outlog=None, blfile=None, csvformat=None,
3938 bltable=None):
3939 """\
3940 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3941
3942 Parameters:
3943 mask: An optional mask
3944 order: the maximum order of Chebyshev polynomial (default is 5)
3945 insitu: If False a new scantable is returned.
3946 Otherwise, the scaling is done in-situ
3947 The default is taken from .asaprc (False)
3948 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3949 clipniter: maximum number of iteration of 'clipthresh'-sigma
3950 clipping (default is 0)
3951 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3952 plot the fit and the residual. In this each
3953 indivual fit has to be approved, by typing 'y'
3954 or 'n'
3955 getresidual: if False, returns best-fit values instead of
3956 residual. (default is True)
3957 showprogress: show progress status for large data.
3958 default is True.
3959 minnrow: minimum number of input spectra to show.
3960 default is 1000.
3961 outlog: Output the coefficients of the best-fit
3962 function to logger (default is False)
3963 blfile: Name of a text file in which the best-fit
3964 parameter values to be written
3965 (default is "": no file/logger output)
3966 csvformat: if True blfile is csv-formatted, default is False.
3967 bltable: name of a baseline table where fitting results
3968 (coefficients, rms, etc.) are to be written.
3969 if given, fitting results will NOT be output to
3970 scantable (insitu=True) or None will be
3971 returned (insitu=False).
3972 (default is "": no table output)
3973
3974 Example:
3975 # return a scan baselined by a cubic spline consisting of 2 pieces
3976 # (i.e., 1 internal knot),
3977 # also with 3-sigma clipping, iteration up to 4 times
3978 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3979
3980 Note:
3981 The best-fit parameter values output in logger and/or blfile are now
3982 based on specunit of 'channel'.
3983 """
3984
3985 try:
3986 varlist = vars()
3987
3988 if insitu is None: insitu = rcParams['insitu']
3989 if insitu:
3990 workscan = self
3991 else:
3992 workscan = self.copy()
3993
3994 if mask is None: mask = []
3995 if order is None: order = 5
3996 if clipthresh is None: clipthresh = 3.0
3997 if clipniter is None: clipniter = 0
3998 if plot is None: plot = False
3999 if getresidual is None: getresidual = True
4000 if showprogress is None: showprogress = True
4001 if minnrow is None: minnrow = 1000
4002 if outlog is None: outlog = False
4003 if blfile is None: blfile = ''
4004 if csvformat is None: csvformat = False
4005 if bltable is None: bltable = ''
4006
4007 scsvformat = 'T' if csvformat else 'F'
4008
4009 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4010 workscan._chebyshev_baseline(mask, order,
4011 clipthresh, clipniter,
4012 getresidual,
4013 pack_progress_params(showprogress,
4014 minnrow),
4015 outlog, scsvformat+blfile,
4016 bltable)
4017 workscan._add_history("chebyshev_baseline", varlist)
4018
4019 if bltable == '':
4020 if insitu:
4021 self._assign(workscan)
4022 else:
4023 return workscan
4024 else:
4025 if not insitu:
4026 return None
4027
4028 except RuntimeError, e:
4029 raise_fitting_failure_exception(e)
4030
4031 @asaplog_post_dec
4032 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4033 clipthresh=None, clipniter=None,
4034 edge=None, threshold=None, chan_avg_limit=None,
4035 getresidual=None, plot=None,
4036 showprogress=None, minnrow=None, outlog=None,
4037 blfile=None, csvformat=None, bltable=None):
4038 """\
4039 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4040 Spectral lines are detected first using linefinder and masked out
4041 to avoid them affecting the baseline solution.
4042
4043 Parameters:
4044 mask: an optional mask retreived from scantable
4045 order: the maximum order of Chebyshev polynomial (default is 5)
4046 insitu: if False a new scantable is returned.
4047 Otherwise, the scaling is done in-situ
4048 The default is taken from .asaprc (False)
4049 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4050 clipniter: maximum number of iteration of 'clipthresh'-sigma
4051 clipping (default is 0)
4052 edge: an optional number of channel to drop at
4053 the edge of spectrum. If only one value is
4054 specified, the same number will be dropped
4055 from both sides of the spectrum. Default
4056 is to keep all channels. Nested tuples
4057 represent individual edge selection for
4058 different IFs (a number of spectral channels
4059 can be different)
4060 threshold: the threshold used by line finder. It is
4061 better to keep it large as only strong lines
4062 affect the baseline solution.
4063 chan_avg_limit: a maximum number of consequtive spectral
4064 channels to average during the search of
4065 weak and broad lines. The default is no
4066 averaging (and no search for weak lines).
4067 If such lines can affect the fitted baseline
4068 (e.g. a high order polynomial is fitted),
4069 increase this parameter (usually values up
4070 to 8 are reasonable). Most users of this
4071 method should find the default value sufficient.
4072 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4073 plot the fit and the residual. In this each
4074 indivual fit has to be approved, by typing 'y'
4075 or 'n'
4076 getresidual: if False, returns best-fit values instead of
4077 residual. (default is True)
4078 showprogress: show progress status for large data.
4079 default is True.
4080 minnrow: minimum number of input spectra to show.
4081 default is 1000.
4082 outlog: Output the coefficients of the best-fit
4083 function to logger (default is False)
4084 blfile: Name of a text file in which the best-fit
4085 parameter values to be written
4086 (default is "": no file/logger output)
4087 csvformat: if True blfile is csv-formatted, default is False.
4088 bltable: name of a baseline table where fitting results
4089 (coefficients, rms, etc.) are to be written.
4090 if given, fitting results will NOT be output to
4091 scantable (insitu=True) or None will be
4092 returned (insitu=False).
4093 (default is "": no table output)
4094
4095 Example:
4096 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4097
4098 Note:
4099 The best-fit parameter values output in logger and/or blfile are now
4100 based on specunit of 'channel'.
4101 """
4102
4103 try:
4104 varlist = vars()
4105
4106 if insitu is None: insitu = rcParams['insitu']
4107 if insitu:
4108 workscan = self
4109 else:
4110 workscan = self.copy()
4111
4112 if mask is None: mask = []
4113 if order is None: order = 5
4114 if clipthresh is None: clipthresh = 3.0
4115 if clipniter is None: clipniter = 0
4116 if edge is None: edge = (0, 0)
4117 if threshold is None: threshold = 3
4118 if chan_avg_limit is None: chan_avg_limit = 1
4119 if plot is None: plot = False
4120 if getresidual is None: getresidual = True
4121 if showprogress is None: showprogress = True
4122 if minnrow is None: minnrow = 1000
4123 if outlog is None: outlog = False
4124 if blfile is None: blfile = ''
4125 if csvformat is None: csvformat = False
4126 if bltable is None: bltable = ''
4127
4128 scsvformat = 'T' if csvformat else 'F'
4129
4130 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4131 workscan._auto_chebyshev_baseline(mask, order,
4132 clipthresh, clipniter,
4133 normalise_edge_param(edge),
4134 threshold,
4135 chan_avg_limit, getresidual,
4136 pack_progress_params(showprogress,
4137 minnrow),
4138 outlog, scsvformat+blfile,
4139 bltable)
4140 workscan._add_history("auto_chebyshev_baseline", varlist)
4141
4142 if bltable == '':
4143 if insitu:
4144 self._assign(workscan)
4145 else:
4146 return workscan
4147 else:
4148 if not insitu:
4149 return None
4150
4151 except RuntimeError, e:
4152 raise_fitting_failure_exception(e)
4153
4154 @asaplog_post_dec
4155 def poly_baseline(self, mask=None, order=None, insitu=None,
4156 clipthresh=None, clipniter=None, plot=None,
4157 getresidual=None, showprogress=None, minnrow=None,
4158 outlog=None, blfile=None, csvformat=None,
4159 bltable=None):
4160 """\
4161 Return a scan which has been baselined (all rows) by a polynomial.
4162 Parameters:
4163 mask: an optional mask
4164 order: the order of the polynomial (default is 0)
4165 insitu: if False a new scantable is returned.
4166 Otherwise, the scaling is done in-situ
4167 The default is taken from .asaprc (False)
4168 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4169 clipniter: maximum number of iteration of 'clipthresh'-sigma
4170 clipping (default is 0)
4171 plot: plot the fit and the residual. In this each
4172 indivual fit has to be approved, by typing 'y'
4173 or 'n'
4174 getresidual: if False, returns best-fit values instead of
4175 residual. (default is True)
4176 showprogress: show progress status for large data.
4177 default is True.
4178 minnrow: minimum number of input spectra to show.
4179 default is 1000.
4180 outlog: Output the coefficients of the best-fit
4181 function to logger (default is False)
4182 blfile: Name of a text file in which the best-fit
4183 parameter values to be written
4184 (default is "": no file/logger output)
4185 csvformat: if True blfile is csv-formatted, default is False.
4186 bltable: name of a baseline table where fitting results
4187 (coefficients, rms, etc.) are to be written.
4188 if given, fitting results will NOT be output to
4189 scantable (insitu=True) or None will be
4190 returned (insitu=False).
4191 (default is "": no table output)
4192
4193 Example:
4194 # return a scan baselined by a third order polynomial,
4195 # not using a mask
4196 bscan = scan.poly_baseline(order=3)
4197 """
4198
4199 try:
4200 varlist = vars()
4201
4202 if insitu is None:
4203 insitu = rcParams["insitu"]
4204 if insitu:
4205 workscan = self
4206 else:
4207 workscan = self.copy()
4208
4209 if mask is None: mask = []
4210 if order is None: order = 0
4211 if clipthresh is None: clipthresh = 3.0
4212 if clipniter is None: clipniter = 0
4213 if plot is None: plot = False
4214 if getresidual is None: getresidual = True
4215 if showprogress is None: showprogress = True
4216 if minnrow is None: minnrow = 1000
4217 if outlog is None: outlog = False
4218 if blfile is None: blfile = ''
4219 if csvformat is None: csvformat = False
4220 if bltable is None: bltable = ''
4221
4222 scsvformat = 'T' if csvformat else 'F'
4223
4224 if plot:
4225 outblfile = (blfile != "") and \
4226 os.path.exists(os.path.expanduser(
4227 os.path.expandvars(blfile))
4228 )
4229 if outblfile:
4230 blf = open(blfile, "a")
4231
4232 f = fitter()
4233 f.set_function(lpoly=order)
4234
4235 rows = xrange(workscan.nrow())
4236 #if len(rows) > 0: workscan._init_blinfo()
4237
4238 action = "H"
4239 for r in rows:
4240 f.x = workscan._getabcissa(r)
4241 f.y = workscan._getspectrum(r)
4242 if mask:
4243 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4244 else: # mask=None
4245 f.mask = workscan._getmask(r)
4246
4247 f.data = None
4248 f.fit()
4249
4250 if action != "Y": # skip plotting when accepting all
4251 f.plot(residual=True)
4252 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4253 #if accept_fit.upper() == "N":
4254 # #workscan._append_blinfo(None, None, None)
4255 # continue
4256 accept_fit = self._get_verify_action("Accept fit?",action)
4257 if r == 0: action = None
4258 if accept_fit.upper() == "N":
4259 continue
4260 elif accept_fit.upper() == "R":
4261 break
4262 elif accept_fit.upper() == "A":
4263 action = "Y"
4264
4265 blpars = f.get_parameters()
4266 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4267 #workscan._append_blinfo(blpars, masklist, f.mask)
4268 workscan._setspectrum((f.fitter.getresidual()
4269 if getresidual else f.fitter.getfit()), r)
4270
4271 if outblfile:
4272 rms = workscan.get_rms(f.mask, r)
4273 dataout = \
4274 workscan.format_blparams_row(blpars["params"],
4275 blpars["fixed"],
4276 rms, str(masklist),
4277 r, True, csvformat)
4278 blf.write(dataout)
4279
4280 f._p.unmap()
4281 f._p = None
4282
4283 if outblfile:
4284 blf.close()
4285 else:
4286 workscan._poly_baseline(mask, order,
4287 clipthresh, clipniter, #
4288 getresidual,
4289 pack_progress_params(showprogress,
4290 minnrow),
4291 outlog, scsvformat+blfile,
4292 bltable) #
4293
4294 workscan._add_history("poly_baseline", varlist)
4295
4296 if insitu:
4297 self._assign(workscan)
4298 else:
4299 return workscan
4300
4301 except RuntimeError, e:
4302 raise_fitting_failure_exception(e)
4303
4304 @asaplog_post_dec
4305 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4306 clipthresh=None, clipniter=None,
4307 edge=None, threshold=None, chan_avg_limit=None,
4308 getresidual=None, plot=None,
4309 showprogress=None, minnrow=None, outlog=None,
4310 blfile=None, csvformat=None, bltable=None):
4311 """\
4312 Return a scan which has been baselined (all rows) by a polynomial.
4313 Spectral lines are detected first using linefinder and masked out
4314 to avoid them affecting the baseline solution.
4315
4316 Parameters:
4317 mask: an optional mask retreived from scantable
4318 order: the order of the polynomial (default is 0)
4319 insitu: if False a new scantable is returned.
4320 Otherwise, the scaling is done in-situ
4321 The default is taken from .asaprc (False)
4322 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4323 clipniter: maximum number of iteration of 'clipthresh'-sigma
4324 clipping (default is 0)
4325 edge: an optional number of channel to drop at
4326 the edge of spectrum. If only one value is
4327 specified, the same number will be dropped
4328 from both sides of the spectrum. Default
4329 is to keep all channels. Nested tuples
4330 represent individual edge selection for
4331 different IFs (a number of spectral channels
4332 can be different)
4333 threshold: the threshold used by line finder. It is
4334 better to keep it large as only strong lines
4335 affect the baseline solution.
4336 chan_avg_limit: a maximum number of consequtive spectral
4337 channels to average during the search of
4338 weak and broad lines. The default is no
4339 averaging (and no search for weak lines).
4340 If such lines can affect the fitted baseline
4341 (e.g. a high order polynomial is fitted),
4342 increase this parameter (usually values up
4343 to 8 are reasonable). Most users of this
4344 method should find the default value sufficient.
4345 plot: plot the fit and the residual. In this each
4346 indivual fit has to be approved, by typing 'y'
4347 or 'n'
4348 getresidual: if False, returns best-fit values instead of
4349 residual. (default is True)
4350 showprogress: show progress status for large data.
4351 default is True.
4352 minnrow: minimum number of input spectra to show.
4353 default is 1000.
4354 outlog: Output the coefficients of the best-fit
4355 function to logger (default is False)
4356 blfile: Name of a text file in which the best-fit
4357 parameter values to be written
4358 (default is "": no file/logger output)
4359 csvformat: if True blfile is csv-formatted, default is False.
4360 bltable: name of a baseline table where fitting results
4361 (coefficients, rms, etc.) are to be written.
4362 if given, fitting results will NOT be output to
4363 scantable (insitu=True) or None will be
4364 returned (insitu=False).
4365 (default is "": no table output)
4366
4367 Example:
4368 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4369 """
4370
4371 try:
4372 varlist = vars()
4373
4374 if insitu is None:
4375 insitu = rcParams['insitu']
4376 if insitu:
4377 workscan = self
4378 else:
4379 workscan = self.copy()
4380
4381 if mask is None: mask = []
4382 if order is None: order = 0
4383 if clipthresh is None: clipthresh = 3.0
4384 if clipniter is None: clipniter = 0
4385 if edge is None: edge = (0, 0)
4386 if threshold is None: threshold = 3
4387 if chan_avg_limit is None: chan_avg_limit = 1
4388 if plot is None: plot = False
4389 if getresidual is None: getresidual = True
4390 if showprogress is None: showprogress = True
4391 if minnrow is None: minnrow = 1000
4392 if outlog is None: outlog = False
4393 if blfile is None: blfile = ''
4394 if csvformat is None: csvformat = False
4395 if bltable is None: bltable = ''
4396
4397 scsvformat = 'T' if csvformat else 'F'
4398
4399 edge = normalise_edge_param(edge)
4400
4401 if plot:
4402 outblfile = (blfile != "") and \
4403 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4404 if outblfile: blf = open(blfile, "a")
4405
4406 from asap.asaplinefind import linefinder
4407 fl = linefinder()
4408 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4409 fl.set_scan(workscan)
4410
4411 f = fitter()
4412 f.set_function(lpoly=order)
4413
4414 rows = xrange(workscan.nrow())
4415 #if len(rows) > 0: workscan._init_blinfo()
4416
4417 action = "H"
4418 for r in rows:
4419 idx = 2*workscan.getif(r)
4420 if mask:
4421 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4422 else: # mask=None
4423 msk = workscan._getmask(r)
4424 fl.find_lines(r, msk, edge[idx:idx+2])
4425
4426 f.x = workscan._getabcissa(r)
4427 f.y = workscan._getspectrum(r)
4428 f.mask = fl.get_mask()
4429 f.data = None
4430 f.fit()
4431
4432 if action != "Y": # skip plotting when accepting all
4433 f.plot(residual=True)
4434 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4435 accept_fit = self._get_verify_action("Accept fit?",action)
4436 if r == 0: action = None
4437 if accept_fit.upper() == "N":
4438 #workscan._append_blinfo(None, None, None)
4439 continue
4440 elif accept_fit.upper() == "R":
4441 break
4442 elif accept_fit.upper() == "A":
4443 action = "Y"
4444
4445 blpars = f.get_parameters()
4446 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4447 #workscan._append_blinfo(blpars, masklist, f.mask)
4448 workscan._setspectrum(
4449 (f.fitter.getresidual() if getresidual
4450 else f.fitter.getfit()), r
4451 )
4452
4453 if outblfile:
4454 rms = workscan.get_rms(f.mask, r)
4455 dataout = \
4456 workscan.format_blparams_row(blpars["params"],
4457 blpars["fixed"],
4458 rms, str(masklist),
4459 r, True, csvformat)
4460 blf.write(dataout)
4461
4462 f._p.unmap()
4463 f._p = None
4464
4465 if outblfile: blf.close()
4466 else:
4467 workscan._auto_poly_baseline(mask, order,
4468 clipthresh, clipniter,
4469 edge, threshold,
4470 chan_avg_limit, getresidual,
4471 pack_progress_params(showprogress,
4472 minnrow),
4473 outlog, scsvformat+blfile,
4474 bltable)
4475 workscan._add_history("auto_poly_baseline", varlist)
4476
4477 if bltable == '':
4478 if insitu:
4479 self._assign(workscan)
4480 else:
4481 return workscan
4482 else:
4483 if not insitu:
4484 return None
4485
4486 except RuntimeError, e:
4487 raise_fitting_failure_exception(e)
4488
4489 def _init_blinfo(self):
4490 """\
4491 Initialise the following three auxiliary members:
4492 blpars : parameters of the best-fit baseline,
4493 masklists : mask data (edge positions of masked channels) and
4494 actualmask : mask data (in boolean list),
4495 to keep for use later (including output to logger/text files).
4496 Used by poly_baseline() and auto_poly_baseline() in case of
4497 'plot=True'.
4498 """
4499 self.blpars = []
4500 self.masklists = []
4501 self.actualmask = []
4502 return
4503
4504 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4505 """\
4506 Append baseline-fitting related info to blpars, masklist and
4507 actualmask.
4508 """
4509 self.blpars.append(data_blpars)
4510 self.masklists.append(data_masklists)
4511 self.actualmask.append(data_actualmask)
4512 return
4513
4514 @asaplog_post_dec
4515 def rotate_linpolphase(self, angle):
4516 """\
4517 Rotate the phase of the complex polarization O=Q+iU correlation.
4518 This is always done in situ in the raw data. So if you call this
4519 function more than once then each call rotates the phase further.
4520
4521 Parameters:
4522
4523 angle: The angle (degrees) to rotate (add) by.
4524
4525 Example::
4526
4527 scan.rotate_linpolphase(2.3)
4528
4529 """
4530 varlist = vars()
4531 self._math._rotate_linpolphase(self, angle)
4532 self._add_history("rotate_linpolphase", varlist)
4533 return
4534
4535 @asaplog_post_dec
4536 def rotate_xyphase(self, angle):
4537 """\
4538 Rotate the phase of the XY correlation. This is always done in situ
4539 in the data. So if you call this function more than once
4540 then each call rotates the phase further.
4541
4542 Parameters:
4543
4544 angle: The angle (degrees) to rotate (add) by.
4545
4546 Example::
4547
4548 scan.rotate_xyphase(2.3)
4549
4550 """
4551 varlist = vars()
4552 self._math._rotate_xyphase(self, angle)
4553 self._add_history("rotate_xyphase", varlist)
4554 return
4555
4556 @asaplog_post_dec
4557 def swap_linears(self):
4558 """\
4559 Swap the linear polarisations XX and YY, or better the first two
4560 polarisations as this also works for ciculars.
4561 """
4562 varlist = vars()
4563 self._math._swap_linears(self)
4564 self._add_history("swap_linears", varlist)
4565 return
4566
4567 @asaplog_post_dec
4568 def invert_phase(self):
4569 """\
4570 Invert the phase of the complex polarisation
4571 """
4572 varlist = vars()
4573 self._math._invert_phase(self)
4574 self._add_history("invert_phase", varlist)
4575 return
4576
4577 @asaplog_post_dec
4578 def add(self, offset, insitu=None):
4579 """\
4580 Return a scan where all spectra have the offset added
4581
4582 Parameters:
4583
4584 offset: the offset
4585
4586 insitu: if False a new scantable is returned.
4587 Otherwise, the scaling is done in-situ
4588 The default is taken from .asaprc (False)
4589
4590 """
4591 if insitu is None: insitu = rcParams['insitu']
4592 self._math._setinsitu(insitu)
4593 varlist = vars()
4594 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4595 s._add_history("add", varlist)
4596 if insitu:
4597 self._assign(s)
4598 else:
4599 return s
4600
4601 @asaplog_post_dec
4602 def scale(self, factor, tsys=True, insitu=None):
4603 """\
4604
4605 Return a scan where all spectra are scaled by the given 'factor'
4606
4607 Parameters:
4608
4609 factor: the scaling factor (float or 1D float list)
4610
4611 insitu: if False a new scantable is returned.
4612 Otherwise, the scaling is done in-situ
4613 The default is taken from .asaprc (False)
4614
4615 tsys: if True (default) then apply the operation to Tsys
4616 as well as the data
4617
4618 """
4619 if insitu is None: insitu = rcParams['insitu']
4620 self._math._setinsitu(insitu)
4621 varlist = vars()
4622 s = None
4623 import numpy
4624 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4625 if isinstance(factor[0], list) or isinstance(factor[0],
4626 numpy.ndarray):
4627 from asapmath import _array2dOp
4628 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4629 else:
4630 s = scantable( self._math._arrayop( self, factor,
4631 "MUL", tsys ) )
4632 else:
4633 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4634 s._add_history("scale", varlist)
4635 if insitu:
4636 self._assign(s)
4637 else:
4638 return s
4639
4640 @preserve_selection
4641 def set_sourcetype(self, match, matchtype="pattern",
4642 sourcetype="reference"):
4643 """\
4644 Set the type of the source to be an source or reference scan
4645 using the provided pattern.
4646
4647 Parameters:
4648
4649 match: a Unix style pattern, regular expression or selector
4650
4651 matchtype: 'pattern' (default) UNIX style pattern or
4652 'regex' regular expression
4653
4654 sourcetype: the type of the source to use (source/reference)
4655
4656 """
4657 varlist = vars()
4658 stype = -1
4659 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4660 stype = 1
4661 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4662 stype = 0
4663 else:
4664 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4665 if matchtype.lower().startswith("p"):
4666 matchtype = "pattern"
4667 elif matchtype.lower().startswith("r"):
4668 matchtype = "regex"
4669 else:
4670 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4671 sel = selector()
4672 if isinstance(match, selector):
4673 sel = match
4674 else:
4675 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4676 self.set_selection(sel)
4677 self._setsourcetype(stype)
4678 self._add_history("set_sourcetype", varlist)
4679
4680
4681 def set_sourcename(self, name):
4682 varlist = vars()
4683 self._setsourcename(name)
4684 self._add_history("set_sourcename", varlist)
4685
4686 @asaplog_post_dec
4687 @preserve_selection
4688 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4689 """\
4690 This function allows to build quotients automatically.
4691 It assumes the observation to have the same number of
4692 "ons" and "offs"
4693
4694 Parameters:
4695
4696 preserve: you can preserve (default) the continuum or
4697 remove it. The equations used are
4698
4699 preserve: Output = Toff * (on/off) - Toff
4700
4701 remove: Output = Toff * (on/off) - Ton
4702
4703 mode: the on/off detection mode
4704 'paired' (default)
4705 identifies 'off' scans by the
4706 trailing '_R' (Mopra/Parkes) or
4707 '_e'/'_w' (Tid) and matches
4708 on/off pairs from the observing pattern
4709 'time'
4710 finds the closest off in time
4711
4712 .. todo:: verify argument is not implemented
4713
4714 """
4715 varlist = vars()
4716 modes = ["time", "paired"]
4717 if not mode in modes:
4718 msg = "please provide valid mode. Valid modes are %s" % (modes)
4719 raise ValueError(msg)
4720 s = None
4721 if mode.lower() == "paired":
4722 from asap._asap import srctype
4723 sel = self.get_selection()
4724 #sel.set_query("SRCTYPE==psoff")
4725 sel.set_types(srctype.psoff)
4726 self.set_selection(sel)
4727 offs = self.copy()
4728 #sel.set_query("SRCTYPE==pson")
4729 sel.set_types(srctype.pson)
4730 self.set_selection(sel)
4731 ons = self.copy()
4732 s = scantable(self._math._quotient(ons, offs, preserve))
4733 elif mode.lower() == "time":
4734 s = scantable(self._math._auto_quotient(self, mode, preserve))
4735 s._add_history("auto_quotient", varlist)
4736 return s
4737
4738 @asaplog_post_dec
4739 def mx_quotient(self, mask = None, weight='median', preserve=True):
4740 """\
4741 Form a quotient using "off" beams when observing in "MX" mode.
4742
4743 Parameters:
4744
4745 mask: an optional mask to be used when weight == 'stddev'
4746
4747 weight: How to average the off beams. Default is 'median'.
4748
4749 preserve: you can preserve (default) the continuum or
4750 remove it. The equations used are:
4751
4752 preserve: Output = Toff * (on/off) - Toff
4753
4754 remove: Output = Toff * (on/off) - Ton
4755
4756 """
4757 mask = mask or ()
4758 varlist = vars()
4759 on = scantable(self._math._mx_extract(self, 'on'))
4760 preoff = scantable(self._math._mx_extract(self, 'off'))
4761 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4762 from asapmath import quotient
4763 q = quotient(on, off, preserve)
4764 q._add_history("mx_quotient", varlist)
4765 return q
4766
4767 @asaplog_post_dec
4768 def freq_switch(self, insitu=None):
4769 """\
4770 Apply frequency switching to the data.
4771
4772 Parameters:
4773
4774 insitu: if False a new scantable is returned.
4775 Otherwise, the swictching is done in-situ
4776 The default is taken from .asaprc (False)
4777
4778 """
4779 if insitu is None: insitu = rcParams['insitu']
4780 self._math._setinsitu(insitu)
4781 varlist = vars()
4782 s = scantable(self._math._freqswitch(self))
4783 s._add_history("freq_switch", varlist)
4784 if insitu:
4785 self._assign(s)
4786 else:
4787 return s
4788
4789 @asaplog_post_dec
4790 def recalc_azel(self):
4791 """Recalculate the azimuth and elevation for each position."""
4792 varlist = vars()
4793 self._recalcazel()
4794 self._add_history("recalc_azel", varlist)
4795 return
4796
4797 @asaplog_post_dec
4798 def __add__(self, other):
4799 """
4800 implicit on all axes and on Tsys
4801 """
4802 varlist = vars()
4803 s = self.__op( other, "ADD" )
4804 s._add_history("operator +", varlist)
4805 return s
4806
4807 @asaplog_post_dec
4808 def __sub__(self, other):
4809 """
4810 implicit on all axes and on Tsys
4811 """
4812 varlist = vars()
4813 s = self.__op( other, "SUB" )
4814 s._add_history("operator -", varlist)
4815 return s
4816
4817 @asaplog_post_dec
4818 def __mul__(self, other):
4819 """
4820 implicit on all axes and on Tsys
4821 """
4822 varlist = vars()
4823 s = self.__op( other, "MUL" ) ;
4824 s._add_history("operator *", varlist)
4825 return s
4826
4827
4828 @asaplog_post_dec
4829 def __div__(self, other):
4830 """
4831 implicit on all axes and on Tsys
4832 """
4833 varlist = vars()
4834 s = self.__op( other, "DIV" )
4835 s._add_history("operator /", varlist)
4836 return s
4837
4838 @asaplog_post_dec
4839 def __op( self, other, mode ):
4840 s = None
4841 if isinstance(other, scantable):
4842 s = scantable(self._math._binaryop(self, other, mode))
4843 elif isinstance(other, float):
4844 if other == 0.0:
4845 raise ZeroDivisionError("Dividing by zero is not recommended")
4846 s = scantable(self._math._unaryop(self, other, mode, False))
4847 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
4848 if isinstance(other[0], list) \
4849 or isinstance(other[0], numpy.ndarray):
4850 from asapmath import _array2dOp
4851 s = _array2dOp( self, other, mode, False )
4852 else:
4853 s = scantable( self._math._arrayop( self, other,
4854 mode, False ) )
4855 else:
4856 raise TypeError("Other input is not a scantable or float value")
4857 return s
4858
4859 @asaplog_post_dec
4860 def get_fit(self, row=0):
4861 """\
4862 Print or return the stored fits for a row in the scantable
4863
4864 Parameters:
4865
4866 row: the row which the fit has been applied to.
4867
4868 """
4869 if row > self.nrow():
4870 return
4871 from asap.asapfit import asapfit
4872 fit = asapfit(self._getfit(row))
4873 asaplog.push( '%s' %(fit) )
4874 return fit.as_dict()
4875
4876 @preserve_selection
4877 def flag_nans(self):
4878 """\
4879 Utility function to flag NaN values in the scantable.
4880 """
4881 import numpy
4882 basesel = self.get_selection()
4883 for i in range(self.nrow()):
4884 sel = self.get_row_selector(i)
4885 self.set_selection(basesel+sel)
4886 nans = numpy.isnan(self._getspectrum(0))
4887 if numpy.any(nans):
4888 bnans = [ bool(v) for v in nans]
4889 self.flag(bnans)
4890
4891 self.set_selection(basesel)
4892
4893 def get_row_selector(self, rowno):
4894 return selector(rows=[rowno])
4895
4896 def _add_history(self, funcname, parameters):
4897 if not rcParams['scantable.history']:
4898 return
4899 # create date
4900 sep = "##"
4901 from datetime import datetime
4902 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4903 hist = dstr+sep
4904 hist += funcname+sep#cdate+sep
4905 if parameters.has_key('self'):
4906 del parameters['self']
4907 for k, v in parameters.iteritems():
4908 if type(v) is dict:
4909 for k2, v2 in v.iteritems():
4910 hist += k2
4911 hist += "="
4912 if isinstance(v2, scantable):
4913 hist += 'scantable'
4914 elif k2 == 'mask':
4915 if isinstance(v2, list) or isinstance(v2, tuple):
4916 hist += str(self._zip_mask(v2))
4917 else:
4918 hist += str(v2)
4919 else:
4920 hist += str(v2)
4921 else:
4922 hist += k
4923 hist += "="
4924 if isinstance(v, scantable):
4925 hist += 'scantable'
4926 elif k == 'mask':
4927 if isinstance(v, list) or isinstance(v, tuple):
4928 hist += str(self._zip_mask(v))
4929 else:
4930 hist += str(v)
4931 else:
4932 hist += str(v)
4933 hist += sep
4934 hist = hist[:-2] # remove trailing '##'
4935 self._addhistory(hist)
4936
4937
4938 def _zip_mask(self, mask):
4939 mask = list(mask)
4940 i = 0
4941 segments = []
4942 while mask[i:].count(1):
4943 i += mask[i:].index(1)
4944 if mask[i:].count(0):
4945 j = i + mask[i:].index(0)
4946 else:
4947 j = len(mask)
4948 segments.append([i, j])
4949 i = j
4950 return segments
4951
4952 def _get_ordinate_label(self):
4953 fu = "("+self.get_fluxunit()+")"
4954 import re
4955 lbl = "Intensity"
4956 if re.match(".K.", fu):
4957 lbl = "Brightness Temperature "+ fu
4958 elif re.match(".Jy.", fu):
4959 lbl = "Flux density "+ fu
4960 return lbl
4961
4962 def _check_ifs(self):
4963# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
4964 nchans = [self.nchan(i) for i in self.getifnos()]
4965 nchans = filter(lambda t: t > 0, nchans)
4966 return (sum(nchans)/len(nchans) == nchans[0])
4967
4968 @asaplog_post_dec
4969 def _fill(self, names, unit, average, opts={}):
4970 first = True
4971 fullnames = []
4972 for name in names:
4973 name = os.path.expandvars(name)
4974 name = os.path.expanduser(name)
4975 if not os.path.exists(name):
4976 msg = "File '%s' does not exists" % (name)
4977 raise IOError(msg)
4978 fullnames.append(name)
4979 if average:
4980 asaplog.push('Auto averaging integrations')
4981 stype = int(rcParams['scantable.storage'].lower() == 'disk')
4982 for name in fullnames:
4983 tbl = Scantable(stype)
4984 if is_ms( name ):
4985 r = msfiller( tbl )
4986 else:
4987 r = filler( tbl )
4988 msg = "Importing %s..." % (name)
4989 asaplog.push(msg, False)
4990 r.open(name, opts)
4991 rx = rcParams['scantable.reference']
4992 r.setreferenceexpr(rx)
4993 r.fill()
4994 if average:
4995 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
4996 if not first:
4997 tbl = self._math._merge([self, tbl])
4998 Scantable.__init__(self, tbl)
4999 r.close()
5000 del r, tbl
5001 first = False
5002 #flush log
5003 asaplog.post()
5004 if unit is not None:
5005 self.set_fluxunit(unit)
5006 if not is_casapy():
5007 self.set_freqframe(rcParams['scantable.freqframe'])
5008
5009 def _get_verify_action( self, msg, action=None ):
5010 valid_act = ['Y', 'N', 'A', 'R']
5011 if not action or not isinstance(action, str):
5012 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5013 if action == '':
5014 return "Y"
5015 elif (action.upper()[0] in valid_act):
5016 return action.upper()[0]
5017 elif (action.upper()[0] in ['H','?']):
5018 print "Available actions of verification [Y|n|a|r]"
5019 print " Y : Yes for current data (default)"
5020 print " N : No for current data"
5021 print " A : Accept all in the following and exit from verification"
5022 print " R : Reject all in the following and exit from verification"
5023 print " H or ?: help (show this message)"
5024 return self._get_verify_action(msg)
5025 else:
5026 return 'Y'
5027
5028 def __getitem__(self, key):
5029 if key < 0:
5030 key += self.nrow()
5031 if key >= self.nrow():
5032 raise IndexError("Row index out of range.")
5033 return self._getspectrum(key)
5034
5035 def __setitem__(self, key, value):
5036 if key < 0:
5037 key += self.nrow()
5038 if key >= self.nrow():
5039 raise IndexError("Row index out of range.")
5040 if not hasattr(value, "__len__") or \
5041 len(value) > self.nchan(self.getif(key)):
5042 raise ValueError("Spectrum length doesn't match.")
5043 return self._setspectrum(value, key)
5044
5045 def __len__(self):
5046 return self.nrow()
5047
5048 def __iter__(self):
5049 for i in range(len(self)):
5050 yield self[i]
Note: See TracBrowser for help on using the repository browser.