source: trunk/python/scantable.py@ 2889

Last change on this file since 2889 was 2889, checked in by WataruKawasaki, 12 years ago

New Development: No

JIRA Issue: Yes CAS-5859

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes:

Module(s): sd

Description: modified scantable.get_frequency_by_velocity() so that doppler parameter is correctly treated.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 201.9 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176def pack_blinfo(blinfo, maxirow):
177 """\
178 convert a dictionary or a list of dictionaries of baseline info
179 into a list of comma-separated strings.
180 """
181 if isinstance(blinfo, dict):
182 res = do_pack_blinfo(blinfo, maxirow)
183 return [res] if res != '' else []
184 elif isinstance(blinfo, list) or isinstance(blinfo, tuple):
185 res = []
186 for i in xrange(len(blinfo)):
187 resi = do_pack_blinfo(blinfo[i], maxirow)
188 if resi != '':
189 res.append(resi)
190 return res
191
192def do_pack_blinfo(blinfo, maxirow):
193 """\
194 convert a dictionary of baseline info for a spectrum into
195 a comma-separated string.
196 """
197 dinfo = {}
198 for key in ['row', 'blfunc', 'masklist']:
199 if blinfo.has_key(key):
200 val = blinfo[key]
201 if key == 'row':
202 irow = val
203 if isinstance(val, list) or isinstance(val, tuple):
204 slval = []
205 for i in xrange(len(val)):
206 if isinstance(val[i], list) or isinstance(val[i], tuple):
207 for j in xrange(len(val[i])):
208 slval.append(str(val[i][j]))
209 else:
210 slval.append(str(val[i]))
211 sval = ",".join(slval)
212 else:
213 sval = str(val)
214
215 dinfo[key] = sval
216 else:
217 raise ValueError("'"+key+"' is missing in blinfo.")
218
219 if irow >= maxirow: return ''
220
221 for key in ['order', 'npiece', 'nwave']:
222 if blinfo.has_key(key):
223 val = blinfo[key]
224 if isinstance(val, list) or isinstance(val, tuple):
225 slval = []
226 for i in xrange(len(val)):
227 slval.append(str(val[i]))
228 sval = ",".join(slval)
229 else:
230 sval = str(val)
231 dinfo[key] = sval
232
233 blfunc = dinfo['blfunc']
234 fspec_keys = {'poly': 'order', 'chebyshev': 'order', 'cspline': 'npiece', 'sinusoid': 'nwave'}
235
236 fspec_key = fspec_keys[blfunc]
237 if not blinfo.has_key(fspec_key):
238 raise ValueError("'"+fspec_key+"' is missing in blinfo.")
239
240 clip_params_n = 0
241 for key in ['clipthresh', 'clipniter']:
242 if blinfo.has_key(key):
243 clip_params_n += 1
244 dinfo[key] = str(blinfo[key])
245
246 if clip_params_n == 0:
247 dinfo['clipthresh'] = '0.0'
248 dinfo['clipniter'] = '0'
249 elif clip_params_n != 2:
250 raise ValueError("both 'clipthresh' and 'clipniter' must be given for n-sigma clipping.")
251
252 lf_params_n = 0
253 for key in ['thresh', 'edge', 'chan_avg_limit']:
254 if blinfo.has_key(key):
255 lf_params_n += 1
256 val = blinfo[key]
257 if isinstance(val, list) or isinstance(val, tuple):
258 slval = []
259 for i in xrange(len(val)):
260 slval.append(str(val[i]))
261 sval = ",".join(slval)
262 else:
263 sval = str(val)
264 dinfo[key] = sval
265
266 if lf_params_n == 3:
267 dinfo['use_linefinder'] = 'true'
268 elif lf_params_n == 0:
269 dinfo['use_linefinder'] = 'false'
270 dinfo['thresh'] = ''
271 dinfo['edge'] = ''
272 dinfo['chan_avg_limit'] = ''
273 else:
274 raise ValueError("all of 'thresh', 'edge' and 'chan_avg_limit' must be given to use linefinder.")
275
276 slblinfo = [dinfo['row'], blfunc, dinfo[fspec_key], dinfo['masklist'], \
277 dinfo['clipthresh'], dinfo['clipniter'], \
278 dinfo['use_linefinder'], dinfo['thresh'], dinfo['edge'], dinfo['chan_avg_limit']]
279
280 return ":".join(slblinfo)
281
282def parse_fitresult(sres):
283 """\
284 Parse the returned value of apply_bltable() or sub_baseline() and
285 extract row number, the best-fit coefficients and rms, then pack
286 them into a dictionary.
287 The input value is generated by Scantable::packFittingResults() and
288 formatted as 'row:coeff[0],coeff[1],..,coeff[n-1]:rms'.
289 """
290 res = []
291 for i in xrange(len(sres)):
292 (srow, scoeff, srms) = sres[i].split(":")
293 row = int(srow)
294 rms = float(srms)
295 lscoeff = scoeff.split(",")
296 coeff = []
297 for j in xrange(len(lscoeff)):
298 coeff.append(float(lscoeff[j]))
299 res.append({'row': row, 'coeff': coeff, 'rms': rms})
300
301 return res
302
303def is_number(s):
304 s = s.strip()
305 res = True
306 try:
307 a = float(s)
308 res = True
309 except:
310 res = False
311 finally:
312 return res
313
314def is_frequency(s):
315 s = s.strip()
316 return (s[-2:].lower() == "hz")
317
318def get_freq_by_string(s1, s2):
319 if not (is_number(s1) and is_frequency(s2)):
320 raise RuntimeError("Invalid input string.")
321
322 prefix_list = ["a", "f", "p", "n", "u", "m", ".", "k", "M", "G", "T", "P", "E"]
323 factor_list = [1e-18, 1e-15, 1e-12, 1e-9, 1e-6, 1e-3, 1.0, 1e+3, 1e+6, 1e+9, 1e+12, 1e+15, 1e+18]
324
325 s1 = s1.strip()
326 s2 = s2.strip()
327
328 prefix = s2[-3:-2]
329 if is_number(prefix):
330 res1 = float(s1)
331 res2 = float(s2[:-2])
332 else:
333 factor = factor_list[prefix_list.index(prefix)]
334 res1 = float(s1) * factor
335 res2 = float(s2[:-3]) * factor
336
337 return (res1, res2)
338
339def is_velocity(s):
340 s = s.strip()
341 return (s[-3:].lower() == "m/s")
342
343def get_velocity_by_string(s1, s2):
344 if not (is_number(s1) and is_velocity(s2)):
345 raise RuntimeError("Invalid input string.")
346
347 # note that the default velocity unit is km/s
348 prefix_list = [".", "k"]
349 factor_list = [1e-3, 1.0]
350
351 s1 = s1.strip()
352 s2 = s2.strip()
353
354 prefix = s2[-4:-3]
355 if is_number(prefix): # in case velocity unit m/s
356 res1 = float(s1) * 1e-3
357 res2 = float(s2[:-3]) * 1e-3
358 else:
359 factor = factor_list[prefix_list.index(prefix)]
360 res1 = float(s1) * factor
361 res2 = float(s2[:-4]) * factor
362
363 return (res1, res2)
364
365def get_frequency_by_velocity(restfreq, vel, doppler):
366 # vel is in unit of km/s
367
368 # speed of light
369 vel_c = 299792.458
370
371 import math
372 r = vel / vel_c
373
374 if doppler.lower() == 'radio':
375 return restfreq * (1.0 - r)
376 if doppler.lower() == 'optical':
377 return restfreq / (1.0 + r)
378 else:
379 return restfreq * math.sqrt((1.0 - r) / (1.0 + r))
380
381
382class scantable(Scantable):
383 """\
384 The ASAP container for scans (single-dish data).
385 """
386
387 @asaplog_post_dec
388 def __init__(self, filename, average=None, unit=None, parallactify=None,
389 **args):
390 """\
391 Create a scantable from a saved one or make a reference
392
393 Parameters:
394
395 filename: the name of an asap table on disk
396 or
397 the name of a rpfits/sdfits/ms file
398 (integrations within scans are auto averaged
399 and the whole file is read) or
400 [advanced] a reference to an existing scantable
401
402 average: average all integrations withinb a scan on read.
403 The default (True) is taken from .asaprc.
404
405 unit: brightness unit; must be consistent with K or Jy.
406 Over-rides the default selected by the filler
407 (input rpfits/sdfits/ms) or replaces the value
408 in existing scantables
409
410 antenna: for MeasurementSet input data only:
411 Antenna selection. integer (id) or string
412 (name or id).
413
414 parallactify: Indicate that the data had been parallactified.
415 Default (false) is taken from rc file.
416
417 getpt: Whether to import direction from MS/POINTING
418 table properly or not.
419 This is effective only when filename is MS.
420 The default (True) is to import direction
421 from MS/POINTING.
422 """
423 if average is None:
424 average = rcParams['scantable.autoaverage']
425 parallactify = parallactify or rcParams['scantable.parallactify']
426 varlist = vars()
427 from asap._asap import stmath
428 self._math = stmath( rcParams['insitu'] )
429 if isinstance(filename, Scantable):
430 Scantable.__init__(self, filename)
431 else:
432 if isinstance(filename, str):
433 filename = os.path.expandvars(filename)
434 filename = os.path.expanduser(filename)
435 if not os.path.exists(filename):
436 s = "File '%s' not found." % (filename)
437 raise IOError(s)
438 if is_scantable(filename):
439 ondisk = rcParams['scantable.storage'] == 'disk'
440 Scantable.__init__(self, filename, ondisk)
441 if unit is not None:
442 self.set_fluxunit(unit)
443 if average:
444 self._assign( self.average_time( scanav=True ) )
445 # do not reset to the default freqframe
446 #self.set_freqframe(rcParams['scantable.freqframe'])
447 elif is_ms(filename):
448 # Measurement Set
449 opts={'ms': {}}
450 mskeys=['getpt','antenna']
451 for key in mskeys:
452 if key in args.keys():
453 opts['ms'][key] = args[key]
454 self._fill([filename], unit, average, opts)
455 elif os.path.isfile(filename):
456 opts={'nro': {}}
457 nrokeys=['freqref']
458 for key in nrokeys:
459 if key in args.keys():
460 opts['nro'][key] = args[key]
461 self._fill([filename], unit, average, opts)
462 # only apply to new data not "copy constructor"
463 self.parallactify(parallactify)
464 else:
465 msg = "The given file '%s'is not a valid " \
466 "asap table." % (filename)
467 raise IOError(msg)
468 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
469 and isinstance(filename[-1], str):
470 self._fill(filename, unit, average)
471 self.parallactify(parallactify)
472 self._add_history("scantable", varlist)
473
474 @asaplog_post_dec
475 def save(self, name=None, format=None, overwrite=False):
476 """\
477 Store the scantable on disk. This can be an asap (aips++) Table,
478 SDFITS or MS2 format.
479
480 Parameters:
481
482 name: the name of the outputfile. For format 'ASCII'
483 this is the root file name (data in 'name'.txt
484 and header in 'name'_header.txt)
485
486 format: an optional file format. Default is ASAP.
487 Allowed are:
488
489 * 'ASAP' (save as ASAP [aips++] Table),
490 * 'SDFITS' (save as SDFITS file)
491 * 'ASCII' (saves as ascii text file)
492 * 'MS2' (saves as an casacore MeasurementSet V2)
493 * 'FITS' (save as image FITS - not readable by
494 class)
495 * 'CLASS' (save as FITS readable by CLASS)
496
497 overwrite: If the file should be overwritten if it exists.
498 The default False is to return with warning
499 without writing the output. USE WITH CARE.
500
501 Example::
502
503 scan.save('myscan.asap')
504 scan.save('myscan.sdfits', 'SDFITS')
505
506 """
507 from os import path
508 format = format or rcParams['scantable.save']
509 suffix = '.'+format.lower()
510 if name is None or name == "":
511 name = 'scantable'+suffix
512 msg = "No filename given. Using default name %s..." % name
513 asaplog.push(msg)
514 name = path.expandvars(name)
515 if path.isfile(name) or path.isdir(name):
516 if not overwrite:
517 msg = "File %s exists." % name
518 raise IOError(msg)
519 format2 = format.upper()
520 if format2 == 'ASAP':
521 self._save(name)
522 elif format2 == 'MS2':
523 msopt = {'ms': {'overwrite': overwrite } }
524 from asap._asap import mswriter
525 writer = mswriter( self )
526 writer.write( name, msopt )
527 else:
528 from asap._asap import stwriter as stw
529 writer = stw(format2)
530 writer.write(self, name)
531 return
532
533 def copy(self):
534 """Return a copy of this scantable.
535
536 *Note*:
537
538 This makes a full (deep) copy. scan2 = scan1 makes a reference.
539
540 Example::
541
542 copiedscan = scan.copy()
543
544 """
545 sd = scantable(Scantable._copy(self))
546 return sd
547
548 def drop_scan(self, scanid=None):
549 """\
550 Return a new scantable where the specified scan number(s) has(have)
551 been dropped.
552
553 Parameters:
554
555 scanid: a (list of) scan number(s)
556
557 """
558 from asap import _is_sequence_or_number as _is_valid
559 from asap import _to_list
560 from asap import unique
561 if not _is_valid(scanid):
562 raise RuntimeError( 'Please specify a scanno to drop from the'
563 ' scantable' )
564 scanid = _to_list(scanid)
565 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
566 for sid in scanid: allscans.remove(sid)
567 if len(allscans) == 0:
568 raise ValueError("Can't remove all scans")
569 sel = selector(scans=allscans)
570 return self._select_copy(sel)
571
572 def _select_copy(self, selection):
573 orig = self.get_selection()
574 self.set_selection(orig+selection)
575 cp = self.copy()
576 self.set_selection(orig)
577 return cp
578
579 def get_scan(self, scanid=None):
580 """\
581 Return a specific scan (by scanno) or collection of scans (by
582 source name) in a new scantable.
583
584 *Note*:
585
586 See scantable.drop_scan() for the inverse operation.
587
588 Parameters:
589
590 scanid: a (list of) scanno or a source name, unix-style
591 patterns are accepted for source name matching, e.g.
592 '*_R' gets all 'ref scans
593
594 Example::
595
596 # get all scans containing the source '323p459'
597 newscan = scan.get_scan('323p459')
598 # get all 'off' scans
599 refscans = scan.get_scan('*_R')
600 # get a susbset of scans by scanno (as listed in scan.summary())
601 newscan = scan.get_scan([0, 2, 7, 10])
602
603 """
604 if scanid is None:
605 raise RuntimeError( 'Please specify a scan no or name to '
606 'retrieve from the scantable' )
607 try:
608 bsel = self.get_selection()
609 sel = selector()
610 if type(scanid) is str:
611 sel.set_name(scanid)
612 return self._select_copy(sel)
613 elif type(scanid) is int:
614 sel.set_scans([scanid])
615 return self._select_copy(sel)
616 elif type(scanid) is list:
617 sel.set_scans(scanid)
618 return self._select_copy(sel)
619 else:
620 msg = "Illegal scanid type, use 'int' or 'list' if ints."
621 raise TypeError(msg)
622 except RuntimeError:
623 raise
624
625 def __str__(self):
626 tempFile = tempfile.NamedTemporaryFile()
627 Scantable._summary(self, tempFile.name)
628 tempFile.seek(0)
629 asaplog.clear()
630 return tempFile.file.read()
631
632 @asaplog_post_dec
633 def summary(self, filename=None):
634 """\
635 Print a summary of the contents of this scantable.
636
637 Parameters:
638
639 filename: the name of a file to write the putput to
640 Default - no file output
641
642 """
643 if filename is not None:
644 if filename is "":
645 filename = 'scantable_summary.txt'
646 from os.path import expandvars, isdir
647 filename = expandvars(filename)
648 if isdir(filename):
649 msg = "Illegal file name '%s'." % (filename)
650 raise IOError(msg)
651 else:
652 filename = ""
653 Scantable._summary(self, filename)
654
655 def get_spectrum(self, rowno):
656 """Return the spectrum for the current row in the scantable as a list.
657
658 Parameters:
659
660 rowno: the row number to retrieve the spectrum from
661
662 """
663 return self._getspectrum(rowno)
664
665 def get_mask(self, rowno):
666 """Return the mask for the current row in the scantable as a list.
667
668 Parameters:
669
670 rowno: the row number to retrieve the mask from
671
672 """
673 return self._getmask(rowno)
674
675 def set_spectrum(self, spec, rowno):
676 """Set the spectrum for the current row in the scantable.
677
678 Parameters:
679
680 spec: the new spectrum
681
682 rowno: the row number to set the spectrum for
683
684 """
685 assert(len(spec) == self.nchan(self.getif(rowno)))
686 return self._setspectrum(spec, rowno)
687
688 def get_coordinate(self, rowno):
689 """Return the (spectral) coordinate for a a given 'rowno'.
690
691 *Note*:
692
693 * This coordinate is only valid until a scantable method modifies
694 the frequency axis.
695 * This coordinate does contain the original frequency set-up
696 NOT the new frame. The conversions however are done using the user
697 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
698 use scantable.freq_align first. Without it there is no closure,
699 i.e.::
700
701 c = myscan.get_coordinate(0)
702 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
703
704 Parameters:
705
706 rowno: the row number for the spectral coordinate
707
708 """
709 return coordinate(Scantable.get_coordinate(self, rowno))
710
711 def get_selection(self):
712 """\
713 Get the selection object currently set on this scantable.
714
715 Example::
716
717 sel = scan.get_selection()
718 sel.set_ifs(0) # select IF 0
719 scan.set_selection(sel) # apply modified selection
720
721 """
722 return selector(self._getselection())
723
724 def set_selection(self, selection=None, **kw):
725 """\
726 Select a subset of the data. All following operations on this scantable
727 are only applied to thi selection.
728
729 Parameters:
730
731 selection: a selector object (default unset the selection), or
732 any combination of 'pols', 'ifs', 'beams', 'scans',
733 'cycles', 'name', 'query'
734
735 Examples::
736
737 sel = selector() # create a selection object
738 self.set_scans([0, 3]) # select SCANNO 0 and 3
739 scan.set_selection(sel) # set the selection
740 scan.summary() # will only print summary of scanno 0 an 3
741 scan.set_selection() # unset the selection
742 # or the equivalent
743 scan.set_selection(scans=[0,3])
744 scan.summary() # will only print summary of scanno 0 an 3
745 scan.set_selection() # unset the selection
746
747 """
748 if selection is None:
749 # reset
750 if len(kw) == 0:
751 selection = selector()
752 else:
753 # try keywords
754 for k in kw:
755 if k not in selector.fields:
756 raise KeyError("Invalid selection key '%s', "
757 "valid keys are %s" % (k,
758 selector.fields))
759 selection = selector(**kw)
760 self._setselection(selection)
761
762 def get_row(self, row=0, insitu=None):
763 """\
764 Select a row in the scantable.
765 Return a scantable with single row.
766
767 Parameters:
768
769 row: row no of integration, default is 0.
770 insitu: if False a new scantable is returned. Otherwise, the
771 scaling is done in-situ. The default is taken from .asaprc
772 (False)
773
774 """
775 if insitu is None:
776 insitu = rcParams['insitu']
777 if not insitu:
778 workscan = self.copy()
779 else:
780 workscan = self
781 # Select a row
782 sel = selector()
783 sel.set_rows([row])
784 workscan.set_selection(sel)
785 if not workscan.nrow() == 1:
786 msg = "Could not identify single row. %d rows selected." \
787 % (workscan.nrow())
788 raise RuntimeError(msg)
789 if insitu:
790 self._assign(workscan)
791 else:
792 return workscan
793
794 @asaplog_post_dec
795 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
796 """\
797 Determine the specified statistic of the current beam/if/pol
798 Takes a 'mask' as an optional parameter to specify which
799 channels should be excluded.
800
801 Parameters:
802
803 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
804 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
805
806 mask: an optional mask specifying where the statistic
807 should be determined.
808
809 form: format string to print statistic values
810
811 row: row number of spectrum to process.
812 (default is None: for all rows)
813
814 Example:
815 scan.set_unit('channel')
816 msk = scan.create_mask([100, 200], [500, 600])
817 scan.stats(stat='mean', mask=m)
818
819 """
820 mask = mask or []
821 if not self._check_ifs():
822 raise ValueError("Cannot apply mask as the IFs have different "
823 "number of channels. Please use setselection() "
824 "to select individual IFs")
825 rtnabc = False
826 if stat.lower().endswith('_abc'): rtnabc = True
827 getchan = False
828 if stat.lower().startswith('min') or stat.lower().startswith('max'):
829 chan = self._math._minmaxchan(self, mask, stat)
830 getchan = True
831 statvals = []
832 if not rtnabc:
833 if row == None:
834 statvals = self._math._stats(self, mask, stat)
835 else:
836 statvals = self._math._statsrow(self, mask, stat, int(row))
837
838 #def cb(i):
839 # return statvals[i]
840
841 #return self._row_callback(cb, stat)
842
843 label=stat
844 #callback=cb
845 out = ""
846 #outvec = []
847 sep = '-'*50
848
849 if row == None:
850 rows = xrange(self.nrow())
851 elif isinstance(row, int):
852 rows = [ row ]
853
854 for i in rows:
855 refstr = ''
856 statunit= ''
857 if getchan:
858 qx, qy = self.chan2data(rowno=i, chan=chan[i])
859 if rtnabc:
860 statvals.append(qx['value'])
861 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
862 statunit= '['+qx['unit']+']'
863 else:
864 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
865
866 tm = self._gettime(i)
867 src = self._getsourcename(i)
868 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
869 out += 'Time[%s]:\n' % (tm)
870 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
871 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
872 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
873 #outvec.append(callback(i))
874 if len(rows) > 1:
875 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
876 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
877 else:
878 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
879 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
880 out += sep+"\n"
881
882 import os
883 if os.environ.has_key( 'USER' ):
884 usr = os.environ['USER']
885 else:
886 import commands
887 usr = commands.getoutput( 'whoami' )
888 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
889 f = open(tmpfile,'w')
890 print >> f, sep
891 print >> f, ' %s %s' % (label, statunit)
892 print >> f, sep
893 print >> f, out
894 f.close()
895 f = open(tmpfile,'r')
896 x = f.readlines()
897 f.close()
898 asaplog.push(''.join(x), False)
899
900 return statvals
901
902 def chan2data(self, rowno=0, chan=0):
903 """\
904 Returns channel/frequency/velocity and spectral value
905 at an arbitrary row and channel in the scantable.
906
907 Parameters:
908
909 rowno: a row number in the scantable. Default is the
910 first row, i.e. rowno=0
911
912 chan: a channel in the scantable. Default is the first
913 channel, i.e. pos=0
914
915 """
916 if isinstance(rowno, int) and isinstance(chan, int):
917 qx = {'unit': self.get_unit(),
918 'value': self._getabcissa(rowno)[chan]}
919 qy = {'unit': self.get_fluxunit(),
920 'value': self._getspectrum(rowno)[chan]}
921 return qx, qy
922
923 def stddev(self, mask=None):
924 """\
925 Determine the standard deviation of the current beam/if/pol
926 Takes a 'mask' as an optional parameter to specify which
927 channels should be excluded.
928
929 Parameters:
930
931 mask: an optional mask specifying where the standard
932 deviation should be determined.
933
934 Example::
935
936 scan.set_unit('channel')
937 msk = scan.create_mask([100, 200], [500, 600])
938 scan.stddev(mask=m)
939
940 """
941 return self.stats(stat='stddev', mask=mask);
942
943
944 def get_column_names(self):
945 """\
946 Return a list of column names, which can be used for selection.
947 """
948 return list(Scantable.get_column_names(self))
949
950 def get_tsys(self, row=-1):
951 """\
952 Return the System temperatures.
953
954 Parameters:
955
956 row: the rowno to get the information for. (default all rows)
957
958 Returns:
959
960 a list of Tsys values for the current selection
961
962 """
963 if row > -1:
964 return self._get_column(self._gettsys, row)
965 return self._row_callback(self._gettsys, "Tsys")
966
967 def get_tsysspectrum(self, row=-1):
968 """\
969 Return the channel dependent system temperatures.
970
971 Parameters:
972
973 row: the rowno to get the information for. (default all rows)
974
975 Returns:
976
977 a list of Tsys values for the current selection
978
979 """
980 return self._get_column( self._gettsysspectrum, row )
981
982 def set_tsys(self, values, row=-1):
983 """\
984 Set the Tsys value(s) of the given 'row' or the whole scantable
985 (selection).
986
987 Parameters:
988
989 values: a scalar or list (if Tsys is a vector) of Tsys value(s)
990 row: the row number to apply Tsys values to.
991 (default all rows)
992
993 """
994
995 if not hasattr(values, "__len__"):
996 values = [values]
997 self._settsys(values, row)
998
999 def get_weather(self, row=-1):
1000 """\
1001 Return the weather informations.
1002
1003 Parameters:
1004
1005 row: the rowno to get the information for. (default all rows)
1006
1007 Returns:
1008
1009 a dict or list of of dicts of values for the current selection
1010
1011 """
1012
1013 values = self._get_column(self._get_weather, row)
1014 if row > -1:
1015 return {'temperature': values[0],
1016 'pressure': values[1], 'humidity' : values[2],
1017 'windspeed' : values[3], 'windaz' : values[4]
1018 }
1019 else:
1020 out = []
1021 for r in values:
1022
1023 out.append({'temperature': r[0],
1024 'pressure': r[1], 'humidity' : r[2],
1025 'windspeed' : r[3], 'windaz' : r[4]
1026 })
1027 return out
1028
1029 def _row_callback(self, callback, label):
1030 out = ""
1031 outvec = []
1032 sep = '-'*50
1033 for i in range(self.nrow()):
1034 tm = self._gettime(i)
1035 src = self._getsourcename(i)
1036 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
1037 out += 'Time[%s]:\n' % (tm)
1038 if self.nbeam(-1) > 1:
1039 out += ' Beam[%d] ' % (self.getbeam(i))
1040 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
1041 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
1042 outvec.append(callback(i))
1043 out += '= %3.3f\n' % (outvec[i])
1044 out += sep+'\n'
1045
1046 asaplog.push(sep)
1047 asaplog.push(" %s" % (label))
1048 asaplog.push(sep)
1049 asaplog.push(out)
1050 asaplog.post()
1051 return outvec
1052
1053 def _get_column(self, callback, row=-1, *args):
1054 """
1055 """
1056 if row == -1:
1057 return [callback(i, *args) for i in range(self.nrow())]
1058 else:
1059 if 0 <= row < self.nrow():
1060 return callback(row, *args)
1061
1062
1063 def get_time(self, row=-1, asdatetime=False, prec=-1):
1064 """\
1065 Get a list of time stamps for the observations.
1066 Return a datetime object or a string (default) for each
1067 integration time stamp in the scantable.
1068
1069 Parameters:
1070
1071 row: row no of integration. Default -1 return all rows
1072
1073 asdatetime: return values as datetime objects rather than strings
1074
1075 prec: number of digits shown. Default -1 to automatic
1076 calculation.
1077 Note this number is equals to the digits of MVTime,
1078 i.e., 0<prec<3: dates with hh:: only,
1079 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
1080 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
1081
1082 """
1083 from datetime import datetime
1084 if prec < 0:
1085 # automagically set necessary precision +1
1086 prec = 7 - \
1087 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
1088 prec = max(6, int(prec))
1089 else:
1090 prec = max(0, prec)
1091 if asdatetime:
1092 #precision can be 1 millisecond at max
1093 prec = min(12, prec)
1094
1095 times = self._get_column(self._gettime, row, prec)
1096 if not asdatetime:
1097 return times
1098 format = "%Y/%m/%d/%H:%M:%S.%f"
1099 if prec < 7:
1100 nsub = 1 + (((6-prec)/2) % 3)
1101 substr = [".%f","%S","%M"]
1102 for i in range(nsub):
1103 format = format.replace(substr[i],"")
1104 if isinstance(times, list):
1105 return [datetime.strptime(i, format) for i in times]
1106 else:
1107 return datetime.strptime(times, format)
1108
1109
1110 def get_inttime(self, row=-1):
1111 """\
1112 Get a list of integration times for the observations.
1113 Return a time in seconds for each integration in the scantable.
1114
1115 Parameters:
1116
1117 row: row no of integration. Default -1 return all rows.
1118
1119 """
1120 return self._get_column(self._getinttime, row)
1121
1122
1123 def get_sourcename(self, row=-1):
1124 """\
1125 Get a list source names for the observations.
1126 Return a string for each integration in the scantable.
1127 Parameters:
1128
1129 row: row no of integration. Default -1 return all rows.
1130
1131 """
1132 return self._get_column(self._getsourcename, row)
1133
1134 def get_elevation(self, row=-1):
1135 """\
1136 Get a list of elevations for the observations.
1137 Return a float for each integration in the scantable.
1138
1139 Parameters:
1140
1141 row: row no of integration. Default -1 return all rows.
1142
1143 """
1144 return self._get_column(self._getelevation, row)
1145
1146 def get_azimuth(self, row=-1):
1147 """\
1148 Get a list of azimuths for the observations.
1149 Return a float for each integration in the scantable.
1150
1151 Parameters:
1152 row: row no of integration. Default -1 return all rows.
1153
1154 """
1155 return self._get_column(self._getazimuth, row)
1156
1157 def get_parangle(self, row=-1):
1158 """\
1159 Get a list of parallactic angles for the observations.
1160 Return a float for each integration in the scantable.
1161
1162 Parameters:
1163
1164 row: row no of integration. Default -1 return all rows.
1165
1166 """
1167 return self._get_column(self._getparangle, row)
1168
1169 def get_direction(self, row=-1):
1170 """
1171 Get a list of Positions on the sky (direction) for the observations.
1172 Return a string for each integration in the scantable.
1173
1174 Parameters:
1175
1176 row: row no of integration. Default -1 return all rows
1177
1178 """
1179 return self._get_column(self._getdirection, row)
1180
1181 def get_directionval(self, row=-1):
1182 """\
1183 Get a list of Positions on the sky (direction) for the observations.
1184 Return a float for each integration in the scantable.
1185
1186 Parameters:
1187
1188 row: row no of integration. Default -1 return all rows
1189
1190 """
1191 return self._get_column(self._getdirectionvec, row)
1192
1193 @asaplog_post_dec
1194 def set_unit(self, unit='channel'):
1195 """\
1196 Set the unit for all following operations on this scantable
1197
1198 Parameters:
1199
1200 unit: optional unit, default is 'channel'. Use one of '*Hz',
1201 'km/s', 'channel' or equivalent ''
1202
1203 """
1204 varlist = vars()
1205 if unit in ['', 'pixel', 'channel']:
1206 unit = ''
1207 inf = list(self._getcoordinfo())
1208 inf[0] = unit
1209 self._setcoordinfo(inf)
1210 self._add_history("set_unit", varlist)
1211
1212 @asaplog_post_dec
1213 def set_instrument(self, instr):
1214 """\
1215 Set the instrument for subsequent processing.
1216
1217 Parameters:
1218
1219 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1220 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1221
1222 """
1223 self._setInstrument(instr)
1224 self._add_history("set_instument", vars())
1225
1226 @asaplog_post_dec
1227 def set_feedtype(self, feedtype):
1228 """\
1229 Overwrite the feed type, which might not be set correctly.
1230
1231 Parameters:
1232
1233 feedtype: 'linear' or 'circular'
1234
1235 """
1236 self._setfeedtype(feedtype)
1237 self._add_history("set_feedtype", vars())
1238
1239 @asaplog_post_dec
1240 def set_doppler(self, doppler='RADIO'):
1241 """\
1242 Set the doppler for all following operations on this scantable.
1243
1244 Parameters:
1245
1246 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1247
1248 """
1249 varlist = vars()
1250 inf = list(self._getcoordinfo())
1251 inf[2] = doppler
1252 self._setcoordinfo(inf)
1253 self._add_history("set_doppler", vars())
1254
1255 @asaplog_post_dec
1256 def set_freqframe(self, frame=None):
1257 """\
1258 Set the frame type of the Spectral Axis.
1259
1260 Parameters:
1261
1262 frame: an optional frame type, default 'LSRK'. Valid frames are:
1263 'TOPO', 'LSRD', 'LSRK', 'BARY',
1264 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1265
1266 Example::
1267
1268 scan.set_freqframe('BARY')
1269
1270 """
1271 frame = frame or rcParams['scantable.freqframe']
1272 varlist = vars()
1273 # "REST" is not implemented in casacore
1274 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1275 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1276 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1277 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1278
1279 if frame in valid:
1280 inf = list(self._getcoordinfo())
1281 inf[1] = frame
1282 self._setcoordinfo(inf)
1283 self._add_history("set_freqframe", varlist)
1284 else:
1285 msg = "Please specify a valid freq type. Valid types are:\n", valid
1286 raise TypeError(msg)
1287
1288 @asaplog_post_dec
1289 def set_dirframe(self, frame=""):
1290 """\
1291 Set the frame type of the Direction on the sky.
1292
1293 Parameters:
1294
1295 frame: an optional frame type, default ''. Valid frames are:
1296 'J2000', 'B1950', 'GALACTIC'
1297
1298 Example:
1299
1300 scan.set_dirframe('GALACTIC')
1301
1302 """
1303 varlist = vars()
1304 Scantable.set_dirframe(self, frame)
1305 self._add_history("set_dirframe", varlist)
1306
1307 def get_unit(self):
1308 """\
1309 Get the default unit set in this scantable
1310
1311 Returns:
1312
1313 A unit string
1314
1315 """
1316 inf = self._getcoordinfo()
1317 unit = inf[0]
1318 if unit == '': unit = 'channel'
1319 return unit
1320
1321 @asaplog_post_dec
1322 def get_abcissa(self, rowno=0):
1323 """\
1324 Get the abcissa in the current coordinate setup for the currently
1325 selected Beam/IF/Pol
1326
1327 Parameters:
1328
1329 rowno: an optional row number in the scantable. Default is the
1330 first row, i.e. rowno=0
1331
1332 Returns:
1333
1334 The abcissa values and the format string (as a dictionary)
1335
1336 """
1337 abc = self._getabcissa(rowno)
1338 lbl = self._getabcissalabel(rowno)
1339 return abc, lbl
1340
1341 @asaplog_post_dec
1342 def flag(self, mask=None, unflag=False, row=-1):
1343 """\
1344 Flag the selected data using an optional channel mask.
1345
1346 Parameters:
1347
1348 mask: an optional channel mask, created with create_mask. Default
1349 (no mask) is all channels.
1350
1351 unflag: if True, unflag the data
1352
1353 row: an optional row number in the scantable.
1354 Default -1 flags all rows
1355
1356 """
1357 varlist = vars()
1358 mask = mask or []
1359 self._flag(row, mask, unflag)
1360 self._add_history("flag", varlist)
1361
1362 @asaplog_post_dec
1363 def flag_row(self, rows=None, unflag=False):
1364 """\
1365 Flag the selected data in row-based manner.
1366
1367 Parameters:
1368
1369 rows: list of row numbers to be flagged. Default is no row
1370 (must be explicitly specified to execute row-based
1371 flagging).
1372
1373 unflag: if True, unflag the data.
1374
1375 """
1376 varlist = vars()
1377 if rows is None:
1378 rows = []
1379 self._flag_row(rows, unflag)
1380 self._add_history("flag_row", varlist)
1381
1382 @asaplog_post_dec
1383 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1384 """\
1385 Flag the selected data outside a specified range (in channel-base)
1386
1387 Parameters:
1388
1389 uthres: upper threshold.
1390
1391 dthres: lower threshold
1392
1393 clipoutside: True for flagging data outside the range
1394 [dthres:uthres].
1395 False for flagging data inside the range.
1396
1397 unflag: if True, unflag the data.
1398
1399 """
1400 varlist = vars()
1401 self._clip(uthres, dthres, clipoutside, unflag)
1402 self._add_history("clip", varlist)
1403
1404 @asaplog_post_dec
1405 def lag_flag(self, start, end, unit="MHz", insitu=None):
1406 """\
1407 Flag the data in 'lag' space by providing a frequency to remove.
1408 Flagged data in the scantable get interpolated over the region.
1409 No taper is applied.
1410
1411 Parameters:
1412
1413 start: the start frequency (really a period within the
1414 bandwidth) or period to remove
1415
1416 end: the end frequency or period to remove
1417
1418 unit: the frequency unit (default 'MHz') or '' for
1419 explicit lag channels
1420
1421 *Notes*:
1422
1423 It is recommended to flag edges of the band or strong
1424 signals beforehand.
1425
1426 """
1427 if insitu is None: insitu = rcParams['insitu']
1428 self._math._setinsitu(insitu)
1429 varlist = vars()
1430 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1431 if not (unit == "" or base.has_key(unit)):
1432 raise ValueError("%s is not a valid unit." % unit)
1433 if unit == "":
1434 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1435 else:
1436 s = scantable(self._math._lag_flag(self, start*base[unit],
1437 end*base[unit], "frequency"))
1438 s._add_history("lag_flag", varlist)
1439 if insitu:
1440 self._assign(s)
1441 else:
1442 return s
1443
1444 @asaplog_post_dec
1445 def fft(self, rowno=None, mask=None, getrealimag=False):
1446 """\
1447 Apply FFT to the spectra.
1448 Flagged data in the scantable get interpolated over the region.
1449
1450 Parameters:
1451
1452 rowno: The row number(s) to be processed. int, list
1453 and tuple are accepted. By default (None), FFT
1454 is applied to the whole data.
1455
1456 mask: Auxiliary channel mask(s). Given as a boolean
1457 list, it is applied to all specified rows.
1458 A list of boolean lists can also be used to
1459 apply different masks. In the latter case, the
1460 length of 'mask' must be the same as that of
1461 'rowno'. The default is None.
1462
1463 getrealimag: If True, returns the real and imaginary part
1464 values of the complex results.
1465 If False (the default), returns the amplitude
1466 (absolute value) normalised with Ndata/2 and
1467 phase (argument, in unit of radian).
1468
1469 Returns:
1470
1471 A list of dictionaries containing the results for each spectrum.
1472 Each dictionary contains two values, the real and the imaginary
1473 parts when getrealimag = True, or the amplitude(absolute value)
1474 and the phase(argument) when getrealimag = False. The key for
1475 these values are 'real' and 'imag', or 'ampl' and 'phase',
1476 respectively.
1477 """
1478 if rowno is None:
1479 rowno = []
1480 if isinstance(rowno, int):
1481 rowno = [rowno]
1482 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1483 raise TypeError("The row number(s) must be int, list or tuple.")
1484 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1485
1486 usecommonmask = True
1487
1488 if mask is None:
1489 mask = []
1490 if isinstance(mask, list) or isinstance(mask, tuple):
1491 if len(mask) == 0:
1492 mask = [[]]
1493 else:
1494 if isinstance(mask[0], bool):
1495 if len(mask) != self.nchan(self.getif(rowno[0])):
1496 raise ValueError("The spectra and the mask have "
1497 "different length.")
1498 mask = [mask]
1499 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1500 usecommonmask = False
1501 if len(mask) != len(rowno):
1502 raise ValueError("When specifying masks for each "
1503 "spectrum, the numbers of them "
1504 "must be identical.")
1505 for i in xrange(mask):
1506 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1507 raise ValueError("The spectra and the mask have "
1508 "different length.")
1509 else:
1510 raise TypeError("The mask must be a boolean list or "
1511 "a list of boolean list.")
1512 else:
1513 raise TypeError("The mask must be a boolean list or a list of "
1514 "boolean list.")
1515
1516 res = []
1517
1518 imask = 0
1519 for whichrow in rowno:
1520 fspec = self._fft(whichrow, mask[imask], getrealimag)
1521 nspec = len(fspec)
1522
1523 i = 0
1524 v1 = []
1525 v2 = []
1526 reselem = {"real":[],"imag":[]} if getrealimag \
1527 else {"ampl":[],"phase":[]}
1528
1529 while (i < nspec):
1530 v1.append(fspec[i])
1531 v2.append(fspec[i+1])
1532 i += 2
1533
1534 if getrealimag:
1535 reselem["real"] += v1
1536 reselem["imag"] += v2
1537 else:
1538 reselem["ampl"] += v1
1539 reselem["phase"] += v2
1540
1541 res.append(reselem)
1542
1543 if not usecommonmask:
1544 imask += 1
1545
1546 return res
1547
1548 @asaplog_post_dec
1549 def create_mask(self, *args, **kwargs):
1550 """\
1551 Compute and return a mask based on [min, max] windows.
1552 The specified windows are to be INCLUDED, when the mask is
1553 applied.
1554
1555 Parameters:
1556
1557 [min, max], [min2, max2], ...
1558 Pairs of start/end points (inclusive)specifying the regions
1559 to be masked
1560
1561 invert: optional argument. If specified as True,
1562 return an inverted mask, i.e. the regions
1563 specified are EXCLUDED
1564
1565 row: create the mask using the specified row for
1566 unit conversions, default is row=0
1567 only necessary if frequency varies over rows.
1568
1569 Examples::
1570
1571 scan.set_unit('channel')
1572 # a)
1573 msk = scan.create_mask([400, 500], [800, 900])
1574 # masks everything outside 400 and 500
1575 # and 800 and 900 in the unit 'channel'
1576
1577 # b)
1578 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1579 # masks the regions between 400 and 500
1580 # and 800 and 900 in the unit 'channel'
1581
1582 # c)
1583 #mask only channel 400
1584 msk = scan.create_mask([400])
1585
1586 """
1587 row = kwargs.get("row", 0)
1588 data = self._getabcissa(row)
1589 u = self._getcoordinfo()[0]
1590 if u == "":
1591 u = "channel"
1592 msg = "The current mask window unit is %s" % u
1593 i = self._check_ifs()
1594 if not i:
1595 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1596 asaplog.push(msg)
1597 n = len(data)
1598 msk = _n_bools(n, False)
1599 # test if args is a 'list' or a 'normal *args - UGLY!!!
1600
1601 ws = (isinstance(args[-1][-1], int)
1602 or isinstance(args[-1][-1], float)) and args or args[0]
1603 for window in ws:
1604 if len(window) == 1:
1605 window = [window[0], window[0]]
1606 if len(window) == 0 or len(window) > 2:
1607 raise ValueError("A window needs to be defined as "
1608 "[start(, end)]")
1609 if window[0] > window[1]:
1610 tmp = window[0]
1611 window[0] = window[1]
1612 window[1] = tmp
1613 for i in range(n):
1614 if data[i] >= window[0] and data[i] <= window[1]:
1615 msk[i] = True
1616 if kwargs.has_key('invert'):
1617 if kwargs.get('invert'):
1618 msk = mask_not(msk)
1619 return msk
1620
1621 def get_masklist(self, mask=None, row=0, silent=False):
1622 """\
1623 Compute and return a list of mask windows, [min, max].
1624
1625 Parameters:
1626
1627 mask: channel mask, created with create_mask.
1628
1629 row: calcutate the masklist using the specified row
1630 for unit conversions, default is row=0
1631 only necessary if frequency varies over rows.
1632
1633 Returns:
1634
1635 [min, max], [min2, max2], ...
1636 Pairs of start/end points (inclusive)specifying
1637 the masked regions
1638
1639 """
1640 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1641 raise TypeError("The mask should be list or tuple.")
1642 if len(mask) <= 0:
1643 raise TypeError("The mask elements should be > 0")
1644 data = self._getabcissa(row)
1645 if len(data) != len(mask):
1646 msg = "Number of channels in scantable != number of mask elements"
1647 raise TypeError(msg)
1648 u = self._getcoordinfo()[0]
1649 if u == "":
1650 u = "channel"
1651 msg = "The current mask window unit is %s" % u
1652 i = self._check_ifs()
1653 if not i:
1654 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1655 if not silent:
1656 asaplog.push(msg)
1657 masklist = []
1658 ist, ien = None, None
1659 ist, ien=self.get_mask_indices(mask)
1660 if ist is not None and ien is not None:
1661 for i in xrange(len(ist)):
1662 range=[data[ist[i]],data[ien[i]]]
1663 range.sort()
1664 masklist.append([range[0],range[1]])
1665 return masklist
1666
1667 def get_mask_indices(self, mask=None):
1668 """\
1669 Compute and Return lists of mask start indices and mask end indices.
1670
1671 Parameters:
1672
1673 mask: channel mask, created with create_mask.
1674
1675 Returns:
1676
1677 List of mask start indices and that of mask end indices,
1678 i.e., [istart1,istart2,....], [iend1,iend2,....].
1679
1680 """
1681 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1682 raise TypeError("The mask should be list or tuple.")
1683 if len(mask) <= 0:
1684 raise TypeError("The mask elements should be > 0")
1685 istart = []
1686 iend = []
1687 if mask[0]:
1688 istart.append(0)
1689 for i in range(len(mask)-1):
1690 if not mask[i] and mask[i+1]:
1691 istart.append(i+1)
1692 elif mask[i] and not mask[i+1]:
1693 iend.append(i)
1694 if mask[len(mask)-1]:
1695 iend.append(len(mask)-1)
1696 if len(istart) != len(iend):
1697 raise RuntimeError("Numbers of mask start != mask end.")
1698 for i in range(len(istart)):
1699 if istart[i] > iend[i]:
1700 raise RuntimeError("Mask start index > mask end index")
1701 break
1702 return istart,iend
1703
1704 @asaplog_post_dec
1705 def parse_spw_selection(self, selectstring, restfreq=None, frame=None, doppler=None):
1706 """
1707 Parse MS type spw/channel selection syntax.
1708
1709 Parameters:
1710 selectstring : A string expression of spw and channel selection.
1711 Comma-separated expressions mean different spw -
1712 channel combinations. Spws and channel selections
1713 are partitioned by a colon ':'. In a single
1714 selection expression, you can put multiple values
1715 separated by semicolons ';'. Both for spw and
1716 channel selection, allowed cases include single
1717 value, blank('') or asterisk('*') to specify all
1718 available values, two values connected with a
1719 tilde ('~') to specify an inclusive range. Unit
1720 strings for frequency or velocity can be added to
1721 the tilde-connected values. For channel selection
1722 expression, placing a '<' or a '>' is possible to
1723 specify a semi-infinite interval as well.
1724
1725 examples:
1726 '' or '*' = all spws (all channels)
1727 '<2,4~6,9' = Spws 0,1,4,5,6,9 (all channels)
1728 '3:3~45;60' = channels 3 to 45 and 60 in spw 3
1729 '0~1:2~6,8' = channels 2 to 6 in spws 0,1, and
1730 all channels in spw8
1731 '1.3~1.5GHz' = all spws that fall in or have at
1732 least some overwrap with frequency
1733 range between 1.3GHz and 1.5GHz.
1734 '1.3~1.5GHz:1.3~1.5GHz' = channels that fall
1735 between the specified
1736 frequency range in spws
1737 that fall in or have
1738 overwrap with the
1739 specified frequency
1740 range.
1741 '1:-200~250km/s' = channels that fall between the
1742 specified velocity range in
1743 spw 1.
1744 Returns:
1745 A dictionary of selected (valid) spw and masklist pairs,
1746 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1747 """
1748 if not isinstance(selectstring, str):
1749 asaplog.post()
1750 asaplog.push("Expression of spw/channel selection must be a string.")
1751 asaplog.post("ERROR")
1752
1753 orig_unit = self.get_unit()
1754 self.set_unit('channel')
1755
1756 orig_restfreq = self.get_restfreqs()
1757 orig_restfreq_list = []
1758 for i in orig_restfreq.keys():
1759 if len(orig_restfreq[i]) == 1:
1760 orig_restfreq_list.append(orig_restfreq[i][0])
1761 else:
1762 orig_restfreq_list.append(orig_restfreq[i])
1763
1764 orig_coord = self._getcoordinfo()
1765 orig_frame = orig_coord[1]
1766 orig_doppler = orig_coord[2]
1767
1768 if restfreq is None: restfreq = orig_restfreq_list
1769 if frame is None: frame = orig_frame
1770 if doppler is None: doppler = orig_doppler
1771
1772 self.set_restfreqs(restfreq)
1773 self.set_freqframe(frame)
1774 self.set_doppler(doppler)
1775
1776 valid_ifs = self.getifnos()
1777
1778 comma_sep = selectstring.split(",")
1779 res = {}
1780
1781 for cms_elem in comma_sep:
1782 colon_sep = cms_elem.split(":")
1783
1784 if (len(colon_sep) > 2):
1785 raise RuntimeError("Invalid selection expression: more than two colons!")
1786
1787 # parse spw expression and store result in spw_list.
1788 # allowed cases include '', '*', 'a', '<a', '>a', 'a~b',
1789 # 'a~b*Hz' (where * can be '', 'k', 'M', 'G' etc.),
1790 # 'a~b*m/s' (where * can be '' or 'k') and also
1791 # several of the above expressions connected with ';'.
1792
1793 spw_list = []
1794
1795 semicolon_sep = colon_sep[0].split(";")
1796
1797 for scs_elem in semicolon_sep:
1798 scs_elem = scs_elem.strip()
1799
1800 lt_sep = scs_elem.split("<")
1801 gt_sep = scs_elem.split(">")
1802 ti_sep = scs_elem.split("~")
1803
1804 lt_sep_length = len(lt_sep)
1805 gt_sep_length = len(gt_sep)
1806 ti_sep_length = len(ti_sep)
1807
1808 len_product = lt_sep_length * gt_sep_length * ti_sep_length
1809
1810 if (len_product > 2):
1811 # '<', '>' and '~' must not coexist in a single spw expression
1812
1813 raise RuntimeError("Invalid spw selection.")
1814
1815 elif (len_product == 1):
1816 # '', '*', or single spw number.
1817
1818 if (scs_elem == "") or (scs_elem == "*"):
1819 spw_list = valid_ifs[:] # deep copy
1820
1821 else: # single number
1822 expr = int(scs_elem)
1823 spw_list.append(expr)
1824 if expr not in valid_ifs:
1825 asaplog.push("Invalid spw given. Ignored.")
1826
1827 else: # (len_product == 2)
1828 # namely, one of '<', '>' or '~' appears just once.
1829
1830 if (lt_sep_length == 2): # '<a'
1831 if is_number(lt_sep[1]):
1832 no_valid_spw = True
1833 for i in valid_ifs:
1834 if (i < float(lt_sep[1])):
1835 spw_list.append(i)
1836 no_valid_spw = False
1837
1838 if no_valid_spw:
1839 raise ValueError("Invalid spw selection ('<" + str(lt_sep[1]) + "').")
1840
1841 else:
1842 raise RuntimeError("Invalid spw selection.")
1843
1844 elif (gt_sep_length == 2): # '>a'
1845 if is_number(gt_sep[1]):
1846 no_valid_spw = True
1847 for i in valid_ifs:
1848 if (i > float(gt_sep[1])):
1849 spw_list.append(i)
1850 no_valid_spw = False
1851
1852 if no_valid_spw:
1853 raise ValueError("Invalid spw selection ('>" + str(gt_sep[1]) + "').")
1854
1855 else:
1856 raise RuntimeError("Invalid spw selection.")
1857
1858 else: # (ti_sep_length == 2) where both boundaries inclusive
1859 expr0 = ti_sep[0].strip()
1860 expr1 = ti_sep[1].strip()
1861
1862 if is_number(expr0) and is_number(expr1):
1863 # 'a~b'
1864 expr_pmin = min(float(expr0), float(expr1))
1865 expr_pmax = max(float(expr0), float(expr1))
1866 has_invalid_spw = False
1867 no_valid_spw = True
1868
1869 for i in valid_ifs:
1870 if (expr_pmin <= i) and (i <= expr_pmax):
1871 spw_list.append(i)
1872 no_valid_spw = False
1873 else:
1874 has_invalid_spw = True
1875
1876 if has_invalid_spw:
1877 msg = "Invalid spw is given. Ignored."
1878 asaplog.push(msg)
1879 asaplog.post()
1880
1881 if no_valid_spw:
1882 raise ValueError("No valid spw in range ('" + str(expr_pmin) + "~" + str(expr_pmax) + "').")
1883
1884 elif is_number(expr0) and is_frequency(expr1):
1885 # 'a~b*Hz'
1886 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1887 no_valid_spw = True
1888
1889 for coord in self._get_coordinate_list():
1890 expr_p0 = coord['coord'].to_pixel(expr_f0)
1891 expr_p1 = coord['coord'].to_pixel(expr_f1)
1892 expr_pmin = min(expr_p0, expr_p1)
1893 expr_pmax = max(expr_p0, expr_p1)
1894
1895 spw = coord['if']
1896 pmin = 0.0
1897 pmax = float(self.nchan(spw) - 1)
1898
1899 if ((expr_pmax - pmin)*(expr_pmin - pmax) <= 0.0):
1900 spw_list.append(spw)
1901 no_valid_spw = False
1902
1903 if no_valid_spw:
1904 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
1905
1906 elif is_number(expr0) and is_velocity(expr1):
1907 # 'a~b*m/s'
1908 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
1909 expr_vmin = min(expr_v0, expr_v1)
1910 expr_vmax = max(expr_v0, expr_v1)
1911 no_valid_spw = True
1912
1913 for coord in self._get_coordinate_list():
1914 spw = coord['if']
1915
1916 pmin = 0.0
1917 pmax = float(self.nchan(spw) - 1)
1918
1919 vel0 = coord['coord'].to_velocity(pmin)
1920 vel1 = coord['coord'].to_velocity(pmax)
1921
1922 vmin = min(vel0, vel1)
1923 vmax = max(vel0, vel1)
1924
1925 if ((expr_vmax - vmin)*(expr_vmin - vmax) <= 0.0):
1926 spw_list.append(spw)
1927 no_valid_spw = False
1928
1929 if no_valid_spw:
1930 raise ValueError("No valid spw in range ('" + str(expr0) + "~" + str(expr1) + "').")
1931
1932 else:
1933 # cases such as 'aGHz~bkm/s' are not allowed now
1934 raise RuntimeError("Invalid spw selection.")
1935
1936 # check spw list and remove invalid ones.
1937 # if no valid spw left, emit ValueError.
1938 if len(spw_list) == 0:
1939 raise ValueError("No valid spw in given range.")
1940
1941 # parse channel expression and store the result in crange_list.
1942 # allowed cases include '', 'a~b', 'a*Hz~b*Hz' (where * can be
1943 # '', 'k', 'M', 'G' etc.), 'a*m/s~b*m/s' (where * can be '' or 'k')
1944 # and also several of the above expressions connected with ';'.
1945
1946 for spw in spw_list:
1947 pmin = 0.0
1948 pmax = float(self.nchan(spw) - 1)
1949
1950 if (len(colon_sep) == 1):
1951 # no expression for channel selection,
1952 # which means all channels are to be selected.
1953 crange_list = [[pmin, pmax]]
1954
1955 else: # (len(colon_sep) == 2)
1956 crange_list = []
1957
1958 found = False
1959 for i in self._get_coordinate_list():
1960 if (i['if'] == spw):
1961 coord = i['coord']
1962 found = True
1963 break
1964
1965 if found:
1966 semicolon_sep = colon_sep[1].split(";")
1967 for scs_elem in semicolon_sep:
1968 scs_elem = scs_elem.strip()
1969
1970 ti_sep = scs_elem.split("~")
1971 ti_sep_length = len(ti_sep)
1972
1973 if (ti_sep_length > 2):
1974 raise RuntimeError("Invalid channel selection.")
1975
1976 elif (ti_sep_length == 1):
1977 if (scs_elem == "") or (scs_elem == "*"):
1978 # '' and '*' for all channels
1979 crange_list = [[pmin, pmax]]
1980 break
1981 elif (is_number(scs_elem)):
1982 # single channel given
1983 crange_list.append([float(scs_elem), float(scs_elem)])
1984 else:
1985 raise RuntimeError("Invalid channel selection.")
1986
1987 else: #(ti_sep_length == 2)
1988 expr0 = ti_sep[0].strip()
1989 expr1 = ti_sep[1].strip()
1990
1991 if is_number(expr0) and is_number(expr1):
1992 # 'a~b'
1993 expr_pmin = min(float(expr0), float(expr1))
1994 expr_pmax = max(float(expr0), float(expr1))
1995
1996 elif is_number(expr0) and is_frequency(expr1):
1997 # 'a~b*Hz'
1998 (expr_f0, expr_f1) = get_freq_by_string(expr0, expr1)
1999 expr_p0 = coord.to_pixel(expr_f0)
2000 expr_p1 = coord.to_pixel(expr_f1)
2001 expr_pmin = min(expr_p0, expr_p1)
2002 expr_pmax = max(expr_p0, expr_p1)
2003
2004 elif is_number(expr0) and is_velocity(expr1):
2005 # 'a~b*m/s'
2006 restf = self.get_restfreqs().values()[0][0]
2007 (expr_v0, expr_v1) = get_velocity_by_string(expr0, expr1)
2008 expr_f0 = get_frequency_by_velocity(restf, expr_v0, doppler)
2009 expr_f1 = get_frequency_by_velocity(restf, expr_v1, doppler)
2010 expr_p0 = coord.to_pixel(expr_f0)
2011 expr_p1 = coord.to_pixel(expr_f1)
2012 expr_pmin = min(expr_p0, expr_p1)
2013 expr_pmax = max(expr_p0, expr_p1)
2014
2015 else:
2016 # cases such as 'aGHz~bkm/s' are not allowed now
2017 raise RuntimeError("Invalid channel selection.")
2018
2019 cmin = max(pmin, expr_pmin)
2020 cmax = min(pmax, expr_pmax)
2021 # if the given range of channel selection has overwrap with
2022 # that of current spw, output the overwrap area.
2023 if (cmin <= cmax):
2024 cmin = float(int(cmin + 0.5))
2025 cmax = float(int(cmax + 0.5))
2026 crange_list.append([cmin, cmax])
2027
2028 if (len(crange_list) == 0):
2029 crange_list.append([])
2030
2031 if res.has_key(spw):
2032 res[spw].extend(crange_list)
2033 else:
2034 res[spw] = crange_list
2035
2036 for spw in res.keys():
2037 if spw not in valid_ifs:
2038 del res[spw]
2039
2040 if len(res) == 0:
2041 raise RuntimeError("No valid spw.")
2042
2043 # restore original values
2044 self.set_restfreqs(orig_restfreq_list)
2045 self.set_freqframe(orig_frame)
2046 self.set_doppler(orig_doppler)
2047 self.set_unit(orig_unit)
2048
2049 return res
2050 #found
2051
2052 @asaplog_post_dec
2053 def get_first_rowno_by_if(self, ifno):
2054 found = False
2055 for irow in xrange(self.nrow()):
2056 if (self.getif(irow) == ifno):
2057 res = irow
2058 found = True
2059 break
2060
2061 if not found: raise RuntimeError("Invalid IF value.")
2062
2063 return res
2064
2065 @asaplog_post_dec
2066 def _get_coordinate_list(self):
2067 res = []
2068 spws = self.getifnos()
2069 for spw in spws:
2070 elem = {}
2071 elem['if'] = spw
2072 elem['coord'] = self.get_coordinate(self.get_first_rowno_by_if(spw))
2073 res.append(elem)
2074
2075 return res
2076 # doppler
2077##################################
2078
2079 @asaplog_post_dec
2080 def parse_maskexpr(self, maskstring):
2081 """
2082 Parse CASA type mask selection syntax (IF dependent).
2083
2084 Parameters:
2085 maskstring : A string mask selection expression.
2086 A comma separated selections mean different IF -
2087 channel combinations. IFs and channel selections
2088 are partitioned by a colon, ':'.
2089 examples:
2090 '' = all IFs (all channels)
2091 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
2092 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
2093 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
2094 all channels in IF8
2095 Returns:
2096 A dictionary of selected (valid) IF and masklist pairs,
2097 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
2098 """
2099 if not isinstance(maskstring,str):
2100 asaplog.post()
2101 asaplog.push("Mask expression should be a string.")
2102 asaplog.post("ERROR")
2103
2104 valid_ifs = self.getifnos()
2105 frequnit = self.get_unit()
2106 seldict = {}
2107 if maskstring == "":
2108 maskstring = str(valid_ifs)[1:-1]
2109 ## split each selection "IF range[:CHAN range]"
2110 # split maskstring by "<spaces>,<spaces>"
2111 comma_sep = re.compile('\s*,\s*')
2112 sellist = comma_sep.split(maskstring)
2113 # separator by "<spaces>:<spaces>"
2114 collon_sep = re.compile('\s*:\s*')
2115 for currselstr in sellist:
2116 selset = collon_sep.split(currselstr)
2117 # spw and mask string (may include ~, < or >)
2118 spwmasklist = self._parse_selection(selset[0], typestr='integer',
2119 minval=min(valid_ifs),
2120 maxval=max(valid_ifs))
2121 for spwlist in spwmasklist:
2122 selspws = []
2123 for ispw in range(spwlist[0],spwlist[1]+1):
2124 # Put into the list only if ispw exists
2125 if valid_ifs.count(ispw):
2126 selspws.append(ispw)
2127 del spwmasklist, spwlist
2128
2129 # parse frequency mask list
2130 if len(selset) > 1:
2131 freqmasklist = self._parse_selection(selset[1], typestr='float',
2132 offset=0.)
2133 else:
2134 # want to select the whole spectrum
2135 freqmasklist = [None]
2136
2137 ## define a dictionary of spw - masklist combination
2138 for ispw in selspws:
2139 #print "working on", ispw
2140 spwstr = str(ispw)
2141 if len(selspws) == 0:
2142 # empty spw
2143 continue
2144 else:
2145 ## want to get min and max of the spw and
2146 ## offset to set for '<' and '>'
2147 if frequnit == 'channel':
2148 minfreq = 0
2149 maxfreq = self.nchan(ifno=ispw)
2150 offset = 0.5
2151 else:
2152 ## This is ugly part. need improvement
2153 for ifrow in xrange(self.nrow()):
2154 if self.getif(ifrow) == ispw:
2155 #print "IF",ispw,"found in row =",ifrow
2156 break
2157 freqcoord = self.get_coordinate(ifrow)
2158 freqs = self._getabcissa(ifrow)
2159 minfreq = min(freqs)
2160 maxfreq = max(freqs)
2161 if len(freqs) == 1:
2162 offset = 0.5
2163 elif frequnit.find('Hz') > 0:
2164 offset = abs(freqcoord.to_frequency(1,
2165 unit=frequnit)
2166 -freqcoord.to_frequency(0,
2167 unit=frequnit)
2168 )*0.5
2169 elif frequnit.find('m/s') > 0:
2170 offset = abs(freqcoord.to_velocity(1,
2171 unit=frequnit)
2172 -freqcoord.to_velocity(0,
2173 unit=frequnit)
2174 )*0.5
2175 else:
2176 asaplog.post()
2177 asaplog.push("Invalid frequency unit")
2178 asaplog.post("ERROR")
2179 del freqs, freqcoord, ifrow
2180 for freq in freqmasklist:
2181 selmask = freq or [minfreq, maxfreq]
2182 if selmask[0] == None:
2183 ## selection was "<freq[1]".
2184 if selmask[1] < minfreq:
2185 ## avoid adding region selection
2186 selmask = None
2187 else:
2188 selmask = [minfreq,selmask[1]-offset]
2189 elif selmask[1] == None:
2190 ## selection was ">freq[0]"
2191 if selmask[0] > maxfreq:
2192 ## avoid adding region selection
2193 selmask = None
2194 else:
2195 selmask = [selmask[0]+offset,maxfreq]
2196 if selmask:
2197 if not seldict.has_key(spwstr):
2198 # new spw selection
2199 seldict[spwstr] = []
2200 seldict[spwstr] += [selmask]
2201 del minfreq,maxfreq,offset,freq,selmask
2202 del spwstr
2203 del freqmasklist
2204 del valid_ifs
2205 if len(seldict) == 0:
2206 asaplog.post()
2207 asaplog.push("No valid selection in the mask expression: "
2208 +maskstring)
2209 asaplog.post("WARN")
2210 return None
2211 msg = "Selected masklist:\n"
2212 for sif, lmask in seldict.iteritems():
2213 msg += " IF"+sif+" - "+str(lmask)+"\n"
2214 asaplog.push(msg)
2215 return seldict
2216
2217 @asaplog_post_dec
2218 def parse_idx_selection(self, mode, selexpr):
2219 """
2220 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
2221 BEAMNO, and row number
2222
2223 Parameters:
2224 mode : which column to select.
2225 ['scan',|'if'|'pol'|'beam'|'row']
2226 selexpr : A comma separated selection expression.
2227 examples:
2228 '' = all (returns [])
2229 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
2230 (returns [0,1,4,5,6,9])
2231 Returns:
2232 A List of selected indices
2233 """
2234 if selexpr == "":
2235 return []
2236 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
2237 'b': 'beam', 'r': 'row'}
2238 smode = mode.lower()[0]
2239 if not (smode in valid_modes.keys()):
2240 msg = "Invalid mode '%s'. Valid modes are %s" %\
2241 (mode, str(valid_modes.values()))
2242 asaplog.post()
2243 asaplog.push(msg)
2244 asaplog.post("ERROR")
2245 mode = valid_modes[smode]
2246 minidx = None
2247 maxidx = None
2248 if smode == 'r':
2249 minidx = 0
2250 maxidx = self.nrow()
2251 else:
2252 idx = getattr(self,"get"+mode+"nos")()
2253 minidx = min(idx)
2254 maxidx = max(idx)
2255 del idx
2256 # split selexpr by "<spaces>,<spaces>"
2257 comma_sep = re.compile('\s*,\s*')
2258 sellist = comma_sep.split(selexpr)
2259 idxlist = []
2260 for currselstr in sellist:
2261 # single range (may include ~, < or >)
2262 currlist = self._parse_selection(currselstr, typestr='integer',
2263 minval=minidx,maxval=maxidx)
2264 for thelist in currlist:
2265 idxlist += range(thelist[0],thelist[1]+1)
2266 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
2267 asaplog.push(msg)
2268 return idxlist
2269
2270 def _parse_selection(self, selstr, typestr='float', offset=0.,
2271 minval=None, maxval=None):
2272 """
2273 Parameters:
2274 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
2275 typestr : The type of the values in returned list
2276 ('integer' or 'float')
2277 offset : The offset value to subtract from or add to
2278 the boundary value if the selection string
2279 includes '<' or '>' [Valid only for typestr='float']
2280 minval, maxval : The minimum/maximum values to set if the
2281 selection string includes '<' or '>'.
2282 The list element is filled with None by default.
2283 Returns:
2284 A list of min/max pair of selections.
2285 Example:
2286 _parse_selection('<3;5~7;9',typestr='int',minval=0)
2287 --> returns [[0,2],[5,7],[9,9]]
2288 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
2289 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
2290 """
2291 # split selstr by '<spaces>;<spaces>'
2292 semi_sep = re.compile('\s*;\s*')
2293 selgroups = semi_sep.split(selstr)
2294 sellists = []
2295 if typestr.lower().startswith('int'):
2296 formatfunc = int
2297 offset = 1
2298 else:
2299 formatfunc = float
2300
2301 for currsel in selgroups:
2302 if currsel.strip() == '*' or len(currsel.strip()) == 0:
2303 minsel = minval
2304 maxsel = maxval
2305 if currsel.find('~') > 0:
2306 # val0 <= x <= val1
2307 minsel = formatfunc(currsel.split('~')[0].strip())
2308 maxsel = formatfunc(currsel.split('~')[1].strip())
2309 elif currsel.strip().find('<=') > -1:
2310 bound = currsel.split('<=')
2311 try: # try "x <= val"
2312 minsel = minval
2313 maxsel = formatfunc(bound[1].strip())
2314 except ValueError: # now "val <= x"
2315 minsel = formatfunc(bound[0].strip())
2316 maxsel = maxval
2317 elif currsel.strip().find('>=') > -1:
2318 bound = currsel.split('>=')
2319 try: # try "x >= val"
2320 minsel = formatfunc(bound[1].strip())
2321 maxsel = maxval
2322 except ValueError: # now "val >= x"
2323 minsel = minval
2324 maxsel = formatfunc(bound[0].strip())
2325 elif currsel.strip().find('<') > -1:
2326 bound = currsel.split('<')
2327 try: # try "x < val"
2328 minsel = minval
2329 maxsel = formatfunc(bound[1].strip()) \
2330 - formatfunc(offset)
2331 except ValueError: # now "val < x"
2332 minsel = formatfunc(bound[0].strip()) \
2333 + formatfunc(offset)
2334 maxsel = maxval
2335 elif currsel.strip().find('>') > -1:
2336 bound = currsel.split('>')
2337 try: # try "x > val"
2338 minsel = formatfunc(bound[1].strip()) \
2339 + formatfunc(offset)
2340 maxsel = maxval
2341 except ValueError: # now "val > x"
2342 minsel = minval
2343 maxsel = formatfunc(bound[0].strip()) \
2344 - formatfunc(offset)
2345 else:
2346 minsel = formatfunc(currsel)
2347 maxsel = formatfunc(currsel)
2348 sellists.append([minsel,maxsel])
2349 return sellists
2350
2351# def get_restfreqs(self):
2352# """
2353# Get the restfrequency(s) stored in this scantable.
2354# The return value(s) are always of unit 'Hz'
2355# Parameters:
2356# none
2357# Returns:
2358# a list of doubles
2359# """
2360# return list(self._getrestfreqs())
2361
2362 def get_restfreqs(self, ids=None):
2363 """\
2364 Get the restfrequency(s) stored in this scantable.
2365 The return value(s) are always of unit 'Hz'
2366
2367 Parameters:
2368
2369 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
2370 be retrieved
2371
2372 Returns:
2373
2374 dictionary containing ids and a list of doubles for each id
2375
2376 """
2377 if ids is None:
2378 rfreqs = {}
2379 idlist = self.getmolnos()
2380 for i in idlist:
2381 rfreqs[i] = list(self._getrestfreqs(i))
2382 return rfreqs
2383 else:
2384 if type(ids) == list or type(ids) == tuple:
2385 rfreqs = {}
2386 for i in ids:
2387 rfreqs[i] = list(self._getrestfreqs(i))
2388 return rfreqs
2389 else:
2390 return list(self._getrestfreqs(ids))
2391
2392 @asaplog_post_dec
2393 def set_restfreqs(self, freqs=None, unit='Hz'):
2394 """\
2395 Set or replace the restfrequency specified and
2396 if the 'freqs' argument holds a scalar,
2397 then that rest frequency will be applied to all the selected
2398 data. If the 'freqs' argument holds
2399 a vector, then it MUST be of equal or smaller length than
2400 the number of IFs (and the available restfrequencies will be
2401 replaced by this vector). In this case, *all* data have
2402 the restfrequency set per IF according
2403 to the corresponding value you give in the 'freqs' vector.
2404 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
2405 IF 1 gets restfreq 2e9.
2406
2407 You can also specify the frequencies via a linecatalog.
2408
2409 Parameters:
2410
2411 freqs: list of rest frequency values or string idenitfiers
2412
2413 unit: unit for rest frequency (default 'Hz')
2414
2415
2416 Example::
2417
2418 # set the given restfrequency for the all currently selected IFs
2419 scan.set_restfreqs(freqs=1.4e9)
2420 # set restfrequencies for the n IFs (n > 1) in the order of the
2421 # list, i.e
2422 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
2423 # len(list_of_restfreqs) == nIF
2424 # for nIF == 1 the following will set multiple restfrequency for
2425 # that IF
2426 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
2427 # set multiple restfrequencies per IF. as a list of lists where
2428 # the outer list has nIF elements, the inner s arbitrary
2429 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
2430
2431 *Note*:
2432
2433 To do more sophisticate Restfrequency setting, e.g. on a
2434 source and IF basis, use scantable.set_selection() before using
2435 this function::
2436
2437 # provided your scantable is called scan
2438 selection = selector()
2439 selection.set_name('ORION*')
2440 selection.set_ifs([1])
2441 scan.set_selection(selection)
2442 scan.set_restfreqs(freqs=86.6e9)
2443
2444 """
2445 varlist = vars()
2446 from asap import linecatalog
2447 # simple value
2448 if isinstance(freqs, int) or isinstance(freqs, float):
2449 self._setrestfreqs([freqs], [""], unit)
2450 # list of values
2451 elif isinstance(freqs, list) or isinstance(freqs, tuple):
2452 # list values are scalars
2453 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
2454 if len(freqs) == 1:
2455 self._setrestfreqs(freqs, [""], unit)
2456 else:
2457 # allow the 'old' mode of setting mulitple IFs
2458 savesel = self._getselection()
2459 sel = self.get_selection()
2460 iflist = self.getifnos()
2461 if len(freqs)>len(iflist):
2462 raise ValueError("number of elements in list of list "
2463 "exeeds the current IF selections")
2464 iflist = self.getifnos()
2465 for i, fval in enumerate(freqs):
2466 sel.set_ifs(iflist[i])
2467 self._setselection(sel)
2468 self._setrestfreqs([fval], [""], unit)
2469 self._setselection(savesel)
2470
2471 # list values are dict, {'value'=, 'name'=)
2472 elif isinstance(freqs[-1], dict):
2473 values = []
2474 names = []
2475 for d in freqs:
2476 values.append(d["value"])
2477 names.append(d["name"])
2478 self._setrestfreqs(values, names, unit)
2479 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
2480 savesel = self._getselection()
2481 sel = self.get_selection()
2482 iflist = self.getifnos()
2483 if len(freqs)>len(iflist):
2484 raise ValueError("number of elements in list of list exeeds"
2485 " the current IF selections")
2486 for i, fval in enumerate(freqs):
2487 sel.set_ifs(iflist[i])
2488 self._setselection(sel)
2489 self._setrestfreqs(fval, [""], unit)
2490 self._setselection(savesel)
2491 # freqs are to be taken from a linecatalog
2492 elif isinstance(freqs, linecatalog):
2493 savesel = self._getselection()
2494 sel = self.get_selection()
2495 for i in xrange(freqs.nrow()):
2496 sel.set_ifs(iflist[i])
2497 self._setselection(sel)
2498 self._setrestfreqs([freqs.get_frequency(i)],
2499 [freqs.get_name(i)], "MHz")
2500 # ensure that we are not iterating past nIF
2501 if i == self.nif()-1: break
2502 self._setselection(savesel)
2503 else:
2504 return
2505 self._add_history("set_restfreqs", varlist)
2506
2507 @asaplog_post_dec
2508 def shift_refpix(self, delta):
2509 """\
2510 Shift the reference pixel of the Spectra Coordinate by an
2511 integer amount.
2512
2513 Parameters:
2514
2515 delta: the amount to shift by
2516
2517 *Note*:
2518
2519 Be careful using this with broadband data.
2520
2521 """
2522 varlist = vars()
2523 Scantable.shift_refpix(self, delta)
2524 s._add_history("shift_refpix", varlist)
2525
2526 @asaplog_post_dec
2527 def history(self, filename=None, nrows=-1, start=0):
2528 """\
2529 Print the history. Optionally to a file.
2530
2531 Parameters:
2532
2533 filename: The name of the file to save the history to.
2534
2535 """
2536 n = self._historylength()
2537 if nrows == -1:
2538 nrows = n
2539 if start+nrows > n:
2540 nrows = nrows-start
2541 if n > 1000 and nrows == n:
2542 nrows = 1000
2543 start = n-1000
2544 asaplog.push("Warning: History has {0} entries. Displaying last "
2545 "1000".format(n))
2546 hist = list(self._gethistory(nrows, start))
2547 out = "-"*80
2548 for h in hist:
2549 if not h.strip():
2550 continue
2551 if h.find("---") >-1:
2552 continue
2553 else:
2554 items = h.split("##")
2555 date = items[0]
2556 func = items[1]
2557 items = items[2:]
2558 out += "\n"+date+"\n"
2559 out += "Function: %s\n Parameters:" % (func)
2560 for i in items:
2561 if i == '':
2562 continue
2563 s = i.split("=")
2564 out += "\n %s = %s" % (s[0], s[1])
2565 out = "\n".join([out, "*"*80])
2566 if filename is not None:
2567 if filename is "":
2568 filename = 'scantable_history.txt'
2569 filename = os.path.expandvars(os.path.expanduser(filename))
2570 if not os.path.isdir(filename):
2571 data = open(filename, 'w')
2572 data.write(out)
2573 data.close()
2574 else:
2575 msg = "Illegal file name '%s'." % (filename)
2576 raise IOError(msg)
2577 return page(out)
2578
2579 #
2580 # Maths business
2581 #
2582 @asaplog_post_dec
2583 def average_time(self, mask=None, scanav=False, weight='tint', align=False,
2584 avmode="NONE"):
2585 """\
2586 Return the (time) weighted average of a scan. Scans will be averaged
2587 only if the source direction (RA/DEC) is within 1' otherwise
2588
2589 *Note*:
2590
2591 in channels only - align if necessary
2592
2593 Parameters:
2594
2595 mask: an optional mask (only used for 'var' and 'tsys'
2596 weighting)
2597
2598 scanav: True averages each scan separately
2599 False (default) averages all scans together,
2600
2601 weight: Weighting scheme.
2602 'none' (mean no weight)
2603 'var' (1/var(spec) weighted)
2604 'tsys' (1/Tsys**2 weighted)
2605 'tint' (integration time weighted)
2606 'tintsys' (Tint/Tsys**2)
2607 'median' ( median averaging)
2608 The default is 'tint'
2609
2610 align: align the spectra in velocity before averaging. It takes
2611 the time of the first spectrum as reference time.
2612 avmode: 'SOURCE' - also select by source name - or
2613 'NONE' (default). Not applicable for scanav=True or
2614 weight=median
2615
2616 Example::
2617
2618 # time average the scantable without using a mask
2619 newscan = scan.average_time()
2620
2621 """
2622 varlist = vars()
2623 weight = weight or 'TINT'
2624 mask = mask or ()
2625 scanav = (scanav and 'SCAN') or avmode.upper()
2626 scan = (self, )
2627
2628 if align:
2629 scan = (self.freq_align(insitu=False), )
2630 asaplog.push("Note: Alignment is don on a source-by-source basis")
2631 asaplog.push("Note: Averaging (by default) is not")
2632 # we need to set it to SOURCE averaging here
2633 s = None
2634 if weight.upper() == 'MEDIAN':
2635 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2636 scanav))
2637 else:
2638 s = scantable(self._math._average(scan, mask, weight.upper(),
2639 scanav))
2640 s._add_history("average_time", varlist)
2641 return s
2642
2643 @asaplog_post_dec
2644 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2645 """\
2646 Return a scan where all spectra are converted to either
2647 Jansky or Kelvin depending upon the flux units of the scan table.
2648 By default the function tries to look the values up internally.
2649 If it can't find them (or if you want to over-ride), you must
2650 specify EITHER jyperk OR eta (and D which it will try to look up
2651 also if you don't set it). jyperk takes precedence if you set both.
2652
2653 Parameters:
2654
2655 jyperk: the Jy / K conversion factor
2656
2657 eta: the aperture efficiency
2658
2659 d: the geometric diameter (metres)
2660
2661 insitu: if False a new scantable is returned.
2662 Otherwise, the scaling is done in-situ
2663 The default is taken from .asaprc (False)
2664
2665 """
2666 if insitu is None: insitu = rcParams['insitu']
2667 self._math._setinsitu(insitu)
2668 varlist = vars()
2669 jyperk = jyperk or -1.0
2670 d = d or -1.0
2671 eta = eta or -1.0
2672 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2673 s._add_history("convert_flux", varlist)
2674 if insitu: self._assign(s)
2675 else: return s
2676
2677 @asaplog_post_dec
2678 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2679 """\
2680 Return a scan after applying a gain-elevation correction.
2681 The correction can be made via either a polynomial or a
2682 table-based interpolation (and extrapolation if necessary).
2683 You specify polynomial coefficients, an ascii table or neither.
2684 If you specify neither, then a polynomial correction will be made
2685 with built in coefficients known for certain telescopes (an error
2686 will occur if the instrument is not known).
2687 The data and Tsys are *divided* by the scaling factors.
2688
2689 Parameters:
2690
2691 poly: Polynomial coefficients (default None) to compute a
2692 gain-elevation correction as a function of
2693 elevation (in degrees).
2694
2695 filename: The name of an ascii file holding correction factors.
2696 The first row of the ascii file must give the column
2697 names and these MUST include columns
2698 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2699 by this) somewhere.
2700 The second row must give the data type of the
2701 column. Use 'R' for Real and 'I' for Integer.
2702 An example file would be
2703 (actual factors are arbitrary) :
2704
2705 TIME ELEVATION FACTOR
2706 R R R
2707 0.1 0 0.8
2708 0.2 20 0.85
2709 0.3 40 0.9
2710 0.4 60 0.85
2711 0.5 80 0.8
2712 0.6 90 0.75
2713
2714 method: Interpolation method when correcting from a table.
2715 Values are 'nearest', 'linear' (default), 'cubic'
2716 and 'spline'
2717
2718 insitu: if False a new scantable is returned.
2719 Otherwise, the scaling is done in-situ
2720 The default is taken from .asaprc (False)
2721
2722 """
2723
2724 if insitu is None: insitu = rcParams['insitu']
2725 self._math._setinsitu(insitu)
2726 varlist = vars()
2727 poly = poly or ()
2728 from os.path import expandvars
2729 filename = expandvars(filename)
2730 s = scantable(self._math._gainel(self, poly, filename, method))
2731 s._add_history("gain_el", varlist)
2732 if insitu:
2733 self._assign(s)
2734 else:
2735 return s
2736
2737 @asaplog_post_dec
2738 def freq_align(self, reftime=None, method='cubic', insitu=None):
2739 """\
2740 Return a scan where all rows have been aligned in frequency/velocity.
2741 The alignment frequency frame (e.g. LSRK) is that set by function
2742 set_freqframe.
2743
2744 Parameters:
2745
2746 reftime: reference time to align at. By default, the time of
2747 the first row of data is used.
2748
2749 method: Interpolation method for regridding the spectra.
2750 Choose from 'nearest', 'linear', 'cubic' (default)
2751 and 'spline'
2752
2753 insitu: if False a new scantable is returned.
2754 Otherwise, the scaling is done in-situ
2755 The default is taken from .asaprc (False)
2756
2757 """
2758 if insitu is None: insitu = rcParams["insitu"]
2759 oldInsitu = self._math._insitu()
2760 self._math._setinsitu(insitu)
2761 varlist = vars()
2762 reftime = reftime or ""
2763 s = scantable(self._math._freq_align(self, reftime, method))
2764 s._add_history("freq_align", varlist)
2765 self._math._setinsitu(oldInsitu)
2766 if insitu:
2767 self._assign(s)
2768 else:
2769 return s
2770
2771 @asaplog_post_dec
2772 def opacity(self, tau=None, insitu=None):
2773 """\
2774 Apply an opacity correction. The data
2775 and Tsys are multiplied by the correction factor.
2776
2777 Parameters:
2778
2779 tau: (list of) opacity from which the correction factor is
2780 exp(tau*ZD)
2781 where ZD is the zenith-distance.
2782 If a list is provided, it has to be of length nIF,
2783 nIF*nPol or 1 and in order of IF/POL, e.g.
2784 [opif0pol0, opif0pol1, opif1pol0 ...]
2785 if tau is `None` the opacities are determined from a
2786 model.
2787
2788 insitu: if False a new scantable is returned.
2789 Otherwise, the scaling is done in-situ
2790 The default is taken from .asaprc (False)
2791
2792 """
2793 if insitu is None:
2794 insitu = rcParams['insitu']
2795 self._math._setinsitu(insitu)
2796 varlist = vars()
2797 if not hasattr(tau, "__len__"):
2798 tau = [tau]
2799 s = scantable(self._math._opacity(self, tau))
2800 s._add_history("opacity", varlist)
2801 if insitu:
2802 self._assign(s)
2803 else:
2804 return s
2805
2806 @asaplog_post_dec
2807 def bin(self, width=5, insitu=None):
2808 """\
2809 Return a scan where all spectra have been binned up.
2810
2811 Parameters:
2812
2813 width: The bin width (default=5) in pixels
2814
2815 insitu: if False a new scantable is returned.
2816 Otherwise, the scaling is done in-situ
2817 The default is taken from .asaprc (False)
2818
2819 """
2820 if insitu is None:
2821 insitu = rcParams['insitu']
2822 self._math._setinsitu(insitu)
2823 varlist = vars()
2824 s = scantable(self._math._bin(self, width))
2825 s._add_history("bin", varlist)
2826 if insitu:
2827 self._assign(s)
2828 else:
2829 return s
2830
2831 @asaplog_post_dec
2832 def reshape(self, first, last, insitu=None):
2833 """Resize the band by providing first and last channel.
2834 This will cut off all channels outside [first, last].
2835 """
2836 if insitu is None:
2837 insitu = rcParams['insitu']
2838 varlist = vars()
2839 if last < 0:
2840 last = self.nchan()-1 + last
2841 s = None
2842 if insitu:
2843 s = self
2844 else:
2845 s = self.copy()
2846 s._reshape(first,last)
2847 s._add_history("reshape", varlist)
2848 if not insitu:
2849 return s
2850
2851 @asaplog_post_dec
2852 def resample(self, width=5, method='cubic', insitu=None):
2853 """\
2854 Return a scan where all spectra have been binned up.
2855
2856 Parameters:
2857
2858 width: The bin width (default=5) in pixels
2859
2860 method: Interpolation method when correcting from a table.
2861 Values are 'nearest', 'linear', 'cubic' (default)
2862 and 'spline'
2863
2864 insitu: if False a new scantable is returned.
2865 Otherwise, the scaling is done in-situ
2866 The default is taken from .asaprc (False)
2867
2868 """
2869 if insitu is None:
2870 insitu = rcParams['insitu']
2871 self._math._setinsitu(insitu)
2872 varlist = vars()
2873 s = scantable(self._math._resample(self, method, width))
2874 s._add_history("resample", varlist)
2875 if insitu:
2876 self._assign(s)
2877 else:
2878 return s
2879
2880 @asaplog_post_dec
2881 def average_pol(self, mask=None, weight='none'):
2882 """\
2883 Average the Polarisations together.
2884
2885 Parameters:
2886
2887 mask: An optional mask defining the region, where the
2888 averaging will be applied. The output will have all
2889 specified points masked.
2890
2891 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2892 weighted), or 'tsys' (1/Tsys**2 weighted)
2893
2894 """
2895 varlist = vars()
2896 mask = mask or ()
2897 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2898 s._add_history("average_pol", varlist)
2899 return s
2900
2901 @asaplog_post_dec
2902 def average_beam(self, mask=None, weight='none'):
2903 """\
2904 Average the Beams together.
2905
2906 Parameters:
2907 mask: An optional mask defining the region, where the
2908 averaging will be applied. The output will have all
2909 specified points masked.
2910
2911 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2912 weighted), or 'tsys' (1/Tsys**2 weighted)
2913
2914 """
2915 varlist = vars()
2916 mask = mask or ()
2917 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2918 s._add_history("average_beam", varlist)
2919 return s
2920
2921 def parallactify(self, pflag):
2922 """\
2923 Set a flag to indicate whether this data should be treated as having
2924 been 'parallactified' (total phase == 0.0)
2925
2926 Parameters:
2927
2928 pflag: Bool indicating whether to turn this on (True) or
2929 off (False)
2930
2931 """
2932 varlist = vars()
2933 self._parallactify(pflag)
2934 self._add_history("parallactify", varlist)
2935
2936 @asaplog_post_dec
2937 def convert_pol(self, poltype=None):
2938 """\
2939 Convert the data to a different polarisation type.
2940 Note that you will need cross-polarisation terms for most conversions.
2941
2942 Parameters:
2943
2944 poltype: The new polarisation type. Valid types are:
2945 'linear', 'circular', 'stokes' and 'linpol'
2946
2947 """
2948 varlist = vars()
2949 s = scantable(self._math._convertpol(self, poltype))
2950 s._add_history("convert_pol", varlist)
2951 return s
2952
2953 @asaplog_post_dec
2954 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2955 insitu=None):
2956 """\
2957 Smooth the spectrum by the specified kernel (conserving flux).
2958
2959 Parameters:
2960
2961 kernel: The type of smoothing kernel. Select from
2962 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2963 or 'poly'
2964
2965 width: The width of the kernel in pixels. For hanning this is
2966 ignored otherwise it defauls to 5 pixels.
2967 For 'gaussian' it is the Full Width Half
2968 Maximum. For 'boxcar' it is the full width.
2969 For 'rmedian' and 'poly' it is the half width.
2970
2971 order: Optional parameter for 'poly' kernel (default is 2), to
2972 specify the order of the polnomial. Ignored by all other
2973 kernels.
2974
2975 plot: plot the original and the smoothed spectra.
2976 In this each indivual fit has to be approved, by
2977 typing 'y' or 'n'
2978
2979 insitu: if False a new scantable is returned.
2980 Otherwise, the scaling is done in-situ
2981 The default is taken from .asaprc (False)
2982
2983 """
2984 if insitu is None: insitu = rcParams['insitu']
2985 self._math._setinsitu(insitu)
2986 varlist = vars()
2987
2988 if plot: orgscan = self.copy()
2989
2990 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2991 s._add_history("smooth", varlist)
2992
2993 action = 'H'
2994 if plot:
2995 from asap.asapplotter import new_asaplot
2996 theplot = new_asaplot(rcParams['plotter.gui'])
2997 from matplotlib import rc as rcp
2998 rcp('lines', linewidth=1)
2999 theplot.set_panels()
3000 ylab=s._get_ordinate_label()
3001 #theplot.palette(0,["#777777","red"])
3002 for r in xrange(s.nrow()):
3003 xsm=s._getabcissa(r)
3004 ysm=s._getspectrum(r)
3005 xorg=orgscan._getabcissa(r)
3006 yorg=orgscan._getspectrum(r)
3007 if action != "N": #skip plotting if rejecting all
3008 theplot.clear()
3009 theplot.hold()
3010 theplot.set_axes('ylabel',ylab)
3011 theplot.set_axes('xlabel',s._getabcissalabel(r))
3012 theplot.set_axes('title',s._getsourcename(r))
3013 theplot.set_line(label='Original',color="#777777")
3014 theplot.plot(xorg,yorg)
3015 theplot.set_line(label='Smoothed',color="red")
3016 theplot.plot(xsm,ysm)
3017 ### Ugly part for legend
3018 for i in [0,1]:
3019 theplot.subplots[0]['lines'].append(
3020 [theplot.subplots[0]['axes'].lines[i]]
3021 )
3022 theplot.release()
3023 ### Ugly part for legend
3024 theplot.subplots[0]['lines']=[]
3025 res = self._get_verify_action("Accept smoothing?",action)
3026 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
3027 if r == 0: action = None
3028 #res = raw_input("Accept smoothing ([y]/n): ")
3029 if res.upper() == 'N':
3030 # reject for the current rows
3031 s._setspectrum(yorg, r)
3032 elif res.upper() == 'R':
3033 # reject all the following rows
3034 action = "N"
3035 s._setspectrum(yorg, r)
3036 elif res.upper() == 'A':
3037 # accept all the following rows
3038 break
3039 theplot.quit()
3040 del theplot
3041 del orgscan
3042
3043 if insitu: self._assign(s)
3044 else: return s
3045
3046 @asaplog_post_dec
3047 def regrid_channel(self, width=5, plot=False, insitu=None):
3048 """\
3049 Regrid the spectra by the specified channel width
3050
3051 Parameters:
3052
3053 width: The channel width (float) of regridded spectra
3054 in the current spectral unit.
3055
3056 plot: [NOT IMPLEMENTED YET]
3057 plot the original and the regridded spectra.
3058 In this each indivual fit has to be approved, by
3059 typing 'y' or 'n'
3060
3061 insitu: if False a new scantable is returned.
3062 Otherwise, the scaling is done in-situ
3063 The default is taken from .asaprc (False)
3064
3065 """
3066 if insitu is None: insitu = rcParams['insitu']
3067 varlist = vars()
3068
3069 if plot:
3070 asaplog.post()
3071 asaplog.push("Verification plot is not implemtnetd yet.")
3072 asaplog.post("WARN")
3073
3074 s = self.copy()
3075 s._regrid_specchan(width)
3076
3077 s._add_history("regrid_channel", varlist)
3078
3079# if plot:
3080# from asap.asapplotter import new_asaplot
3081# theplot = new_asaplot(rcParams['plotter.gui'])
3082# from matplotlib import rc as rcp
3083# rcp('lines', linewidth=1)
3084# theplot.set_panels()
3085# ylab=s._get_ordinate_label()
3086# #theplot.palette(0,["#777777","red"])
3087# for r in xrange(s.nrow()):
3088# xsm=s._getabcissa(r)
3089# ysm=s._getspectrum(r)
3090# xorg=orgscan._getabcissa(r)
3091# yorg=orgscan._getspectrum(r)
3092# theplot.clear()
3093# theplot.hold()
3094# theplot.set_axes('ylabel',ylab)
3095# theplot.set_axes('xlabel',s._getabcissalabel(r))
3096# theplot.set_axes('title',s._getsourcename(r))
3097# theplot.set_line(label='Original',color="#777777")
3098# theplot.plot(xorg,yorg)
3099# theplot.set_line(label='Smoothed',color="red")
3100# theplot.plot(xsm,ysm)
3101# ### Ugly part for legend
3102# for i in [0,1]:
3103# theplot.subplots[0]['lines'].append(
3104# [theplot.subplots[0]['axes'].lines[i]]
3105# )
3106# theplot.release()
3107# ### Ugly part for legend
3108# theplot.subplots[0]['lines']=[]
3109# res = raw_input("Accept smoothing ([y]/n): ")
3110# if res.upper() == 'N':
3111# s._setspectrum(yorg, r)
3112# theplot.quit()
3113# del theplot
3114# del orgscan
3115
3116 if insitu: self._assign(s)
3117 else: return s
3118
3119 @asaplog_post_dec
3120 def _parse_wn(self, wn):
3121 if isinstance(wn, list) or isinstance(wn, tuple):
3122 return wn
3123 elif isinstance(wn, int):
3124 return [ wn ]
3125 elif isinstance(wn, str):
3126 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
3127 val = wn.split('-')
3128 val = [int(val[0]), int(val[1])]
3129 val.sort()
3130 res = [i for i in xrange(val[0], val[1]+1)]
3131 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
3132 val = int(wn[2:])+1
3133 res = [i for i in xrange(val)]
3134 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
3135 val = int(wn[:-2])+1
3136 res = [i for i in xrange(val)]
3137 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
3138 val = int(wn[1:])
3139 res = [i for i in xrange(val)]
3140 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
3141 val = int(wn[:-1])
3142 res = [i for i in xrange(val)]
3143 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
3144 # then interpreted in C++
3145 # side as [a,a+1,...,a_nyq]
3146 # (CAS-3759)
3147 val = int(wn[2:])
3148 res = [val, -999]
3149 #res = [i for i in xrange(val, self.nchan()/2+1)]
3150 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
3151 # then interpreted in C++
3152 # side as [a,a+1,...,a_nyq]
3153 # (CAS-3759)
3154 val = int(wn[:-2])
3155 res = [val, -999]
3156 #res = [i for i in xrange(val, self.nchan()/2+1)]
3157 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
3158 # then interpreted in C++
3159 # side as [a+1,a+2,...,a_nyq]
3160 # (CAS-3759)
3161 val = int(wn[1:])+1
3162 res = [val, -999]
3163 #res = [i for i in xrange(val, self.nchan()/2+1)]
3164 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
3165 # then interpreted in C++
3166 # side as [a+1,a+2,...,a_nyq]
3167 # (CAS-3759)
3168 val = int(wn[:-1])+1
3169 res = [val, -999]
3170 #res = [i for i in xrange(val, self.nchan()/2+1)]
3171
3172 return res
3173 else:
3174 msg = 'wrong value given for addwn/rejwn'
3175 raise RuntimeError(msg)
3176
3177 @asaplog_post_dec
3178 def apply_bltable(self, insitu=None, retfitres=None, inbltable=None, outbltable=None, overwrite=None):
3179 """\
3180 Subtract baseline based on parameters written in Baseline Table.
3181
3182 Parameters:
3183 insitu: if True, baseline fitting/subtraction is done
3184 in-situ. If False, a new scantable with
3185 baseline subtracted is returned. Actually,
3186 format of the returned value depends on both
3187 insitu and retfitres (see below).
3188 The default is taken from .asaprc (False)
3189 retfitres: if True, the results of baseline fitting (i.e.,
3190 coefficients and rms) are returned.
3191 default is False.
3192 The format of the returned value of this
3193 function varies as follows:
3194 (1) in case insitu=True and retfitres=True:
3195 fitting result.
3196 (2) in case insitu=True and retfitres=False:
3197 None.
3198 (3) in case insitu=False and retfitres=True:
3199 a dictionary containing a new scantable
3200 (with baseline subtracted) and the fitting
3201 results.
3202 (4) in case insitu=False and retfitres=False:
3203 a new scantable (with baseline subtracted).
3204 inbltable: name of input baseline table. The row number of
3205 scantable and that of inbltable must be
3206 identical.
3207 outbltable: name of output baseline table where baseline
3208 parameters and fitting results recorded.
3209 default is ''(no output).
3210 overwrite: if True when an existing baseline table is
3211 specified for outbltable, overwrites it.
3212 Otherwise there is no harm.
3213 default is False.
3214 """
3215
3216 try:
3217 varlist = vars()
3218 if retfitres is None: retfitres = False
3219 if inbltable is None: raise ValueError("bltable missing.")
3220 if outbltable is None: outbltable = ''
3221 if overwrite is None: overwrite = False
3222
3223 if insitu is None: insitu = rcParams['insitu']
3224 if insitu:
3225 workscan = self
3226 else:
3227 workscan = self.copy()
3228
3229 sres = workscan._apply_bltable(inbltable,
3230 retfitres,
3231 outbltable,
3232 os.path.exists(outbltable),
3233 overwrite)
3234 if retfitres: res = parse_fitresult(sres)
3235
3236 workscan._add_history('apply_bltable', varlist)
3237
3238 if insitu:
3239 self._assign(workscan)
3240 if retfitres:
3241 return res
3242 else:
3243 return None
3244 else:
3245 if retfitres:
3246 return {'scantable': workscan, 'fitresults': res}
3247 else:
3248 return workscan
3249
3250 except RuntimeError, e:
3251 raise_fitting_failure_exception(e)
3252
3253 @asaplog_post_dec
3254 def sub_baseline(self, insitu=None, retfitres=None, blinfo=None, bltable=None, overwrite=None):
3255 """\
3256 Subtract baseline based on parameters written in the input list.
3257
3258 Parameters:
3259 insitu: if True, baseline fitting/subtraction is done
3260 in-situ. If False, a new scantable with
3261 baseline subtracted is returned. Actually,
3262 format of the returned value depends on both
3263 insitu and retfitres (see below).
3264 The default is taken from .asaprc (False)
3265 retfitres: if True, the results of baseline fitting (i.e.,
3266 coefficients and rms) are returned.
3267 default is False.
3268 The format of the returned value of this
3269 function varies as follows:
3270 (1) in case insitu=True and retfitres=True:
3271 fitting result.
3272 (2) in case insitu=True and retfitres=False:
3273 None.
3274 (3) in case insitu=False and retfitres=True:
3275 a dictionary containing a new scantable
3276 (with baseline subtracted) and the fitting
3277 results.
3278 (4) in case insitu=False and retfitres=False:
3279 a new scantable (with baseline subtracted).
3280 blinfo: baseline parameter set stored in a dictionary
3281 or a list of dictionary. Each dictionary
3282 corresponds to each spectrum and must contain
3283 the following keys and values:
3284 'row': row number,
3285 'blfunc': function name. available ones include
3286 'poly', 'chebyshev', 'cspline' and
3287 'sinusoid',
3288 'order': maximum order of polynomial. needed
3289 if blfunc='poly' or 'chebyshev',
3290 'npiece': number or piecewise polynomial.
3291 needed if blfunc='cspline',
3292 'nwave': a list of sinusoidal wave numbers.
3293 needed if blfunc='sinusoid', and
3294 'masklist': min-max windows for channel mask.
3295 the specified ranges will be used
3296 for fitting.
3297 bltable: name of output baseline table where baseline
3298 parameters and fitting results recorded.
3299 default is ''(no output).
3300 overwrite: if True when an existing baseline table is
3301 specified for bltable, overwrites it.
3302 Otherwise there is no harm.
3303 default is False.
3304
3305 Example:
3306 sub_baseline(blinfo=[{'row':0, 'blfunc':'poly', 'order':5,
3307 'masklist':[[10,350],[352,510]]},
3308 {'row':1, 'blfunc':'cspline', 'npiece':3,
3309 'masklist':[[3,16],[19,404],[407,511]]}
3310 ])
3311
3312 the first spectrum (row=0) will be fitted with polynomial
3313 of order=5 and the next one (row=1) will be fitted with cubic
3314 spline consisting of 3 pieces.
3315 """
3316
3317 try:
3318 varlist = vars()
3319 if retfitres is None: retfitres = False
3320 if blinfo is None: blinfo = []
3321 if bltable is None: bltable = ''
3322 if overwrite is None: overwrite = False
3323
3324 if insitu is None: insitu = rcParams['insitu']
3325 if insitu:
3326 workscan = self
3327 else:
3328 workscan = self.copy()
3329
3330 nrow = workscan.nrow()
3331
3332 in_blinfo = pack_blinfo(blinfo=blinfo, maxirow=nrow)
3333
3334 print "in_blinfo=< "+ str(in_blinfo)+" >"
3335
3336 sres = workscan._sub_baseline(in_blinfo,
3337 retfitres,
3338 bltable,
3339 os.path.exists(bltable),
3340 overwrite)
3341 if retfitres: res = parse_fitresult(sres)
3342
3343 workscan._add_history('sub_baseline', varlist)
3344
3345 if insitu:
3346 self._assign(workscan)
3347 if retfitres:
3348 return res
3349 else:
3350 return None
3351 else:
3352 if retfitres:
3353 return {'scantable': workscan, 'fitresults': res}
3354 else:
3355 return workscan
3356
3357 except RuntimeError, e:
3358 raise_fitting_failure_exception(e)
3359
3360 @asaplog_post_dec
3361 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
3362 whichrow=None, uselinefinder=None, edge=None,
3363 threshold=None, chan_avg_limit=None):
3364 """\
3365 Calculates and returns model selection criteria for a specified
3366 baseline model and a given spectrum data.
3367 Available values include Akaike Information Criterion (AIC), the
3368 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
3369 Bayesian Information Criterion (BIC) and the Generalised Cross
3370 Validation (GCV).
3371
3372 Parameters:
3373 value: name of model selection criteria to calculate.
3374 available ones include 'aic', 'aicc', 'bic' and
3375 'gcv'. default is 'aicc'.
3376 blfunc: baseline function name. available ones include
3377 'chebyshev', 'cspline' and 'sinusoid'.
3378 default is 'chebyshev'.
3379 order: parameter for basline function. actually stands for
3380 order of polynomial (order) for 'chebyshev',
3381 number of spline pieces (npiece) for 'cspline' and
3382 maximum wave number for 'sinusoid', respectively.
3383 default is 5 (which is also the default order value
3384 for [auto_]chebyshev_baseline()).
3385 mask: an optional mask. default is [].
3386 whichrow: row number. default is 0 (the first row)
3387 uselinefinder: use sd.linefinder() to flag out line regions
3388 default is True.
3389 edge: an optional number of channel to drop at
3390 the edge of spectrum. If only one value is
3391 specified, the same number will be dropped
3392 from both sides of the spectrum. Default
3393 is to keep all channels. Nested tuples
3394 represent individual edge selection for
3395 different IFs (a number of spectral channels
3396 can be different)
3397 default is (0, 0).
3398 threshold: the threshold used by line finder. It is
3399 better to keep it large as only strong lines
3400 affect the baseline solution.
3401 default is 3.
3402 chan_avg_limit: a maximum number of consequtive spectral
3403 channels to average during the search of
3404 weak and broad lines. The default is no
3405 averaging (and no search for weak lines).
3406 If such lines can affect the fitted baseline
3407 (e.g. a high order polynomial is fitted),
3408 increase this parameter (usually values up
3409 to 8 are reasonable). Most users of this
3410 method should find the default value sufficient.
3411 default is 1.
3412
3413 Example:
3414 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
3415 """
3416
3417 try:
3418 varlist = vars()
3419
3420 if value is None: value = 'aicc'
3421 if blfunc is None: blfunc = 'chebyshev'
3422 if order is None: order = 5
3423 if mask is None: mask = []
3424 if whichrow is None: whichrow = 0
3425 if uselinefinder is None: uselinefinder = True
3426 if edge is None: edge = (0, 0)
3427 if threshold is None: threshold = 3
3428 if chan_avg_limit is None: chan_avg_limit = 1
3429
3430 return self._calc_aic(value, blfunc, order, mask,
3431 whichrow, uselinefinder, edge,
3432 threshold, chan_avg_limit)
3433
3434 except RuntimeError, e:
3435 raise_fitting_failure_exception(e)
3436
3437 @asaplog_post_dec
3438 def sinusoid_baseline(self, mask=None, applyfft=None,
3439 fftmethod=None, fftthresh=None,
3440 addwn=None, rejwn=None,
3441 insitu=None,
3442 clipthresh=None, clipniter=None,
3443 plot=None,
3444 getresidual=None,
3445 showprogress=None, minnrow=None,
3446 outlog=None,
3447 blfile=None, csvformat=None,
3448 bltable=None):
3449 """\
3450 Return a scan which has been baselined (all rows) with sinusoidal
3451 functions.
3452
3453 Parameters:
3454 mask: an optional mask
3455 applyfft: if True use some method, such as FFT, to find
3456 strongest sinusoidal components in the wavenumber
3457 domain to be used for baseline fitting.
3458 default is True.
3459 fftmethod: method to find the strong sinusoidal components.
3460 now only 'fft' is available and it is the default.
3461 fftthresh: the threshold to select wave numbers to be used for
3462 fitting from the distribution of amplitudes in the
3463 wavenumber domain.
3464 both float and string values accepted.
3465 given a float value, the unit is set to sigma.
3466 for string values, allowed formats include:
3467 'xsigma' or 'x' (= x-sigma level. e.g.,
3468 '3sigma'), or
3469 'topx' (= the x strongest ones, e.g. 'top5').
3470 default is 3.0 (unit: sigma).
3471 addwn: the additional wave numbers to be used for fitting.
3472 list or integer value is accepted to specify every
3473 wave numbers. also string value can be used in case
3474 you need to specify wave numbers in a certain range,
3475 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3476 '<a' (= 0,1,...,a-2,a-1),
3477 '>=a' (= a, a+1, ... up to the maximum wave
3478 number corresponding to the Nyquist
3479 frequency for the case of FFT).
3480 default is [0].
3481 rejwn: the wave numbers NOT to be used for fitting.
3482 can be set just as addwn but has higher priority:
3483 wave numbers which are specified both in addwn
3484 and rejwn will NOT be used. default is [].
3485 insitu: if False a new scantable is returned.
3486 Otherwise, the scaling is done in-situ
3487 The default is taken from .asaprc (False)
3488 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3489 clipniter: maximum number of iteration of 'clipthresh'-sigma
3490 clipping (default is 0)
3491 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3492 plot the fit and the residual. In this each
3493 indivual fit has to be approved, by typing 'y'
3494 or 'n'
3495 getresidual: if False, returns best-fit values instead of
3496 residual. (default is True)
3497 showprogress: show progress status for large data.
3498 default is True.
3499 minnrow: minimum number of input spectra to show.
3500 default is 1000.
3501 outlog: Output the coefficients of the best-fit
3502 function to logger (default is False)
3503 blfile: Name of a text file in which the best-fit
3504 parameter values to be written
3505 (default is '': no file/logger output)
3506 csvformat: if True blfile is csv-formatted, default is False.
3507 bltable: name of a baseline table where fitting results
3508 (coefficients, rms, etc.) are to be written.
3509 if given, fitting results will NOT be output to
3510 scantable (insitu=True) or None will be
3511 returned (insitu=False).
3512 (default is "": no table output)
3513
3514 Example:
3515 # return a scan baselined by a combination of sinusoidal curves
3516 # having wave numbers in spectral window up to 10,
3517 # also with 3-sigma clipping, iteration up to 4 times
3518 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
3519
3520 Note:
3521 The best-fit parameter values output in logger and/or blfile are now
3522 based on specunit of 'channel'.
3523 """
3524
3525 try:
3526 varlist = vars()
3527
3528 if insitu is None: insitu = rcParams['insitu']
3529 if insitu:
3530 workscan = self
3531 else:
3532 workscan = self.copy()
3533
3534 if mask is None: mask = []
3535 if applyfft is None: applyfft = True
3536 if fftmethod is None: fftmethod = 'fft'
3537 if fftthresh is None: fftthresh = 3.0
3538 if addwn is None: addwn = [0]
3539 if rejwn is None: rejwn = []
3540 if clipthresh is None: clipthresh = 3.0
3541 if clipniter is None: clipniter = 0
3542 if plot is None: plot = False
3543 if getresidual is None: getresidual = True
3544 if showprogress is None: showprogress = True
3545 if minnrow is None: minnrow = 1000
3546 if outlog is None: outlog = False
3547 if blfile is None: blfile = ''
3548 if csvformat is None: csvformat = False
3549 if bltable is None: bltable = ''
3550
3551 sapplyfft = 'true' if applyfft else 'false'
3552 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3553
3554 scsvformat = 'T' if csvformat else 'F'
3555
3556 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3557 workscan._sinusoid_baseline(mask,
3558 fftinfo,
3559 #applyfft, fftmethod.lower(),
3560 #str(fftthresh).lower(),
3561 workscan._parse_wn(addwn),
3562 workscan._parse_wn(rejwn),
3563 clipthresh, clipniter,
3564 getresidual,
3565 pack_progress_params(showprogress,
3566 minnrow),
3567 outlog, scsvformat+blfile,
3568 bltable)
3569 workscan._add_history('sinusoid_baseline', varlist)
3570
3571 if bltable == '':
3572 if insitu:
3573 self._assign(workscan)
3574 else:
3575 return workscan
3576 else:
3577 if not insitu:
3578 return None
3579
3580 except RuntimeError, e:
3581 raise_fitting_failure_exception(e)
3582
3583
3584 @asaplog_post_dec
3585 def auto_sinusoid_baseline(self, mask=None, applyfft=None,
3586 fftmethod=None, fftthresh=None,
3587 addwn=None, rejwn=None,
3588 insitu=None,
3589 clipthresh=None, clipniter=None,
3590 edge=None, threshold=None, chan_avg_limit=None,
3591 plot=None,
3592 getresidual=None,
3593 showprogress=None, minnrow=None,
3594 outlog=None,
3595 blfile=None, csvformat=None,
3596 bltable=None):
3597 """\
3598 Return a scan which has been baselined (all rows) with sinusoidal
3599 functions.
3600 Spectral lines are detected first using linefinder and masked out
3601 to avoid them affecting the baseline solution.
3602
3603 Parameters:
3604 mask: an optional mask retreived from scantable
3605 applyfft: if True use some method, such as FFT, to find
3606 strongest sinusoidal components in the wavenumber
3607 domain to be used for baseline fitting.
3608 default is True.
3609 fftmethod: method to find the strong sinusoidal components.
3610 now only 'fft' is available and it is the default.
3611 fftthresh: the threshold to select wave numbers to be used for
3612 fitting from the distribution of amplitudes in the
3613 wavenumber domain.
3614 both float and string values accepted.
3615 given a float value, the unit is set to sigma.
3616 for string values, allowed formats include:
3617 'xsigma' or 'x' (= x-sigma level. e.g.,
3618 '3sigma'), or
3619 'topx' (= the x strongest ones, e.g. 'top5').
3620 default is 3.0 (unit: sigma).
3621 addwn: the additional wave numbers to be used for fitting.
3622 list or integer value is accepted to specify every
3623 wave numbers. also string value can be used in case
3624 you need to specify wave numbers in a certain range,
3625 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
3626 '<a' (= 0,1,...,a-2,a-1),
3627 '>=a' (= a, a+1, ... up to the maximum wave
3628 number corresponding to the Nyquist
3629 frequency for the case of FFT).
3630 default is [0].
3631 rejwn: the wave numbers NOT to be used for fitting.
3632 can be set just as addwn but has higher priority:
3633 wave numbers which are specified both in addwn
3634 and rejwn will NOT be used. default is [].
3635 insitu: if False a new scantable is returned.
3636 Otherwise, the scaling is done in-situ
3637 The default is taken from .asaprc (False)
3638 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3639 clipniter: maximum number of iteration of 'clipthresh'-sigma
3640 clipping (default is 0)
3641 edge: an optional number of channel to drop at
3642 the edge of spectrum. If only one value is
3643 specified, the same number will be dropped
3644 from both sides of the spectrum. Default
3645 is to keep all channels. Nested tuples
3646 represent individual edge selection for
3647 different IFs (a number of spectral channels
3648 can be different)
3649 threshold: the threshold used by line finder. It is
3650 better to keep it large as only strong lines
3651 affect the baseline solution.
3652 chan_avg_limit: a maximum number of consequtive spectral
3653 channels to average during the search of
3654 weak and broad lines. The default is no
3655 averaging (and no search for weak lines).
3656 If such lines can affect the fitted baseline
3657 (e.g. a high order polynomial is fitted),
3658 increase this parameter (usually values up
3659 to 8 are reasonable). Most users of this
3660 method should find the default value sufficient.
3661 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3662 plot the fit and the residual. In this each
3663 indivual fit has to be approved, by typing 'y'
3664 or 'n'
3665 getresidual: if False, returns best-fit values instead of
3666 residual. (default is True)
3667 showprogress: show progress status for large data.
3668 default is True.
3669 minnrow: minimum number of input spectra to show.
3670 default is 1000.
3671 outlog: Output the coefficients of the best-fit
3672 function to logger (default is False)
3673 blfile: Name of a text file in which the best-fit
3674 parameter values to be written
3675 (default is "": no file/logger output)
3676 csvformat: if True blfile is csv-formatted, default is False.
3677 bltable: name of a baseline table where fitting results
3678 (coefficients, rms, etc.) are to be written.
3679 if given, fitting results will NOT be output to
3680 scantable (insitu=True) or None will be
3681 returned (insitu=False).
3682 (default is "": no table output)
3683
3684 Example:
3685 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
3686
3687 Note:
3688 The best-fit parameter values output in logger and/or blfile are now
3689 based on specunit of 'channel'.
3690 """
3691
3692 try:
3693 varlist = vars()
3694
3695 if insitu is None: insitu = rcParams['insitu']
3696 if insitu:
3697 workscan = self
3698 else:
3699 workscan = self.copy()
3700
3701 if mask is None: mask = []
3702 if applyfft is None: applyfft = True
3703 if fftmethod is None: fftmethod = 'fft'
3704 if fftthresh is None: fftthresh = 3.0
3705 if addwn is None: addwn = [0]
3706 if rejwn is None: rejwn = []
3707 if clipthresh is None: clipthresh = 3.0
3708 if clipniter is None: clipniter = 0
3709 if edge is None: edge = (0,0)
3710 if threshold is None: threshold = 3
3711 if chan_avg_limit is None: chan_avg_limit = 1
3712 if plot is None: plot = False
3713 if getresidual is None: getresidual = True
3714 if showprogress is None: showprogress = True
3715 if minnrow is None: minnrow = 1000
3716 if outlog is None: outlog = False
3717 if blfile is None: blfile = ''
3718 if csvformat is None: csvformat = False
3719 if bltable is None: bltable = ''
3720
3721 sapplyfft = 'true' if applyfft else 'false'
3722 fftinfo = ','.join([sapplyfft, fftmethod.lower(), str(fftthresh).lower()])
3723
3724 scsvformat = 'T' if csvformat else 'F'
3725
3726 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
3727 workscan._auto_sinusoid_baseline(mask,
3728 fftinfo,
3729 workscan._parse_wn(addwn),
3730 workscan._parse_wn(rejwn),
3731 clipthresh, clipniter,
3732 normalise_edge_param(edge),
3733 threshold, chan_avg_limit,
3734 getresidual,
3735 pack_progress_params(showprogress,
3736 minnrow),
3737 outlog, scsvformat+blfile, bltable)
3738 workscan._add_history("auto_sinusoid_baseline", varlist)
3739
3740 if bltable == '':
3741 if insitu:
3742 self._assign(workscan)
3743 else:
3744 return workscan
3745 else:
3746 if not insitu:
3747 return None
3748
3749 except RuntimeError, e:
3750 raise_fitting_failure_exception(e)
3751
3752 @asaplog_post_dec
3753 def cspline_baseline(self, mask=None, npiece=None, insitu=None,
3754 clipthresh=None, clipniter=None, plot=None,
3755 getresidual=None, showprogress=None, minnrow=None,
3756 outlog=None, blfile=None, csvformat=None,
3757 bltable=None):
3758 """\
3759 Return a scan which has been baselined (all rows) by cubic spline
3760 function (piecewise cubic polynomial).
3761
3762 Parameters:
3763 mask: An optional mask
3764 npiece: Number of pieces. (default is 2)
3765 insitu: If False a new scantable is returned.
3766 Otherwise, the scaling is done in-situ
3767 The default is taken from .asaprc (False)
3768 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3769 clipniter: maximum number of iteration of 'clipthresh'-sigma
3770 clipping (default is 0)
3771 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3772 plot the fit and the residual. In this each
3773 indivual fit has to be approved, by typing 'y'
3774 or 'n'
3775 getresidual: if False, returns best-fit values instead of
3776 residual. (default is True)
3777 showprogress: show progress status for large data.
3778 default is True.
3779 minnrow: minimum number of input spectra to show.
3780 default is 1000.
3781 outlog: Output the coefficients of the best-fit
3782 function to logger (default is False)
3783 blfile: Name of a text file in which the best-fit
3784 parameter values to be written
3785 (default is "": no file/logger output)
3786 csvformat: if True blfile is csv-formatted, default is False.
3787 bltable: name of a baseline table where fitting results
3788 (coefficients, rms, etc.) are to be written.
3789 if given, fitting results will NOT be output to
3790 scantable (insitu=True) or None will be
3791 returned (insitu=False).
3792 (default is "": no table output)
3793
3794 Example:
3795 # return a scan baselined by a cubic spline consisting of 2 pieces
3796 # (i.e., 1 internal knot),
3797 # also with 3-sigma clipping, iteration up to 4 times
3798 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3799
3800 Note:
3801 The best-fit parameter values output in logger and/or blfile are now
3802 based on specunit of 'channel'.
3803 """
3804
3805 try:
3806 varlist = vars()
3807
3808 if insitu is None: insitu = rcParams['insitu']
3809 if insitu:
3810 workscan = self
3811 else:
3812 workscan = self.copy()
3813
3814 if mask is None: mask = []
3815 if npiece is None: npiece = 2
3816 if clipthresh is None: clipthresh = 3.0
3817 if clipniter is None: clipniter = 0
3818 if plot is None: plot = False
3819 if getresidual is None: getresidual = True
3820 if showprogress is None: showprogress = True
3821 if minnrow is None: minnrow = 1000
3822 if outlog is None: outlog = False
3823 if blfile is None: blfile = ''
3824 if csvformat is None: csvformat = False
3825 if bltable is None: bltable = ''
3826
3827 scsvformat = 'T' if csvformat else 'F'
3828
3829 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3830 workscan._cspline_baseline(mask, npiece,
3831 clipthresh, clipniter,
3832 getresidual,
3833 pack_progress_params(showprogress,
3834 minnrow),
3835 outlog, scsvformat+blfile,
3836 bltable)
3837 workscan._add_history("cspline_baseline", varlist)
3838
3839 if bltable == '':
3840 if insitu:
3841 self._assign(workscan)
3842 else:
3843 return workscan
3844 else:
3845 if not insitu:
3846 return None
3847
3848 except RuntimeError, e:
3849 raise_fitting_failure_exception(e)
3850
3851 @asaplog_post_dec
3852 def auto_cspline_baseline(self, mask=None, npiece=None, insitu=None,
3853 clipthresh=None, clipniter=None,
3854 edge=None, threshold=None, chan_avg_limit=None,
3855 getresidual=None, plot=None,
3856 showprogress=None, minnrow=None, outlog=None,
3857 blfile=None, csvformat=None, bltable=None):
3858 """\
3859 Return a scan which has been baselined (all rows) by cubic spline
3860 function (piecewise cubic polynomial).
3861 Spectral lines are detected first using linefinder and masked out
3862 to avoid them affecting the baseline solution.
3863
3864 Parameters:
3865 mask: an optional mask retreived from scantable
3866 npiece: Number of pieces. (default is 2)
3867 insitu: if False a new scantable is returned.
3868 Otherwise, the scaling is done in-situ
3869 The default is taken from .asaprc (False)
3870 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3871 clipniter: maximum number of iteration of 'clipthresh'-sigma
3872 clipping (default is 0)
3873 edge: an optional number of channel to drop at
3874 the edge of spectrum. If only one value is
3875 specified, the same number will be dropped
3876 from both sides of the spectrum. Default
3877 is to keep all channels. Nested tuples
3878 represent individual edge selection for
3879 different IFs (a number of spectral channels
3880 can be different)
3881 threshold: the threshold used by line finder. It is
3882 better to keep it large as only strong lines
3883 affect the baseline solution.
3884 chan_avg_limit: a maximum number of consequtive spectral
3885 channels to average during the search of
3886 weak and broad lines. The default is no
3887 averaging (and no search for weak lines).
3888 If such lines can affect the fitted baseline
3889 (e.g. a high order polynomial is fitted),
3890 increase this parameter (usually values up
3891 to 8 are reasonable). Most users of this
3892 method should find the default value sufficient.
3893 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3894 plot the fit and the residual. In this each
3895 indivual fit has to be approved, by typing 'y'
3896 or 'n'
3897 getresidual: if False, returns best-fit values instead of
3898 residual. (default is True)
3899 showprogress: show progress status for large data.
3900 default is True.
3901 minnrow: minimum number of input spectra to show.
3902 default is 1000.
3903 outlog: Output the coefficients of the best-fit
3904 function to logger (default is False)
3905 blfile: Name of a text file in which the best-fit
3906 parameter values to be written
3907 (default is "": no file/logger output)
3908 csvformat: if True blfile is csv-formatted, default is False.
3909 bltable: name of a baseline table where fitting results
3910 (coefficients, rms, etc.) are to be written.
3911 if given, fitting results will NOT be output to
3912 scantable (insitu=True) or None will be
3913 returned (insitu=False).
3914 (default is "": no table output)
3915
3916 Example:
3917 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3918
3919 Note:
3920 The best-fit parameter values output in logger and/or blfile are now
3921 based on specunit of 'channel'.
3922 """
3923
3924 try:
3925 varlist = vars()
3926
3927 if insitu is None: insitu = rcParams['insitu']
3928 if insitu:
3929 workscan = self
3930 else:
3931 workscan = self.copy()
3932
3933 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3934 if mask is None: mask = []
3935 if npiece is None: npiece = 2
3936 if clipthresh is None: clipthresh = 3.0
3937 if clipniter is None: clipniter = 0
3938 if edge is None: edge = (0, 0)
3939 if threshold is None: threshold = 3
3940 if chan_avg_limit is None: chan_avg_limit = 1
3941 if plot is None: plot = False
3942 if getresidual is None: getresidual = True
3943 if showprogress is None: showprogress = True
3944 if minnrow is None: minnrow = 1000
3945 if outlog is None: outlog = False
3946 if blfile is None: blfile = ''
3947 if csvformat is None: csvformat = False
3948 if bltable is None: bltable = ''
3949
3950 scsvformat = 'T' if csvformat else 'F'
3951
3952 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3953 workscan._auto_cspline_baseline(mask, npiece,
3954 clipthresh, clipniter,
3955 normalise_edge_param(edge),
3956 threshold,
3957 chan_avg_limit, getresidual,
3958 pack_progress_params(showprogress,
3959 minnrow),
3960 outlog,
3961 scsvformat+blfile,
3962 bltable)
3963 workscan._add_history("auto_cspline_baseline", varlist)
3964
3965 if bltable == '':
3966 if insitu:
3967 self._assign(workscan)
3968 else:
3969 return workscan
3970 else:
3971 if not insitu:
3972 return None
3973
3974 except RuntimeError, e:
3975 raise_fitting_failure_exception(e)
3976
3977 @asaplog_post_dec
3978 def chebyshev_baseline(self, mask=None, order=None, insitu=None,
3979 clipthresh=None, clipniter=None, plot=None,
3980 getresidual=None, showprogress=None, minnrow=None,
3981 outlog=None, blfile=None, csvformat=None,
3982 bltable=None):
3983 """\
3984 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3985
3986 Parameters:
3987 mask: An optional mask
3988 order: the maximum order of Chebyshev polynomial (default is 5)
3989 insitu: If False a new scantable is returned.
3990 Otherwise, the scaling is done in-situ
3991 The default is taken from .asaprc (False)
3992 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3993 clipniter: maximum number of iteration of 'clipthresh'-sigma
3994 clipping (default is 0)
3995 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3996 plot the fit and the residual. In this each
3997 indivual fit has to be approved, by typing 'y'
3998 or 'n'
3999 getresidual: if False, returns best-fit values instead of
4000 residual. (default is True)
4001 showprogress: show progress status for large data.
4002 default is True.
4003 minnrow: minimum number of input spectra to show.
4004 default is 1000.
4005 outlog: Output the coefficients of the best-fit
4006 function to logger (default is False)
4007 blfile: Name of a text file in which the best-fit
4008 parameter values to be written
4009 (default is "": no file/logger output)
4010 csvformat: if True blfile is csv-formatted, default is False.
4011 bltable: name of a baseline table where fitting results
4012 (coefficients, rms, etc.) are to be written.
4013 if given, fitting results will NOT be output to
4014 scantable (insitu=True) or None will be
4015 returned (insitu=False).
4016 (default is "": no table output)
4017
4018 Example:
4019 # return a scan baselined by a cubic spline consisting of 2 pieces
4020 # (i.e., 1 internal knot),
4021 # also with 3-sigma clipping, iteration up to 4 times
4022 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
4023
4024 Note:
4025 The best-fit parameter values output in logger and/or blfile are now
4026 based on specunit of 'channel'.
4027 """
4028
4029 try:
4030 varlist = vars()
4031
4032 if insitu is None: insitu = rcParams['insitu']
4033 if insitu:
4034 workscan = self
4035 else:
4036 workscan = self.copy()
4037
4038 if mask is None: mask = []
4039 if order is None: order = 5
4040 if clipthresh is None: clipthresh = 3.0
4041 if clipniter is None: clipniter = 0
4042 if plot is None: plot = False
4043 if getresidual is None: getresidual = True
4044 if showprogress is None: showprogress = True
4045 if minnrow is None: minnrow = 1000
4046 if outlog is None: outlog = False
4047 if blfile is None: blfile = ''
4048 if csvformat is None: csvformat = False
4049 if bltable is None: bltable = ''
4050
4051 scsvformat = 'T' if csvformat else 'F'
4052
4053 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4054 workscan._chebyshev_baseline(mask, order,
4055 clipthresh, clipniter,
4056 getresidual,
4057 pack_progress_params(showprogress,
4058 minnrow),
4059 outlog, scsvformat+blfile,
4060 bltable)
4061 workscan._add_history("chebyshev_baseline", varlist)
4062
4063 if bltable == '':
4064 if insitu:
4065 self._assign(workscan)
4066 else:
4067 return workscan
4068 else:
4069 if not insitu:
4070 return None
4071
4072 except RuntimeError, e:
4073 raise_fitting_failure_exception(e)
4074
4075 @asaplog_post_dec
4076 def auto_chebyshev_baseline(self, mask=None, order=None, insitu=None,
4077 clipthresh=None, clipniter=None,
4078 edge=None, threshold=None, chan_avg_limit=None,
4079 getresidual=None, plot=None,
4080 showprogress=None, minnrow=None, outlog=None,
4081 blfile=None, csvformat=None, bltable=None):
4082 """\
4083 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
4084 Spectral lines are detected first using linefinder and masked out
4085 to avoid them affecting the baseline solution.
4086
4087 Parameters:
4088 mask: an optional mask retreived from scantable
4089 order: the maximum order of Chebyshev polynomial (default is 5)
4090 insitu: if False a new scantable is returned.
4091 Otherwise, the scaling is done in-situ
4092 The default is taken from .asaprc (False)
4093 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4094 clipniter: maximum number of iteration of 'clipthresh'-sigma
4095 clipping (default is 0)
4096 edge: an optional number of channel to drop at
4097 the edge of spectrum. If only one value is
4098 specified, the same number will be dropped
4099 from both sides of the spectrum. Default
4100 is to keep all channels. Nested tuples
4101 represent individual edge selection for
4102 different IFs (a number of spectral channels
4103 can be different)
4104 threshold: the threshold used by line finder. It is
4105 better to keep it large as only strong lines
4106 affect the baseline solution.
4107 chan_avg_limit: a maximum number of consequtive spectral
4108 channels to average during the search of
4109 weak and broad lines. The default is no
4110 averaging (and no search for weak lines).
4111 If such lines can affect the fitted baseline
4112 (e.g. a high order polynomial is fitted),
4113 increase this parameter (usually values up
4114 to 8 are reasonable). Most users of this
4115 method should find the default value sufficient.
4116 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
4117 plot the fit and the residual. In this each
4118 indivual fit has to be approved, by typing 'y'
4119 or 'n'
4120 getresidual: if False, returns best-fit values instead of
4121 residual. (default is True)
4122 showprogress: show progress status for large data.
4123 default is True.
4124 minnrow: minimum number of input spectra to show.
4125 default is 1000.
4126 outlog: Output the coefficients of the best-fit
4127 function to logger (default is False)
4128 blfile: Name of a text file in which the best-fit
4129 parameter values to be written
4130 (default is "": no file/logger output)
4131 csvformat: if True blfile is csv-formatted, default is False.
4132 bltable: name of a baseline table where fitting results
4133 (coefficients, rms, etc.) are to be written.
4134 if given, fitting results will NOT be output to
4135 scantable (insitu=True) or None will be
4136 returned (insitu=False).
4137 (default is "": no table output)
4138
4139 Example:
4140 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
4141
4142 Note:
4143 The best-fit parameter values output in logger and/or blfile are now
4144 based on specunit of 'channel'.
4145 """
4146
4147 try:
4148 varlist = vars()
4149
4150 if insitu is None: insitu = rcParams['insitu']
4151 if insitu:
4152 workscan = self
4153 else:
4154 workscan = self.copy()
4155
4156 if mask is None: mask = []
4157 if order is None: order = 5
4158 if clipthresh is None: clipthresh = 3.0
4159 if clipniter is None: clipniter = 0
4160 if edge is None: edge = (0, 0)
4161 if threshold is None: threshold = 3
4162 if chan_avg_limit is None: chan_avg_limit = 1
4163 if plot is None: plot = False
4164 if getresidual is None: getresidual = True
4165 if showprogress is None: showprogress = True
4166 if minnrow is None: minnrow = 1000
4167 if outlog is None: outlog = False
4168 if blfile is None: blfile = ''
4169 if csvformat is None: csvformat = False
4170 if bltable is None: bltable = ''
4171
4172 scsvformat = 'T' if csvformat else 'F'
4173
4174 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
4175 workscan._auto_chebyshev_baseline(mask, order,
4176 clipthresh, clipniter,
4177 normalise_edge_param(edge),
4178 threshold,
4179 chan_avg_limit, getresidual,
4180 pack_progress_params(showprogress,
4181 minnrow),
4182 outlog, scsvformat+blfile,
4183 bltable)
4184 workscan._add_history("auto_chebyshev_baseline", varlist)
4185
4186 if bltable == '':
4187 if insitu:
4188 self._assign(workscan)
4189 else:
4190 return workscan
4191 else:
4192 if not insitu:
4193 return None
4194
4195 except RuntimeError, e:
4196 raise_fitting_failure_exception(e)
4197
4198 @asaplog_post_dec
4199 def poly_baseline(self, mask=None, order=None, insitu=None,
4200 clipthresh=None, clipniter=None, plot=None,
4201 getresidual=None, showprogress=None, minnrow=None,
4202 outlog=None, blfile=None, csvformat=None,
4203 bltable=None):
4204 """\
4205 Return a scan which has been baselined (all rows) by a polynomial.
4206 Parameters:
4207 mask: an optional mask
4208 order: the order of the polynomial (default is 0)
4209 insitu: if False a new scantable is returned.
4210 Otherwise, the scaling is done in-situ
4211 The default is taken from .asaprc (False)
4212 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4213 clipniter: maximum number of iteration of 'clipthresh'-sigma
4214 clipping (default is 0)
4215 plot: plot the fit and the residual. In this each
4216 indivual fit has to be approved, by typing 'y'
4217 or 'n'
4218 getresidual: if False, returns best-fit values instead of
4219 residual. (default is True)
4220 showprogress: show progress status for large data.
4221 default is True.
4222 minnrow: minimum number of input spectra to show.
4223 default is 1000.
4224 outlog: Output the coefficients of the best-fit
4225 function to logger (default is False)
4226 blfile: Name of a text file in which the best-fit
4227 parameter values to be written
4228 (default is "": no file/logger output)
4229 csvformat: if True blfile is csv-formatted, default is False.
4230 bltable: name of a baseline table where fitting results
4231 (coefficients, rms, etc.) are to be written.
4232 if given, fitting results will NOT be output to
4233 scantable (insitu=True) or None will be
4234 returned (insitu=False).
4235 (default is "": no table output)
4236
4237 Example:
4238 # return a scan baselined by a third order polynomial,
4239 # not using a mask
4240 bscan = scan.poly_baseline(order=3)
4241 """
4242
4243 try:
4244 varlist = vars()
4245
4246 if insitu is None:
4247 insitu = rcParams["insitu"]
4248 if insitu:
4249 workscan = self
4250 else:
4251 workscan = self.copy()
4252
4253 if mask is None: mask = []
4254 if order is None: order = 0
4255 if clipthresh is None: clipthresh = 3.0
4256 if clipniter is None: clipniter = 0
4257 if plot is None: plot = False
4258 if getresidual is None: getresidual = True
4259 if showprogress is None: showprogress = True
4260 if minnrow is None: minnrow = 1000
4261 if outlog is None: outlog = False
4262 if blfile is None: blfile = ''
4263 if csvformat is None: csvformat = False
4264 if bltable is None: bltable = ''
4265
4266 scsvformat = 'T' if csvformat else 'F'
4267
4268 if plot:
4269 outblfile = (blfile != "") and \
4270 os.path.exists(os.path.expanduser(
4271 os.path.expandvars(blfile))
4272 )
4273 if outblfile:
4274 blf = open(blfile, "a")
4275
4276 f = fitter()
4277 f.set_function(lpoly=order)
4278
4279 rows = xrange(workscan.nrow())
4280 #if len(rows) > 0: workscan._init_blinfo()
4281
4282 action = "H"
4283 for r in rows:
4284 f.x = workscan._getabcissa(r)
4285 f.y = workscan._getspectrum(r)
4286 if mask:
4287 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4288 else: # mask=None
4289 f.mask = workscan._getmask(r)
4290
4291 f.data = None
4292 f.fit()
4293
4294 if action != "Y": # skip plotting when accepting all
4295 f.plot(residual=True)
4296 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4297 #if accept_fit.upper() == "N":
4298 # #workscan._append_blinfo(None, None, None)
4299 # continue
4300 accept_fit = self._get_verify_action("Accept fit?",action)
4301 if r == 0: action = None
4302 if accept_fit.upper() == "N":
4303 continue
4304 elif accept_fit.upper() == "R":
4305 break
4306 elif accept_fit.upper() == "A":
4307 action = "Y"
4308
4309 blpars = f.get_parameters()
4310 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4311 #workscan._append_blinfo(blpars, masklist, f.mask)
4312 workscan._setspectrum((f.fitter.getresidual()
4313 if getresidual else f.fitter.getfit()), r)
4314
4315 if outblfile:
4316 rms = workscan.get_rms(f.mask, r)
4317 dataout = \
4318 workscan.format_blparams_row(blpars["params"],
4319 blpars["fixed"],
4320 rms, str(masklist),
4321 r, True, csvformat)
4322 blf.write(dataout)
4323
4324 f._p.unmap()
4325 f._p = None
4326
4327 if outblfile:
4328 blf.close()
4329 else:
4330 workscan._poly_baseline(mask, order,
4331 clipthresh, clipniter, #
4332 getresidual,
4333 pack_progress_params(showprogress,
4334 minnrow),
4335 outlog, scsvformat+blfile,
4336 bltable) #
4337
4338 workscan._add_history("poly_baseline", varlist)
4339
4340 if insitu:
4341 self._assign(workscan)
4342 else:
4343 return workscan
4344
4345 except RuntimeError, e:
4346 raise_fitting_failure_exception(e)
4347
4348 @asaplog_post_dec
4349 def auto_poly_baseline(self, mask=None, order=None, insitu=None,
4350 clipthresh=None, clipniter=None,
4351 edge=None, threshold=None, chan_avg_limit=None,
4352 getresidual=None, plot=None,
4353 showprogress=None, minnrow=None, outlog=None,
4354 blfile=None, csvformat=None, bltable=None):
4355 """\
4356 Return a scan which has been baselined (all rows) by a polynomial.
4357 Spectral lines are detected first using linefinder and masked out
4358 to avoid them affecting the baseline solution.
4359
4360 Parameters:
4361 mask: an optional mask retreived from scantable
4362 order: the order of the polynomial (default is 0)
4363 insitu: if False a new scantable is returned.
4364 Otherwise, the scaling is done in-situ
4365 The default is taken from .asaprc (False)
4366 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
4367 clipniter: maximum number of iteration of 'clipthresh'-sigma
4368 clipping (default is 0)
4369 edge: an optional number of channel to drop at
4370 the edge of spectrum. If only one value is
4371 specified, the same number will be dropped
4372 from both sides of the spectrum. Default
4373 is to keep all channels. Nested tuples
4374 represent individual edge selection for
4375 different IFs (a number of spectral channels
4376 can be different)
4377 threshold: the threshold used by line finder. It is
4378 better to keep it large as only strong lines
4379 affect the baseline solution.
4380 chan_avg_limit: a maximum number of consequtive spectral
4381 channels to average during the search of
4382 weak and broad lines. The default is no
4383 averaging (and no search for weak lines).
4384 If such lines can affect the fitted baseline
4385 (e.g. a high order polynomial is fitted),
4386 increase this parameter (usually values up
4387 to 8 are reasonable). Most users of this
4388 method should find the default value sufficient.
4389 plot: plot the fit and the residual. In this each
4390 indivual fit has to be approved, by typing 'y'
4391 or 'n'
4392 getresidual: if False, returns best-fit values instead of
4393 residual. (default is True)
4394 showprogress: show progress status for large data.
4395 default is True.
4396 minnrow: minimum number of input spectra to show.
4397 default is 1000.
4398 outlog: Output the coefficients of the best-fit
4399 function to logger (default is False)
4400 blfile: Name of a text file in which the best-fit
4401 parameter values to be written
4402 (default is "": no file/logger output)
4403 csvformat: if True blfile is csv-formatted, default is False.
4404 bltable: name of a baseline table where fitting results
4405 (coefficients, rms, etc.) are to be written.
4406 if given, fitting results will NOT be output to
4407 scantable (insitu=True) or None will be
4408 returned (insitu=False).
4409 (default is "": no table output)
4410
4411 Example:
4412 bscan = scan.auto_poly_baseline(order=7, insitu=False)
4413 """
4414
4415 try:
4416 varlist = vars()
4417
4418 if insitu is None:
4419 insitu = rcParams['insitu']
4420 if insitu:
4421 workscan = self
4422 else:
4423 workscan = self.copy()
4424
4425 if mask is None: mask = []
4426 if order is None: order = 0
4427 if clipthresh is None: clipthresh = 3.0
4428 if clipniter is None: clipniter = 0
4429 if edge is None: edge = (0, 0)
4430 if threshold is None: threshold = 3
4431 if chan_avg_limit is None: chan_avg_limit = 1
4432 if plot is None: plot = False
4433 if getresidual is None: getresidual = True
4434 if showprogress is None: showprogress = True
4435 if minnrow is None: minnrow = 1000
4436 if outlog is None: outlog = False
4437 if blfile is None: blfile = ''
4438 if csvformat is None: csvformat = False
4439 if bltable is None: bltable = ''
4440
4441 scsvformat = 'T' if csvformat else 'F'
4442
4443 edge = normalise_edge_param(edge)
4444
4445 if plot:
4446 outblfile = (blfile != "") and \
4447 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
4448 if outblfile: blf = open(blfile, "a")
4449
4450 from asap.asaplinefind import linefinder
4451 fl = linefinder()
4452 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
4453 fl.set_scan(workscan)
4454
4455 f = fitter()
4456 f.set_function(lpoly=order)
4457
4458 rows = xrange(workscan.nrow())
4459 #if len(rows) > 0: workscan._init_blinfo()
4460
4461 action = "H"
4462 for r in rows:
4463 idx = 2*workscan.getif(r)
4464 if mask:
4465 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
4466 else: # mask=None
4467 msk = workscan._getmask(r)
4468 fl.find_lines(r, msk, edge[idx:idx+2])
4469
4470 f.x = workscan._getabcissa(r)
4471 f.y = workscan._getspectrum(r)
4472 f.mask = fl.get_mask()
4473 f.data = None
4474 f.fit()
4475
4476 if action != "Y": # skip plotting when accepting all
4477 f.plot(residual=True)
4478 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
4479 accept_fit = self._get_verify_action("Accept fit?",action)
4480 if r == 0: action = None
4481 if accept_fit.upper() == "N":
4482 #workscan._append_blinfo(None, None, None)
4483 continue
4484 elif accept_fit.upper() == "R":
4485 break
4486 elif accept_fit.upper() == "A":
4487 action = "Y"
4488
4489 blpars = f.get_parameters()
4490 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
4491 #workscan._append_blinfo(blpars, masklist, f.mask)
4492 workscan._setspectrum(
4493 (f.fitter.getresidual() if getresidual
4494 else f.fitter.getfit()), r
4495 )
4496
4497 if outblfile:
4498 rms = workscan.get_rms(f.mask, r)
4499 dataout = \
4500 workscan.format_blparams_row(blpars["params"],
4501 blpars["fixed"],
4502 rms, str(masklist),
4503 r, True, csvformat)
4504 blf.write(dataout)
4505
4506 f._p.unmap()
4507 f._p = None
4508
4509 if outblfile: blf.close()
4510 else:
4511 workscan._auto_poly_baseline(mask, order,
4512 clipthresh, clipniter,
4513 edge, threshold,
4514 chan_avg_limit, getresidual,
4515 pack_progress_params(showprogress,
4516 minnrow),
4517 outlog, scsvformat+blfile,
4518 bltable)
4519 workscan._add_history("auto_poly_baseline", varlist)
4520
4521 if bltable == '':
4522 if insitu:
4523 self._assign(workscan)
4524 else:
4525 return workscan
4526 else:
4527 if not insitu:
4528 return None
4529
4530 except RuntimeError, e:
4531 raise_fitting_failure_exception(e)
4532
4533 def _init_blinfo(self):
4534 """\
4535 Initialise the following three auxiliary members:
4536 blpars : parameters of the best-fit baseline,
4537 masklists : mask data (edge positions of masked channels) and
4538 actualmask : mask data (in boolean list),
4539 to keep for use later (including output to logger/text files).
4540 Used by poly_baseline() and auto_poly_baseline() in case of
4541 'plot=True'.
4542 """
4543 self.blpars = []
4544 self.masklists = []
4545 self.actualmask = []
4546 return
4547
4548 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
4549 """\
4550 Append baseline-fitting related info to blpars, masklist and
4551 actualmask.
4552 """
4553 self.blpars.append(data_blpars)
4554 self.masklists.append(data_masklists)
4555 self.actualmask.append(data_actualmask)
4556 return
4557
4558 @asaplog_post_dec
4559 def rotate_linpolphase(self, angle):
4560 """\
4561 Rotate the phase of the complex polarization O=Q+iU correlation.
4562 This is always done in situ in the raw data. So if you call this
4563 function more than once then each call rotates the phase further.
4564
4565 Parameters:
4566
4567 angle: The angle (degrees) to rotate (add) by.
4568
4569 Example::
4570
4571 scan.rotate_linpolphase(2.3)
4572
4573 """
4574 varlist = vars()
4575 self._math._rotate_linpolphase(self, angle)
4576 self._add_history("rotate_linpolphase", varlist)
4577 return
4578
4579 @asaplog_post_dec
4580 def rotate_xyphase(self, angle):
4581 """\
4582 Rotate the phase of the XY correlation. This is always done in situ
4583 in the data. So if you call this function more than once
4584 then each call rotates the phase further.
4585
4586 Parameters:
4587
4588 angle: The angle (degrees) to rotate (add) by.
4589
4590 Example::
4591
4592 scan.rotate_xyphase(2.3)
4593
4594 """
4595 varlist = vars()
4596 self._math._rotate_xyphase(self, angle)
4597 self._add_history("rotate_xyphase", varlist)
4598 return
4599
4600 @asaplog_post_dec
4601 def swap_linears(self):
4602 """\
4603 Swap the linear polarisations XX and YY, or better the first two
4604 polarisations as this also works for ciculars.
4605 """
4606 varlist = vars()
4607 self._math._swap_linears(self)
4608 self._add_history("swap_linears", varlist)
4609 return
4610
4611 @asaplog_post_dec
4612 def invert_phase(self):
4613 """\
4614 Invert the phase of the complex polarisation
4615 """
4616 varlist = vars()
4617 self._math._invert_phase(self)
4618 self._add_history("invert_phase", varlist)
4619 return
4620
4621 @asaplog_post_dec
4622 def add(self, offset, insitu=None):
4623 """\
4624 Return a scan where all spectra have the offset added
4625
4626 Parameters:
4627
4628 offset: the offset
4629
4630 insitu: if False a new scantable is returned.
4631 Otherwise, the scaling is done in-situ
4632 The default is taken from .asaprc (False)
4633
4634 """
4635 if insitu is None: insitu = rcParams['insitu']
4636 self._math._setinsitu(insitu)
4637 varlist = vars()
4638 s = scantable(self._math._unaryop(self, offset, "ADD", False))
4639 s._add_history("add", varlist)
4640 if insitu:
4641 self._assign(s)
4642 else:
4643 return s
4644
4645 @asaplog_post_dec
4646 def scale(self, factor, tsys=True, insitu=None):
4647 """\
4648
4649 Return a scan where all spectra are scaled by the given 'factor'
4650
4651 Parameters:
4652
4653 factor: the scaling factor (float or 1D float list)
4654
4655 insitu: if False a new scantable is returned.
4656 Otherwise, the scaling is done in-situ
4657 The default is taken from .asaprc (False)
4658
4659 tsys: if True (default) then apply the operation to Tsys
4660 as well as the data
4661
4662 """
4663 if insitu is None: insitu = rcParams['insitu']
4664 self._math._setinsitu(insitu)
4665 varlist = vars()
4666 s = None
4667 import numpy
4668 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
4669 if isinstance(factor[0], list) or isinstance(factor[0],
4670 numpy.ndarray):
4671 from asapmath import _array2dOp
4672 s = _array2dOp( self, factor, "MUL", tsys, insitu )
4673 else:
4674 s = scantable( self._math._arrayop( self, factor,
4675 "MUL", tsys ) )
4676 else:
4677 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
4678 s._add_history("scale", varlist)
4679 if insitu:
4680 self._assign(s)
4681 else:
4682 return s
4683
4684 @preserve_selection
4685 def set_sourcetype(self, match, matchtype="pattern",
4686 sourcetype="reference"):
4687 """\
4688 Set the type of the source to be an source or reference scan
4689 using the provided pattern.
4690
4691 Parameters:
4692
4693 match: a Unix style pattern, regular expression or selector
4694
4695 matchtype: 'pattern' (default) UNIX style pattern or
4696 'regex' regular expression
4697
4698 sourcetype: the type of the source to use (source/reference)
4699
4700 """
4701 varlist = vars()
4702 stype = -1
4703 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
4704 stype = 1
4705 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
4706 stype = 0
4707 else:
4708 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
4709 if matchtype.lower().startswith("p"):
4710 matchtype = "pattern"
4711 elif matchtype.lower().startswith("r"):
4712 matchtype = "regex"
4713 else:
4714 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
4715 sel = selector()
4716 if isinstance(match, selector):
4717 sel = match
4718 else:
4719 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
4720 self.set_selection(sel)
4721 self._setsourcetype(stype)
4722 self._add_history("set_sourcetype", varlist)
4723
4724
4725 def set_sourcename(self, name):
4726 varlist = vars()
4727 self._setsourcename(name)
4728 self._add_history("set_sourcename", varlist)
4729
4730 @asaplog_post_dec
4731 @preserve_selection
4732 def auto_quotient(self, preserve=True, mode='paired', verify=False):
4733 """\
4734 This function allows to build quotients automatically.
4735 It assumes the observation to have the same number of
4736 "ons" and "offs"
4737
4738 Parameters:
4739
4740 preserve: you can preserve (default) the continuum or
4741 remove it. The equations used are
4742
4743 preserve: Output = Toff * (on/off) - Toff
4744
4745 remove: Output = Toff * (on/off) - Ton
4746
4747 mode: the on/off detection mode
4748 'paired' (default)
4749 identifies 'off' scans by the
4750 trailing '_R' (Mopra/Parkes) or
4751 '_e'/'_w' (Tid) and matches
4752 on/off pairs from the observing pattern
4753 'time'
4754 finds the closest off in time
4755
4756 .. todo:: verify argument is not implemented
4757
4758 """
4759 varlist = vars()
4760 modes = ["time", "paired"]
4761 if not mode in modes:
4762 msg = "please provide valid mode. Valid modes are %s" % (modes)
4763 raise ValueError(msg)
4764 s = None
4765 if mode.lower() == "paired":
4766 from asap._asap import srctype
4767 sel = self.get_selection()
4768 #sel.set_query("SRCTYPE==psoff")
4769 sel.set_types(srctype.psoff)
4770 self.set_selection(sel)
4771 offs = self.copy()
4772 #sel.set_query("SRCTYPE==pson")
4773 sel.set_types(srctype.pson)
4774 self.set_selection(sel)
4775 ons = self.copy()
4776 s = scantable(self._math._quotient(ons, offs, preserve))
4777 elif mode.lower() == "time":
4778 s = scantable(self._math._auto_quotient(self, mode, preserve))
4779 s._add_history("auto_quotient", varlist)
4780 return s
4781
4782 @asaplog_post_dec
4783 def mx_quotient(self, mask = None, weight='median', preserve=True):
4784 """\
4785 Form a quotient using "off" beams when observing in "MX" mode.
4786
4787 Parameters:
4788
4789 mask: an optional mask to be used when weight == 'stddev'
4790
4791 weight: How to average the off beams. Default is 'median'.
4792
4793 preserve: you can preserve (default) the continuum or
4794 remove it. The equations used are:
4795
4796 preserve: Output = Toff * (on/off) - Toff
4797
4798 remove: Output = Toff * (on/off) - Ton
4799
4800 """
4801 mask = mask or ()
4802 varlist = vars()
4803 on = scantable(self._math._mx_extract(self, 'on'))
4804 preoff = scantable(self._math._mx_extract(self, 'off'))
4805 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
4806 from asapmath import quotient
4807 q = quotient(on, off, preserve)
4808 q._add_history("mx_quotient", varlist)
4809 return q
4810
4811 @asaplog_post_dec
4812 def freq_switch(self, insitu=None):
4813 """\
4814 Apply frequency switching to the data.
4815
4816 Parameters:
4817
4818 insitu: if False a new scantable is returned.
4819 Otherwise, the swictching is done in-situ
4820 The default is taken from .asaprc (False)
4821
4822 """
4823 if insitu is None: insitu = rcParams['insitu']
4824 self._math._setinsitu(insitu)
4825 varlist = vars()
4826 s = scantable(self._math._freqswitch(self))
4827 s._add_history("freq_switch", varlist)
4828 if insitu:
4829 self._assign(s)
4830 else:
4831 return s
4832
4833 @asaplog_post_dec
4834 def recalc_azel(self):
4835 """Recalculate the azimuth and elevation for each position."""
4836 varlist = vars()
4837 self._recalcazel()
4838 self._add_history("recalc_azel", varlist)
4839 return
4840
4841 @asaplog_post_dec
4842 def __add__(self, other):
4843 """
4844 implicit on all axes and on Tsys
4845 """
4846 varlist = vars()
4847 s = self.__op( other, "ADD" )
4848 s._add_history("operator +", varlist)
4849 return s
4850
4851 @asaplog_post_dec
4852 def __sub__(self, other):
4853 """
4854 implicit on all axes and on Tsys
4855 """
4856 varlist = vars()
4857 s = self.__op( other, "SUB" )
4858 s._add_history("operator -", varlist)
4859 return s
4860
4861 @asaplog_post_dec
4862 def __mul__(self, other):
4863 """
4864 implicit on all axes and on Tsys
4865 """
4866 varlist = vars()
4867 s = self.__op( other, "MUL" ) ;
4868 s._add_history("operator *", varlist)
4869 return s
4870
4871
4872 @asaplog_post_dec
4873 def __div__(self, other):
4874 """
4875 implicit on all axes and on Tsys
4876 """
4877 varlist = vars()
4878 s = self.__op( other, "DIV" )
4879 s._add_history("operator /", varlist)
4880 return s
4881
4882 @asaplog_post_dec
4883 def __op( self, other, mode ):
4884 s = None
4885 if isinstance(other, scantable):
4886 s = scantable(self._math._binaryop(self, other, mode))
4887 elif isinstance(other, float):
4888 if other == 0.0:
4889 raise ZeroDivisionError("Dividing by zero is not recommended")
4890 s = scantable(self._math._unaryop(self, other, mode, False))
4891 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
4892 if isinstance(other[0], list) \
4893 or isinstance(other[0], numpy.ndarray):
4894 from asapmath import _array2dOp
4895 s = _array2dOp( self, other, mode, False )
4896 else:
4897 s = scantable( self._math._arrayop( self, other,
4898 mode, False ) )
4899 else:
4900 raise TypeError("Other input is not a scantable or float value")
4901 return s
4902
4903 @asaplog_post_dec
4904 def get_fit(self, row=0):
4905 """\
4906 Print or return the stored fits for a row in the scantable
4907
4908 Parameters:
4909
4910 row: the row which the fit has been applied to.
4911
4912 """
4913 if row > self.nrow():
4914 return
4915 from asap.asapfit import asapfit
4916 fit = asapfit(self._getfit(row))
4917 asaplog.push( '%s' %(fit) )
4918 return fit.as_dict()
4919
4920 @preserve_selection
4921 def flag_nans(self):
4922 """\
4923 Utility function to flag NaN values in the scantable.
4924 """
4925 import numpy
4926 basesel = self.get_selection()
4927 for i in range(self.nrow()):
4928 sel = self.get_row_selector(i)
4929 self.set_selection(basesel+sel)
4930 nans = numpy.isnan(self._getspectrum(0))
4931 if numpy.any(nans):
4932 bnans = [ bool(v) for v in nans]
4933 self.flag(bnans)
4934
4935 self.set_selection(basesel)
4936
4937 def get_row_selector(self, rowno):
4938 return selector(rows=[rowno])
4939
4940 def _add_history(self, funcname, parameters):
4941 if not rcParams['scantable.history']:
4942 return
4943 # create date
4944 sep = "##"
4945 from datetime import datetime
4946 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4947 hist = dstr+sep
4948 hist += funcname+sep#cdate+sep
4949 if parameters.has_key('self'):
4950 del parameters['self']
4951 for k, v in parameters.iteritems():
4952 if type(v) is dict:
4953 for k2, v2 in v.iteritems():
4954 hist += k2
4955 hist += "="
4956 if isinstance(v2, scantable):
4957 hist += 'scantable'
4958 elif k2 == 'mask':
4959 if isinstance(v2, list) or isinstance(v2, tuple):
4960 hist += str(self._zip_mask(v2))
4961 else:
4962 hist += str(v2)
4963 else:
4964 hist += str(v2)
4965 else:
4966 hist += k
4967 hist += "="
4968 if isinstance(v, scantable):
4969 hist += 'scantable'
4970 elif k == 'mask':
4971 if isinstance(v, list) or isinstance(v, tuple):
4972 hist += str(self._zip_mask(v))
4973 else:
4974 hist += str(v)
4975 else:
4976 hist += str(v)
4977 hist += sep
4978 hist = hist[:-2] # remove trailing '##'
4979 self._addhistory(hist)
4980
4981
4982 def _zip_mask(self, mask):
4983 mask = list(mask)
4984 i = 0
4985 segments = []
4986 while mask[i:].count(1):
4987 i += mask[i:].index(1)
4988 if mask[i:].count(0):
4989 j = i + mask[i:].index(0)
4990 else:
4991 j = len(mask)
4992 segments.append([i, j])
4993 i = j
4994 return segments
4995
4996 def _get_ordinate_label(self):
4997 fu = "("+self.get_fluxunit()+")"
4998 import re
4999 lbl = "Intensity"
5000 if re.match(".K.", fu):
5001 lbl = "Brightness Temperature "+ fu
5002 elif re.match(".Jy.", fu):
5003 lbl = "Flux density "+ fu
5004 return lbl
5005
5006 def _check_ifs(self):
5007# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
5008 nchans = [self.nchan(i) for i in self.getifnos()]
5009 nchans = filter(lambda t: t > 0, nchans)
5010 return (sum(nchans)/len(nchans) == nchans[0])
5011
5012 @asaplog_post_dec
5013 def _fill(self, names, unit, average, opts={}):
5014 first = True
5015 fullnames = []
5016 for name in names:
5017 name = os.path.expandvars(name)
5018 name = os.path.expanduser(name)
5019 if not os.path.exists(name):
5020 msg = "File '%s' does not exists" % (name)
5021 raise IOError(msg)
5022 fullnames.append(name)
5023 if average:
5024 asaplog.push('Auto averaging integrations')
5025 stype = int(rcParams['scantable.storage'].lower() == 'disk')
5026 for name in fullnames:
5027 tbl = Scantable(stype)
5028 if is_ms( name ):
5029 r = msfiller( tbl )
5030 else:
5031 r = filler( tbl )
5032 msg = "Importing %s..." % (name)
5033 asaplog.push(msg, False)
5034 r.open(name, opts)
5035 rx = rcParams['scantable.reference']
5036 r.setreferenceexpr(rx)
5037 r.fill()
5038 if average:
5039 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
5040 if not first:
5041 tbl = self._math._merge([self, tbl])
5042 Scantable.__init__(self, tbl)
5043 r.close()
5044 del r, tbl
5045 first = False
5046 #flush log
5047 asaplog.post()
5048 if unit is not None:
5049 self.set_fluxunit(unit)
5050 if not is_casapy():
5051 self.set_freqframe(rcParams['scantable.freqframe'])
5052
5053 def _get_verify_action( self, msg, action=None ):
5054 valid_act = ['Y', 'N', 'A', 'R']
5055 if not action or not isinstance(action, str):
5056 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
5057 if action == '':
5058 return "Y"
5059 elif (action.upper()[0] in valid_act):
5060 return action.upper()[0]
5061 elif (action.upper()[0] in ['H','?']):
5062 print "Available actions of verification [Y|n|a|r]"
5063 print " Y : Yes for current data (default)"
5064 print " N : No for current data"
5065 print " A : Accept all in the following and exit from verification"
5066 print " R : Reject all in the following and exit from verification"
5067 print " H or ?: help (show this message)"
5068 return self._get_verify_action(msg)
5069 else:
5070 return 'Y'
5071
5072 def __getitem__(self, key):
5073 if key < 0:
5074 key += self.nrow()
5075 if key >= self.nrow():
5076 raise IndexError("Row index out of range.")
5077 return self._getspectrum(key)
5078
5079 def __setitem__(self, key, value):
5080 if key < 0:
5081 key += self.nrow()
5082 if key >= self.nrow():
5083 raise IndexError("Row index out of range.")
5084 if not hasattr(value, "__len__") or \
5085 len(value) > self.nchan(self.getif(key)):
5086 raise ValueError("Spectrum length doesn't match.")
5087 return self._setspectrum(value, key)
5088
5089 def __len__(self):
5090 return self.nrow()
5091
5092 def __iter__(self):
5093 for i in range(len(self)):
5094 yield self[i]
Note: See TracBrowser for help on using the repository browser.