source: branches/hpc33/python/scantable.py@ 2560

Last change on this file since 2560 was 2351, checked in by Kana Sugimoto, 13 years ago

New Development: No

JIRA Issue: No (a minor bug fix)

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs: unit tests of sdbaseline (CASA)

Put in Release Notes: No

Module(s): sdbaseline (CASA)

Description: fix a minor typo introduced in rev.2348.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 136.6 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import tempfile
5import numpy
6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
11from asap.env import is_casapy
12from asap._asap import Scantable
13from asap._asap import filler, msfiller
14from asap.parameters import rcParams
15from asap.logging import asaplog, asaplog_post_dec
16from asap.selector import selector
17from asap.linecatalog import linecatalog
18from asap.coordinate import coordinate
19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
20from asap.asapfitter import fitter
21
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 #if ( l.find('Scantable') != -1 ):
49 if ( l.find('Measurement Set') == -1 ):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176class scantable(Scantable):
177 """\
178 The ASAP container for scans (single-dish data).
179 """
180
181 @asaplog_post_dec
182 def __init__(self, filename, average=None, unit=None, parallactify=None,
183 **args):
184 """\
185 Create a scantable from a saved one or make a reference
186
187 Parameters:
188
189 filename: the name of an asap table on disk
190 or
191 the name of a rpfits/sdfits/ms file
192 (integrations within scans are auto averaged
193 and the whole file is read) or
194 [advanced] a reference to an existing scantable
195
196 average: average all integrations withinb a scan on read.
197 The default (True) is taken from .asaprc.
198
199 unit: brightness unit; must be consistent with K or Jy.
200 Over-rides the default selected by the filler
201 (input rpfits/sdfits/ms) or replaces the value
202 in existing scantables
203
204 antenna: for MeasurementSet input data only:
205 Antenna selection. integer (id) or string
206 (name or id).
207
208 parallactify: Indicate that the data had been parallactified.
209 Default (false) is taken from rc file.
210
211 """
212 if average is None:
213 average = rcParams['scantable.autoaverage']
214 parallactify = parallactify or rcParams['scantable.parallactify']
215 varlist = vars()
216 from asap._asap import stmath
217 self._math = stmath( rcParams['insitu'] )
218 if isinstance(filename, Scantable):
219 Scantable.__init__(self, filename)
220 else:
221 if isinstance(filename, str):
222 filename = os.path.expandvars(filename)
223 filename = os.path.expanduser(filename)
224 if not os.path.exists(filename):
225 s = "File '%s' not found." % (filename)
226 raise IOError(s)
227 if is_scantable(filename):
228 ondisk = rcParams['scantable.storage'] == 'disk'
229 Scantable.__init__(self, filename, ondisk)
230 if unit is not None:
231 self.set_fluxunit(unit)
232 if average:
233 self._assign( self.average_time( scanav=True ) )
234 # do not reset to the default freqframe
235 #self.set_freqframe(rcParams['scantable.freqframe'])
236 elif is_ms(filename):
237 # Measurement Set
238 opts={'ms': {}}
239 mskeys=['getpt','antenna']
240 for key in mskeys:
241 if key in args.keys():
242 opts['ms'][key] = args[key]
243 self._fill([filename], unit, average, opts)
244 elif os.path.isfile(filename):
245 self._fill([filename], unit, average)
246 # only apply to new data not "copy constructor"
247 self.parallactify(parallactify)
248 else:
249 msg = "The given file '%s'is not a valid " \
250 "asap table." % (filename)
251 raise IOError(msg)
252 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
253 and isinstance(filename[-1], str):
254 self._fill(filename, unit, average)
255 self.parallactify(parallactify)
256 self._add_history("scantable", varlist)
257
258 @asaplog_post_dec
259 def save(self, name=None, format=None, overwrite=False):
260 """\
261 Store the scantable on disk. This can be an asap (aips++) Table,
262 SDFITS or MS2 format.
263
264 Parameters:
265
266 name: the name of the outputfile. For format "ASCII"
267 this is the root file name (data in 'name'.txt
268 and header in 'name'_header.txt)
269
270 format: an optional file format. Default is ASAP.
271 Allowed are:
272
273 * 'ASAP' (save as ASAP [aips++] Table),
274 * 'SDFITS' (save as SDFITS file)
275 * 'ASCII' (saves as ascii text file)
276 * 'MS2' (saves as an casacore MeasurementSet V2)
277 * 'FITS' (save as image FITS - not readable by
278 class)
279 * 'CLASS' (save as FITS readable by CLASS)
280
281 overwrite: If the file should be overwritten if it exists.
282 The default False is to return with warning
283 without writing the output. USE WITH CARE.
284
285 Example::
286
287 scan.save('myscan.asap')
288 scan.save('myscan.sdfits', 'SDFITS')
289
290 """
291 from os import path
292 format = format or rcParams['scantable.save']
293 suffix = '.'+format.lower()
294 if name is None or name == "":
295 name = 'scantable'+suffix
296 msg = "No filename given. Using default name %s..." % name
297 asaplog.push(msg)
298 name = path.expandvars(name)
299 if path.isfile(name) or path.isdir(name):
300 if not overwrite:
301 msg = "File %s exists." % name
302 raise IOError(msg)
303 format2 = format.upper()
304 if format2 == 'ASAP':
305 self._save(name)
306 elif format2 == 'MS2':
307 msopt = {'ms': {'overwrite': overwrite } }
308 from asap._asap import mswriter
309 writer = mswriter( self )
310 writer.write( name, msopt )
311 else:
312 from asap._asap import stwriter as stw
313 writer = stw(format2)
314 writer.write(self, name)
315 return
316
317 def copy(self):
318 """Return a copy of this scantable.
319
320 *Note*:
321
322 This makes a full (deep) copy. scan2 = scan1 makes a reference.
323
324 Example::
325
326 copiedscan = scan.copy()
327
328 """
329 sd = scantable(Scantable._copy(self))
330 return sd
331
332 def drop_scan(self, scanid=None):
333 """\
334 Return a new scantable where the specified scan number(s) has(have)
335 been dropped.
336
337 Parameters:
338
339 scanid: a (list of) scan number(s)
340
341 """
342 from asap import _is_sequence_or_number as _is_valid
343 from asap import _to_list
344 from asap import unique
345 if not _is_valid(scanid):
346 raise RuntimeError( 'Please specify a scanno to drop from the'
347 ' scantable' )
348 scanid = _to_list(scanid)
349 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
350 for sid in scanid: allscans.remove(sid)
351 if len(allscans) == 0:
352 raise ValueError("Can't remove all scans")
353 sel = selector(scans=allscans)
354 return self._select_copy(sel)
355
356 def _select_copy(self, selection):
357 orig = self.get_selection()
358 self.set_selection(orig+selection)
359 cp = self.copy()
360 self.set_selection(orig)
361 return cp
362
363 def get_scan(self, scanid=None):
364 """\
365 Return a specific scan (by scanno) or collection of scans (by
366 source name) in a new scantable.
367
368 *Note*:
369
370 See scantable.drop_scan() for the inverse operation.
371
372 Parameters:
373
374 scanid: a (list of) scanno or a source name, unix-style
375 patterns are accepted for source name matching, e.g.
376 '*_R' gets all 'ref scans
377
378 Example::
379
380 # get all scans containing the source '323p459'
381 newscan = scan.get_scan('323p459')
382 # get all 'off' scans
383 refscans = scan.get_scan('*_R')
384 # get a susbset of scans by scanno (as listed in scan.summary())
385 newscan = scan.get_scan([0, 2, 7, 10])
386
387 """
388 if scanid is None:
389 raise RuntimeError( 'Please specify a scan no or name to '
390 'retrieve from the scantable' )
391 try:
392 bsel = self.get_selection()
393 sel = selector()
394 if type(scanid) is str:
395 sel.set_name(scanid)
396 return self._select_copy(sel)
397 elif type(scanid) is int:
398 sel.set_scans([scanid])
399 return self._select_copy(sel)
400 elif type(scanid) is list:
401 sel.set_scans(scanid)
402 return self._select_copy(sel)
403 else:
404 msg = "Illegal scanid type, use 'int' or 'list' if ints."
405 raise TypeError(msg)
406 except RuntimeError:
407 raise
408
409 def __str__(self):
410 tempFile = tempfile.NamedTemporaryFile()
411 Scantable._summary(self, tempFile.name)
412 tempFile.seek(0)
413 asaplog.clear()
414 return tempFile.file.read()
415
416 @asaplog_post_dec
417 def summary(self, filename=None):
418 """\
419 Print a summary of the contents of this scantable.
420
421 Parameters:
422
423 filename: the name of a file to write the putput to
424 Default - no file output
425
426 """
427# info = Scantable._summary(self)
428 if filename is not None:
429 if filename is "":
430 filename = 'scantable_summary.txt'
431 from os.path import expandvars, isdir
432 filename = expandvars(filename)
433# if not isdir(filename):
434# data = open(filename, 'w')
435# data.write(info)
436# data.close()
437# else:
438 if isdir(filename):
439 msg = "Illegal file name '%s'." % (filename)
440 raise IOError(msg)
441 else:
442 filename = ""
443 Scantable._summary(self, filename)
444# info = Scantable._summary(self, filename)
445# return page(info)
446
447 def get_spectrum(self, rowno):
448 """Return the spectrum for the current row in the scantable as a list.
449
450 Parameters:
451
452 rowno: the row number to retrieve the spectrum from
453
454 """
455 return self._getspectrum(rowno)
456
457 def get_mask(self, rowno):
458 """Return the mask for the current row in the scantable as a list.
459
460 Parameters:
461
462 rowno: the row number to retrieve the mask from
463
464 """
465 return self._getmask(rowno)
466
467 def set_spectrum(self, spec, rowno):
468 """Set the spectrum for the current row in the scantable.
469
470 Parameters:
471
472 spec: the new spectrum
473
474 rowno: the row number to set the spectrum for
475
476 """
477 assert(len(spec) == self.nchan(self.getif(rowno)))
478 return self._setspectrum(spec, rowno)
479
480 def get_coordinate(self, rowno):
481 """Return the (spectral) coordinate for a a given 'rowno'.
482
483 *Note*:
484
485 * This coordinate is only valid until a scantable method modifies
486 the frequency axis.
487 * This coordinate does contain the original frequency set-up
488 NOT the new frame. The conversions however are done using the user
489 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
490 use scantable.freq_align first. Without it there is no closure,
491 i.e.::
492
493 c = myscan.get_coordinate(0)
494 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
495
496 Parameters:
497
498 rowno: the row number for the spectral coordinate
499
500 """
501 return coordinate(Scantable.get_coordinate(self, rowno))
502
503 def get_selection(self):
504 """\
505 Get the selection object currently set on this scantable.
506
507 Example::
508
509 sel = scan.get_selection()
510 sel.set_ifs(0) # select IF 0
511 scan.set_selection(sel) # apply modified selection
512
513 """
514 return selector(self._getselection())
515
516 def set_selection(self, selection=None, **kw):
517 """\
518 Select a subset of the data. All following operations on this scantable
519 are only applied to thi selection.
520
521 Parameters:
522
523 selection: a selector object (default unset the selection), or
524 any combination of "pols", "ifs", "beams", "scans",
525 "cycles", "name", "query"
526
527 Examples::
528
529 sel = selector() # create a selection object
530 self.set_scans([0, 3]) # select SCANNO 0 and 3
531 scan.set_selection(sel) # set the selection
532 scan.summary() # will only print summary of scanno 0 an 3
533 scan.set_selection() # unset the selection
534 # or the equivalent
535 scan.set_selection(scans=[0,3])
536 scan.summary() # will only print summary of scanno 0 an 3
537 scan.set_selection() # unset the selection
538
539 """
540 if selection is None:
541 # reset
542 if len(kw) == 0:
543 selection = selector()
544 else:
545 # try keywords
546 for k in kw:
547 if k not in selector.fields:
548 raise KeyError("Invalid selection key '%s', "
549 "valid keys are %s" % (k,
550 selector.fields))
551 selection = selector(**kw)
552 self._setselection(selection)
553
554 def get_row(self, row=0, insitu=None):
555 """\
556 Select a row in the scantable.
557 Return a scantable with single row.
558
559 Parameters:
560
561 row: row no of integration, default is 0.
562 insitu: if False a new scantable is returned. Otherwise, the
563 scaling is done in-situ. The default is taken from .asaprc
564 (False)
565
566 """
567 if insitu is None:
568 insitu = rcParams['insitu']
569 if not insitu:
570 workscan = self.copy()
571 else:
572 workscan = self
573 # Select a row
574 sel = selector()
575 sel.set_rows([row])
576 workscan.set_selection(sel)
577 if not workscan.nrow() == 1:
578 msg = "Could not identify single row. %d rows selected." \
579 % (workscan.nrow())
580 raise RuntimeError(msg)
581 if insitu:
582 self._assign(workscan)
583 else:
584 return workscan
585
586 @asaplog_post_dec
587 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
588 """\
589 Determine the specified statistic of the current beam/if/pol
590 Takes a 'mask' as an optional parameter to specify which
591 channels should be excluded.
592
593 Parameters:
594
595 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
596 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
597
598 mask: an optional mask specifying where the statistic
599 should be determined.
600
601 form: format string to print statistic values
602
603 row: row number of spectrum to process.
604 (default is None: for all rows)
605
606 Example:
607 scan.set_unit('channel')
608 msk = scan.create_mask([100, 200], [500, 600])
609 scan.stats(stat='mean', mask=m)
610
611 """
612 mask = mask or []
613 if not self._check_ifs():
614 raise ValueError("Cannot apply mask as the IFs have different "
615 "number of channels. Please use setselection() "
616 "to select individual IFs")
617 rtnabc = False
618 if stat.lower().endswith('_abc'): rtnabc = True
619 getchan = False
620 if stat.lower().startswith('min') or stat.lower().startswith('max'):
621 chan = self._math._minmaxchan(self, mask, stat)
622 getchan = True
623 statvals = []
624 if not rtnabc:
625 if row == None:
626 statvals = self._math._stats(self, mask, stat)
627 else:
628 statvals = self._math._statsrow(self, mask, stat, int(row))
629
630 #def cb(i):
631 # return statvals[i]
632
633 #return self._row_callback(cb, stat)
634
635 label=stat
636 #callback=cb
637 out = ""
638 #outvec = []
639 sep = '-'*50
640
641 if row == None:
642 rows = xrange(self.nrow())
643 elif isinstance(row, int):
644 rows = [ row ]
645
646 for i in rows:
647 refstr = ''
648 statunit= ''
649 if getchan:
650 qx, qy = self.chan2data(rowno=i, chan=chan[i])
651 if rtnabc:
652 statvals.append(qx['value'])
653 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
654 statunit= '['+qx['unit']+']'
655 else:
656 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
657
658 tm = self._gettime(i)
659 src = self._getsourcename(i)
660 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
661 out += 'Time[%s]:\n' % (tm)
662 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
663 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
664 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
665 #outvec.append(callback(i))
666 if len(rows) > 1:
667 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
668 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
669 else:
670 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
671 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
672 out += sep+"\n"
673
674 import os
675 if os.environ.has_key( 'USER' ):
676 usr = os.environ['USER']
677 else:
678 import commands
679 usr = commands.getoutput( 'whoami' )
680 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
681 f = open(tmpfile,'w')
682 print >> f, sep
683 print >> f, ' %s %s' % (label, statunit)
684 print >> f, sep
685 print >> f, out
686 f.close()
687 f = open(tmpfile,'r')
688 x = f.readlines()
689 f.close()
690 asaplog.push(''.join(x), False)
691
692 return statvals
693
694 def chan2data(self, rowno=0, chan=0):
695 """\
696 Returns channel/frequency/velocity and spectral value
697 at an arbitrary row and channel in the scantable.
698
699 Parameters:
700
701 rowno: a row number in the scantable. Default is the
702 first row, i.e. rowno=0
703
704 chan: a channel in the scantable. Default is the first
705 channel, i.e. pos=0
706
707 """
708 if isinstance(rowno, int) and isinstance(chan, int):
709 qx = {'unit': self.get_unit(),
710 'value': self._getabcissa(rowno)[chan]}
711 qy = {'unit': self.get_fluxunit(),
712 'value': self._getspectrum(rowno)[chan]}
713 return qx, qy
714
715 def stddev(self, mask=None):
716 """\
717 Determine the standard deviation of the current beam/if/pol
718 Takes a 'mask' as an optional parameter to specify which
719 channels should be excluded.
720
721 Parameters:
722
723 mask: an optional mask specifying where the standard
724 deviation should be determined.
725
726 Example::
727
728 scan.set_unit('channel')
729 msk = scan.create_mask([100, 200], [500, 600])
730 scan.stddev(mask=m)
731
732 """
733 return self.stats(stat='stddev', mask=mask);
734
735
736 def get_column_names(self):
737 """\
738 Return a list of column names, which can be used for selection.
739 """
740 return list(Scantable.get_column_names(self))
741
742 def get_tsys(self, row=-1):
743 """\
744 Return the System temperatures.
745
746 Parameters:
747
748 row: the rowno to get the information for. (default all rows)
749
750 Returns:
751
752 a list of Tsys values for the current selection
753
754 """
755 if row > -1:
756 return self._get_column(self._gettsys, row)
757 return self._row_callback(self._gettsys, "Tsys")
758
759
760 def get_weather(self, row=-1):
761 """\
762 Return the weather informations.
763
764 Parameters:
765
766 row: the rowno to get the information for. (default all rows)
767
768 Returns:
769
770 a dict or list of of dicts of values for the current selection
771
772 """
773
774 values = self._get_column(self._get_weather, row)
775 if row > -1:
776 return {'temperature': values[0],
777 'pressure': values[1], 'humidity' : values[2],
778 'windspeed' : values[3], 'windaz' : values[4]
779 }
780 else:
781 out = []
782 for r in values:
783
784 out.append({'temperature': r[0],
785 'pressure': r[1], 'humidity' : r[2],
786 'windspeed' : r[3], 'windaz' : r[4]
787 })
788 return out
789
790 def _row_callback(self, callback, label):
791 out = ""
792 outvec = []
793 sep = '-'*50
794 for i in range(self.nrow()):
795 tm = self._gettime(i)
796 src = self._getsourcename(i)
797 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
798 out += 'Time[%s]:\n' % (tm)
799 if self.nbeam(-1) > 1:
800 out += ' Beam[%d] ' % (self.getbeam(i))
801 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
802 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
803 outvec.append(callback(i))
804 out += '= %3.3f\n' % (outvec[i])
805 out += sep+'\n'
806
807 asaplog.push(sep)
808 asaplog.push(" %s" % (label))
809 asaplog.push(sep)
810 asaplog.push(out)
811 asaplog.post()
812 return outvec
813
814 def _get_column(self, callback, row=-1, *args):
815 """
816 """
817 if row == -1:
818 return [callback(i, *args) for i in range(self.nrow())]
819 else:
820 if 0 <= row < self.nrow():
821 return callback(row, *args)
822
823
824 def get_time(self, row=-1, asdatetime=False, prec=-1):
825 """\
826 Get a list of time stamps for the observations.
827 Return a datetime object or a string (default) for each
828 integration time stamp in the scantable.
829
830 Parameters:
831
832 row: row no of integration. Default -1 return all rows
833
834 asdatetime: return values as datetime objects rather than strings
835
836 prec: number of digits shown. Default -1 to automatic
837 calculation.
838 Note this number is equals to the digits of MVTime,
839 i.e., 0<prec<3: dates with hh:: only,
840 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
841 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
842
843 """
844 from datetime import datetime
845 if prec < 0:
846 # automagically set necessary precision +1
847 prec = 7 - \
848 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
849 prec = max(6, int(prec))
850 else:
851 prec = max(0, prec)
852 if asdatetime:
853 #precision can be 1 millisecond at max
854 prec = min(12, prec)
855
856 times = self._get_column(self._gettime, row, prec)
857 if not asdatetime:
858 return times
859 format = "%Y/%m/%d/%H:%M:%S.%f"
860 if prec < 7:
861 nsub = 1 + (((6-prec)/2) % 3)
862 substr = [".%f","%S","%M"]
863 for i in range(nsub):
864 format = format.replace(substr[i],"")
865 if isinstance(times, list):
866 return [datetime.strptime(i, format) for i in times]
867 else:
868 return datetime.strptime(times, format)
869
870
871 def get_inttime(self, row=-1):
872 """\
873 Get a list of integration times for the observations.
874 Return a time in seconds for each integration in the scantable.
875
876 Parameters:
877
878 row: row no of integration. Default -1 return all rows.
879
880 """
881 return self._get_column(self._getinttime, row)
882
883
884 def get_sourcename(self, row=-1):
885 """\
886 Get a list source names for the observations.
887 Return a string for each integration in the scantable.
888 Parameters:
889
890 row: row no of integration. Default -1 return all rows.
891
892 """
893 return self._get_column(self._getsourcename, row)
894
895 def get_elevation(self, row=-1):
896 """\
897 Get a list of elevations for the observations.
898 Return a float for each integration in the scantable.
899
900 Parameters:
901
902 row: row no of integration. Default -1 return all rows.
903
904 """
905 return self._get_column(self._getelevation, row)
906
907 def get_azimuth(self, row=-1):
908 """\
909 Get a list of azimuths for the observations.
910 Return a float for each integration in the scantable.
911
912 Parameters:
913 row: row no of integration. Default -1 return all rows.
914
915 """
916 return self._get_column(self._getazimuth, row)
917
918 def get_parangle(self, row=-1):
919 """\
920 Get a list of parallactic angles for the observations.
921 Return a float for each integration in the scantable.
922
923 Parameters:
924
925 row: row no of integration. Default -1 return all rows.
926
927 """
928 return self._get_column(self._getparangle, row)
929
930 def get_direction(self, row=-1):
931 """
932 Get a list of Positions on the sky (direction) for the observations.
933 Return a string for each integration in the scantable.
934
935 Parameters:
936
937 row: row no of integration. Default -1 return all rows
938
939 """
940 return self._get_column(self._getdirection, row)
941
942 def get_directionval(self, row=-1):
943 """\
944 Get a list of Positions on the sky (direction) for the observations.
945 Return a float for each integration in the scantable.
946
947 Parameters:
948
949 row: row no of integration. Default -1 return all rows
950
951 """
952 return self._get_column(self._getdirectionvec, row)
953
954 @asaplog_post_dec
955 def set_unit(self, unit='channel'):
956 """\
957 Set the unit for all following operations on this scantable
958
959 Parameters:
960
961 unit: optional unit, default is 'channel'. Use one of '*Hz',
962 'km/s', 'channel' or equivalent ''
963
964 """
965 varlist = vars()
966 if unit in ['', 'pixel', 'channel']:
967 unit = ''
968 inf = list(self._getcoordinfo())
969 inf[0] = unit
970 self._setcoordinfo(inf)
971 self._add_history("set_unit", varlist)
972
973 @asaplog_post_dec
974 def set_instrument(self, instr):
975 """\
976 Set the instrument for subsequent processing.
977
978 Parameters:
979
980 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
981 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
982
983 """
984 self._setInstrument(instr)
985 self._add_history("set_instument", vars())
986
987 @asaplog_post_dec
988 def set_feedtype(self, feedtype):
989 """\
990 Overwrite the feed type, which might not be set correctly.
991
992 Parameters:
993
994 feedtype: 'linear' or 'circular'
995
996 """
997 self._setfeedtype(feedtype)
998 self._add_history("set_feedtype", vars())
999
1000 @asaplog_post_dec
1001 def set_doppler(self, doppler='RADIO'):
1002 """\
1003 Set the doppler for all following operations on this scantable.
1004
1005 Parameters:
1006
1007 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1008
1009 """
1010 varlist = vars()
1011 inf = list(self._getcoordinfo())
1012 inf[2] = doppler
1013 self._setcoordinfo(inf)
1014 self._add_history("set_doppler", vars())
1015
1016 @asaplog_post_dec
1017 def set_freqframe(self, frame=None):
1018 """\
1019 Set the frame type of the Spectral Axis.
1020
1021 Parameters:
1022
1023 frame: an optional frame type, default 'LSRK'. Valid frames are:
1024 'TOPO', 'LSRD', 'LSRK', 'BARY',
1025 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1026
1027 Example::
1028
1029 scan.set_freqframe('BARY')
1030
1031 """
1032 frame = frame or rcParams['scantable.freqframe']
1033 varlist = vars()
1034 # "REST" is not implemented in casacore
1035 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1036 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1037 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1038 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1039
1040 if frame in valid:
1041 inf = list(self._getcoordinfo())
1042 inf[1] = frame
1043 self._setcoordinfo(inf)
1044 self._add_history("set_freqframe", varlist)
1045 else:
1046 msg = "Please specify a valid freq type. Valid types are:\n", valid
1047 raise TypeError(msg)
1048
1049 @asaplog_post_dec
1050 def set_dirframe(self, frame=""):
1051 """\
1052 Set the frame type of the Direction on the sky.
1053
1054 Parameters:
1055
1056 frame: an optional frame type, default ''. Valid frames are:
1057 'J2000', 'B1950', 'GALACTIC'
1058
1059 Example:
1060
1061 scan.set_dirframe('GALACTIC')
1062
1063 """
1064 varlist = vars()
1065 Scantable.set_dirframe(self, frame)
1066 self._add_history("set_dirframe", varlist)
1067
1068 def get_unit(self):
1069 """\
1070 Get the default unit set in this scantable
1071
1072 Returns:
1073
1074 A unit string
1075
1076 """
1077 inf = self._getcoordinfo()
1078 unit = inf[0]
1079 if unit == '': unit = 'channel'
1080 return unit
1081
1082 @asaplog_post_dec
1083 def get_abcissa(self, rowno=0):
1084 """\
1085 Get the abcissa in the current coordinate setup for the currently
1086 selected Beam/IF/Pol
1087
1088 Parameters:
1089
1090 rowno: an optional row number in the scantable. Default is the
1091 first row, i.e. rowno=0
1092
1093 Returns:
1094
1095 The abcissa values and the format string (as a dictionary)
1096
1097 """
1098 abc = self._getabcissa(rowno)
1099 lbl = self._getabcissalabel(rowno)
1100 return abc, lbl
1101
1102 @asaplog_post_dec
1103 def flag(self, mask=None, unflag=False, row=-1):
1104 """\
1105 Flag the selected data using an optional channel mask.
1106
1107 Parameters:
1108
1109 mask: an optional channel mask, created with create_mask. Default
1110 (no mask) is all channels.
1111
1112 unflag: if True, unflag the data
1113
1114 row: an optional row number in the scantable.
1115 Default -1 flags all rows
1116
1117 """
1118 varlist = vars()
1119 mask = mask or []
1120 self._flag(row, mask, unflag)
1121 self._add_history("flag", varlist)
1122
1123 @asaplog_post_dec
1124 def flag_row(self, rows=None, unflag=False):
1125 """\
1126 Flag the selected data in row-based manner.
1127
1128 Parameters:
1129
1130 rows: list of row numbers to be flagged. Default is no row
1131 (must be explicitly specified to execute row-based
1132 flagging).
1133
1134 unflag: if True, unflag the data.
1135
1136 """
1137 varlist = vars()
1138 if rows is None:
1139 rows = []
1140 self._flag_row(rows, unflag)
1141 self._add_history("flag_row", varlist)
1142
1143 @asaplog_post_dec
1144 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1145 """\
1146 Flag the selected data outside a specified range (in channel-base)
1147
1148 Parameters:
1149
1150 uthres: upper threshold.
1151
1152 dthres: lower threshold
1153
1154 clipoutside: True for flagging data outside the range
1155 [dthres:uthres].
1156 False for flagging data inside the range.
1157
1158 unflag: if True, unflag the data.
1159
1160 """
1161 varlist = vars()
1162 self._clip(uthres, dthres, clipoutside, unflag)
1163 self._add_history("clip", varlist)
1164
1165 @asaplog_post_dec
1166 def lag_flag(self, start, end, unit="MHz", insitu=None):
1167 """\
1168 Flag the data in 'lag' space by providing a frequency to remove.
1169 Flagged data in the scantable get interpolated over the region.
1170 No taper is applied.
1171
1172 Parameters:
1173
1174 start: the start frequency (really a period within the
1175 bandwidth) or period to remove
1176
1177 end: the end frequency or period to remove
1178
1179 unit: the frequency unit (default "MHz") or "" for
1180 explicit lag channels
1181
1182 *Notes*:
1183
1184 It is recommended to flag edges of the band or strong
1185 signals beforehand.
1186
1187 """
1188 if insitu is None: insitu = rcParams['insitu']
1189 self._math._setinsitu(insitu)
1190 varlist = vars()
1191 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1192 if not (unit == "" or base.has_key(unit)):
1193 raise ValueError("%s is not a valid unit." % unit)
1194 if unit == "":
1195 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1196 else:
1197 s = scantable(self._math._lag_flag(self, start*base[unit],
1198 end*base[unit], "frequency"))
1199 s._add_history("lag_flag", varlist)
1200 if insitu:
1201 self._assign(s)
1202 else:
1203 return s
1204
1205 @asaplog_post_dec
1206 def fft(self, rowno=None, mask=None, getrealimag=False):
1207 """\
1208 Apply FFT to the spectra.
1209 Flagged data in the scantable get interpolated over the region.
1210
1211 Parameters:
1212
1213 rowno: The row number(s) to be processed. int, list
1214 and tuple are accepted. By default (None), FFT
1215 is applied to the whole data.
1216
1217 mask: Auxiliary channel mask(s). Given as a boolean
1218 list, it is applied to all specified rows.
1219 A list of boolean lists can also be used to
1220 apply different masks. In the latter case, the
1221 length of 'mask' must be the same as that of
1222 'rowno'. The default is None.
1223
1224 getrealimag: If True, returns the real and imaginary part
1225 values of the complex results.
1226 If False (the default), returns the amplitude
1227 (absolute value) normalised with Ndata/2 and
1228 phase (argument, in unit of radian).
1229
1230 Returns:
1231
1232 A list of dictionaries containing the results for each spectrum.
1233 Each dictionary contains two values, the real and the imaginary
1234 parts when getrealimag = True, or the amplitude(absolute value)
1235 and the phase(argument) when getrealimag = False. The key for
1236 these values are 'real' and 'imag', or 'ampl' and 'phase',
1237 respectively.
1238 """
1239 if rowno is None:
1240 rowno = []
1241 if mask is None:
1242 mask = []
1243 if isinstance(rowno, int):
1244 rowno = [rowno]
1245 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1246 raise TypeError("The row number(s) must be int, list or tuple.")
1247
1248 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1249
1250 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1251 raise TypeError("The mask must be a boolean list or a list of "
1252 "boolean list.")
1253 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1254 if isinstance(mask[0], bool): mask = [mask]
1255 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1256 raise TypeError("The mask must be a boolean list or a list of "
1257 "boolean list.")
1258
1259 usecommonmask = (len(mask) == 1)
1260 if not usecommonmask:
1261 if len(mask) != len(rowno):
1262 raise ValueError("When specifying masks for each spectrum, "
1263 "the numbers of them must be identical.")
1264 for amask in mask:
1265 if len(amask) != self.nchan():
1266 raise ValueError("The spectra and the mask have different "
1267 "length.")
1268
1269 res = []
1270
1271 imask = 0
1272 for whichrow in rowno:
1273 fspec = self._fft(whichrow, mask[imask], getrealimag)
1274 nspec = len(fspec)
1275
1276 i = 0
1277 v1 = []
1278 v2 = []
1279 reselem = {"real":[],"imag":[]} if getrealimag \
1280 else {"ampl":[],"phase":[]}
1281
1282 while (i < nspec):
1283 v1.append(fspec[i])
1284 v2.append(fspec[i+1])
1285 i += 2
1286
1287 if getrealimag:
1288 reselem["real"] += v1
1289 reselem["imag"] += v2
1290 else:
1291 reselem["ampl"] += v1
1292 reselem["phase"] += v2
1293
1294 res.append(reselem)
1295
1296 if not usecommonmask:
1297 imask += 1
1298
1299 return res
1300
1301 @asaplog_post_dec
1302 def create_mask(self, *args, **kwargs):
1303 """\
1304 Compute and return a mask based on [min, max] windows.
1305 The specified windows are to be INCLUDED, when the mask is
1306 applied.
1307
1308 Parameters:
1309
1310 [min, max], [min2, max2], ...
1311 Pairs of start/end points (inclusive)specifying the regions
1312 to be masked
1313
1314 invert: optional argument. If specified as True,
1315 return an inverted mask, i.e. the regions
1316 specified are EXCLUDED
1317
1318 row: create the mask using the specified row for
1319 unit conversions, default is row=0
1320 only necessary if frequency varies over rows.
1321
1322 Examples::
1323
1324 scan.set_unit('channel')
1325 # a)
1326 msk = scan.create_mask([400, 500], [800, 900])
1327 # masks everything outside 400 and 500
1328 # and 800 and 900 in the unit 'channel'
1329
1330 # b)
1331 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1332 # masks the regions between 400 and 500
1333 # and 800 and 900 in the unit 'channel'
1334
1335 # c)
1336 #mask only channel 400
1337 msk = scan.create_mask([400])
1338
1339 """
1340 row = kwargs.get("row", 0)
1341 data = self._getabcissa(row)
1342 u = self._getcoordinfo()[0]
1343 if u == "":
1344 u = "channel"
1345 msg = "The current mask window unit is %s" % u
1346 i = self._check_ifs()
1347 if not i:
1348 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1349 asaplog.push(msg)
1350 n = len(data)
1351 msk = _n_bools(n, False)
1352 # test if args is a 'list' or a 'normal *args - UGLY!!!
1353
1354 ws = (isinstance(args[-1][-1], int)
1355 or isinstance(args[-1][-1], float)) and args or args[0]
1356 for window in ws:
1357 if len(window) == 1:
1358 window = [window[0], window[0]]
1359 if len(window) == 0 or len(window) > 2:
1360 raise ValueError("A window needs to be defined as "
1361 "[start(, end)]")
1362 if window[0] > window[1]:
1363 tmp = window[0]
1364 window[0] = window[1]
1365 window[1] = tmp
1366 for i in range(n):
1367 if data[i] >= window[0] and data[i] <= window[1]:
1368 msk[i] = True
1369 if kwargs.has_key('invert'):
1370 if kwargs.get('invert'):
1371 msk = mask_not(msk)
1372 return msk
1373
1374 def get_masklist(self, mask=None, row=0, silent=False):
1375 """\
1376 Compute and return a list of mask windows, [min, max].
1377
1378 Parameters:
1379
1380 mask: channel mask, created with create_mask.
1381
1382 row: calcutate the masklist using the specified row
1383 for unit conversions, default is row=0
1384 only necessary if frequency varies over rows.
1385
1386 Returns:
1387
1388 [min, max], [min2, max2], ...
1389 Pairs of start/end points (inclusive)specifying
1390 the masked regions
1391
1392 """
1393 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1394 raise TypeError("The mask should be list or tuple.")
1395 if len(mask) < 2:
1396 raise TypeError("The mask elements should be > 1")
1397 data = self._getabcissa(row)
1398 if len(data) != len(mask):
1399 msg = "Number of channels in scantable != number of mask elements"
1400 raise TypeError(msg)
1401 u = self._getcoordinfo()[0]
1402 if u == "":
1403 u = "channel"
1404 msg = "The current mask window unit is %s" % u
1405 i = self._check_ifs()
1406 if not i:
1407 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1408 if not silent:
1409 asaplog.push(msg)
1410 masklist = []
1411 ist, ien = None, None
1412 ist, ien=self.get_mask_indices(mask)
1413 if ist is not None and ien is not None:
1414 for i in xrange(len(ist)):
1415 range=[data[ist[i]],data[ien[i]]]
1416 range.sort()
1417 masklist.append([range[0],range[1]])
1418 return masklist
1419
1420 def get_mask_indices(self, mask=None):
1421 """\
1422 Compute and Return lists of mask start indices and mask end indices.
1423
1424 Parameters:
1425
1426 mask: channel mask, created with create_mask.
1427
1428 Returns:
1429
1430 List of mask start indices and that of mask end indices,
1431 i.e., [istart1,istart2,....], [iend1,iend2,....].
1432
1433 """
1434 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1435 raise TypeError("The mask should be list or tuple.")
1436 if len(mask) < 2:
1437 raise TypeError("The mask elements should be > 1")
1438 istart = []
1439 iend = []
1440 if mask[0]:
1441 istart.append(0)
1442 for i in range(len(mask)-1):
1443 if not mask[i] and mask[i+1]:
1444 istart.append(i+1)
1445 elif mask[i] and not mask[i+1]:
1446 iend.append(i)
1447 if mask[len(mask)-1]:
1448 iend.append(len(mask)-1)
1449 if len(istart) != len(iend):
1450 raise RuntimeError("Numbers of mask start != mask end.")
1451 for i in range(len(istart)):
1452 if istart[i] > iend[i]:
1453 raise RuntimeError("Mask start index > mask end index")
1454 break
1455 return istart,iend
1456
1457 @asaplog_post_dec
1458 def parse_maskexpr(self, maskstring):
1459 """
1460 Parse CASA type mask selection syntax (IF dependent).
1461
1462 Parameters:
1463 maskstring : A string mask selection expression.
1464 A comma separated selections mean different IF -
1465 channel combinations. IFs and channel selections
1466 are partitioned by a colon, ':'.
1467 examples:
1468 '' = all IFs (all channels)
1469 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1470 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1471 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1472 all channels in IF8
1473 Returns:
1474 A dictionary of selected (valid) IF and masklist pairs,
1475 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1476 """
1477 if not isinstance(maskstring,str):
1478 asaplog.post()
1479 asaplog.push("Invalid mask expression")
1480 asaplog.post("ERROR")
1481
1482 valid_ifs = self.getifnos()
1483 frequnit = self.get_unit()
1484 seldict = {}
1485 if maskstring == "":
1486 maskstring = str(valid_ifs)[1:-1]
1487 ## split each selection
1488 sellist = maskstring.split(',')
1489 for currselstr in sellist:
1490 selset = currselstr.split(':')
1491 # spw and mask string (may include ~, < or >)
1492 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1493 offset=1, minval=min(valid_ifs),
1494 maxval=max(valid_ifs))
1495 for spwlist in spwmasklist:
1496 selspws = []
1497 for ispw in range(spwlist[0],spwlist[1]+1):
1498 # Put into the list only if ispw exists
1499 if valid_ifs.count(ispw):
1500 selspws.append(ispw)
1501 del spwmasklist, spwlist
1502
1503 # parse frequency mask list
1504 if len(selset) > 1:
1505 freqmasklist = self._parse_selection(selset[1], typestr='float',
1506 offset=0.)
1507 else:
1508 # want to select the whole spectrum
1509 freqmasklist = [None]
1510
1511 ## define a dictionary of spw - masklist combination
1512 for ispw in selspws:
1513 #print "working on", ispw
1514 spwstr = str(ispw)
1515 if len(selspws) == 0:
1516 # empty spw
1517 continue
1518 else:
1519 ## want to get min and max of the spw and
1520 ## offset to set for '<' and '>'
1521 if frequnit == 'channel':
1522 minfreq = 0
1523 maxfreq = self.nchan(ifno=ispw)
1524 offset = 0.5
1525 else:
1526 ## This is ugly part. need improvement
1527 for ifrow in xrange(self.nrow()):
1528 if self.getif(ifrow) == ispw:
1529 #print "IF",ispw,"found in row =",ifrow
1530 break
1531 freqcoord = self.get_coordinate(ifrow)
1532 freqs = self._getabcissa(ifrow)
1533 minfreq = min(freqs)
1534 maxfreq = max(freqs)
1535 if len(freqs) == 1:
1536 offset = 0.5
1537 elif frequnit.find('Hz') > 0:
1538 offset = abs(freqcoord.to_frequency(1,
1539 unit=frequnit)
1540 -freqcoord.to_frequency(0,
1541 unit=frequnit)
1542 )*0.5
1543 elif frequnit.find('m/s') > 0:
1544 offset = abs(freqcoord.to_velocity(1,
1545 unit=frequnit)
1546 -freqcoord.to_velocity(0,
1547 unit=frequnit)
1548 )*0.5
1549 else:
1550 asaplog.post()
1551 asaplog.push("Invalid frequency unit")
1552 asaplog.post("ERROR")
1553 del freqs, freqcoord, ifrow
1554 for freq in freqmasklist:
1555 selmask = freq or [minfreq, maxfreq]
1556 if selmask[0] == None:
1557 ## selection was "<freq[1]".
1558 if selmask[1] < minfreq:
1559 ## avoid adding region selection
1560 selmask = None
1561 else:
1562 selmask = [minfreq,selmask[1]-offset]
1563 elif selmask[1] == None:
1564 ## selection was ">freq[0]"
1565 if selmask[0] > maxfreq:
1566 ## avoid adding region selection
1567 selmask = None
1568 else:
1569 selmask = [selmask[0]+offset,maxfreq]
1570 if selmask:
1571 if not seldict.has_key(spwstr):
1572 # new spw selection
1573 seldict[spwstr] = []
1574 seldict[spwstr] += [selmask]
1575 del minfreq,maxfreq,offset,freq,selmask
1576 del spwstr
1577 del freqmasklist
1578 del valid_ifs
1579 if len(seldict) == 0:
1580 asaplog.post()
1581 asaplog.push("No valid selection in the mask expression: "
1582 +maskstring)
1583 asaplog.post("WARN")
1584 return None
1585 msg = "Selected masklist:\n"
1586 for sif, lmask in seldict.iteritems():
1587 msg += " IF"+sif+" - "+str(lmask)+"\n"
1588 asaplog.push(msg)
1589 return seldict
1590
1591 def _parse_selection(self, selstr, typestr='float', offset=0.,
1592 minval=None, maxval=None):
1593 """
1594 Parameters:
1595 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1596 typestr : The type of the values in returned list
1597 ('integer' or 'float')
1598 offset : The offset value to subtract from or add to
1599 the boundary value if the selection string
1600 includes '<' or '>'
1601 minval, maxval : The minimum/maximum values to set if the
1602 selection string includes '<' or '>'.
1603 The list element is filled with None by default.
1604 Returns:
1605 A list of min/max pair of selections.
1606 Example:
1607 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1608 returns [[0,2],[5,7],[9,9]]
1609 """
1610 selgroups = selstr.split(';')
1611 sellists = []
1612 if typestr.lower().startswith('int'):
1613 formatfunc = int
1614 else:
1615 formatfunc = float
1616
1617 for currsel in selgroups:
1618 if currsel.find('~') > 0:
1619 minsel = formatfunc(currsel.split('~')[0].strip())
1620 maxsel = formatfunc(currsel.split('~')[1].strip())
1621 elif currsel.strip().startswith('<'):
1622 minsel = minval
1623 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1624 - formatfunc(offset)
1625 elif currsel.strip().startswith('>'):
1626 minsel = formatfunc(currsel.split('>')[1].strip()) \
1627 + formatfunc(offset)
1628 maxsel = maxval
1629 else:
1630 minsel = formatfunc(currsel)
1631 maxsel = formatfunc(currsel)
1632 sellists.append([minsel,maxsel])
1633 return sellists
1634
1635# def get_restfreqs(self):
1636# """
1637# Get the restfrequency(s) stored in this scantable.
1638# The return value(s) are always of unit 'Hz'
1639# Parameters:
1640# none
1641# Returns:
1642# a list of doubles
1643# """
1644# return list(self._getrestfreqs())
1645
1646 def get_restfreqs(self, ids=None):
1647 """\
1648 Get the restfrequency(s) stored in this scantable.
1649 The return value(s) are always of unit 'Hz'
1650
1651 Parameters:
1652
1653 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1654 be retrieved
1655
1656 Returns:
1657
1658 dictionary containing ids and a list of doubles for each id
1659
1660 """
1661 if ids is None:
1662 rfreqs = {}
1663 idlist = self.getmolnos()
1664 for i in idlist:
1665 rfreqs[i] = list(self._getrestfreqs(i))
1666 return rfreqs
1667 else:
1668 if type(ids) == list or type(ids) == tuple:
1669 rfreqs = {}
1670 for i in ids:
1671 rfreqs[i] = list(self._getrestfreqs(i))
1672 return rfreqs
1673 else:
1674 return list(self._getrestfreqs(ids))
1675
1676 @asaplog_post_dec
1677 def set_restfreqs(self, freqs=None, unit='Hz'):
1678 """\
1679 Set or replace the restfrequency specified and
1680 if the 'freqs' argument holds a scalar,
1681 then that rest frequency will be applied to all the selected
1682 data. If the 'freqs' argument holds
1683 a vector, then it MUST be of equal or smaller length than
1684 the number of IFs (and the available restfrequencies will be
1685 replaced by this vector). In this case, *all* data have
1686 the restfrequency set per IF according
1687 to the corresponding value you give in the 'freqs' vector.
1688 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1689 IF 1 gets restfreq 2e9.
1690
1691 You can also specify the frequencies via a linecatalog.
1692
1693 Parameters:
1694
1695 freqs: list of rest frequency values or string idenitfiers
1696
1697 unit: unit for rest frequency (default 'Hz')
1698
1699
1700 Example::
1701
1702 # set the given restfrequency for the all currently selected IFs
1703 scan.set_restfreqs(freqs=1.4e9)
1704 # set restfrequencies for the n IFs (n > 1) in the order of the
1705 # list, i.e
1706 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1707 # len(list_of_restfreqs) == nIF
1708 # for nIF == 1 the following will set multiple restfrequency for
1709 # that IF
1710 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1711 # set multiple restfrequencies per IF. as a list of lists where
1712 # the outer list has nIF elements, the inner s arbitrary
1713 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1714
1715 *Note*:
1716
1717 To do more sophisticate Restfrequency setting, e.g. on a
1718 source and IF basis, use scantable.set_selection() before using
1719 this function::
1720
1721 # provided your scantable is called scan
1722 selection = selector()
1723 selection.set_name("ORION*")
1724 selection.set_ifs([1])
1725 scan.set_selection(selection)
1726 scan.set_restfreqs(freqs=86.6e9)
1727
1728 """
1729 varlist = vars()
1730 from asap import linecatalog
1731 # simple value
1732 if isinstance(freqs, int) or isinstance(freqs, float):
1733 self._setrestfreqs([freqs], [""], unit)
1734 # list of values
1735 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1736 # list values are scalars
1737 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1738 if len(freqs) == 1:
1739 self._setrestfreqs(freqs, [""], unit)
1740 else:
1741 # allow the 'old' mode of setting mulitple IFs
1742 sel = selector()
1743 savesel = self._getselection()
1744 iflist = self.getifnos()
1745 if len(freqs)>len(iflist):
1746 raise ValueError("number of elements in list of list "
1747 "exeeds the current IF selections")
1748 iflist = self.getifnos()
1749 for i, fval in enumerate(freqs):
1750 sel.set_ifs(iflist[i])
1751 self._setselection(sel)
1752 self._setrestfreqs([fval], [""], unit)
1753 self._setselection(savesel)
1754
1755 # list values are dict, {'value'=, 'name'=)
1756 elif isinstance(freqs[-1], dict):
1757 values = []
1758 names = []
1759 for d in freqs:
1760 values.append(d["value"])
1761 names.append(d["name"])
1762 self._setrestfreqs(values, names, unit)
1763 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1764 sel = selector()
1765 savesel = self._getselection()
1766 iflist = self.getifnos()
1767 if len(freqs)>len(iflist):
1768 raise ValueError("number of elements in list of list exeeds"
1769 " the current IF selections")
1770 for i, fval in enumerate(freqs):
1771 sel.set_ifs(iflist[i])
1772 self._setselection(sel)
1773 self._setrestfreqs(fval, [""], unit)
1774 self._setselection(savesel)
1775 # freqs are to be taken from a linecatalog
1776 elif isinstance(freqs, linecatalog):
1777 sel = selector()
1778 savesel = self._getselection()
1779 for i in xrange(freqs.nrow()):
1780 sel.set_ifs(iflist[i])
1781 self._setselection(sel)
1782 self._setrestfreqs([freqs.get_frequency(i)],
1783 [freqs.get_name(i)], "MHz")
1784 # ensure that we are not iterating past nIF
1785 if i == self.nif()-1: break
1786 self._setselection(savesel)
1787 else:
1788 return
1789 self._add_history("set_restfreqs", varlist)
1790
1791 @asaplog_post_dec
1792 def shift_refpix(self, delta):
1793 """\
1794 Shift the reference pixel of the Spectra Coordinate by an
1795 integer amount.
1796
1797 Parameters:
1798
1799 delta: the amount to shift by
1800
1801 *Note*:
1802
1803 Be careful using this with broadband data.
1804
1805 """
1806 varlist = vars()
1807 Scantable.shift_refpix(self, delta)
1808 s._add_history("shift_refpix", varlist)
1809
1810 @asaplog_post_dec
1811 def history(self, filename=None):
1812 """\
1813 Print the history. Optionally to a file.
1814
1815 Parameters:
1816
1817 filename: The name of the file to save the history to.
1818
1819 """
1820 hist = list(self._gethistory())
1821 out = "-"*80
1822 for h in hist:
1823 if h.startswith("---"):
1824 out = "\n".join([out, h])
1825 else:
1826 items = h.split("##")
1827 date = items[0]
1828 func = items[1]
1829 items = items[2:]
1830 out += "\n"+date+"\n"
1831 out += "Function: %s\n Parameters:" % (func)
1832 for i in items:
1833 if i == '':
1834 continue
1835 s = i.split("=")
1836 out += "\n %s = %s" % (s[0], s[1])
1837 out = "\n".join([out, "-"*80])
1838 if filename is not None:
1839 if filename is "":
1840 filename = 'scantable_history.txt'
1841 import os
1842 filename = os.path.expandvars(os.path.expanduser(filename))
1843 if not os.path.isdir(filename):
1844 data = open(filename, 'w')
1845 data.write(out)
1846 data.close()
1847 else:
1848 msg = "Illegal file name '%s'." % (filename)
1849 raise IOError(msg)
1850 return page(out)
1851
1852 #
1853 # Maths business
1854 #
1855 @asaplog_post_dec
1856 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1857 """\
1858 Return the (time) weighted average of a scan. Scans will be averaged
1859 only if the source direction (RA/DEC) is within 1' otherwise
1860
1861 *Note*:
1862
1863 in channels only - align if necessary
1864
1865 Parameters:
1866
1867 mask: an optional mask (only used for 'var' and 'tsys'
1868 weighting)
1869
1870 scanav: True averages each scan separately
1871 False (default) averages all scans together,
1872
1873 weight: Weighting scheme.
1874 'none' (mean no weight)
1875 'var' (1/var(spec) weighted)
1876 'tsys' (1/Tsys**2 weighted)
1877 'tint' (integration time weighted)
1878 'tintsys' (Tint/Tsys**2)
1879 'median' ( median averaging)
1880 The default is 'tint'
1881
1882 align: align the spectra in velocity before averaging. It takes
1883 the time of the first spectrum as reference time.
1884
1885 Example::
1886
1887 # time average the scantable without using a mask
1888 newscan = scan.average_time()
1889
1890 """
1891 varlist = vars()
1892 weight = weight or 'TINT'
1893 mask = mask or ()
1894 scanav = (scanav and 'SCAN') or 'NONE'
1895 scan = (self, )
1896
1897 if align:
1898 scan = (self.freq_align(insitu=False), )
1899 s = None
1900 if weight.upper() == 'MEDIAN':
1901 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1902 scanav))
1903 else:
1904 s = scantable(self._math._average(scan, mask, weight.upper(),
1905 scanav))
1906 s._add_history("average_time", varlist)
1907 return s
1908
1909 @asaplog_post_dec
1910 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1911 """\
1912 Return a scan where all spectra are converted to either
1913 Jansky or Kelvin depending upon the flux units of the scan table.
1914 By default the function tries to look the values up internally.
1915 If it can't find them (or if you want to over-ride), you must
1916 specify EITHER jyperk OR eta (and D which it will try to look up
1917 also if you don't set it). jyperk takes precedence if you set both.
1918
1919 Parameters:
1920
1921 jyperk: the Jy / K conversion factor
1922
1923 eta: the aperture efficiency
1924
1925 d: the geometric diameter (metres)
1926
1927 insitu: if False a new scantable is returned.
1928 Otherwise, the scaling is done in-situ
1929 The default is taken from .asaprc (False)
1930
1931 """
1932 if insitu is None: insitu = rcParams['insitu']
1933 self._math._setinsitu(insitu)
1934 varlist = vars()
1935 jyperk = jyperk or -1.0
1936 d = d or -1.0
1937 eta = eta or -1.0
1938 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1939 s._add_history("convert_flux", varlist)
1940 if insitu: self._assign(s)
1941 else: return s
1942
1943 @asaplog_post_dec
1944 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1945 """\
1946 Return a scan after applying a gain-elevation correction.
1947 The correction can be made via either a polynomial or a
1948 table-based interpolation (and extrapolation if necessary).
1949 You specify polynomial coefficients, an ascii table or neither.
1950 If you specify neither, then a polynomial correction will be made
1951 with built in coefficients known for certain telescopes (an error
1952 will occur if the instrument is not known).
1953 The data and Tsys are *divided* by the scaling factors.
1954
1955 Parameters:
1956
1957 poly: Polynomial coefficients (default None) to compute a
1958 gain-elevation correction as a function of
1959 elevation (in degrees).
1960
1961 filename: The name of an ascii file holding correction factors.
1962 The first row of the ascii file must give the column
1963 names and these MUST include columns
1964 "ELEVATION" (degrees) and "FACTOR" (multiply data
1965 by this) somewhere.
1966 The second row must give the data type of the
1967 column. Use 'R' for Real and 'I' for Integer.
1968 An example file would be
1969 (actual factors are arbitrary) :
1970
1971 TIME ELEVATION FACTOR
1972 R R R
1973 0.1 0 0.8
1974 0.2 20 0.85
1975 0.3 40 0.9
1976 0.4 60 0.85
1977 0.5 80 0.8
1978 0.6 90 0.75
1979
1980 method: Interpolation method when correcting from a table.
1981 Values are "nearest", "linear" (default), "cubic"
1982 and "spline"
1983
1984 insitu: if False a new scantable is returned.
1985 Otherwise, the scaling is done in-situ
1986 The default is taken from .asaprc (False)
1987
1988 """
1989
1990 if insitu is None: insitu = rcParams['insitu']
1991 self._math._setinsitu(insitu)
1992 varlist = vars()
1993 poly = poly or ()
1994 from os.path import expandvars
1995 filename = expandvars(filename)
1996 s = scantable(self._math._gainel(self, poly, filename, method))
1997 s._add_history("gain_el", varlist)
1998 if insitu:
1999 self._assign(s)
2000 else:
2001 return s
2002
2003 @asaplog_post_dec
2004 def freq_align(self, reftime=None, method='cubic', insitu=None):
2005 """\
2006 Return a scan where all rows have been aligned in frequency/velocity.
2007 The alignment frequency frame (e.g. LSRK) is that set by function
2008 set_freqframe.
2009
2010 Parameters:
2011
2012 reftime: reference time to align at. By default, the time of
2013 the first row of data is used.
2014
2015 method: Interpolation method for regridding the spectra.
2016 Choose from "nearest", "linear", "cubic" (default)
2017 and "spline"
2018
2019 insitu: if False a new scantable is returned.
2020 Otherwise, the scaling is done in-situ
2021 The default is taken from .asaprc (False)
2022
2023 """
2024 if insitu is None: insitu = rcParams["insitu"]
2025 self._math._setinsitu(insitu)
2026 varlist = vars()
2027 reftime = reftime or ""
2028 s = scantable(self._math._freq_align(self, reftime, method))
2029 s._add_history("freq_align", varlist)
2030 if insitu:
2031 self._assign(s)
2032 else:
2033 return s
2034
2035 @asaplog_post_dec
2036 def opacity(self, tau=None, insitu=None):
2037 """\
2038 Apply an opacity correction. The data
2039 and Tsys are multiplied by the correction factor.
2040
2041 Parameters:
2042
2043 tau: (list of) opacity from which the correction factor is
2044 exp(tau*ZD)
2045 where ZD is the zenith-distance.
2046 If a list is provided, it has to be of length nIF,
2047 nIF*nPol or 1 and in order of IF/POL, e.g.
2048 [opif0pol0, opif0pol1, opif1pol0 ...]
2049 if tau is `None` the opacities are determined from a
2050 model.
2051
2052 insitu: if False a new scantable is returned.
2053 Otherwise, the scaling is done in-situ
2054 The default is taken from .asaprc (False)
2055
2056 """
2057 if insitu is None:
2058 insitu = rcParams['insitu']
2059 self._math._setinsitu(insitu)
2060 varlist = vars()
2061 if not hasattr(tau, "__len__"):
2062 tau = [tau]
2063 s = scantable(self._math._opacity(self, tau))
2064 s._add_history("opacity", varlist)
2065 if insitu:
2066 self._assign(s)
2067 else:
2068 return s
2069
2070 @asaplog_post_dec
2071 def bin(self, width=5, insitu=None):
2072 """\
2073 Return a scan where all spectra have been binned up.
2074
2075 Parameters:
2076
2077 width: The bin width (default=5) in pixels
2078
2079 insitu: if False a new scantable is returned.
2080 Otherwise, the scaling is done in-situ
2081 The default is taken from .asaprc (False)
2082
2083 """
2084 if insitu is None:
2085 insitu = rcParams['insitu']
2086 self._math._setinsitu(insitu)
2087 varlist = vars()
2088 s = scantable(self._math._bin(self, width))
2089 s._add_history("bin", varlist)
2090 if insitu:
2091 self._assign(s)
2092 else:
2093 return s
2094
2095 @asaplog_post_dec
2096 def resample(self, width=5, method='cubic', insitu=None):
2097 """\
2098 Return a scan where all spectra have been binned up.
2099
2100 Parameters:
2101
2102 width: The bin width (default=5) in pixels
2103
2104 method: Interpolation method when correcting from a table.
2105 Values are "nearest", "linear", "cubic" (default)
2106 and "spline"
2107
2108 insitu: if False a new scantable is returned.
2109 Otherwise, the scaling is done in-situ
2110 The default is taken from .asaprc (False)
2111
2112 """
2113 if insitu is None:
2114 insitu = rcParams['insitu']
2115 self._math._setinsitu(insitu)
2116 varlist = vars()
2117 s = scantable(self._math._resample(self, method, width))
2118 s._add_history("resample", varlist)
2119 if insitu:
2120 self._assign(s)
2121 else:
2122 return s
2123
2124 @asaplog_post_dec
2125 def average_pol(self, mask=None, weight='none'):
2126 """\
2127 Average the Polarisations together.
2128
2129 Parameters:
2130
2131 mask: An optional mask defining the region, where the
2132 averaging will be applied. The output will have all
2133 specified points masked.
2134
2135 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2136 weighted), or 'tsys' (1/Tsys**2 weighted)
2137
2138 """
2139 varlist = vars()
2140 mask = mask or ()
2141 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2142 s._add_history("average_pol", varlist)
2143 return s
2144
2145 @asaplog_post_dec
2146 def average_beam(self, mask=None, weight='none'):
2147 """\
2148 Average the Beams together.
2149
2150 Parameters:
2151 mask: An optional mask defining the region, where the
2152 averaging will be applied. The output will have all
2153 specified points masked.
2154
2155 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2156 weighted), or 'tsys' (1/Tsys**2 weighted)
2157
2158 """
2159 varlist = vars()
2160 mask = mask or ()
2161 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2162 s._add_history("average_beam", varlist)
2163 return s
2164
2165 def parallactify(self, pflag):
2166 """\
2167 Set a flag to indicate whether this data should be treated as having
2168 been 'parallactified' (total phase == 0.0)
2169
2170 Parameters:
2171
2172 pflag: Bool indicating whether to turn this on (True) or
2173 off (False)
2174
2175 """
2176 varlist = vars()
2177 self._parallactify(pflag)
2178 self._add_history("parallactify", varlist)
2179
2180 @asaplog_post_dec
2181 def convert_pol(self, poltype=None):
2182 """\
2183 Convert the data to a different polarisation type.
2184 Note that you will need cross-polarisation terms for most conversions.
2185
2186 Parameters:
2187
2188 poltype: The new polarisation type. Valid types are:
2189 "linear", "circular", "stokes" and "linpol"
2190
2191 """
2192 varlist = vars()
2193 s = scantable(self._math._convertpol(self, poltype))
2194 s._add_history("convert_pol", varlist)
2195 return s
2196
2197 @asaplog_post_dec
2198 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2199 insitu=None):
2200 """\
2201 Smooth the spectrum by the specified kernel (conserving flux).
2202
2203 Parameters:
2204
2205 kernel: The type of smoothing kernel. Select from
2206 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2207 or 'poly'
2208
2209 width: The width of the kernel in pixels. For hanning this is
2210 ignored otherwise it defauls to 5 pixels.
2211 For 'gaussian' it is the Full Width Half
2212 Maximum. For 'boxcar' it is the full width.
2213 For 'rmedian' and 'poly' it is the half width.
2214
2215 order: Optional parameter for 'poly' kernel (default is 2), to
2216 specify the order of the polnomial. Ignored by all other
2217 kernels.
2218
2219 plot: plot the original and the smoothed spectra.
2220 In this each indivual fit has to be approved, by
2221 typing 'y' or 'n'
2222
2223 insitu: if False a new scantable is returned.
2224 Otherwise, the scaling is done in-situ
2225 The default is taken from .asaprc (False)
2226
2227 """
2228 if insitu is None: insitu = rcParams['insitu']
2229 self._math._setinsitu(insitu)
2230 varlist = vars()
2231
2232 if plot: orgscan = self.copy()
2233
2234 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2235 s._add_history("smooth", varlist)
2236
2237 if plot:
2238 from asap.asapplotter import new_asaplot
2239 theplot = new_asaplot(rcParams['plotter.gui'])
2240 theplot.set_panels()
2241 ylab=s._get_ordinate_label()
2242 #theplot.palette(0,["#777777","red"])
2243 for r in xrange(s.nrow()):
2244 xsm=s._getabcissa(r)
2245 ysm=s._getspectrum(r)
2246 xorg=orgscan._getabcissa(r)
2247 yorg=orgscan._getspectrum(r)
2248 theplot.clear()
2249 theplot.hold()
2250 theplot.set_axes('ylabel',ylab)
2251 theplot.set_axes('xlabel',s._getabcissalabel(r))
2252 theplot.set_axes('title',s._getsourcename(r))
2253 theplot.set_line(label='Original',color="#777777")
2254 theplot.plot(xorg,yorg)
2255 theplot.set_line(label='Smoothed',color="red")
2256 theplot.plot(xsm,ysm)
2257 ### Ugly part for legend
2258 for i in [0,1]:
2259 theplot.subplots[0]['lines'].append(
2260 [theplot.subplots[0]['axes'].lines[i]]
2261 )
2262 theplot.release()
2263 ### Ugly part for legend
2264 theplot.subplots[0]['lines']=[]
2265 res = raw_input("Accept smoothing ([y]/n): ")
2266 if res.upper() == 'N':
2267 s._setspectrum(yorg, r)
2268 theplot.quit()
2269 del theplot
2270 del orgscan
2271
2272 if insitu: self._assign(s)
2273 else: return s
2274
2275 @asaplog_post_dec
2276 def _parse_wn(self, wn):
2277 if isinstance(wn, list) or isinstance(wn, tuple):
2278 return wn
2279 elif isinstance(wn, int):
2280 return [ wn ]
2281 elif isinstance(wn, str):
2282 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2283 val = wn.split('-')
2284 val = [int(val[0]), int(val[1])]
2285 val.sort()
2286 res = [i for i in xrange(val[0], val[1]+1)]
2287 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2288 val = int(wn[2:])+1
2289 res = [i for i in xrange(val)]
2290 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2291 val = int(wn[:-2])+1
2292 res = [i for i in xrange(val)]
2293 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2294 val = int(wn[1:])
2295 res = [i for i in xrange(val)]
2296 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2297 val = int(wn[:-1])
2298 res = [i for i in xrange(val)]
2299 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2300 val = int(wn[2:])
2301 res = [i for i in xrange(val, self.nchan()/2+1)]
2302 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2303 val = int(wn[:-2])
2304 res = [i for i in xrange(val, self.nchan()/2+1)]
2305 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
2306 val = int(wn[1:])+1
2307 res = [i for i in xrange(val, self.nchan()/2+1)]
2308 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
2309 val = int(wn[:-1])+1
2310 res = [i for i in xrange(val, self.nchan()/2+1)]
2311
2312 return res
2313 else:
2314 msg = 'wrong value given for addwn/rejwn'
2315 raise RuntimeError(msg)
2316
2317
2318 @asaplog_post_dec
2319 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2320 fftmethod=None, fftthresh=None,
2321 addwn=None, rejwn=None, clipthresh=None,
2322 clipniter=None, plot=None,
2323 getresidual=None, showprogress=None,
2324 minnrow=None, outlog=None, blfile=None):
2325 """\
2326 Return a scan which has been baselined (all rows) with sinusoidal
2327 functions.
2328
2329 Parameters:
2330 insitu: if False a new scantable is returned.
2331 Otherwise, the scaling is done in-situ
2332 The default is taken from .asaprc (False)
2333 mask: an optional mask
2334 applyfft: if True use some method, such as FFT, to find
2335 strongest sinusoidal components in the wavenumber
2336 domain to be used for baseline fitting.
2337 default is True.
2338 fftmethod: method to find the strong sinusoidal components.
2339 now only 'fft' is available and it is the default.
2340 fftthresh: the threshold to select wave numbers to be used for
2341 fitting from the distribution of amplitudes in the
2342 wavenumber domain.
2343 both float and string values accepted.
2344 given a float value, the unit is set to sigma.
2345 for string values, allowed formats include:
2346 'xsigma' or 'x' (= x-sigma level. e.g.,
2347 '3sigma'), or
2348 'topx' (= the x strongest ones, e.g. 'top5').
2349 default is 3.0 (unit: sigma).
2350 addwn: the additional wave numbers to be used for fitting.
2351 list or integer value is accepted to specify every
2352 wave numbers. also string value can be used in case
2353 you need to specify wave numbers in a certain range,
2354 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2355 '<a' (= 0,1,...,a-2,a-1),
2356 '>=a' (= a, a+1, ... up to the maximum wave
2357 number corresponding to the Nyquist
2358 frequency for the case of FFT).
2359 default is [].
2360 rejwn: the wave numbers NOT to be used for fitting.
2361 can be set just as addwn but has higher priority:
2362 wave numbers which are specified both in addwn
2363 and rejwn will NOT be used. default is [].
2364 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2365 clipniter: maximum number of iteration of 'clipthresh'-sigma
2366 clipping (default is 0)
2367 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2368 plot the fit and the residual. In this each
2369 indivual fit has to be approved, by typing 'y'
2370 or 'n'
2371 getresidual: if False, returns best-fit values instead of
2372 residual. (default is True)
2373 showprogress: show progress status for large data.
2374 default is True.
2375 minnrow: minimum number of input spectra to show.
2376 default is 1000.
2377 outlog: Output the coefficients of the best-fit
2378 function to logger (default is False)
2379 blfile: Name of a text file in which the best-fit
2380 parameter values to be written
2381 (default is '': no file/logger output)
2382
2383 Example:
2384 # return a scan baselined by a combination of sinusoidal curves
2385 # having wave numbers in spectral window up to 10,
2386 # also with 3-sigma clipping, iteration up to 4 times
2387 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2388
2389 Note:
2390 The best-fit parameter values output in logger and/or blfile are now
2391 based on specunit of 'channel'.
2392 """
2393
2394 try:
2395 varlist = vars()
2396
2397 if insitu is None: insitu = rcParams['insitu']
2398 if insitu:
2399 workscan = self
2400 else:
2401 workscan = self.copy()
2402
2403 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2404 if applyfft is None: applyfft = True
2405 if fftmethod is None: fftmethod = 'fft'
2406 if fftthresh is None: fftthresh = 3.0
2407 if addwn is None: addwn = []
2408 if rejwn is None: rejwn = []
2409 if clipthresh is None: clipthresh = 3.0
2410 if clipniter is None: clipniter = 0
2411 if plot is None: plot = False
2412 if getresidual is None: getresidual = True
2413 if showprogress is None: showprogress = True
2414 if minnrow is None: minnrow = 1000
2415 if outlog is None: outlog = False
2416 if blfile is None: blfile = ''
2417
2418 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2419 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2420 str(fftthresh).lower(),
2421 workscan._parse_wn(addwn),
2422 workscan._parse_wn(rejwn), clipthresh,
2423 clipniter, getresidual,
2424 pack_progress_params(showprogress,
2425 minnrow), outlog,
2426 blfile)
2427 workscan._add_history('sinusoid_baseline', varlist)
2428
2429 if insitu:
2430 self._assign(workscan)
2431 else:
2432 return workscan
2433
2434 except RuntimeError, e:
2435 raise_fitting_failure_exception(e)
2436
2437
2438 @asaplog_post_dec
2439 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2440 fftmethod=None, fftthresh=None,
2441 addwn=None, rejwn=None, clipthresh=None,
2442 clipniter=None, edge=None, threshold=None,
2443 chan_avg_limit=None, plot=None,
2444 getresidual=None, showprogress=None,
2445 minnrow=None, outlog=None, blfile=None):
2446 """\
2447 Return a scan which has been baselined (all rows) with sinusoidal
2448 functions.
2449 Spectral lines are detected first using linefinder and masked out
2450 to avoid them affecting the baseline solution.
2451
2452 Parameters:
2453 insitu: if False a new scantable is returned.
2454 Otherwise, the scaling is done in-situ
2455 The default is taken from .asaprc (False)
2456 mask: an optional mask retreived from scantable
2457 applyfft: if True use some method, such as FFT, to find
2458 strongest sinusoidal components in the wavenumber
2459 domain to be used for baseline fitting.
2460 default is True.
2461 fftmethod: method to find the strong sinusoidal components.
2462 now only 'fft' is available and it is the default.
2463 fftthresh: the threshold to select wave numbers to be used for
2464 fitting from the distribution of amplitudes in the
2465 wavenumber domain.
2466 both float and string values accepted.
2467 given a float value, the unit is set to sigma.
2468 for string values, allowed formats include:
2469 'xsigma' or 'x' (= x-sigma level. e.g.,
2470 '3sigma'), or
2471 'topx' (= the x strongest ones, e.g. 'top5').
2472 default is 3.0 (unit: sigma).
2473 addwn: the additional wave numbers to be used for fitting.
2474 list or integer value is accepted to specify every
2475 wave numbers. also string value can be used in case
2476 you need to specify wave numbers in a certain range,
2477 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2478 '<a' (= 0,1,...,a-2,a-1),
2479 '>=a' (= a, a+1, ... up to the maximum wave
2480 number corresponding to the Nyquist
2481 frequency for the case of FFT).
2482 default is [].
2483 rejwn: the wave numbers NOT to be used for fitting.
2484 can be set just as addwn but has higher priority:
2485 wave numbers which are specified both in addwn
2486 and rejwn will NOT be used. default is [].
2487 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2488 clipniter: maximum number of iteration of 'clipthresh'-sigma
2489 clipping (default is 0)
2490 edge: an optional number of channel to drop at
2491 the edge of spectrum. If only one value is
2492 specified, the same number will be dropped
2493 from both sides of the spectrum. Default
2494 is to keep all channels. Nested tuples
2495 represent individual edge selection for
2496 different IFs (a number of spectral channels
2497 can be different)
2498 threshold: the threshold used by line finder. It is
2499 better to keep it large as only strong lines
2500 affect the baseline solution.
2501 chan_avg_limit: a maximum number of consequtive spectral
2502 channels to average during the search of
2503 weak and broad lines. The default is no
2504 averaging (and no search for weak lines).
2505 If such lines can affect the fitted baseline
2506 (e.g. a high order polynomial is fitted),
2507 increase this parameter (usually values up
2508 to 8 are reasonable). Most users of this
2509 method should find the default value sufficient.
2510 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2511 plot the fit and the residual. In this each
2512 indivual fit has to be approved, by typing 'y'
2513 or 'n'
2514 getresidual: if False, returns best-fit values instead of
2515 residual. (default is True)
2516 showprogress: show progress status for large data.
2517 default is True.
2518 minnrow: minimum number of input spectra to show.
2519 default is 1000.
2520 outlog: Output the coefficients of the best-fit
2521 function to logger (default is False)
2522 blfile: Name of a text file in which the best-fit
2523 parameter values to be written
2524 (default is "": no file/logger output)
2525
2526 Example:
2527 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2528
2529 Note:
2530 The best-fit parameter values output in logger and/or blfile are now
2531 based on specunit of 'channel'.
2532 """
2533
2534 try:
2535 varlist = vars()
2536
2537 if insitu is None: insitu = rcParams['insitu']
2538 if insitu:
2539 workscan = self
2540 else:
2541 workscan = self.copy()
2542
2543 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2544 if applyfft is None: applyfft = True
2545 if fftmethod is None: fftmethod = 'fft'
2546 if fftthresh is None: fftthresh = 3.0
2547 if addwn is None: addwn = []
2548 if rejwn is None: rejwn = []
2549 if clipthresh is None: clipthresh = 3.0
2550 if clipniter is None: clipniter = 0
2551 if edge is None: edge = (0,0)
2552 if threshold is None: threshold = 3
2553 if chan_avg_limit is None: chan_avg_limit = 1
2554 if plot is None: plot = False
2555 if getresidual is None: getresidual = True
2556 if showprogress is None: showprogress = True
2557 if minnrow is None: minnrow = 1000
2558 if outlog is None: outlog = False
2559 if blfile is None: blfile = ''
2560
2561 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2562 workscan._auto_sinusoid_baseline(mask, applyfft,
2563 fftmethod.lower(),
2564 str(fftthresh).lower(),
2565 workscan._parse_wn(addwn),
2566 workscan._parse_wn(rejwn),
2567 clipthresh, clipniter,
2568 normalise_edge_param(edge),
2569 threshold, chan_avg_limit,
2570 getresidual,
2571 pack_progress_params(showprogress,
2572 minnrow),
2573 outlog, blfile)
2574 workscan._add_history("auto_sinusoid_baseline", varlist)
2575
2576 if insitu:
2577 self._assign(workscan)
2578 else:
2579 return workscan
2580
2581 except RuntimeError, e:
2582 raise_fitting_failure_exception(e)
2583
2584 @asaplog_post_dec
2585 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2586 clipthresh=None, clipniter=None, plot=None,
2587 getresidual=None, showprogress=None, minnrow=None,
2588 outlog=None, blfile=None):
2589 """\
2590 Return a scan which has been baselined (all rows) by cubic spline
2591 function (piecewise cubic polynomial).
2592
2593 Parameters:
2594 insitu: If False a new scantable is returned.
2595 Otherwise, the scaling is done in-situ
2596 The default is taken from .asaprc (False)
2597 mask: An optional mask
2598 npiece: Number of pieces. (default is 2)
2599 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2600 clipniter: maximum number of iteration of 'clipthresh'-sigma
2601 clipping (default is 0)
2602 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2603 plot the fit and the residual. In this each
2604 indivual fit has to be approved, by typing 'y'
2605 or 'n'
2606 getresidual: if False, returns best-fit values instead of
2607 residual. (default is True)
2608 showprogress: show progress status for large data.
2609 default is True.
2610 minnrow: minimum number of input spectra to show.
2611 default is 1000.
2612 outlog: Output the coefficients of the best-fit
2613 function to logger (default is False)
2614 blfile: Name of a text file in which the best-fit
2615 parameter values to be written
2616 (default is "": no file/logger output)
2617
2618 Example:
2619 # return a scan baselined by a cubic spline consisting of 2 pieces
2620 # (i.e., 1 internal knot),
2621 # also with 3-sigma clipping, iteration up to 4 times
2622 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2623
2624 Note:
2625 The best-fit parameter values output in logger and/or blfile are now
2626 based on specunit of 'channel'.
2627 """
2628
2629 try:
2630 varlist = vars()
2631
2632 if insitu is None: insitu = rcParams['insitu']
2633 if insitu:
2634 workscan = self
2635 else:
2636 workscan = self.copy()
2637
2638 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2639 if npiece is None: npiece = 2
2640 if clipthresh is None: clipthresh = 3.0
2641 if clipniter is None: clipniter = 0
2642 if plot is None: plot = False
2643 if getresidual is None: getresidual = True
2644 if showprogress is None: showprogress = True
2645 if minnrow is None: minnrow = 1000
2646 if outlog is None: outlog = False
2647 if blfile is None: blfile = ''
2648
2649 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2650 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2651 getresidual,
2652 pack_progress_params(showprogress,
2653 minnrow), outlog,
2654 blfile)
2655 workscan._add_history("cspline_baseline", varlist)
2656
2657 if insitu:
2658 self._assign(workscan)
2659 else:
2660 return workscan
2661
2662 except RuntimeError, e:
2663 raise_fitting_failure_exception(e)
2664
2665 @asaplog_post_dec
2666 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2667 clipthresh=None, clipniter=None,
2668 edge=None, threshold=None, chan_avg_limit=None,
2669 getresidual=None, plot=None,
2670 showprogress=None, minnrow=None, outlog=None,
2671 blfile=None):
2672 """\
2673 Return a scan which has been baselined (all rows) by cubic spline
2674 function (piecewise cubic polynomial).
2675 Spectral lines are detected first using linefinder and masked out
2676 to avoid them affecting the baseline solution.
2677
2678 Parameters:
2679 insitu: if False a new scantable is returned.
2680 Otherwise, the scaling is done in-situ
2681 The default is taken from .asaprc (False)
2682 mask: an optional mask retreived from scantable
2683 npiece: Number of pieces. (default is 2)
2684 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2685 clipniter: maximum number of iteration of 'clipthresh'-sigma
2686 clipping (default is 0)
2687 edge: an optional number of channel to drop at
2688 the edge of spectrum. If only one value is
2689 specified, the same number will be dropped
2690 from both sides of the spectrum. Default
2691 is to keep all channels. Nested tuples
2692 represent individual edge selection for
2693 different IFs (a number of spectral channels
2694 can be different)
2695 threshold: the threshold used by line finder. It is
2696 better to keep it large as only strong lines
2697 affect the baseline solution.
2698 chan_avg_limit: a maximum number of consequtive spectral
2699 channels to average during the search of
2700 weak and broad lines. The default is no
2701 averaging (and no search for weak lines).
2702 If such lines can affect the fitted baseline
2703 (e.g. a high order polynomial is fitted),
2704 increase this parameter (usually values up
2705 to 8 are reasonable). Most users of this
2706 method should find the default value sufficient.
2707 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2708 plot the fit and the residual. In this each
2709 indivual fit has to be approved, by typing 'y'
2710 or 'n'
2711 getresidual: if False, returns best-fit values instead of
2712 residual. (default is True)
2713 showprogress: show progress status for large data.
2714 default is True.
2715 minnrow: minimum number of input spectra to show.
2716 default is 1000.
2717 outlog: Output the coefficients of the best-fit
2718 function to logger (default is False)
2719 blfile: Name of a text file in which the best-fit
2720 parameter values to be written
2721 (default is "": no file/logger output)
2722
2723 Example:
2724 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2725
2726 Note:
2727 The best-fit parameter values output in logger and/or blfile are now
2728 based on specunit of 'channel'.
2729 """
2730
2731 try:
2732 varlist = vars()
2733
2734 if insitu is None: insitu = rcParams['insitu']
2735 if insitu:
2736 workscan = self
2737 else:
2738 workscan = self.copy()
2739
2740 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2741 if npiece is None: npiece = 2
2742 if clipthresh is None: clipthresh = 3.0
2743 if clipniter is None: clipniter = 0
2744 if edge is None: edge = (0, 0)
2745 if threshold is None: threshold = 3
2746 if chan_avg_limit is None: chan_avg_limit = 1
2747 if plot is None: plot = False
2748 if getresidual is None: getresidual = True
2749 if showprogress is None: showprogress = True
2750 if minnrow is None: minnrow = 1000
2751 if outlog is None: outlog = False
2752 if blfile is None: blfile = ''
2753
2754 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2755 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2756 clipniter,
2757 normalise_edge_param(edge),
2758 threshold,
2759 chan_avg_limit, getresidual,
2760 pack_progress_params(showprogress,
2761 minnrow),
2762 outlog, blfile)
2763 workscan._add_history("auto_cspline_baseline", varlist)
2764
2765 if insitu:
2766 self._assign(workscan)
2767 else:
2768 return workscan
2769
2770 except RuntimeError, e:
2771 raise_fitting_failure_exception(e)
2772
2773 @asaplog_post_dec
2774 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2775 getresidual=None, showprogress=None, minnrow=None,
2776 outlog=None, blfile=None):
2777 """\
2778 Return a scan which has been baselined (all rows) by a polynomial.
2779 Parameters:
2780 insitu: if False a new scantable is returned.
2781 Otherwise, the scaling is done in-situ
2782 The default is taken from .asaprc (False)
2783 mask: an optional mask
2784 order: the order of the polynomial (default is 0)
2785 plot: plot the fit and the residual. In this each
2786 indivual fit has to be approved, by typing 'y'
2787 or 'n'
2788 getresidual: if False, returns best-fit values instead of
2789 residual. (default is True)
2790 showprogress: show progress status for large data.
2791 default is True.
2792 minnrow: minimum number of input spectra to show.
2793 default is 1000.
2794 outlog: Output the coefficients of the best-fit
2795 function to logger (default is False)
2796 blfile: Name of a text file in which the best-fit
2797 parameter values to be written
2798 (default is "": no file/logger output)
2799
2800 Example:
2801 # return a scan baselined by a third order polynomial,
2802 # not using a mask
2803 bscan = scan.poly_baseline(order=3)
2804 """
2805
2806 try:
2807 varlist = vars()
2808
2809 if insitu is None:
2810 insitu = rcParams["insitu"]
2811 if insitu:
2812 workscan = self
2813 else:
2814 workscan = self.copy()
2815
2816 if mask is None: mask = [True for i in \
2817 xrange(workscan.nchan())]
2818 if order is None: order = 0
2819 if plot is None: plot = False
2820 if getresidual is None: getresidual = True
2821 if showprogress is None: showprogress = True
2822 if minnrow is None: minnrow = 1000
2823 if outlog is None: outlog = False
2824 if blfile is None: blfile = ""
2825
2826 if plot:
2827 outblfile = (blfile != "") and \
2828 os.path.exists(os.path.expanduser(
2829 os.path.expandvars(blfile))
2830 )
2831 if outblfile:
2832 blf = open(blfile, "a")
2833
2834 f = fitter()
2835 f.set_function(lpoly=order)
2836
2837 rows = xrange(workscan.nrow())
2838 #if len(rows) > 0: workscan._init_blinfo()
2839
2840 for r in rows:
2841 f.x = workscan._getabcissa(r)
2842 f.y = workscan._getspectrum(r)
2843 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2844 f.data = None
2845 f.fit()
2846
2847 f.plot(residual=True)
2848 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2849 if accept_fit.upper() == "N":
2850 #workscan._append_blinfo(None, None, None)
2851 continue
2852
2853 blpars = f.get_parameters()
2854 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2855 #workscan._append_blinfo(blpars, masklist, f.mask)
2856 workscan._setspectrum((f.fitter.getresidual()
2857 if getresidual else f.fitter.getfit()), r)
2858
2859 if outblfile:
2860 rms = workscan.get_rms(f.mask, r)
2861 dataout = \
2862 workscan.format_blparams_row(blpars["params"],
2863 blpars["fixed"],
2864 rms, str(masklist),
2865 r, True)
2866 blf.write(dataout)
2867
2868 f._p.unmap()
2869 f._p = None
2870
2871 if outblfile:
2872 blf.close()
2873 else:
2874 workscan._poly_baseline(mask, order, getresidual,
2875 pack_progress_params(showprogress,
2876 minnrow),
2877 outlog, blfile)
2878
2879 workscan._add_history("poly_baseline", varlist)
2880
2881 if insitu:
2882 self._assign(workscan)
2883 else:
2884 return workscan
2885
2886 except RuntimeError, e:
2887 raise_fitting_failure_exception(e)
2888
2889 @asaplog_post_dec
2890 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2891 threshold=None, chan_avg_limit=None,
2892 plot=None, insitu=None,
2893 getresidual=None, showprogress=None,
2894 minnrow=None, outlog=None, blfile=None):
2895 """\
2896 Return a scan which has been baselined (all rows) by a polynomial.
2897 Spectral lines are detected first using linefinder and masked out
2898 to avoid them affecting the baseline solution.
2899
2900 Parameters:
2901 insitu: if False a new scantable is returned.
2902 Otherwise, the scaling is done in-situ
2903 The default is taken from .asaprc (False)
2904 mask: an optional mask retreived from scantable
2905 order: the order of the polynomial (default is 0)
2906 edge: an optional number of channel to drop at
2907 the edge of spectrum. If only one value is
2908 specified, the same number will be dropped
2909 from both sides of the spectrum. Default
2910 is to keep all channels. Nested tuples
2911 represent individual edge selection for
2912 different IFs (a number of spectral channels
2913 can be different)
2914 threshold: the threshold used by line finder. It is
2915 better to keep it large as only strong lines
2916 affect the baseline solution.
2917 chan_avg_limit: a maximum number of consequtive spectral
2918 channels to average during the search of
2919 weak and broad lines. The default is no
2920 averaging (and no search for weak lines).
2921 If such lines can affect the fitted baseline
2922 (e.g. a high order polynomial is fitted),
2923 increase this parameter (usually values up
2924 to 8 are reasonable). Most users of this
2925 method should find the default value sufficient.
2926 plot: plot the fit and the residual. In this each
2927 indivual fit has to be approved, by typing 'y'
2928 or 'n'
2929 getresidual: if False, returns best-fit values instead of
2930 residual. (default is True)
2931 showprogress: show progress status for large data.
2932 default is True.
2933 minnrow: minimum number of input spectra to show.
2934 default is 1000.
2935 outlog: Output the coefficients of the best-fit
2936 function to logger (default is False)
2937 blfile: Name of a text file in which the best-fit
2938 parameter values to be written
2939 (default is "": no file/logger output)
2940
2941 Example:
2942 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2943 """
2944
2945 try:
2946 varlist = vars()
2947
2948 if insitu is None:
2949 insitu = rcParams['insitu']
2950 if insitu:
2951 workscan = self
2952 else:
2953 workscan = self.copy()
2954
2955 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2956 if order is None: order = 0
2957 if edge is None: edge = (0, 0)
2958 if threshold is None: threshold = 3
2959 if chan_avg_limit is None: chan_avg_limit = 1
2960 if plot is None: plot = False
2961 if getresidual is None: getresidual = True
2962 if showprogress is None: showprogress = True
2963 if minnrow is None: minnrow = 1000
2964 if outlog is None: outlog = False
2965 if blfile is None: blfile = ''
2966
2967 edge = normalise_edge_param(edge)
2968
2969 if plot:
2970 outblfile = (blfile != "") and \
2971 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2972 if outblfile: blf = open(blfile, "a")
2973
2974 from asap.asaplinefind import linefinder
2975 fl = linefinder()
2976 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
2977 fl.set_scan(workscan)
2978
2979 f = fitter()
2980 f.set_function(lpoly=order)
2981
2982 rows = xrange(workscan.nrow())
2983 #if len(rows) > 0: workscan._init_blinfo()
2984
2985 for r in rows:
2986 idx = 2*workscan.getif(r)
2987 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2988 edge[idx:idx+2]) # (CAS-1434)
2989
2990 f.x = workscan._getabcissa(r)
2991 f.y = workscan._getspectrum(r)
2992 f.mask = fl.get_mask()
2993 f.data = None
2994 f.fit()
2995
2996 f.plot(residual=True)
2997 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2998 if accept_fit.upper() == "N":
2999 #workscan._append_blinfo(None, None, None)
3000 continue
3001
3002 blpars = f.get_parameters()
3003 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3004 #workscan._append_blinfo(blpars, masklist, f.mask)
3005 workscan._setspectrum(
3006 (f.fitter.getresidual() if getresidual
3007 else f.fitter.getfit()), r
3008 )
3009
3010 if outblfile:
3011 rms = workscan.get_rms(f.mask, r)
3012 dataout = \
3013 workscan.format_blparams_row(blpars["params"],
3014 blpars["fixed"],
3015 rms, str(masklist),
3016 r, True)
3017 blf.write(dataout)
3018
3019 f._p.unmap()
3020 f._p = None
3021
3022 if outblfile: blf.close()
3023 else:
3024 workscan._auto_poly_baseline(mask, order, edge, threshold,
3025 chan_avg_limit, getresidual,
3026 pack_progress_params(showprogress,
3027 minnrow),
3028 outlog, blfile)
3029
3030 workscan._add_history("auto_poly_baseline", varlist)
3031
3032 if insitu:
3033 self._assign(workscan)
3034 else:
3035 return workscan
3036
3037 except RuntimeError, e:
3038 raise_fitting_failure_exception(e)
3039
3040 def _init_blinfo(self):
3041 """\
3042 Initialise the following three auxiliary members:
3043 blpars : parameters of the best-fit baseline,
3044 masklists : mask data (edge positions of masked channels) and
3045 actualmask : mask data (in boolean list),
3046 to keep for use later (including output to logger/text files).
3047 Used by poly_baseline() and auto_poly_baseline() in case of
3048 'plot=True'.
3049 """
3050 self.blpars = []
3051 self.masklists = []
3052 self.actualmask = []
3053 return
3054
3055 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3056 """\
3057 Append baseline-fitting related info to blpars, masklist and
3058 actualmask.
3059 """
3060 self.blpars.append(data_blpars)
3061 self.masklists.append(data_masklists)
3062 self.actualmask.append(data_actualmask)
3063 return
3064
3065 @asaplog_post_dec
3066 def rotate_linpolphase(self, angle):
3067 """\
3068 Rotate the phase of the complex polarization O=Q+iU correlation.
3069 This is always done in situ in the raw data. So if you call this
3070 function more than once then each call rotates the phase further.
3071
3072 Parameters:
3073
3074 angle: The angle (degrees) to rotate (add) by.
3075
3076 Example::
3077
3078 scan.rotate_linpolphase(2.3)
3079
3080 """
3081 varlist = vars()
3082 self._math._rotate_linpolphase(self, angle)
3083 self._add_history("rotate_linpolphase", varlist)
3084 return
3085
3086 @asaplog_post_dec
3087 def rotate_xyphase(self, angle):
3088 """\
3089 Rotate the phase of the XY correlation. This is always done in situ
3090 in the data. So if you call this function more than once
3091 then each call rotates the phase further.
3092
3093 Parameters:
3094
3095 angle: The angle (degrees) to rotate (add) by.
3096
3097 Example::
3098
3099 scan.rotate_xyphase(2.3)
3100
3101 """
3102 varlist = vars()
3103 self._math._rotate_xyphase(self, angle)
3104 self._add_history("rotate_xyphase", varlist)
3105 return
3106
3107 @asaplog_post_dec
3108 def swap_linears(self):
3109 """\
3110 Swap the linear polarisations XX and YY, or better the first two
3111 polarisations as this also works for ciculars.
3112 """
3113 varlist = vars()
3114 self._math._swap_linears(self)
3115 self._add_history("swap_linears", varlist)
3116 return
3117
3118 @asaplog_post_dec
3119 def invert_phase(self):
3120 """\
3121 Invert the phase of the complex polarisation
3122 """
3123 varlist = vars()
3124 self._math._invert_phase(self)
3125 self._add_history("invert_phase", varlist)
3126 return
3127
3128 @asaplog_post_dec
3129 def add(self, offset, insitu=None):
3130 """\
3131 Return a scan where all spectra have the offset added
3132
3133 Parameters:
3134
3135 offset: the offset
3136
3137 insitu: if False a new scantable is returned.
3138 Otherwise, the scaling is done in-situ
3139 The default is taken from .asaprc (False)
3140
3141 """
3142 if insitu is None: insitu = rcParams['insitu']
3143 self._math._setinsitu(insitu)
3144 varlist = vars()
3145 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3146 s._add_history("add", varlist)
3147 if insitu:
3148 self._assign(s)
3149 else:
3150 return s
3151
3152 @asaplog_post_dec
3153 def scale(self, factor, tsys=True, insitu=None):
3154 """\
3155
3156 Return a scan where all spectra are scaled by the given 'factor'
3157
3158 Parameters:
3159
3160 factor: the scaling factor (float or 1D float list)
3161
3162 insitu: if False a new scantable is returned.
3163 Otherwise, the scaling is done in-situ
3164 The default is taken from .asaprc (False)
3165
3166 tsys: if True (default) then apply the operation to Tsys
3167 as well as the data
3168
3169 """
3170 if insitu is None: insitu = rcParams['insitu']
3171 self._math._setinsitu(insitu)
3172 varlist = vars()
3173 s = None
3174 import numpy
3175 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3176 if isinstance(factor[0], list) or isinstance(factor[0],
3177 numpy.ndarray):
3178 from asapmath import _array2dOp
3179 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3180 else:
3181 s = scantable( self._math._arrayop( self, factor,
3182 "MUL", tsys ) )
3183 else:
3184 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3185 s._add_history("scale", varlist)
3186 if insitu:
3187 self._assign(s)
3188 else:
3189 return s
3190
3191 @preserve_selection
3192 def set_sourcetype(self, match, matchtype="pattern",
3193 sourcetype="reference"):
3194 """\
3195 Set the type of the source to be an source or reference scan
3196 using the provided pattern.
3197
3198 Parameters:
3199
3200 match: a Unix style pattern, regular expression or selector
3201
3202 matchtype: 'pattern' (default) UNIX style pattern or
3203 'regex' regular expression
3204
3205 sourcetype: the type of the source to use (source/reference)
3206
3207 """
3208 varlist = vars()
3209 basesel = self.get_selection()
3210 stype = -1
3211 if sourcetype.lower().startswith("r"):
3212 stype = 1
3213 elif sourcetype.lower().startswith("s"):
3214 stype = 0
3215 else:
3216 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3217 if matchtype.lower().startswith("p"):
3218 matchtype = "pattern"
3219 elif matchtype.lower().startswith("r"):
3220 matchtype = "regex"
3221 else:
3222 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3223 sel = selector()
3224 if isinstance(match, selector):
3225 sel = match
3226 else:
3227 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3228 self._setsourcetype(stype)
3229 self._add_history("set_sourcetype", varlist)
3230
3231 @asaplog_post_dec
3232 @preserve_selection
3233 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3234 """\
3235 This function allows to build quotients automatically.
3236 It assumes the observation to have the same number of
3237 "ons" and "offs"
3238
3239 Parameters:
3240
3241 preserve: you can preserve (default) the continuum or
3242 remove it. The equations used are
3243
3244 preserve: Output = Toff * (on/off) - Toff
3245
3246 remove: Output = Toff * (on/off) - Ton
3247
3248 mode: the on/off detection mode
3249 'paired' (default)
3250 identifies 'off' scans by the
3251 trailing '_R' (Mopra/Parkes) or
3252 '_e'/'_w' (Tid) and matches
3253 on/off pairs from the observing pattern
3254 'time'
3255 finds the closest off in time
3256
3257 .. todo:: verify argument is not implemented
3258
3259 """
3260 varlist = vars()
3261 modes = ["time", "paired"]
3262 if not mode in modes:
3263 msg = "please provide valid mode. Valid modes are %s" % (modes)
3264 raise ValueError(msg)
3265 s = None
3266 if mode.lower() == "paired":
3267 sel = self.get_selection()
3268 sel.set_query("SRCTYPE==psoff")
3269 self.set_selection(sel)
3270 offs = self.copy()
3271 sel.set_query("SRCTYPE==pson")
3272 self.set_selection(sel)
3273 ons = self.copy()
3274 s = scantable(self._math._quotient(ons, offs, preserve))
3275 elif mode.lower() == "time":
3276 s = scantable(self._math._auto_quotient(self, mode, preserve))
3277 s._add_history("auto_quotient", varlist)
3278 return s
3279
3280 @asaplog_post_dec
3281 def mx_quotient(self, mask = None, weight='median', preserve=True):
3282 """\
3283 Form a quotient using "off" beams when observing in "MX" mode.
3284
3285 Parameters:
3286
3287 mask: an optional mask to be used when weight == 'stddev'
3288
3289 weight: How to average the off beams. Default is 'median'.
3290
3291 preserve: you can preserve (default) the continuum or
3292 remove it. The equations used are:
3293
3294 preserve: Output = Toff * (on/off) - Toff
3295
3296 remove: Output = Toff * (on/off) - Ton
3297
3298 """
3299 mask = mask or ()
3300 varlist = vars()
3301 on = scantable(self._math._mx_extract(self, 'on'))
3302 preoff = scantable(self._math._mx_extract(self, 'off'))
3303 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3304 from asapmath import quotient
3305 q = quotient(on, off, preserve)
3306 q._add_history("mx_quotient", varlist)
3307 return q
3308
3309 @asaplog_post_dec
3310 def freq_switch(self, insitu=None):
3311 """\
3312 Apply frequency switching to the data.
3313
3314 Parameters:
3315
3316 insitu: if False a new scantable is returned.
3317 Otherwise, the swictching is done in-situ
3318 The default is taken from .asaprc (False)
3319
3320 """
3321 if insitu is None: insitu = rcParams['insitu']
3322 self._math._setinsitu(insitu)
3323 varlist = vars()
3324 s = scantable(self._math._freqswitch(self))
3325 s._add_history("freq_switch", varlist)
3326 if insitu:
3327 self._assign(s)
3328 else:
3329 return s
3330
3331 @asaplog_post_dec
3332 def recalc_azel(self):
3333 """Recalculate the azimuth and elevation for each position."""
3334 varlist = vars()
3335 self._recalcazel()
3336 self._add_history("recalc_azel", varlist)
3337 return
3338
3339 @asaplog_post_dec
3340 def __add__(self, other):
3341 varlist = vars()
3342 s = None
3343 if isinstance(other, scantable):
3344 s = scantable(self._math._binaryop(self, other, "ADD"))
3345 elif isinstance(other, float):
3346 s = scantable(self._math._unaryop(self, other, "ADD", False))
3347 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3348 if isinstance(other[0], list) \
3349 or isinstance(other[0], numpy.ndarray):
3350 from asapmath import _array2dOp
3351 s = _array2dOp( self.copy(), other, "ADD", False )
3352 else:
3353 s = scantable( self._math._arrayop( self.copy(), other,
3354 "ADD", False ) )
3355 else:
3356 raise TypeError("Other input is not a scantable or float value")
3357 s._add_history("operator +", varlist)
3358 return s
3359
3360 @asaplog_post_dec
3361 def __sub__(self, other):
3362 """
3363 implicit on all axes and on Tsys
3364 """
3365 varlist = vars()
3366 s = None
3367 if isinstance(other, scantable):
3368 s = scantable(self._math._binaryop(self, other, "SUB"))
3369 elif isinstance(other, float):
3370 s = scantable(self._math._unaryop(self, other, "SUB", False))
3371 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3372 if isinstance(other[0], list) \
3373 or isinstance(other[0], numpy.ndarray):
3374 from asapmath import _array2dOp
3375 s = _array2dOp( self.copy(), other, "SUB", False )
3376 else:
3377 s = scantable( self._math._arrayop( self.copy(), other,
3378 "SUB", False ) )
3379 else:
3380 raise TypeError("Other input is not a scantable or float value")
3381 s._add_history("operator -", varlist)
3382 return s
3383
3384 @asaplog_post_dec
3385 def __mul__(self, other):
3386 """
3387 implicit on all axes and on Tsys
3388 """
3389 varlist = vars()
3390 s = None
3391 if isinstance(other, scantable):
3392 s = scantable(self._math._binaryop(self, other, "MUL"))
3393 elif isinstance(other, float):
3394 s = scantable(self._math._unaryop(self, other, "MUL", False))
3395 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3396 if isinstance(other[0], list) \
3397 or isinstance(other[0], numpy.ndarray):
3398 from asapmath import _array2dOp
3399 s = _array2dOp( self.copy(), other, "MUL", False )
3400 else:
3401 s = scantable( self._math._arrayop( self.copy(), other,
3402 "MUL", False ) )
3403 else:
3404 raise TypeError("Other input is not a scantable or float value")
3405 s._add_history("operator *", varlist)
3406 return s
3407
3408
3409 @asaplog_post_dec
3410 def __div__(self, other):
3411 """
3412 implicit on all axes and on Tsys
3413 """
3414 varlist = vars()
3415 s = None
3416 if isinstance(other, scantable):
3417 s = scantable(self._math._binaryop(self, other, "DIV"))
3418 elif isinstance(other, float):
3419 if other == 0.0:
3420 raise ZeroDivisionError("Dividing by zero is not recommended")
3421 s = scantable(self._math._unaryop(self, other, "DIV", False))
3422 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3423 if isinstance(other[0], list) \
3424 or isinstance(other[0], numpy.ndarray):
3425 from asapmath import _array2dOp
3426 s = _array2dOp( self.copy(), other, "DIV", False )
3427 else:
3428 s = scantable( self._math._arrayop( self.copy(), other,
3429 "DIV", False ) )
3430 else:
3431 raise TypeError("Other input is not a scantable or float value")
3432 s._add_history("operator /", varlist)
3433 return s
3434
3435 @asaplog_post_dec
3436 def get_fit(self, row=0):
3437 """\
3438 Print or return the stored fits for a row in the scantable
3439
3440 Parameters:
3441
3442 row: the row which the fit has been applied to.
3443
3444 """
3445 if row > self.nrow():
3446 return
3447 from asap.asapfit import asapfit
3448 fit = asapfit(self._getfit(row))
3449 asaplog.push( '%s' %(fit) )
3450 return fit.as_dict()
3451
3452 @preserve_selection
3453 def flag_nans(self):
3454 """\
3455 Utility function to flag NaN values in the scantable.
3456 """
3457 import numpy
3458 basesel = self.get_selection()
3459 for i in range(self.nrow()):
3460 sel = self.get_row_selector(i)
3461 self.set_selection(basesel+sel)
3462 nans = numpy.isnan(self._getspectrum(0))
3463 if numpy.any(nans):
3464 bnans = [ bool(v) for v in nans]
3465 self.flag(bnans)
3466
3467 def get_row_selector(self, rowno):
3468 return selector(rows=[rowno])
3469
3470 def _add_history(self, funcname, parameters):
3471 if not rcParams['scantable.history']:
3472 return
3473 # create date
3474 sep = "##"
3475 from datetime import datetime
3476 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3477 hist = dstr+sep
3478 hist += funcname+sep#cdate+sep
3479 if parameters.has_key('self'):
3480 del parameters['self']
3481 for k, v in parameters.iteritems():
3482 if type(v) is dict:
3483 for k2, v2 in v.iteritems():
3484 hist += k2
3485 hist += "="
3486 if isinstance(v2, scantable):
3487 hist += 'scantable'
3488 elif k2 == 'mask':
3489 if isinstance(v2, list) or isinstance(v2, tuple):
3490 hist += str(self._zip_mask(v2))
3491 else:
3492 hist += str(v2)
3493 else:
3494 hist += str(v2)
3495 else:
3496 hist += k
3497 hist += "="
3498 if isinstance(v, scantable):
3499 hist += 'scantable'
3500 elif k == 'mask':
3501 if isinstance(v, list) or isinstance(v, tuple):
3502 hist += str(self._zip_mask(v))
3503 else:
3504 hist += str(v)
3505 else:
3506 hist += str(v)
3507 hist += sep
3508 hist = hist[:-2] # remove trailing '##'
3509 self._addhistory(hist)
3510
3511
3512 def _zip_mask(self, mask):
3513 mask = list(mask)
3514 i = 0
3515 segments = []
3516 while mask[i:].count(1):
3517 i += mask[i:].index(1)
3518 if mask[i:].count(0):
3519 j = i + mask[i:].index(0)
3520 else:
3521 j = len(mask)
3522 segments.append([i, j])
3523 i = j
3524 return segments
3525
3526 def _get_ordinate_label(self):
3527 fu = "("+self.get_fluxunit()+")"
3528 import re
3529 lbl = "Intensity"
3530 if re.match(".K.", fu):
3531 lbl = "Brightness Temperature "+ fu
3532 elif re.match(".Jy.", fu):
3533 lbl = "Flux density "+ fu
3534 return lbl
3535
3536 def _check_ifs(self):
3537# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
3538 nchans = [self.nchan(i) for i in self.getifnos()]
3539 nchans = filter(lambda t: t > 0, nchans)
3540 return (sum(nchans)/len(nchans) == nchans[0])
3541
3542 @asaplog_post_dec
3543 def _fill(self, names, unit, average, opts={}):
3544 first = True
3545 fullnames = []
3546 for name in names:
3547 name = os.path.expandvars(name)
3548 name = os.path.expanduser(name)
3549 if not os.path.exists(name):
3550 msg = "File '%s' does not exists" % (name)
3551 raise IOError(msg)
3552 fullnames.append(name)
3553 if average:
3554 asaplog.push('Auto averaging integrations')
3555 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3556 for name in fullnames:
3557 tbl = Scantable(stype)
3558 if is_ms( name ):
3559 r = msfiller( tbl )
3560 else:
3561 r = filler( tbl )
3562 rx = rcParams['scantable.reference']
3563 r.setreferenceexpr(rx)
3564 msg = "Importing %s..." % (name)
3565 asaplog.push(msg, False)
3566 r.open(name, opts)
3567 r.fill()
3568 if average:
3569 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3570 if not first:
3571 tbl = self._math._merge([self, tbl])
3572 Scantable.__init__(self, tbl)
3573 r.close()
3574 del r, tbl
3575 first = False
3576 #flush log
3577 asaplog.post()
3578 if unit is not None:
3579 self.set_fluxunit(unit)
3580 if not is_casapy():
3581 self.set_freqframe(rcParams['scantable.freqframe'])
3582
3583
3584 def __getitem__(self, key):
3585 if key < 0:
3586 key += self.nrow()
3587 if key >= self.nrow():
3588 raise IndexError("Row index out of range.")
3589 return self._getspectrum(key)
3590
3591 def __setitem__(self, key, value):
3592 if key < 0:
3593 key += self.nrow()
3594 if key >= self.nrow():
3595 raise IndexError("Row index out of range.")
3596 if not hasattr(value, "__len__") or \
3597 len(value) > self.nchan(self.getif(key)):
3598 raise ValueError("Spectrum length doesn't match.")
3599 return self._setspectrum(value, key)
3600
3601 def __len__(self):
3602 return self.nrow()
3603
3604 def __iter__(self):
3605 for i in range(len(self)):
3606 yield self[i]
Note: See TracBrowser for help on using the repository browser.