source: trunk/python/scantable.py@ 2413

Last change on this file since 2413 was 2411, checked in by WataruKawasaki, 13 years ago

New Development: No

JIRA Issue: Yes CAS-3759

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: (1) in sinusoidal baselining procedures and their relevants, quit getting nchan info in Python side but in C++ side.

(2) changed the defaults value of addwn from [] to [0] in sd.*_sinusoid_baseline(). (i.e., toolkit level)


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 139.4 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import tempfile
5import numpy
6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
11from asap.env import is_casapy
12from asap._asap import Scantable
13from asap._asap import filler, msfiller
14from asap.parameters import rcParams
15from asap.logging import asaplog, asaplog_post_dec
16from asap.selector import selector
17from asap.linecatalog import linecatalog
18from asap.coordinate import coordinate
19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
20from asap.asapfitter import fitter
21
22###############################################################
23### WK temporarily added these lines for testing 2011/11/28 ###
24###############################################################
25#from asap._asap import TestClass
26
27#class testclass(TestClass):
28# def __init__(self, nelem):
29# TestClass.__init__(self, nelem)
30
31###############################################################
32
33
34def preserve_selection(func):
35 @wraps_dec(func)
36 def wrap(obj, *args, **kw):
37 basesel = obj.get_selection()
38 try:
39 val = func(obj, *args, **kw)
40 finally:
41 obj.set_selection(basesel)
42 return val
43 return wrap
44
45def is_scantable(filename):
46 """Is the given file a scantable?
47
48 Parameters:
49
50 filename: the name of the file/directory to test
51
52 """
53 if ( os.path.isdir(filename)
54 and os.path.exists(filename+'/table.info')
55 and os.path.exists(filename+'/table.dat') ):
56 f=open(filename+'/table.info')
57 l=f.readline()
58 f.close()
59 #if ( l.find('Scantable') != -1 ):
60 if ( l.find('Measurement Set') == -1 ):
61 return True
62 else:
63 return False
64 else:
65 return False
66## return (os.path.isdir(filename)
67## and not os.path.exists(filename+'/table.f1')
68## and os.path.exists(filename+'/table.info'))
69
70def is_ms(filename):
71 """Is the given file a MeasurementSet?
72
73 Parameters:
74
75 filename: the name of the file/directory to test
76
77 """
78 if ( os.path.isdir(filename)
79 and os.path.exists(filename+'/table.info')
80 and os.path.exists(filename+'/table.dat') ):
81 f=open(filename+'/table.info')
82 l=f.readline()
83 f.close()
84 if ( l.find('Measurement Set') != -1 ):
85 return True
86 else:
87 return False
88 else:
89 return False
90
91def normalise_edge_param(edge):
92 """\
93 Convert a given edge value to a one-dimensional array that can be
94 given to baseline-fitting/subtraction functions.
95 The length of the output value will be an even because values for
96 the both sides of spectra are to be contained for each IF. When
97 the length is 2, the values will be applied to all IFs. If the length
98 is larger than 2, it will be 2*ifnos().
99 Accepted format of edge include:
100 * an integer - will be used for both sides of spectra of all IFs.
101 e.g. 10 is converted to [10,10]
102 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
103 * a list/tuple containing an integer - same as the above case.
104 e.g. [10] is converted to [10,10]
105 * a list/tuple containing two integers - will be used for all IFs.
106 e.g. [5,10] is output as it is. no need to convert.
107 * a list/tuple of lists/tuples containing TWO integers -
108 each element of edge will be used for each IF.
109 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
110
111 If an element contains the same integer values, the input 'edge'
112 parameter can be given in a simpler shape in the following cases:
113 ** when len(edge)!=2
114 any elements containing the same values can be replaced
115 to single integers.
116 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
117 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
118 ** when len(edge)=2
119 care is needed for this case: ONLY ONE of the
120 elements can be a single integer,
121 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
122 or [[5,5],10], but can NOT be simplified to [5,10].
123 when [5,10] given, it is interpreted as
124 [[5,10],[5,10],[5,10],...] instead, as shown before.
125 """
126 from asap import _is_sequence_or_number as _is_valid
127 if isinstance(edge, list) or isinstance(edge, tuple):
128 for edgepar in edge:
129 if not _is_valid(edgepar, int):
130 raise ValueError, "Each element of the 'edge' tuple has \
131 to be a pair of integers or an integer."
132 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
133 if len(edgepar) != 2:
134 raise ValueError, "Each element of the 'edge' tuple has \
135 to be a pair of integers or an integer."
136 else:
137 if not _is_valid(edge, int):
138 raise ValueError, "Parameter 'edge' has to be an integer or a \
139 pair of integers specified as a tuple. \
140 Nested tuples are allowed \
141 to make individual selection for different IFs."
142
143
144 if isinstance(edge, int):
145 edge = [ edge, edge ] # e.g. 3 => [3,3]
146 elif isinstance(edge, list) or isinstance(edge, tuple):
147 if len(edge) == 0:
148 edge = [0, 0] # e.g. [] => [0,0]
149 elif len(edge) == 1:
150 if isinstance(edge[0], int):
151 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
152
153 commonedge = True
154 if len(edge) > 2: commonedge = False
155 else:
156 for edgepar in edge:
157 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
158 commonedge = False
159 break
160
161 if commonedge:
162 if len(edge) > 1:
163 norm_edge = edge
164 else:
165 norm_edge = edge + edge
166 else:
167 norm_edge = []
168 for edgepar in edge:
169 if isinstance(edgepar, int):
170 norm_edge += [edgepar, edgepar]
171 else:
172 norm_edge += edgepar
173
174 return norm_edge
175
176def raise_fitting_failure_exception(e):
177 msg = "The fit failed, possibly because it didn't converge."
178 if rcParams["verbose"]:
179 asaplog.push(str(e))
180 asaplog.push(str(msg))
181 else:
182 raise RuntimeError(str(e)+'\n'+msg)
183
184def pack_progress_params(showprogress, minnrow):
185 return str(showprogress).lower() + ',' + str(minnrow)
186
187class scantable(Scantable):
188 """\
189 The ASAP container for scans (single-dish data).
190 """
191
192 @asaplog_post_dec
193 def __init__(self, filename, average=None, unit=None, parallactify=None,
194 **args):
195 """\
196 Create a scantable from a saved one or make a reference
197
198 Parameters:
199
200 filename: the name of an asap table on disk
201 or
202 the name of a rpfits/sdfits/ms file
203 (integrations within scans are auto averaged
204 and the whole file is read) or
205 [advanced] a reference to an existing scantable
206
207 average: average all integrations withinb a scan on read.
208 The default (True) is taken from .asaprc.
209
210 unit: brightness unit; must be consistent with K or Jy.
211 Over-rides the default selected by the filler
212 (input rpfits/sdfits/ms) or replaces the value
213 in existing scantables
214
215 antenna: for MeasurementSet input data only:
216 Antenna selection. integer (id) or string
217 (name or id).
218
219 parallactify: Indicate that the data had been parallactified.
220 Default (false) is taken from rc file.
221
222 """
223 if average is None:
224 average = rcParams['scantable.autoaverage']
225 parallactify = parallactify or rcParams['scantable.parallactify']
226 varlist = vars()
227 from asap._asap import stmath
228 self._math = stmath( rcParams['insitu'] )
229 if isinstance(filename, Scantable):
230 Scantable.__init__(self, filename)
231 else:
232 if isinstance(filename, str):
233 filename = os.path.expandvars(filename)
234 filename = os.path.expanduser(filename)
235 if not os.path.exists(filename):
236 s = "File '%s' not found." % (filename)
237 raise IOError(s)
238 if is_scantable(filename):
239 ondisk = rcParams['scantable.storage'] == 'disk'
240 Scantable.__init__(self, filename, ondisk)
241 if unit is not None:
242 self.set_fluxunit(unit)
243 if average:
244 self._assign( self.average_time( scanav=True ) )
245 # do not reset to the default freqframe
246 #self.set_freqframe(rcParams['scantable.freqframe'])
247 elif is_ms(filename):
248 # Measurement Set
249 opts={'ms': {}}
250 mskeys=['getpt','antenna']
251 for key in mskeys:
252 if key in args.keys():
253 opts['ms'][key] = args[key]
254 self._fill([filename], unit, average, opts)
255 elif os.path.isfile(filename):
256 self._fill([filename], unit, average)
257 # only apply to new data not "copy constructor"
258 self.parallactify(parallactify)
259 else:
260 msg = "The given file '%s'is not a valid " \
261 "asap table." % (filename)
262 raise IOError(msg)
263 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
264 and isinstance(filename[-1], str):
265 self._fill(filename, unit, average)
266 self.parallactify(parallactify)
267 self._add_history("scantable", varlist)
268
269 @asaplog_post_dec
270 def save(self, name=None, format=None, overwrite=False):
271 """\
272 Store the scantable on disk. This can be an asap (aips++) Table,
273 SDFITS or MS2 format.
274
275 Parameters:
276
277 name: the name of the outputfile. For format "ASCII"
278 this is the root file name (data in 'name'.txt
279 and header in 'name'_header.txt)
280
281 format: an optional file format. Default is ASAP.
282 Allowed are:
283
284 * 'ASAP' (save as ASAP [aips++] Table),
285 * 'SDFITS' (save as SDFITS file)
286 * 'ASCII' (saves as ascii text file)
287 * 'MS2' (saves as an casacore MeasurementSet V2)
288 * 'FITS' (save as image FITS - not readable by
289 class)
290 * 'CLASS' (save as FITS readable by CLASS)
291
292 overwrite: If the file should be overwritten if it exists.
293 The default False is to return with warning
294 without writing the output. USE WITH CARE.
295
296 Example::
297
298 scan.save('myscan.asap')
299 scan.save('myscan.sdfits', 'SDFITS')
300
301 """
302 from os import path
303 format = format or rcParams['scantable.save']
304 suffix = '.'+format.lower()
305 if name is None or name == "":
306 name = 'scantable'+suffix
307 msg = "No filename given. Using default name %s..." % name
308 asaplog.push(msg)
309 name = path.expandvars(name)
310 if path.isfile(name) or path.isdir(name):
311 if not overwrite:
312 msg = "File %s exists." % name
313 raise IOError(msg)
314 format2 = format.upper()
315 if format2 == 'ASAP':
316 self._save(name)
317 elif format2 == 'MS2':
318 msopt = {'ms': {'overwrite': overwrite } }
319 from asap._asap import mswriter
320 writer = mswriter( self )
321 writer.write( name, msopt )
322 else:
323 from asap._asap import stwriter as stw
324 writer = stw(format2)
325 writer.write(self, name)
326 return
327
328 def copy(self):
329 """Return a copy of this scantable.
330
331 *Note*:
332
333 This makes a full (deep) copy. scan2 = scan1 makes a reference.
334
335 Example::
336
337 copiedscan = scan.copy()
338
339 """
340 sd = scantable(Scantable._copy(self))
341 return sd
342
343 def drop_scan(self, scanid=None):
344 """\
345 Return a new scantable where the specified scan number(s) has(have)
346 been dropped.
347
348 Parameters:
349
350 scanid: a (list of) scan number(s)
351
352 """
353 from asap import _is_sequence_or_number as _is_valid
354 from asap import _to_list
355 from asap import unique
356 if not _is_valid(scanid):
357 raise RuntimeError( 'Please specify a scanno to drop from the'
358 ' scantable' )
359 scanid = _to_list(scanid)
360 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
361 for sid in scanid: allscans.remove(sid)
362 if len(allscans) == 0:
363 raise ValueError("Can't remove all scans")
364 sel = selector(scans=allscans)
365 return self._select_copy(sel)
366
367 def _select_copy(self, selection):
368 orig = self.get_selection()
369 self.set_selection(orig+selection)
370 cp = self.copy()
371 self.set_selection(orig)
372 return cp
373
374 def get_scan(self, scanid=None):
375 """\
376 Return a specific scan (by scanno) or collection of scans (by
377 source name) in a new scantable.
378
379 *Note*:
380
381 See scantable.drop_scan() for the inverse operation.
382
383 Parameters:
384
385 scanid: a (list of) scanno or a source name, unix-style
386 patterns are accepted for source name matching, e.g.
387 '*_R' gets all 'ref scans
388
389 Example::
390
391 # get all scans containing the source '323p459'
392 newscan = scan.get_scan('323p459')
393 # get all 'off' scans
394 refscans = scan.get_scan('*_R')
395 # get a susbset of scans by scanno (as listed in scan.summary())
396 newscan = scan.get_scan([0, 2, 7, 10])
397
398 """
399 if scanid is None:
400 raise RuntimeError( 'Please specify a scan no or name to '
401 'retrieve from the scantable' )
402 try:
403 bsel = self.get_selection()
404 sel = selector()
405 if type(scanid) is str:
406 sel.set_name(scanid)
407 return self._select_copy(sel)
408 elif type(scanid) is int:
409 sel.set_scans([scanid])
410 return self._select_copy(sel)
411 elif type(scanid) is list:
412 sel.set_scans(scanid)
413 return self._select_copy(sel)
414 else:
415 msg = "Illegal scanid type, use 'int' or 'list' if ints."
416 raise TypeError(msg)
417 except RuntimeError:
418 raise
419
420 def __str__(self):
421 tempFile = tempfile.NamedTemporaryFile()
422 Scantable._summary(self, tempFile.name)
423 tempFile.seek(0)
424 asaplog.clear()
425 return tempFile.file.read()
426
427 @asaplog_post_dec
428 def summary(self, filename=None):
429 """\
430 Print a summary of the contents of this scantable.
431
432 Parameters:
433
434 filename: the name of a file to write the putput to
435 Default - no file output
436
437 """
438# info = Scantable._summary(self)
439 if filename is not None:
440 if filename is "":
441 filename = 'scantable_summary.txt'
442 from os.path import expandvars, isdir
443 filename = expandvars(filename)
444# if not isdir(filename):
445# data = open(filename, 'w')
446# data.write(info)
447# data.close()
448# else:
449 if isdir(filename):
450 msg = "Illegal file name '%s'." % (filename)
451 raise IOError(msg)
452 else:
453 filename = ""
454 Scantable._summary(self, filename)
455# info = Scantable._summary(self, filename)
456# return page(info)
457
458 def get_spectrum(self, rowno):
459 """Return the spectrum for the current row in the scantable as a list.
460
461 Parameters:
462
463 rowno: the row number to retrieve the spectrum from
464
465 """
466 return self._getspectrum(rowno)
467
468 def get_mask(self, rowno):
469 """Return the mask for the current row in the scantable as a list.
470
471 Parameters:
472
473 rowno: the row number to retrieve the mask from
474
475 """
476 return self._getmask(rowno)
477
478 def set_spectrum(self, spec, rowno):
479 """Set the spectrum for the current row in the scantable.
480
481 Parameters:
482
483 spec: the new spectrum
484
485 rowno: the row number to set the spectrum for
486
487 """
488 assert(len(spec) == self.nchan(self.getif(rowno)))
489 return self._setspectrum(spec, rowno)
490
491 def get_coordinate(self, rowno):
492 """Return the (spectral) coordinate for a a given 'rowno'.
493
494 *Note*:
495
496 * This coordinate is only valid until a scantable method modifies
497 the frequency axis.
498 * This coordinate does contain the original frequency set-up
499 NOT the new frame. The conversions however are done using the user
500 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
501 use scantable.freq_align first. Without it there is no closure,
502 i.e.::
503
504 c = myscan.get_coordinate(0)
505 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
506
507 Parameters:
508
509 rowno: the row number for the spectral coordinate
510
511 """
512 return coordinate(Scantable.get_coordinate(self, rowno))
513
514 def get_selection(self):
515 """\
516 Get the selection object currently set on this scantable.
517
518 Example::
519
520 sel = scan.get_selection()
521 sel.set_ifs(0) # select IF 0
522 scan.set_selection(sel) # apply modified selection
523
524 """
525 return selector(self._getselection())
526
527 def set_selection(self, selection=None, **kw):
528 """\
529 Select a subset of the data. All following operations on this scantable
530 are only applied to thi selection.
531
532 Parameters:
533
534 selection: a selector object (default unset the selection), or
535 any combination of "pols", "ifs", "beams", "scans",
536 "cycles", "name", "query"
537
538 Examples::
539
540 sel = selector() # create a selection object
541 self.set_scans([0, 3]) # select SCANNO 0 and 3
542 scan.set_selection(sel) # set the selection
543 scan.summary() # will only print summary of scanno 0 an 3
544 scan.set_selection() # unset the selection
545 # or the equivalent
546 scan.set_selection(scans=[0,3])
547 scan.summary() # will only print summary of scanno 0 an 3
548 scan.set_selection() # unset the selection
549
550 """
551 if selection is None:
552 # reset
553 if len(kw) == 0:
554 selection = selector()
555 else:
556 # try keywords
557 for k in kw:
558 if k not in selector.fields:
559 raise KeyError("Invalid selection key '%s', "
560 "valid keys are %s" % (k,
561 selector.fields))
562 selection = selector(**kw)
563 self._setselection(selection)
564
565 def get_row(self, row=0, insitu=None):
566 """\
567 Select a row in the scantable.
568 Return a scantable with single row.
569
570 Parameters:
571
572 row: row no of integration, default is 0.
573 insitu: if False a new scantable is returned. Otherwise, the
574 scaling is done in-situ. The default is taken from .asaprc
575 (False)
576
577 """
578 if insitu is None:
579 insitu = rcParams['insitu']
580 if not insitu:
581 workscan = self.copy()
582 else:
583 workscan = self
584 # Select a row
585 sel = selector()
586 sel.set_rows([row])
587 workscan.set_selection(sel)
588 if not workscan.nrow() == 1:
589 msg = "Could not identify single row. %d rows selected." \
590 % (workscan.nrow())
591 raise RuntimeError(msg)
592 if insitu:
593 self._assign(workscan)
594 else:
595 return workscan
596
597 @asaplog_post_dec
598 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
599 """\
600 Determine the specified statistic of the current beam/if/pol
601 Takes a 'mask' as an optional parameter to specify which
602 channels should be excluded.
603
604 Parameters:
605
606 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
607 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
608
609 mask: an optional mask specifying where the statistic
610 should be determined.
611
612 form: format string to print statistic values
613
614 row: row number of spectrum to process.
615 (default is None: for all rows)
616
617 Example:
618 scan.set_unit('channel')
619 msk = scan.create_mask([100, 200], [500, 600])
620 scan.stats(stat='mean', mask=m)
621
622 """
623 mask = mask or []
624 if not self._check_ifs():
625 raise ValueError("Cannot apply mask as the IFs have different "
626 "number of channels. Please use setselection() "
627 "to select individual IFs")
628 rtnabc = False
629 if stat.lower().endswith('_abc'): rtnabc = True
630 getchan = False
631 if stat.lower().startswith('min') or stat.lower().startswith('max'):
632 chan = self._math._minmaxchan(self, mask, stat)
633 getchan = True
634 statvals = []
635 if not rtnabc:
636 if row == None:
637 statvals = self._math._stats(self, mask, stat)
638 else:
639 statvals = self._math._statsrow(self, mask, stat, int(row))
640
641 #def cb(i):
642 # return statvals[i]
643
644 #return self._row_callback(cb, stat)
645
646 label=stat
647 #callback=cb
648 out = ""
649 #outvec = []
650 sep = '-'*50
651
652 if row == None:
653 rows = xrange(self.nrow())
654 elif isinstance(row, int):
655 rows = [ row ]
656
657 for i in rows:
658 refstr = ''
659 statunit= ''
660 if getchan:
661 qx, qy = self.chan2data(rowno=i, chan=chan[i])
662 if rtnabc:
663 statvals.append(qx['value'])
664 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
665 statunit= '['+qx['unit']+']'
666 else:
667 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
668
669 tm = self._gettime(i)
670 src = self._getsourcename(i)
671 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
672 out += 'Time[%s]:\n' % (tm)
673 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
674 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
675 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
676 #outvec.append(callback(i))
677 if len(rows) > 1:
678 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
679 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
680 else:
681 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
682 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
683 out += sep+"\n"
684
685 import os
686 if os.environ.has_key( 'USER' ):
687 usr = os.environ['USER']
688 else:
689 import commands
690 usr = commands.getoutput( 'whoami' )
691 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
692 f = open(tmpfile,'w')
693 print >> f, sep
694 print >> f, ' %s %s' % (label, statunit)
695 print >> f, sep
696 print >> f, out
697 f.close()
698 f = open(tmpfile,'r')
699 x = f.readlines()
700 f.close()
701 asaplog.push(''.join(x), False)
702
703 return statvals
704
705 def chan2data(self, rowno=0, chan=0):
706 """\
707 Returns channel/frequency/velocity and spectral value
708 at an arbitrary row and channel in the scantable.
709
710 Parameters:
711
712 rowno: a row number in the scantable. Default is the
713 first row, i.e. rowno=0
714
715 chan: a channel in the scantable. Default is the first
716 channel, i.e. pos=0
717
718 """
719 if isinstance(rowno, int) and isinstance(chan, int):
720 qx = {'unit': self.get_unit(),
721 'value': self._getabcissa(rowno)[chan]}
722 qy = {'unit': self.get_fluxunit(),
723 'value': self._getspectrum(rowno)[chan]}
724 return qx, qy
725
726 def stddev(self, mask=None):
727 """\
728 Determine the standard deviation of the current beam/if/pol
729 Takes a 'mask' as an optional parameter to specify which
730 channels should be excluded.
731
732 Parameters:
733
734 mask: an optional mask specifying where the standard
735 deviation should be determined.
736
737 Example::
738
739 scan.set_unit('channel')
740 msk = scan.create_mask([100, 200], [500, 600])
741 scan.stddev(mask=m)
742
743 """
744 return self.stats(stat='stddev', mask=mask);
745
746
747 def get_column_names(self):
748 """\
749 Return a list of column names, which can be used for selection.
750 """
751 return list(Scantable.get_column_names(self))
752
753 def get_tsys(self, row=-1):
754 """\
755 Return the System temperatures.
756
757 Parameters:
758
759 row: the rowno to get the information for. (default all rows)
760
761 Returns:
762
763 a list of Tsys values for the current selection
764
765 """
766 if row > -1:
767 return self._get_column(self._gettsys, row)
768 return self._row_callback(self._gettsys, "Tsys")
769
770 def get_tsysspectrum(self, row=-1):
771 """\
772 Return the channel dependent system temperatures.
773
774 Parameters:
775
776 row: the rowno to get the information for. (default all rows)
777
778 Returns:
779
780 a list of Tsys values for the current selection
781
782 """
783 return self._get_column( self._gettsysspectrum, row )
784
785 def get_weather(self, row=-1):
786 """\
787 Return the weather informations.
788
789 Parameters:
790
791 row: the rowno to get the information for. (default all rows)
792
793 Returns:
794
795 a dict or list of of dicts of values for the current selection
796
797 """
798
799 values = self._get_column(self._get_weather, row)
800 if row > -1:
801 return {'temperature': values[0],
802 'pressure': values[1], 'humidity' : values[2],
803 'windspeed' : values[3], 'windaz' : values[4]
804 }
805 else:
806 out = []
807 for r in values:
808
809 out.append({'temperature': r[0],
810 'pressure': r[1], 'humidity' : r[2],
811 'windspeed' : r[3], 'windaz' : r[4]
812 })
813 return out
814
815 def _row_callback(self, callback, label):
816 out = ""
817 outvec = []
818 sep = '-'*50
819 for i in range(self.nrow()):
820 tm = self._gettime(i)
821 src = self._getsourcename(i)
822 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
823 out += 'Time[%s]:\n' % (tm)
824 if self.nbeam(-1) > 1:
825 out += ' Beam[%d] ' % (self.getbeam(i))
826 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
827 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
828 outvec.append(callback(i))
829 out += '= %3.3f\n' % (outvec[i])
830 out += sep+'\n'
831
832 asaplog.push(sep)
833 asaplog.push(" %s" % (label))
834 asaplog.push(sep)
835 asaplog.push(out)
836 asaplog.post()
837 return outvec
838
839 def _get_column(self, callback, row=-1, *args):
840 """
841 """
842 if row == -1:
843 return [callback(i, *args) for i in range(self.nrow())]
844 else:
845 if 0 <= row < self.nrow():
846 return callback(row, *args)
847
848
849 def get_time(self, row=-1, asdatetime=False, prec=-1):
850 """\
851 Get a list of time stamps for the observations.
852 Return a datetime object or a string (default) for each
853 integration time stamp in the scantable.
854
855 Parameters:
856
857 row: row no of integration. Default -1 return all rows
858
859 asdatetime: return values as datetime objects rather than strings
860
861 prec: number of digits shown. Default -1 to automatic
862 calculation.
863 Note this number is equals to the digits of MVTime,
864 i.e., 0<prec<3: dates with hh:: only,
865 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
866 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
867
868 """
869 from datetime import datetime
870 if prec < 0:
871 # automagically set necessary precision +1
872 prec = 7 - \
873 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
874 prec = max(6, int(prec))
875 else:
876 prec = max(0, prec)
877 if asdatetime:
878 #precision can be 1 millisecond at max
879 prec = min(12, prec)
880
881 times = self._get_column(self._gettime, row, prec)
882 if not asdatetime:
883 return times
884 format = "%Y/%m/%d/%H:%M:%S.%f"
885 if prec < 7:
886 nsub = 1 + (((6-prec)/2) % 3)
887 substr = [".%f","%S","%M"]
888 for i in range(nsub):
889 format = format.replace(substr[i],"")
890 if isinstance(times, list):
891 return [datetime.strptime(i, format) for i in times]
892 else:
893 return datetime.strptime(times, format)
894
895
896 def get_inttime(self, row=-1):
897 """\
898 Get a list of integration times for the observations.
899 Return a time in seconds for each integration in the scantable.
900
901 Parameters:
902
903 row: row no of integration. Default -1 return all rows.
904
905 """
906 return self._get_column(self._getinttime, row)
907
908
909 def get_sourcename(self, row=-1):
910 """\
911 Get a list source names for the observations.
912 Return a string for each integration in the scantable.
913 Parameters:
914
915 row: row no of integration. Default -1 return all rows.
916
917 """
918 return self._get_column(self._getsourcename, row)
919
920 def get_elevation(self, row=-1):
921 """\
922 Get a list of elevations for the observations.
923 Return a float for each integration in the scantable.
924
925 Parameters:
926
927 row: row no of integration. Default -1 return all rows.
928
929 """
930 return self._get_column(self._getelevation, row)
931
932 def get_azimuth(self, row=-1):
933 """\
934 Get a list of azimuths for the observations.
935 Return a float for each integration in the scantable.
936
937 Parameters:
938 row: row no of integration. Default -1 return all rows.
939
940 """
941 return self._get_column(self._getazimuth, row)
942
943 def get_parangle(self, row=-1):
944 """\
945 Get a list of parallactic angles for the observations.
946 Return a float for each integration in the scantable.
947
948 Parameters:
949
950 row: row no of integration. Default -1 return all rows.
951
952 """
953 return self._get_column(self._getparangle, row)
954
955 def get_direction(self, row=-1):
956 """
957 Get a list of Positions on the sky (direction) for the observations.
958 Return a string for each integration in the scantable.
959
960 Parameters:
961
962 row: row no of integration. Default -1 return all rows
963
964 """
965 return self._get_column(self._getdirection, row)
966
967 def get_directionval(self, row=-1):
968 """\
969 Get a list of Positions on the sky (direction) for the observations.
970 Return a float for each integration in the scantable.
971
972 Parameters:
973
974 row: row no of integration. Default -1 return all rows
975
976 """
977 return self._get_column(self._getdirectionvec, row)
978
979 @asaplog_post_dec
980 def set_unit(self, unit='channel'):
981 """\
982 Set the unit for all following operations on this scantable
983
984 Parameters:
985
986 unit: optional unit, default is 'channel'. Use one of '*Hz',
987 'km/s', 'channel' or equivalent ''
988
989 """
990 varlist = vars()
991 if unit in ['', 'pixel', 'channel']:
992 unit = ''
993 inf = list(self._getcoordinfo())
994 inf[0] = unit
995 self._setcoordinfo(inf)
996 self._add_history("set_unit", varlist)
997
998 @asaplog_post_dec
999 def set_instrument(self, instr):
1000 """\
1001 Set the instrument for subsequent processing.
1002
1003 Parameters:
1004
1005 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1006 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1007
1008 """
1009 self._setInstrument(instr)
1010 self._add_history("set_instument", vars())
1011
1012 @asaplog_post_dec
1013 def set_feedtype(self, feedtype):
1014 """\
1015 Overwrite the feed type, which might not be set correctly.
1016
1017 Parameters:
1018
1019 feedtype: 'linear' or 'circular'
1020
1021 """
1022 self._setfeedtype(feedtype)
1023 self._add_history("set_feedtype", vars())
1024
1025 @asaplog_post_dec
1026 def set_doppler(self, doppler='RADIO'):
1027 """\
1028 Set the doppler for all following operations on this scantable.
1029
1030 Parameters:
1031
1032 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1033
1034 """
1035 varlist = vars()
1036 inf = list(self._getcoordinfo())
1037 inf[2] = doppler
1038 self._setcoordinfo(inf)
1039 self._add_history("set_doppler", vars())
1040
1041 @asaplog_post_dec
1042 def set_freqframe(self, frame=None):
1043 """\
1044 Set the frame type of the Spectral Axis.
1045
1046 Parameters:
1047
1048 frame: an optional frame type, default 'LSRK'. Valid frames are:
1049 'TOPO', 'LSRD', 'LSRK', 'BARY',
1050 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1051
1052 Example::
1053
1054 scan.set_freqframe('BARY')
1055
1056 """
1057 frame = frame or rcParams['scantable.freqframe']
1058 varlist = vars()
1059 # "REST" is not implemented in casacore
1060 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1061 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1062 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1063 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1064
1065 if frame in valid:
1066 inf = list(self._getcoordinfo())
1067 inf[1] = frame
1068 self._setcoordinfo(inf)
1069 self._add_history("set_freqframe", varlist)
1070 else:
1071 msg = "Please specify a valid freq type. Valid types are:\n", valid
1072 raise TypeError(msg)
1073
1074 @asaplog_post_dec
1075 def set_dirframe(self, frame=""):
1076 """\
1077 Set the frame type of the Direction on the sky.
1078
1079 Parameters:
1080
1081 frame: an optional frame type, default ''. Valid frames are:
1082 'J2000', 'B1950', 'GALACTIC'
1083
1084 Example:
1085
1086 scan.set_dirframe('GALACTIC')
1087
1088 """
1089 varlist = vars()
1090 Scantable.set_dirframe(self, frame)
1091 self._add_history("set_dirframe", varlist)
1092
1093 def get_unit(self):
1094 """\
1095 Get the default unit set in this scantable
1096
1097 Returns:
1098
1099 A unit string
1100
1101 """
1102 inf = self._getcoordinfo()
1103 unit = inf[0]
1104 if unit == '': unit = 'channel'
1105 return unit
1106
1107 @asaplog_post_dec
1108 def get_abcissa(self, rowno=0):
1109 """\
1110 Get the abcissa in the current coordinate setup for the currently
1111 selected Beam/IF/Pol
1112
1113 Parameters:
1114
1115 rowno: an optional row number in the scantable. Default is the
1116 first row, i.e. rowno=0
1117
1118 Returns:
1119
1120 The abcissa values and the format string (as a dictionary)
1121
1122 """
1123 abc = self._getabcissa(rowno)
1124 lbl = self._getabcissalabel(rowno)
1125 return abc, lbl
1126
1127 @asaplog_post_dec
1128 def flag(self, mask=None, unflag=False, row=-1):
1129 """\
1130 Flag the selected data using an optional channel mask.
1131
1132 Parameters:
1133
1134 mask: an optional channel mask, created with create_mask. Default
1135 (no mask) is all channels.
1136
1137 unflag: if True, unflag the data
1138
1139 row: an optional row number in the scantable.
1140 Default -1 flags all rows
1141
1142 """
1143 varlist = vars()
1144 mask = mask or []
1145 self._flag(row, mask, unflag)
1146 self._add_history("flag", varlist)
1147
1148 @asaplog_post_dec
1149 def flag_row(self, rows=None, unflag=False):
1150 """\
1151 Flag the selected data in row-based manner.
1152
1153 Parameters:
1154
1155 rows: list of row numbers to be flagged. Default is no row
1156 (must be explicitly specified to execute row-based
1157 flagging).
1158
1159 unflag: if True, unflag the data.
1160
1161 """
1162 varlist = vars()
1163 if rows is None:
1164 rows = []
1165 self._flag_row(rows, unflag)
1166 self._add_history("flag_row", varlist)
1167
1168 @asaplog_post_dec
1169 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1170 """\
1171 Flag the selected data outside a specified range (in channel-base)
1172
1173 Parameters:
1174
1175 uthres: upper threshold.
1176
1177 dthres: lower threshold
1178
1179 clipoutside: True for flagging data outside the range
1180 [dthres:uthres].
1181 False for flagging data inside the range.
1182
1183 unflag: if True, unflag the data.
1184
1185 """
1186 varlist = vars()
1187 self._clip(uthres, dthres, clipoutside, unflag)
1188 self._add_history("clip", varlist)
1189
1190 @asaplog_post_dec
1191 def lag_flag(self, start, end, unit="MHz", insitu=None):
1192 """\
1193 Flag the data in 'lag' space by providing a frequency to remove.
1194 Flagged data in the scantable get interpolated over the region.
1195 No taper is applied.
1196
1197 Parameters:
1198
1199 start: the start frequency (really a period within the
1200 bandwidth) or period to remove
1201
1202 end: the end frequency or period to remove
1203
1204 unit: the frequency unit (default "MHz") or "" for
1205 explicit lag channels
1206
1207 *Notes*:
1208
1209 It is recommended to flag edges of the band or strong
1210 signals beforehand.
1211
1212 """
1213 if insitu is None: insitu = rcParams['insitu']
1214 self._math._setinsitu(insitu)
1215 varlist = vars()
1216 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1217 if not (unit == "" or base.has_key(unit)):
1218 raise ValueError("%s is not a valid unit." % unit)
1219 if unit == "":
1220 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1221 else:
1222 s = scantable(self._math._lag_flag(self, start*base[unit],
1223 end*base[unit], "frequency"))
1224 s._add_history("lag_flag", varlist)
1225 if insitu:
1226 self._assign(s)
1227 else:
1228 return s
1229
1230 @asaplog_post_dec
1231 def fft(self, rowno=None, mask=None, getrealimag=False):
1232 """\
1233 Apply FFT to the spectra.
1234 Flagged data in the scantable get interpolated over the region.
1235
1236 Parameters:
1237
1238 rowno: The row number(s) to be processed. int, list
1239 and tuple are accepted. By default (None), FFT
1240 is applied to the whole data.
1241
1242 mask: Auxiliary channel mask(s). Given as a boolean
1243 list, it is applied to all specified rows.
1244 A list of boolean lists can also be used to
1245 apply different masks. In the latter case, the
1246 length of 'mask' must be the same as that of
1247 'rowno'. The default is None.
1248
1249 getrealimag: If True, returns the real and imaginary part
1250 values of the complex results.
1251 If False (the default), returns the amplitude
1252 (absolute value) normalised with Ndata/2 and
1253 phase (argument, in unit of radian).
1254
1255 Returns:
1256
1257 A list of dictionaries containing the results for each spectrum.
1258 Each dictionary contains two values, the real and the imaginary
1259 parts when getrealimag = True, or the amplitude(absolute value)
1260 and the phase(argument) when getrealimag = False. The key for
1261 these values are 'real' and 'imag', or 'ampl' and 'phase',
1262 respectively.
1263 """
1264 if rowno is None:
1265 rowno = []
1266 if isinstance(rowno, int):
1267 rowno = [rowno]
1268 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1269 raise TypeError("The row number(s) must be int, list or tuple.")
1270 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1271
1272 usecommonmask = True
1273
1274 if mask is None:
1275 mask = []
1276 if isinstance(mask, list) or isinstance(mask, tuple):
1277 if len(mask) == 0:
1278 mask = [[]]
1279 else:
1280 if isinstance(mask[0], bool):
1281 if len(mask) != self.nchan(self.getif(rowno[0])):
1282 raise ValueError("The spectra and the mask have "
1283 "different length.")
1284 mask = [mask]
1285 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1286 usecommonmask = False
1287 if len(mask) != len(rowno):
1288 raise ValueError("When specifying masks for each "
1289 "spectrum, the numbers of them "
1290 "must be identical.")
1291 for i in xrange(mask):
1292 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1293 raise ValueError("The spectra and the mask have "
1294 "different length.")
1295 else:
1296 raise TypeError("The mask must be a boolean list or "
1297 "a list of boolean list.")
1298 else:
1299 raise TypeError("The mask must be a boolean list or a list of "
1300 "boolean list.")
1301
1302 res = []
1303
1304 imask = 0
1305 for whichrow in rowno:
1306 fspec = self._fft(whichrow, mask[imask], getrealimag)
1307 nspec = len(fspec)
1308
1309 i = 0
1310 v1 = []
1311 v2 = []
1312 reselem = {"real":[],"imag":[]} if getrealimag \
1313 else {"ampl":[],"phase":[]}
1314
1315 while (i < nspec):
1316 v1.append(fspec[i])
1317 v2.append(fspec[i+1])
1318 i += 2
1319
1320 if getrealimag:
1321 reselem["real"] += v1
1322 reselem["imag"] += v2
1323 else:
1324 reselem["ampl"] += v1
1325 reselem["phase"] += v2
1326
1327 res.append(reselem)
1328
1329 if not usecommonmask:
1330 imask += 1
1331
1332 return res
1333
1334 @asaplog_post_dec
1335 def create_mask(self, *args, **kwargs):
1336 """\
1337 Compute and return a mask based on [min, max] windows.
1338 The specified windows are to be INCLUDED, when the mask is
1339 applied.
1340
1341 Parameters:
1342
1343 [min, max], [min2, max2], ...
1344 Pairs of start/end points (inclusive)specifying the regions
1345 to be masked
1346
1347 invert: optional argument. If specified as True,
1348 return an inverted mask, i.e. the regions
1349 specified are EXCLUDED
1350
1351 row: create the mask using the specified row for
1352 unit conversions, default is row=0
1353 only necessary if frequency varies over rows.
1354
1355 Examples::
1356
1357 scan.set_unit('channel')
1358 # a)
1359 msk = scan.create_mask([400, 500], [800, 900])
1360 # masks everything outside 400 and 500
1361 # and 800 and 900 in the unit 'channel'
1362
1363 # b)
1364 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1365 # masks the regions between 400 and 500
1366 # and 800 and 900 in the unit 'channel'
1367
1368 # c)
1369 #mask only channel 400
1370 msk = scan.create_mask([400])
1371
1372 """
1373 row = kwargs.get("row", 0)
1374 data = self._getabcissa(row)
1375 u = self._getcoordinfo()[0]
1376 if u == "":
1377 u = "channel"
1378 msg = "The current mask window unit is %s" % u
1379 i = self._check_ifs()
1380 if not i:
1381 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1382 asaplog.push(msg)
1383 n = len(data)
1384 msk = _n_bools(n, False)
1385 # test if args is a 'list' or a 'normal *args - UGLY!!!
1386
1387 ws = (isinstance(args[-1][-1], int)
1388 or isinstance(args[-1][-1], float)) and args or args[0]
1389 for window in ws:
1390 if len(window) == 1:
1391 window = [window[0], window[0]]
1392 if len(window) == 0 or len(window) > 2:
1393 raise ValueError("A window needs to be defined as "
1394 "[start(, end)]")
1395 if window[0] > window[1]:
1396 tmp = window[0]
1397 window[0] = window[1]
1398 window[1] = tmp
1399 for i in range(n):
1400 if data[i] >= window[0] and data[i] <= window[1]:
1401 msk[i] = True
1402 if kwargs.has_key('invert'):
1403 if kwargs.get('invert'):
1404 msk = mask_not(msk)
1405 return msk
1406
1407 def get_masklist(self, mask=None, row=0, silent=False):
1408 """\
1409 Compute and return a list of mask windows, [min, max].
1410
1411 Parameters:
1412
1413 mask: channel mask, created with create_mask.
1414
1415 row: calcutate the masklist using the specified row
1416 for unit conversions, default is row=0
1417 only necessary if frequency varies over rows.
1418
1419 Returns:
1420
1421 [min, max], [min2, max2], ...
1422 Pairs of start/end points (inclusive)specifying
1423 the masked regions
1424
1425 """
1426 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1427 raise TypeError("The mask should be list or tuple.")
1428 if len(mask) < 2:
1429 raise TypeError("The mask elements should be > 1")
1430 data = self._getabcissa(row)
1431 if len(data) != len(mask):
1432 msg = "Number of channels in scantable != number of mask elements"
1433 raise TypeError(msg)
1434 u = self._getcoordinfo()[0]
1435 if u == "":
1436 u = "channel"
1437 msg = "The current mask window unit is %s" % u
1438 i = self._check_ifs()
1439 if not i:
1440 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1441 if not silent:
1442 asaplog.push(msg)
1443 masklist = []
1444 ist, ien = None, None
1445 ist, ien=self.get_mask_indices(mask)
1446 if ist is not None and ien is not None:
1447 for i in xrange(len(ist)):
1448 range=[data[ist[i]],data[ien[i]]]
1449 range.sort()
1450 masklist.append([range[0],range[1]])
1451 return masklist
1452
1453 def get_mask_indices(self, mask=None):
1454 """\
1455 Compute and Return lists of mask start indices and mask end indices.
1456
1457 Parameters:
1458
1459 mask: channel mask, created with create_mask.
1460
1461 Returns:
1462
1463 List of mask start indices and that of mask end indices,
1464 i.e., [istart1,istart2,....], [iend1,iend2,....].
1465
1466 """
1467 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1468 raise TypeError("The mask should be list or tuple.")
1469 if len(mask) < 2:
1470 raise TypeError("The mask elements should be > 1")
1471 istart = []
1472 iend = []
1473 if mask[0]:
1474 istart.append(0)
1475 for i in range(len(mask)-1):
1476 if not mask[i] and mask[i+1]:
1477 istart.append(i+1)
1478 elif mask[i] and not mask[i+1]:
1479 iend.append(i)
1480 if mask[len(mask)-1]:
1481 iend.append(len(mask)-1)
1482 if len(istart) != len(iend):
1483 raise RuntimeError("Numbers of mask start != mask end.")
1484 for i in range(len(istart)):
1485 if istart[i] > iend[i]:
1486 raise RuntimeError("Mask start index > mask end index")
1487 break
1488 return istart,iend
1489
1490 @asaplog_post_dec
1491 def parse_maskexpr(self, maskstring):
1492 """
1493 Parse CASA type mask selection syntax (IF dependent).
1494
1495 Parameters:
1496 maskstring : A string mask selection expression.
1497 A comma separated selections mean different IF -
1498 channel combinations. IFs and channel selections
1499 are partitioned by a colon, ':'.
1500 examples:
1501 '' = all IFs (all channels)
1502 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1503 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1504 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1505 all channels in IF8
1506 Returns:
1507 A dictionary of selected (valid) IF and masklist pairs,
1508 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1509 """
1510 if not isinstance(maskstring,str):
1511 asaplog.post()
1512 asaplog.push("Invalid mask expression")
1513 asaplog.post("ERROR")
1514
1515 valid_ifs = self.getifnos()
1516 frequnit = self.get_unit()
1517 seldict = {}
1518 if maskstring == "":
1519 maskstring = str(valid_ifs)[1:-1]
1520 ## split each selection
1521 sellist = maskstring.split(',')
1522 for currselstr in sellist:
1523 selset = currselstr.split(':')
1524 # spw and mask string (may include ~, < or >)
1525 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1526 offset=1, minval=min(valid_ifs),
1527 maxval=max(valid_ifs))
1528 for spwlist in spwmasklist:
1529 selspws = []
1530 for ispw in range(spwlist[0],spwlist[1]+1):
1531 # Put into the list only if ispw exists
1532 if valid_ifs.count(ispw):
1533 selspws.append(ispw)
1534 del spwmasklist, spwlist
1535
1536 # parse frequency mask list
1537 if len(selset) > 1:
1538 freqmasklist = self._parse_selection(selset[1], typestr='float',
1539 offset=0.)
1540 else:
1541 # want to select the whole spectrum
1542 freqmasklist = [None]
1543
1544 ## define a dictionary of spw - masklist combination
1545 for ispw in selspws:
1546 #print "working on", ispw
1547 spwstr = str(ispw)
1548 if len(selspws) == 0:
1549 # empty spw
1550 continue
1551 else:
1552 ## want to get min and max of the spw and
1553 ## offset to set for '<' and '>'
1554 if frequnit == 'channel':
1555 minfreq = 0
1556 maxfreq = self.nchan(ifno=ispw)
1557 offset = 0.5
1558 else:
1559 ## This is ugly part. need improvement
1560 for ifrow in xrange(self.nrow()):
1561 if self.getif(ifrow) == ispw:
1562 #print "IF",ispw,"found in row =",ifrow
1563 break
1564 freqcoord = self.get_coordinate(ifrow)
1565 freqs = self._getabcissa(ifrow)
1566 minfreq = min(freqs)
1567 maxfreq = max(freqs)
1568 if len(freqs) == 1:
1569 offset = 0.5
1570 elif frequnit.find('Hz') > 0:
1571 offset = abs(freqcoord.to_frequency(1,
1572 unit=frequnit)
1573 -freqcoord.to_frequency(0,
1574 unit=frequnit)
1575 )*0.5
1576 elif frequnit.find('m/s') > 0:
1577 offset = abs(freqcoord.to_velocity(1,
1578 unit=frequnit)
1579 -freqcoord.to_velocity(0,
1580 unit=frequnit)
1581 )*0.5
1582 else:
1583 asaplog.post()
1584 asaplog.push("Invalid frequency unit")
1585 asaplog.post("ERROR")
1586 del freqs, freqcoord, ifrow
1587 for freq in freqmasklist:
1588 selmask = freq or [minfreq, maxfreq]
1589 if selmask[0] == None:
1590 ## selection was "<freq[1]".
1591 if selmask[1] < minfreq:
1592 ## avoid adding region selection
1593 selmask = None
1594 else:
1595 selmask = [minfreq,selmask[1]-offset]
1596 elif selmask[1] == None:
1597 ## selection was ">freq[0]"
1598 if selmask[0] > maxfreq:
1599 ## avoid adding region selection
1600 selmask = None
1601 else:
1602 selmask = [selmask[0]+offset,maxfreq]
1603 if selmask:
1604 if not seldict.has_key(spwstr):
1605 # new spw selection
1606 seldict[spwstr] = []
1607 seldict[spwstr] += [selmask]
1608 del minfreq,maxfreq,offset,freq,selmask
1609 del spwstr
1610 del freqmasklist
1611 del valid_ifs
1612 if len(seldict) == 0:
1613 asaplog.post()
1614 asaplog.push("No valid selection in the mask expression: "
1615 +maskstring)
1616 asaplog.post("WARN")
1617 return None
1618 msg = "Selected masklist:\n"
1619 for sif, lmask in seldict.iteritems():
1620 msg += " IF"+sif+" - "+str(lmask)+"\n"
1621 asaplog.push(msg)
1622 return seldict
1623
1624 def _parse_selection(self, selstr, typestr='float', offset=0.,
1625 minval=None, maxval=None):
1626 """
1627 Parameters:
1628 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1629 typestr : The type of the values in returned list
1630 ('integer' or 'float')
1631 offset : The offset value to subtract from or add to
1632 the boundary value if the selection string
1633 includes '<' or '>'
1634 minval, maxval : The minimum/maximum values to set if the
1635 selection string includes '<' or '>'.
1636 The list element is filled with None by default.
1637 Returns:
1638 A list of min/max pair of selections.
1639 Example:
1640 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1641 returns [[0,2],[5,7],[9,9]]
1642 """
1643 selgroups = selstr.split(';')
1644 sellists = []
1645 if typestr.lower().startswith('int'):
1646 formatfunc = int
1647 else:
1648 formatfunc = float
1649
1650 for currsel in selgroups:
1651 if currsel.find('~') > 0:
1652 minsel = formatfunc(currsel.split('~')[0].strip())
1653 maxsel = formatfunc(currsel.split('~')[1].strip())
1654 elif currsel.strip().startswith('<'):
1655 minsel = minval
1656 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1657 - formatfunc(offset)
1658 elif currsel.strip().startswith('>'):
1659 minsel = formatfunc(currsel.split('>')[1].strip()) \
1660 + formatfunc(offset)
1661 maxsel = maxval
1662 else:
1663 minsel = formatfunc(currsel)
1664 maxsel = formatfunc(currsel)
1665 sellists.append([minsel,maxsel])
1666 return sellists
1667
1668# def get_restfreqs(self):
1669# """
1670# Get the restfrequency(s) stored in this scantable.
1671# The return value(s) are always of unit 'Hz'
1672# Parameters:
1673# none
1674# Returns:
1675# a list of doubles
1676# """
1677# return list(self._getrestfreqs())
1678
1679 def get_restfreqs(self, ids=None):
1680 """\
1681 Get the restfrequency(s) stored in this scantable.
1682 The return value(s) are always of unit 'Hz'
1683
1684 Parameters:
1685
1686 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1687 be retrieved
1688
1689 Returns:
1690
1691 dictionary containing ids and a list of doubles for each id
1692
1693 """
1694 if ids is None:
1695 rfreqs = {}
1696 idlist = self.getmolnos()
1697 for i in idlist:
1698 rfreqs[i] = list(self._getrestfreqs(i))
1699 return rfreqs
1700 else:
1701 if type(ids) == list or type(ids) == tuple:
1702 rfreqs = {}
1703 for i in ids:
1704 rfreqs[i] = list(self._getrestfreqs(i))
1705 return rfreqs
1706 else:
1707 return list(self._getrestfreqs(ids))
1708
1709 @asaplog_post_dec
1710 def set_restfreqs(self, freqs=None, unit='Hz'):
1711 """\
1712 Set or replace the restfrequency specified and
1713 if the 'freqs' argument holds a scalar,
1714 then that rest frequency will be applied to all the selected
1715 data. If the 'freqs' argument holds
1716 a vector, then it MUST be of equal or smaller length than
1717 the number of IFs (and the available restfrequencies will be
1718 replaced by this vector). In this case, *all* data have
1719 the restfrequency set per IF according
1720 to the corresponding value you give in the 'freqs' vector.
1721 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1722 IF 1 gets restfreq 2e9.
1723
1724 You can also specify the frequencies via a linecatalog.
1725
1726 Parameters:
1727
1728 freqs: list of rest frequency values or string idenitfiers
1729
1730 unit: unit for rest frequency (default 'Hz')
1731
1732
1733 Example::
1734
1735 # set the given restfrequency for the all currently selected IFs
1736 scan.set_restfreqs(freqs=1.4e9)
1737 # set restfrequencies for the n IFs (n > 1) in the order of the
1738 # list, i.e
1739 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1740 # len(list_of_restfreqs) == nIF
1741 # for nIF == 1 the following will set multiple restfrequency for
1742 # that IF
1743 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1744 # set multiple restfrequencies per IF. as a list of lists where
1745 # the outer list has nIF elements, the inner s arbitrary
1746 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1747
1748 *Note*:
1749
1750 To do more sophisticate Restfrequency setting, e.g. on a
1751 source and IF basis, use scantable.set_selection() before using
1752 this function::
1753
1754 # provided your scantable is called scan
1755 selection = selector()
1756 selection.set_name("ORION*")
1757 selection.set_ifs([1])
1758 scan.set_selection(selection)
1759 scan.set_restfreqs(freqs=86.6e9)
1760
1761 """
1762 varlist = vars()
1763 from asap import linecatalog
1764 # simple value
1765 if isinstance(freqs, int) or isinstance(freqs, float):
1766 self._setrestfreqs([freqs], [""], unit)
1767 # list of values
1768 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1769 # list values are scalars
1770 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1771 if len(freqs) == 1:
1772 self._setrestfreqs(freqs, [""], unit)
1773 else:
1774 # allow the 'old' mode of setting mulitple IFs
1775 sel = selector()
1776 savesel = self._getselection()
1777 iflist = self.getifnos()
1778 if len(freqs)>len(iflist):
1779 raise ValueError("number of elements in list of list "
1780 "exeeds the current IF selections")
1781 iflist = self.getifnos()
1782 for i, fval in enumerate(freqs):
1783 sel.set_ifs(iflist[i])
1784 self._setselection(sel)
1785 self._setrestfreqs([fval], [""], unit)
1786 self._setselection(savesel)
1787
1788 # list values are dict, {'value'=, 'name'=)
1789 elif isinstance(freqs[-1], dict):
1790 values = []
1791 names = []
1792 for d in freqs:
1793 values.append(d["value"])
1794 names.append(d["name"])
1795 self._setrestfreqs(values, names, unit)
1796 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1797 sel = selector()
1798 savesel = self._getselection()
1799 iflist = self.getifnos()
1800 if len(freqs)>len(iflist):
1801 raise ValueError("number of elements in list of list exeeds"
1802 " the current IF selections")
1803 for i, fval in enumerate(freqs):
1804 sel.set_ifs(iflist[i])
1805 self._setselection(sel)
1806 self._setrestfreqs(fval, [""], unit)
1807 self._setselection(savesel)
1808 # freqs are to be taken from a linecatalog
1809 elif isinstance(freqs, linecatalog):
1810 sel = selector()
1811 savesel = self._getselection()
1812 for i in xrange(freqs.nrow()):
1813 sel.set_ifs(iflist[i])
1814 self._setselection(sel)
1815 self._setrestfreqs([freqs.get_frequency(i)],
1816 [freqs.get_name(i)], "MHz")
1817 # ensure that we are not iterating past nIF
1818 if i == self.nif()-1: break
1819 self._setselection(savesel)
1820 else:
1821 return
1822 self._add_history("set_restfreqs", varlist)
1823
1824 @asaplog_post_dec
1825 def shift_refpix(self, delta):
1826 """\
1827 Shift the reference pixel of the Spectra Coordinate by an
1828 integer amount.
1829
1830 Parameters:
1831
1832 delta: the amount to shift by
1833
1834 *Note*:
1835
1836 Be careful using this with broadband data.
1837
1838 """
1839 varlist = vars()
1840 Scantable.shift_refpix(self, delta)
1841 s._add_history("shift_refpix", varlist)
1842
1843 @asaplog_post_dec
1844 def history(self, filename=None):
1845 """\
1846 Print the history. Optionally to a file.
1847
1848 Parameters:
1849
1850 filename: The name of the file to save the history to.
1851
1852 """
1853 hist = list(self._gethistory())
1854 out = "-"*80
1855 for h in hist:
1856 if h.startswith("---"):
1857 out = "\n".join([out, h])
1858 else:
1859 items = h.split("##")
1860 date = items[0]
1861 func = items[1]
1862 items = items[2:]
1863 out += "\n"+date+"\n"
1864 out += "Function: %s\n Parameters:" % (func)
1865 for i in items:
1866 if i == '':
1867 continue
1868 s = i.split("=")
1869 out += "\n %s = %s" % (s[0], s[1])
1870 out = "\n".join([out, "-"*80])
1871 if filename is not None:
1872 if filename is "":
1873 filename = 'scantable_history.txt'
1874 import os
1875 filename = os.path.expandvars(os.path.expanduser(filename))
1876 if not os.path.isdir(filename):
1877 data = open(filename, 'w')
1878 data.write(out)
1879 data.close()
1880 else:
1881 msg = "Illegal file name '%s'." % (filename)
1882 raise IOError(msg)
1883 return page(out)
1884
1885 #
1886 # Maths business
1887 #
1888 @asaplog_post_dec
1889 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1890 """\
1891 Return the (time) weighted average of a scan. Scans will be averaged
1892 only if the source direction (RA/DEC) is within 1' otherwise
1893
1894 *Note*:
1895
1896 in channels only - align if necessary
1897
1898 Parameters:
1899
1900 mask: an optional mask (only used for 'var' and 'tsys'
1901 weighting)
1902
1903 scanav: True averages each scan separately
1904 False (default) averages all scans together,
1905
1906 weight: Weighting scheme.
1907 'none' (mean no weight)
1908 'var' (1/var(spec) weighted)
1909 'tsys' (1/Tsys**2 weighted)
1910 'tint' (integration time weighted)
1911 'tintsys' (Tint/Tsys**2)
1912 'median' ( median averaging)
1913 The default is 'tint'
1914
1915 align: align the spectra in velocity before averaging. It takes
1916 the time of the first spectrum as reference time.
1917
1918 Example::
1919
1920 # time average the scantable without using a mask
1921 newscan = scan.average_time()
1922
1923 """
1924 varlist = vars()
1925 weight = weight or 'TINT'
1926 mask = mask or ()
1927 scanav = (scanav and 'SCAN') or 'NONE'
1928 scan = (self, )
1929
1930 if align:
1931 scan = (self.freq_align(insitu=False), )
1932 s = None
1933 if weight.upper() == 'MEDIAN':
1934 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1935 scanav))
1936 else:
1937 s = scantable(self._math._average(scan, mask, weight.upper(),
1938 scanav))
1939 s._add_history("average_time", varlist)
1940 return s
1941
1942 @asaplog_post_dec
1943 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1944 """\
1945 Return a scan where all spectra are converted to either
1946 Jansky or Kelvin depending upon the flux units of the scan table.
1947 By default the function tries to look the values up internally.
1948 If it can't find them (or if you want to over-ride), you must
1949 specify EITHER jyperk OR eta (and D which it will try to look up
1950 also if you don't set it). jyperk takes precedence if you set both.
1951
1952 Parameters:
1953
1954 jyperk: the Jy / K conversion factor
1955
1956 eta: the aperture efficiency
1957
1958 d: the geometric diameter (metres)
1959
1960 insitu: if False a new scantable is returned.
1961 Otherwise, the scaling is done in-situ
1962 The default is taken from .asaprc (False)
1963
1964 """
1965 if insitu is None: insitu = rcParams['insitu']
1966 self._math._setinsitu(insitu)
1967 varlist = vars()
1968 jyperk = jyperk or -1.0
1969 d = d or -1.0
1970 eta = eta or -1.0
1971 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1972 s._add_history("convert_flux", varlist)
1973 if insitu: self._assign(s)
1974 else: return s
1975
1976 @asaplog_post_dec
1977 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1978 """\
1979 Return a scan after applying a gain-elevation correction.
1980 The correction can be made via either a polynomial or a
1981 table-based interpolation (and extrapolation if necessary).
1982 You specify polynomial coefficients, an ascii table or neither.
1983 If you specify neither, then a polynomial correction will be made
1984 with built in coefficients known for certain telescopes (an error
1985 will occur if the instrument is not known).
1986 The data and Tsys are *divided* by the scaling factors.
1987
1988 Parameters:
1989
1990 poly: Polynomial coefficients (default None) to compute a
1991 gain-elevation correction as a function of
1992 elevation (in degrees).
1993
1994 filename: The name of an ascii file holding correction factors.
1995 The first row of the ascii file must give the column
1996 names and these MUST include columns
1997 "ELEVATION" (degrees) and "FACTOR" (multiply data
1998 by this) somewhere.
1999 The second row must give the data type of the
2000 column. Use 'R' for Real and 'I' for Integer.
2001 An example file would be
2002 (actual factors are arbitrary) :
2003
2004 TIME ELEVATION FACTOR
2005 R R R
2006 0.1 0 0.8
2007 0.2 20 0.85
2008 0.3 40 0.9
2009 0.4 60 0.85
2010 0.5 80 0.8
2011 0.6 90 0.75
2012
2013 method: Interpolation method when correcting from a table.
2014 Values are "nearest", "linear" (default), "cubic"
2015 and "spline"
2016
2017 insitu: if False a new scantable is returned.
2018 Otherwise, the scaling is done in-situ
2019 The default is taken from .asaprc (False)
2020
2021 """
2022
2023 if insitu is None: insitu = rcParams['insitu']
2024 self._math._setinsitu(insitu)
2025 varlist = vars()
2026 poly = poly or ()
2027 from os.path import expandvars
2028 filename = expandvars(filename)
2029 s = scantable(self._math._gainel(self, poly, filename, method))
2030 s._add_history("gain_el", varlist)
2031 if insitu:
2032 self._assign(s)
2033 else:
2034 return s
2035
2036 @asaplog_post_dec
2037 def freq_align(self, reftime=None, method='cubic', insitu=None):
2038 """\
2039 Return a scan where all rows have been aligned in frequency/velocity.
2040 The alignment frequency frame (e.g. LSRK) is that set by function
2041 set_freqframe.
2042
2043 Parameters:
2044
2045 reftime: reference time to align at. By default, the time of
2046 the first row of data is used.
2047
2048 method: Interpolation method for regridding the spectra.
2049 Choose from "nearest", "linear", "cubic" (default)
2050 and "spline"
2051
2052 insitu: if False a new scantable is returned.
2053 Otherwise, the scaling is done in-situ
2054 The default is taken from .asaprc (False)
2055
2056 """
2057 if insitu is None: insitu = rcParams["insitu"]
2058 self._math._setinsitu(insitu)
2059 varlist = vars()
2060 reftime = reftime or ""
2061 s = scantable(self._math._freq_align(self, reftime, method))
2062 s._add_history("freq_align", varlist)
2063 if insitu:
2064 self._assign(s)
2065 else:
2066 return s
2067
2068 @asaplog_post_dec
2069 def opacity(self, tau=None, insitu=None):
2070 """\
2071 Apply an opacity correction. The data
2072 and Tsys are multiplied by the correction factor.
2073
2074 Parameters:
2075
2076 tau: (list of) opacity from which the correction factor is
2077 exp(tau*ZD)
2078 where ZD is the zenith-distance.
2079 If a list is provided, it has to be of length nIF,
2080 nIF*nPol or 1 and in order of IF/POL, e.g.
2081 [opif0pol0, opif0pol1, opif1pol0 ...]
2082 if tau is `None` the opacities are determined from a
2083 model.
2084
2085 insitu: if False a new scantable is returned.
2086 Otherwise, the scaling is done in-situ
2087 The default is taken from .asaprc (False)
2088
2089 """
2090 if insitu is None:
2091 insitu = rcParams['insitu']
2092 self._math._setinsitu(insitu)
2093 varlist = vars()
2094 if not hasattr(tau, "__len__"):
2095 tau = [tau]
2096 s = scantable(self._math._opacity(self, tau))
2097 s._add_history("opacity", varlist)
2098 if insitu:
2099 self._assign(s)
2100 else:
2101 return s
2102
2103 @asaplog_post_dec
2104 def bin(self, width=5, insitu=None):
2105 """\
2106 Return a scan where all spectra have been binned up.
2107
2108 Parameters:
2109
2110 width: The bin width (default=5) in pixels
2111
2112 insitu: if False a new scantable is returned.
2113 Otherwise, the scaling is done in-situ
2114 The default is taken from .asaprc (False)
2115
2116 """
2117 if insitu is None:
2118 insitu = rcParams['insitu']
2119 self._math._setinsitu(insitu)
2120 varlist = vars()
2121 s = scantable(self._math._bin(self, width))
2122 s._add_history("bin", varlist)
2123 if insitu:
2124 self._assign(s)
2125 else:
2126 return s
2127
2128 @asaplog_post_dec
2129 def resample(self, width=5, method='cubic', insitu=None):
2130 """\
2131 Return a scan where all spectra have been binned up.
2132
2133 Parameters:
2134
2135 width: The bin width (default=5) in pixels
2136
2137 method: Interpolation method when correcting from a table.
2138 Values are "nearest", "linear", "cubic" (default)
2139 and "spline"
2140
2141 insitu: if False a new scantable is returned.
2142 Otherwise, the scaling is done in-situ
2143 The default is taken from .asaprc (False)
2144
2145 """
2146 if insitu is None:
2147 insitu = rcParams['insitu']
2148 self._math._setinsitu(insitu)
2149 varlist = vars()
2150 s = scantable(self._math._resample(self, method, width))
2151 s._add_history("resample", varlist)
2152 if insitu:
2153 self._assign(s)
2154 else:
2155 return s
2156
2157 @asaplog_post_dec
2158 def average_pol(self, mask=None, weight='none'):
2159 """\
2160 Average the Polarisations together.
2161
2162 Parameters:
2163
2164 mask: An optional mask defining the region, where the
2165 averaging will be applied. The output will have all
2166 specified points masked.
2167
2168 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2169 weighted), or 'tsys' (1/Tsys**2 weighted)
2170
2171 """
2172 varlist = vars()
2173 mask = mask or ()
2174 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2175 s._add_history("average_pol", varlist)
2176 return s
2177
2178 @asaplog_post_dec
2179 def average_beam(self, mask=None, weight='none'):
2180 """\
2181 Average the Beams together.
2182
2183 Parameters:
2184 mask: An optional mask defining the region, where the
2185 averaging will be applied. The output will have all
2186 specified points masked.
2187
2188 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2189 weighted), or 'tsys' (1/Tsys**2 weighted)
2190
2191 """
2192 varlist = vars()
2193 mask = mask or ()
2194 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2195 s._add_history("average_beam", varlist)
2196 return s
2197
2198 def parallactify(self, pflag):
2199 """\
2200 Set a flag to indicate whether this data should be treated as having
2201 been 'parallactified' (total phase == 0.0)
2202
2203 Parameters:
2204
2205 pflag: Bool indicating whether to turn this on (True) or
2206 off (False)
2207
2208 """
2209 varlist = vars()
2210 self._parallactify(pflag)
2211 self._add_history("parallactify", varlist)
2212
2213 @asaplog_post_dec
2214 def convert_pol(self, poltype=None):
2215 """\
2216 Convert the data to a different polarisation type.
2217 Note that you will need cross-polarisation terms for most conversions.
2218
2219 Parameters:
2220
2221 poltype: The new polarisation type. Valid types are:
2222 "linear", "circular", "stokes" and "linpol"
2223
2224 """
2225 varlist = vars()
2226 s = scantable(self._math._convertpol(self, poltype))
2227 s._add_history("convert_pol", varlist)
2228 return s
2229
2230 @asaplog_post_dec
2231 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2232 insitu=None):
2233 """\
2234 Smooth the spectrum by the specified kernel (conserving flux).
2235
2236 Parameters:
2237
2238 kernel: The type of smoothing kernel. Select from
2239 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2240 or 'poly'
2241
2242 width: The width of the kernel in pixels. For hanning this is
2243 ignored otherwise it defauls to 5 pixels.
2244 For 'gaussian' it is the Full Width Half
2245 Maximum. For 'boxcar' it is the full width.
2246 For 'rmedian' and 'poly' it is the half width.
2247
2248 order: Optional parameter for 'poly' kernel (default is 2), to
2249 specify the order of the polnomial. Ignored by all other
2250 kernels.
2251
2252 plot: plot the original and the smoothed spectra.
2253 In this each indivual fit has to be approved, by
2254 typing 'y' or 'n'
2255
2256 insitu: if False a new scantable is returned.
2257 Otherwise, the scaling is done in-situ
2258 The default is taken from .asaprc (False)
2259
2260 """
2261 if insitu is None: insitu = rcParams['insitu']
2262 self._math._setinsitu(insitu)
2263 varlist = vars()
2264
2265 if plot: orgscan = self.copy()
2266
2267 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2268 s._add_history("smooth", varlist)
2269
2270 if plot:
2271 from asap.asapplotter import new_asaplot
2272 theplot = new_asaplot(rcParams['plotter.gui'])
2273 theplot.set_panels()
2274 ylab=s._get_ordinate_label()
2275 #theplot.palette(0,["#777777","red"])
2276 for r in xrange(s.nrow()):
2277 xsm=s._getabcissa(r)
2278 ysm=s._getspectrum(r)
2279 xorg=orgscan._getabcissa(r)
2280 yorg=orgscan._getspectrum(r)
2281 theplot.clear()
2282 theplot.hold()
2283 theplot.set_axes('ylabel',ylab)
2284 theplot.set_axes('xlabel',s._getabcissalabel(r))
2285 theplot.set_axes('title',s._getsourcename(r))
2286 theplot.set_line(label='Original',color="#777777")
2287 theplot.plot(xorg,yorg)
2288 theplot.set_line(label='Smoothed',color="red")
2289 theplot.plot(xsm,ysm)
2290 ### Ugly part for legend
2291 for i in [0,1]:
2292 theplot.subplots[0]['lines'].append(
2293 [theplot.subplots[0]['axes'].lines[i]]
2294 )
2295 theplot.release()
2296 ### Ugly part for legend
2297 theplot.subplots[0]['lines']=[]
2298 res = raw_input("Accept smoothing ([y]/n): ")
2299 if res.upper() == 'N':
2300 s._setspectrum(yorg, r)
2301 theplot.quit()
2302 del theplot
2303 del orgscan
2304
2305 if insitu: self._assign(s)
2306 else: return s
2307
2308 @asaplog_post_dec
2309 def _parse_wn(self, wn):
2310 if isinstance(wn, list) or isinstance(wn, tuple):
2311 return wn
2312 elif isinstance(wn, int):
2313 return [ wn ]
2314 elif isinstance(wn, str):
2315 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2316 val = wn.split('-')
2317 val = [int(val[0]), int(val[1])]
2318 val.sort()
2319 res = [i for i in xrange(val[0], val[1]+1)]
2320 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2321 val = int(wn[2:])+1
2322 res = [i for i in xrange(val)]
2323 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2324 val = int(wn[:-2])+1
2325 res = [i for i in xrange(val)]
2326 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2327 val = int(wn[1:])
2328 res = [i for i in xrange(val)]
2329 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2330 val = int(wn[:-1])
2331 res = [i for i in xrange(val)]
2332 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2333 # then interpreted in C++
2334 # side as [a,a+1,...,a_nyq]
2335 # (CAS-3759)
2336 val = int(wn[2:])
2337 res = [val, -999]
2338 #res = [i for i in xrange(val, self.nchan()/2+1)]
2339 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2340 # then interpreted in C++
2341 # side as [a,a+1,...,a_nyq]
2342 # (CAS-3759)
2343 val = int(wn[:-2])
2344 res = [val, -999]
2345 #res = [i for i in xrange(val, self.nchan()/2+1)]
2346 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2347 # then interpreted in C++
2348 # side as [a+1,a+2,...,a_nyq]
2349 # (CAS-3759)
2350 val = int(wn[1:])+1
2351 res = [val, -999]
2352 #res = [i for i in xrange(val, self.nchan()/2+1)]
2353 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2354 # then interpreted in C++
2355 # side as [a+1,a+2,...,a_nyq]
2356 # (CAS-3759)
2357 val = int(wn[:-1])+1
2358 res = [val, -999]
2359 #res = [i for i in xrange(val, self.nchan()/2+1)]
2360
2361 return res
2362 else:
2363 msg = 'wrong value given for addwn/rejwn'
2364 raise RuntimeError(msg)
2365
2366
2367 @asaplog_post_dec
2368 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2369 fftmethod=None, fftthresh=None,
2370 addwn=None, rejwn=None, clipthresh=None,
2371 clipniter=None, plot=None,
2372 getresidual=None, showprogress=None,
2373 minnrow=None, outlog=None, blfile=None):
2374 """\
2375 Return a scan which has been baselined (all rows) with sinusoidal
2376 functions.
2377
2378 Parameters:
2379 insitu: if False a new scantable is returned.
2380 Otherwise, the scaling is done in-situ
2381 The default is taken from .asaprc (False)
2382 mask: an optional mask
2383 applyfft: if True use some method, such as FFT, to find
2384 strongest sinusoidal components in the wavenumber
2385 domain to be used for baseline fitting.
2386 default is True.
2387 fftmethod: method to find the strong sinusoidal components.
2388 now only 'fft' is available and it is the default.
2389 fftthresh: the threshold to select wave numbers to be used for
2390 fitting from the distribution of amplitudes in the
2391 wavenumber domain.
2392 both float and string values accepted.
2393 given a float value, the unit is set to sigma.
2394 for string values, allowed formats include:
2395 'xsigma' or 'x' (= x-sigma level. e.g.,
2396 '3sigma'), or
2397 'topx' (= the x strongest ones, e.g. 'top5').
2398 default is 3.0 (unit: sigma).
2399 addwn: the additional wave numbers to be used for fitting.
2400 list or integer value is accepted to specify every
2401 wave numbers. also string value can be used in case
2402 you need to specify wave numbers in a certain range,
2403 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2404 '<a' (= 0,1,...,a-2,a-1),
2405 '>=a' (= a, a+1, ... up to the maximum wave
2406 number corresponding to the Nyquist
2407 frequency for the case of FFT).
2408 default is [0].
2409 rejwn: the wave numbers NOT to be used for fitting.
2410 can be set just as addwn but has higher priority:
2411 wave numbers which are specified both in addwn
2412 and rejwn will NOT be used. default is [].
2413 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2414 clipniter: maximum number of iteration of 'clipthresh'-sigma
2415 clipping (default is 0)
2416 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2417 plot the fit and the residual. In this each
2418 indivual fit has to be approved, by typing 'y'
2419 or 'n'
2420 getresidual: if False, returns best-fit values instead of
2421 residual. (default is True)
2422 showprogress: show progress status for large data.
2423 default is True.
2424 minnrow: minimum number of input spectra to show.
2425 default is 1000.
2426 outlog: Output the coefficients of the best-fit
2427 function to logger (default is False)
2428 blfile: Name of a text file in which the best-fit
2429 parameter values to be written
2430 (default is '': no file/logger output)
2431
2432 Example:
2433 # return a scan baselined by a combination of sinusoidal curves
2434 # having wave numbers in spectral window up to 10,
2435 # also with 3-sigma clipping, iteration up to 4 times
2436 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2437
2438 Note:
2439 The best-fit parameter values output in logger and/or blfile are now
2440 based on specunit of 'channel'.
2441 """
2442
2443 try:
2444 varlist = vars()
2445
2446 if insitu is None: insitu = rcParams['insitu']
2447 if insitu:
2448 workscan = self
2449 else:
2450 workscan = self.copy()
2451
2452 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2453 if mask is None: mask = []
2454 if applyfft is None: applyfft = True
2455 if fftmethod is None: fftmethod = 'fft'
2456 if fftthresh is None: fftthresh = 3.0
2457 if addwn is None: addwn = [0]
2458 if rejwn is None: rejwn = []
2459 if clipthresh is None: clipthresh = 3.0
2460 if clipniter is None: clipniter = 0
2461 if plot is None: plot = False
2462 if getresidual is None: getresidual = True
2463 if showprogress is None: showprogress = True
2464 if minnrow is None: minnrow = 1000
2465 if outlog is None: outlog = False
2466 if blfile is None: blfile = ''
2467
2468 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2469 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2470 str(fftthresh).lower(),
2471 workscan._parse_wn(addwn),
2472 workscan._parse_wn(rejwn), clipthresh,
2473 clipniter, getresidual,
2474 pack_progress_params(showprogress,
2475 minnrow), outlog,
2476 blfile)
2477 workscan._add_history('sinusoid_baseline', varlist)
2478
2479 if insitu:
2480 self._assign(workscan)
2481 else:
2482 return workscan
2483
2484 except RuntimeError, e:
2485 raise_fitting_failure_exception(e)
2486
2487
2488 @asaplog_post_dec
2489 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2490 fftmethod=None, fftthresh=None,
2491 addwn=None, rejwn=None, clipthresh=None,
2492 clipniter=None, edge=None, threshold=None,
2493 chan_avg_limit=None, plot=None,
2494 getresidual=None, showprogress=None,
2495 minnrow=None, outlog=None, blfile=None):
2496 """\
2497 Return a scan which has been baselined (all rows) with sinusoidal
2498 functions.
2499 Spectral lines are detected first using linefinder and masked out
2500 to avoid them affecting the baseline solution.
2501
2502 Parameters:
2503 insitu: if False a new scantable is returned.
2504 Otherwise, the scaling is done in-situ
2505 The default is taken from .asaprc (False)
2506 mask: an optional mask retreived from scantable
2507 applyfft: if True use some method, such as FFT, to find
2508 strongest sinusoidal components in the wavenumber
2509 domain to be used for baseline fitting.
2510 default is True.
2511 fftmethod: method to find the strong sinusoidal components.
2512 now only 'fft' is available and it is the default.
2513 fftthresh: the threshold to select wave numbers to be used for
2514 fitting from the distribution of amplitudes in the
2515 wavenumber domain.
2516 both float and string values accepted.
2517 given a float value, the unit is set to sigma.
2518 for string values, allowed formats include:
2519 'xsigma' or 'x' (= x-sigma level. e.g.,
2520 '3sigma'), or
2521 'topx' (= the x strongest ones, e.g. 'top5').
2522 default is 3.0 (unit: sigma).
2523 addwn: the additional wave numbers to be used for fitting.
2524 list or integer value is accepted to specify every
2525 wave numbers. also string value can be used in case
2526 you need to specify wave numbers in a certain range,
2527 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2528 '<a' (= 0,1,...,a-2,a-1),
2529 '>=a' (= a, a+1, ... up to the maximum wave
2530 number corresponding to the Nyquist
2531 frequency for the case of FFT).
2532 default is [0].
2533 rejwn: the wave numbers NOT to be used for fitting.
2534 can be set just as addwn but has higher priority:
2535 wave numbers which are specified both in addwn
2536 and rejwn will NOT be used. default is [].
2537 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2538 clipniter: maximum number of iteration of 'clipthresh'-sigma
2539 clipping (default is 0)
2540 edge: an optional number of channel to drop at
2541 the edge of spectrum. If only one value is
2542 specified, the same number will be dropped
2543 from both sides of the spectrum. Default
2544 is to keep all channels. Nested tuples
2545 represent individual edge selection for
2546 different IFs (a number of spectral channels
2547 can be different)
2548 threshold: the threshold used by line finder. It is
2549 better to keep it large as only strong lines
2550 affect the baseline solution.
2551 chan_avg_limit: a maximum number of consequtive spectral
2552 channels to average during the search of
2553 weak and broad lines. The default is no
2554 averaging (and no search for weak lines).
2555 If such lines can affect the fitted baseline
2556 (e.g. a high order polynomial is fitted),
2557 increase this parameter (usually values up
2558 to 8 are reasonable). Most users of this
2559 method should find the default value sufficient.
2560 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2561 plot the fit and the residual. In this each
2562 indivual fit has to be approved, by typing 'y'
2563 or 'n'
2564 getresidual: if False, returns best-fit values instead of
2565 residual. (default is True)
2566 showprogress: show progress status for large data.
2567 default is True.
2568 minnrow: minimum number of input spectra to show.
2569 default is 1000.
2570 outlog: Output the coefficients of the best-fit
2571 function to logger (default is False)
2572 blfile: Name of a text file in which the best-fit
2573 parameter values to be written
2574 (default is "": no file/logger output)
2575
2576 Example:
2577 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2578
2579 Note:
2580 The best-fit parameter values output in logger and/or blfile are now
2581 based on specunit of 'channel'.
2582 """
2583
2584 try:
2585 varlist = vars()
2586
2587 if insitu is None: insitu = rcParams['insitu']
2588 if insitu:
2589 workscan = self
2590 else:
2591 workscan = self.copy()
2592
2593 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2594 if mask is None: mask = []
2595 if applyfft is None: applyfft = True
2596 if fftmethod is None: fftmethod = 'fft'
2597 if fftthresh is None: fftthresh = 3.0
2598 if addwn is None: addwn = [0]
2599 if rejwn is None: rejwn = []
2600 if clipthresh is None: clipthresh = 3.0
2601 if clipniter is None: clipniter = 0
2602 if edge is None: edge = (0,0)
2603 if threshold is None: threshold = 3
2604 if chan_avg_limit is None: chan_avg_limit = 1
2605 if plot is None: plot = False
2606 if getresidual is None: getresidual = True
2607 if showprogress is None: showprogress = True
2608 if minnrow is None: minnrow = 1000
2609 if outlog is None: outlog = False
2610 if blfile is None: blfile = ''
2611
2612 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2613 workscan._auto_sinusoid_baseline(mask, applyfft,
2614 fftmethod.lower(),
2615 str(fftthresh).lower(),
2616 workscan._parse_wn(addwn),
2617 workscan._parse_wn(rejwn),
2618 clipthresh, clipniter,
2619 normalise_edge_param(edge),
2620 threshold, chan_avg_limit,
2621 getresidual,
2622 pack_progress_params(showprogress,
2623 minnrow),
2624 outlog, blfile)
2625 workscan._add_history("auto_sinusoid_baseline", varlist)
2626
2627 if insitu:
2628 self._assign(workscan)
2629 else:
2630 return workscan
2631
2632 except RuntimeError, e:
2633 raise_fitting_failure_exception(e)
2634
2635 @asaplog_post_dec
2636 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2637 clipthresh=None, clipniter=None, plot=None,
2638 getresidual=None, showprogress=None, minnrow=None,
2639 outlog=None, blfile=None):
2640 """\
2641 Return a scan which has been baselined (all rows) by cubic spline
2642 function (piecewise cubic polynomial).
2643
2644 Parameters:
2645 insitu: If False a new scantable is returned.
2646 Otherwise, the scaling is done in-situ
2647 The default is taken from .asaprc (False)
2648 mask: An optional mask
2649 npiece: Number of pieces. (default is 2)
2650 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2651 clipniter: maximum number of iteration of 'clipthresh'-sigma
2652 clipping (default is 0)
2653 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2654 plot the fit and the residual. In this each
2655 indivual fit has to be approved, by typing 'y'
2656 or 'n'
2657 getresidual: if False, returns best-fit values instead of
2658 residual. (default is True)
2659 showprogress: show progress status for large data.
2660 default is True.
2661 minnrow: minimum number of input spectra to show.
2662 default is 1000.
2663 outlog: Output the coefficients of the best-fit
2664 function to logger (default is False)
2665 blfile: Name of a text file in which the best-fit
2666 parameter values to be written
2667 (default is "": no file/logger output)
2668
2669 Example:
2670 # return a scan baselined by a cubic spline consisting of 2 pieces
2671 # (i.e., 1 internal knot),
2672 # also with 3-sigma clipping, iteration up to 4 times
2673 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2674
2675 Note:
2676 The best-fit parameter values output in logger and/or blfile are now
2677 based on specunit of 'channel'.
2678 """
2679
2680 try:
2681 varlist = vars()
2682
2683 if insitu is None: insitu = rcParams['insitu']
2684 if insitu:
2685 workscan = self
2686 else:
2687 workscan = self.copy()
2688
2689 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2690 if mask is None: mask = []
2691 if npiece is None: npiece = 2
2692 if clipthresh is None: clipthresh = 3.0
2693 if clipniter is None: clipniter = 0
2694 if plot is None: plot = False
2695 if getresidual is None: getresidual = True
2696 if showprogress is None: showprogress = True
2697 if minnrow is None: minnrow = 1000
2698 if outlog is None: outlog = False
2699 if blfile is None: blfile = ''
2700
2701 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2702 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2703 getresidual,
2704 pack_progress_params(showprogress,
2705 minnrow), outlog,
2706 blfile)
2707 workscan._add_history("cspline_baseline", varlist)
2708
2709 if insitu:
2710 self._assign(workscan)
2711 else:
2712 return workscan
2713
2714 except RuntimeError, e:
2715 raise_fitting_failure_exception(e)
2716
2717 @asaplog_post_dec
2718 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2719 clipthresh=None, clipniter=None,
2720 edge=None, threshold=None, chan_avg_limit=None,
2721 getresidual=None, plot=None,
2722 showprogress=None, minnrow=None, outlog=None,
2723 blfile=None):
2724 """\
2725 Return a scan which has been baselined (all rows) by cubic spline
2726 function (piecewise cubic polynomial).
2727 Spectral lines are detected first using linefinder and masked out
2728 to avoid them affecting the baseline solution.
2729
2730 Parameters:
2731 insitu: if False a new scantable is returned.
2732 Otherwise, the scaling is done in-situ
2733 The default is taken from .asaprc (False)
2734 mask: an optional mask retreived from scantable
2735 npiece: Number of pieces. (default is 2)
2736 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2737 clipniter: maximum number of iteration of 'clipthresh'-sigma
2738 clipping (default is 0)
2739 edge: an optional number of channel to drop at
2740 the edge of spectrum. If only one value is
2741 specified, the same number will be dropped
2742 from both sides of the spectrum. Default
2743 is to keep all channels. Nested tuples
2744 represent individual edge selection for
2745 different IFs (a number of spectral channels
2746 can be different)
2747 threshold: the threshold used by line finder. It is
2748 better to keep it large as only strong lines
2749 affect the baseline solution.
2750 chan_avg_limit: a maximum number of consequtive spectral
2751 channels to average during the search of
2752 weak and broad lines. The default is no
2753 averaging (and no search for weak lines).
2754 If such lines can affect the fitted baseline
2755 (e.g. a high order polynomial is fitted),
2756 increase this parameter (usually values up
2757 to 8 are reasonable). Most users of this
2758 method should find the default value sufficient.
2759 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2760 plot the fit and the residual. In this each
2761 indivual fit has to be approved, by typing 'y'
2762 or 'n'
2763 getresidual: if False, returns best-fit values instead of
2764 residual. (default is True)
2765 showprogress: show progress status for large data.
2766 default is True.
2767 minnrow: minimum number of input spectra to show.
2768 default is 1000.
2769 outlog: Output the coefficients of the best-fit
2770 function to logger (default is False)
2771 blfile: Name of a text file in which the best-fit
2772 parameter values to be written
2773 (default is "": no file/logger output)
2774
2775 Example:
2776 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2777
2778 Note:
2779 The best-fit parameter values output in logger and/or blfile are now
2780 based on specunit of 'channel'.
2781 """
2782
2783 try:
2784 varlist = vars()
2785
2786 if insitu is None: insitu = rcParams['insitu']
2787 if insitu:
2788 workscan = self
2789 else:
2790 workscan = self.copy()
2791
2792 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2793 if mask is None: mask = []
2794 if npiece is None: npiece = 2
2795 if clipthresh is None: clipthresh = 3.0
2796 if clipniter is None: clipniter = 0
2797 if edge is None: edge = (0, 0)
2798 if threshold is None: threshold = 3
2799 if chan_avg_limit is None: chan_avg_limit = 1
2800 if plot is None: plot = False
2801 if getresidual is None: getresidual = True
2802 if showprogress is None: showprogress = True
2803 if minnrow is None: minnrow = 1000
2804 if outlog is None: outlog = False
2805 if blfile is None: blfile = ''
2806
2807 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2808 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2809 clipniter,
2810 normalise_edge_param(edge),
2811 threshold,
2812 chan_avg_limit, getresidual,
2813 pack_progress_params(showprogress,
2814 minnrow),
2815 outlog, blfile)
2816 workscan._add_history("auto_cspline_baseline", varlist)
2817
2818 if insitu:
2819 self._assign(workscan)
2820 else:
2821 return workscan
2822
2823 except RuntimeError, e:
2824 raise_fitting_failure_exception(e)
2825
2826 @asaplog_post_dec
2827 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2828 getresidual=None, showprogress=None, minnrow=None,
2829 outlog=None, blfile=None):
2830 """\
2831 Return a scan which has been baselined (all rows) by a polynomial.
2832 Parameters:
2833 insitu: if False a new scantable is returned.
2834 Otherwise, the scaling is done in-situ
2835 The default is taken from .asaprc (False)
2836 mask: an optional mask
2837 order: the order of the polynomial (default is 0)
2838 plot: plot the fit and the residual. In this each
2839 indivual fit has to be approved, by typing 'y'
2840 or 'n'
2841 getresidual: if False, returns best-fit values instead of
2842 residual. (default is True)
2843 showprogress: show progress status for large data.
2844 default is True.
2845 minnrow: minimum number of input spectra to show.
2846 default is 1000.
2847 outlog: Output the coefficients of the best-fit
2848 function to logger (default is False)
2849 blfile: Name of a text file in which the best-fit
2850 parameter values to be written
2851 (default is "": no file/logger output)
2852
2853 Example:
2854 # return a scan baselined by a third order polynomial,
2855 # not using a mask
2856 bscan = scan.poly_baseline(order=3)
2857 """
2858
2859 try:
2860 varlist = vars()
2861
2862 if insitu is None:
2863 insitu = rcParams["insitu"]
2864 if insitu:
2865 workscan = self
2866 else:
2867 workscan = self.copy()
2868
2869 #if mask is None: mask = [True for i in \
2870 # xrange(workscan.nchan())]
2871 if mask is None: mask = []
2872 if order is None: order = 0
2873 if plot is None: plot = False
2874 if getresidual is None: getresidual = True
2875 if showprogress is None: showprogress = True
2876 if minnrow is None: minnrow = 1000
2877 if outlog is None: outlog = False
2878 if blfile is None: blfile = ""
2879
2880 if plot:
2881 outblfile = (blfile != "") and \
2882 os.path.exists(os.path.expanduser(
2883 os.path.expandvars(blfile))
2884 )
2885 if outblfile:
2886 blf = open(blfile, "a")
2887
2888 f = fitter()
2889 f.set_function(lpoly=order)
2890
2891 rows = xrange(workscan.nrow())
2892 #if len(rows) > 0: workscan._init_blinfo()
2893
2894 for r in rows:
2895 f.x = workscan._getabcissa(r)
2896 f.y = workscan._getspectrum(r)
2897 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2898 f.data = None
2899 f.fit()
2900
2901 f.plot(residual=True)
2902 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2903 if accept_fit.upper() == "N":
2904 #workscan._append_blinfo(None, None, None)
2905 continue
2906
2907 blpars = f.get_parameters()
2908 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2909 #workscan._append_blinfo(blpars, masklist, f.mask)
2910 workscan._setspectrum((f.fitter.getresidual()
2911 if getresidual else f.fitter.getfit()), r)
2912
2913 if outblfile:
2914 rms = workscan.get_rms(f.mask, r)
2915 dataout = \
2916 workscan.format_blparams_row(blpars["params"],
2917 blpars["fixed"],
2918 rms, str(masklist),
2919 r, True)
2920 blf.write(dataout)
2921
2922 f._p.unmap()
2923 f._p = None
2924
2925 if outblfile:
2926 blf.close()
2927 else:
2928 workscan._poly_baseline(mask, order, getresidual,
2929 pack_progress_params(showprogress,
2930 minnrow),
2931 outlog, blfile)
2932
2933 workscan._add_history("poly_baseline", varlist)
2934
2935 if insitu:
2936 self._assign(workscan)
2937 else:
2938 return workscan
2939
2940 except RuntimeError, e:
2941 raise_fitting_failure_exception(e)
2942
2943 @asaplog_post_dec
2944 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2945 threshold=None, chan_avg_limit=None,
2946 plot=None, insitu=None,
2947 getresidual=None, showprogress=None,
2948 minnrow=None, outlog=None, blfile=None):
2949 """\
2950 Return a scan which has been baselined (all rows) by a polynomial.
2951 Spectral lines are detected first using linefinder and masked out
2952 to avoid them affecting the baseline solution.
2953
2954 Parameters:
2955 insitu: if False a new scantable is returned.
2956 Otherwise, the scaling is done in-situ
2957 The default is taken from .asaprc (False)
2958 mask: an optional mask retreived from scantable
2959 order: the order of the polynomial (default is 0)
2960 edge: an optional number of channel to drop at
2961 the edge of spectrum. If only one value is
2962 specified, the same number will be dropped
2963 from both sides of the spectrum. Default
2964 is to keep all channels. Nested tuples
2965 represent individual edge selection for
2966 different IFs (a number of spectral channels
2967 can be different)
2968 threshold: the threshold used by line finder. It is
2969 better to keep it large as only strong lines
2970 affect the baseline solution.
2971 chan_avg_limit: a maximum number of consequtive spectral
2972 channels to average during the search of
2973 weak and broad lines. The default is no
2974 averaging (and no search for weak lines).
2975 If such lines can affect the fitted baseline
2976 (e.g. a high order polynomial is fitted),
2977 increase this parameter (usually values up
2978 to 8 are reasonable). Most users of this
2979 method should find the default value sufficient.
2980 plot: plot the fit and the residual. In this each
2981 indivual fit has to be approved, by typing 'y'
2982 or 'n'
2983 getresidual: if False, returns best-fit values instead of
2984 residual. (default is True)
2985 showprogress: show progress status for large data.
2986 default is True.
2987 minnrow: minimum number of input spectra to show.
2988 default is 1000.
2989 outlog: Output the coefficients of the best-fit
2990 function to logger (default is False)
2991 blfile: Name of a text file in which the best-fit
2992 parameter values to be written
2993 (default is "": no file/logger output)
2994
2995 Example:
2996 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2997 """
2998
2999 try:
3000 varlist = vars()
3001
3002 if insitu is None:
3003 insitu = rcParams['insitu']
3004 if insitu:
3005 workscan = self
3006 else:
3007 workscan = self.copy()
3008
3009 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3010 if mask is None: mask = []
3011 if order is None: order = 0
3012 if edge is None: edge = (0, 0)
3013 if threshold is None: threshold = 3
3014 if chan_avg_limit is None: chan_avg_limit = 1
3015 if plot is None: plot = False
3016 if getresidual is None: getresidual = True
3017 if showprogress is None: showprogress = True
3018 if minnrow is None: minnrow = 1000
3019 if outlog is None: outlog = False
3020 if blfile is None: blfile = ''
3021
3022 edge = normalise_edge_param(edge)
3023
3024 if plot:
3025 outblfile = (blfile != "") and \
3026 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
3027 if outblfile: blf = open(blfile, "a")
3028
3029 from asap.asaplinefind import linefinder
3030 fl = linefinder()
3031 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
3032 fl.set_scan(workscan)
3033
3034 f = fitter()
3035 f.set_function(lpoly=order)
3036
3037 rows = xrange(workscan.nrow())
3038 #if len(rows) > 0: workscan._init_blinfo()
3039
3040 for r in rows:
3041 idx = 2*workscan.getif(r)
3042 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
3043 edge[idx:idx+2]) # (CAS-1434)
3044
3045 f.x = workscan._getabcissa(r)
3046 f.y = workscan._getspectrum(r)
3047 f.mask = fl.get_mask()
3048 f.data = None
3049 f.fit()
3050
3051 f.plot(residual=True)
3052 accept_fit = raw_input("Accept fit ( [y]/n ): ")
3053 if accept_fit.upper() == "N":
3054 #workscan._append_blinfo(None, None, None)
3055 continue
3056
3057 blpars = f.get_parameters()
3058 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3059 #workscan._append_blinfo(blpars, masklist, f.mask)
3060 workscan._setspectrum(
3061 (f.fitter.getresidual() if getresidual
3062 else f.fitter.getfit()), r
3063 )
3064
3065 if outblfile:
3066 rms = workscan.get_rms(f.mask, r)
3067 dataout = \
3068 workscan.format_blparams_row(blpars["params"],
3069 blpars["fixed"],
3070 rms, str(masklist),
3071 r, True)
3072 blf.write(dataout)
3073
3074 f._p.unmap()
3075 f._p = None
3076
3077 if outblfile: blf.close()
3078 else:
3079 workscan._auto_poly_baseline(mask, order, edge, threshold,
3080 chan_avg_limit, getresidual,
3081 pack_progress_params(showprogress,
3082 minnrow),
3083 outlog, blfile)
3084
3085 workscan._add_history("auto_poly_baseline", varlist)
3086
3087 if insitu:
3088 self._assign(workscan)
3089 else:
3090 return workscan
3091
3092 except RuntimeError, e:
3093 raise_fitting_failure_exception(e)
3094
3095 def _init_blinfo(self):
3096 """\
3097 Initialise the following three auxiliary members:
3098 blpars : parameters of the best-fit baseline,
3099 masklists : mask data (edge positions of masked channels) and
3100 actualmask : mask data (in boolean list),
3101 to keep for use later (including output to logger/text files).
3102 Used by poly_baseline() and auto_poly_baseline() in case of
3103 'plot=True'.
3104 """
3105 self.blpars = []
3106 self.masklists = []
3107 self.actualmask = []
3108 return
3109
3110 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3111 """\
3112 Append baseline-fitting related info to blpars, masklist and
3113 actualmask.
3114 """
3115 self.blpars.append(data_blpars)
3116 self.masklists.append(data_masklists)
3117 self.actualmask.append(data_actualmask)
3118 return
3119
3120 @asaplog_post_dec
3121 def rotate_linpolphase(self, angle):
3122 """\
3123 Rotate the phase of the complex polarization O=Q+iU correlation.
3124 This is always done in situ in the raw data. So if you call this
3125 function more than once then each call rotates the phase further.
3126
3127 Parameters:
3128
3129 angle: The angle (degrees) to rotate (add) by.
3130
3131 Example::
3132
3133 scan.rotate_linpolphase(2.3)
3134
3135 """
3136 varlist = vars()
3137 self._math._rotate_linpolphase(self, angle)
3138 self._add_history("rotate_linpolphase", varlist)
3139 return
3140
3141 @asaplog_post_dec
3142 def rotate_xyphase(self, angle):
3143 """\
3144 Rotate the phase of the XY correlation. This is always done in situ
3145 in the data. So if you call this function more than once
3146 then each call rotates the phase further.
3147
3148 Parameters:
3149
3150 angle: The angle (degrees) to rotate (add) by.
3151
3152 Example::
3153
3154 scan.rotate_xyphase(2.3)
3155
3156 """
3157 varlist = vars()
3158 self._math._rotate_xyphase(self, angle)
3159 self._add_history("rotate_xyphase", varlist)
3160 return
3161
3162 @asaplog_post_dec
3163 def swap_linears(self):
3164 """\
3165 Swap the linear polarisations XX and YY, or better the first two
3166 polarisations as this also works for ciculars.
3167 """
3168 varlist = vars()
3169 self._math._swap_linears(self)
3170 self._add_history("swap_linears", varlist)
3171 return
3172
3173 @asaplog_post_dec
3174 def invert_phase(self):
3175 """\
3176 Invert the phase of the complex polarisation
3177 """
3178 varlist = vars()
3179 self._math._invert_phase(self)
3180 self._add_history("invert_phase", varlist)
3181 return
3182
3183 @asaplog_post_dec
3184 def add(self, offset, insitu=None):
3185 """\
3186 Return a scan where all spectra have the offset added
3187
3188 Parameters:
3189
3190 offset: the offset
3191
3192 insitu: if False a new scantable is returned.
3193 Otherwise, the scaling is done in-situ
3194 The default is taken from .asaprc (False)
3195
3196 """
3197 if insitu is None: insitu = rcParams['insitu']
3198 self._math._setinsitu(insitu)
3199 varlist = vars()
3200 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3201 s._add_history("add", varlist)
3202 if insitu:
3203 self._assign(s)
3204 else:
3205 return s
3206
3207 @asaplog_post_dec
3208 def scale(self, factor, tsys=True, insitu=None):
3209 """\
3210
3211 Return a scan where all spectra are scaled by the given 'factor'
3212
3213 Parameters:
3214
3215 factor: the scaling factor (float or 1D float list)
3216
3217 insitu: if False a new scantable is returned.
3218 Otherwise, the scaling is done in-situ
3219 The default is taken from .asaprc (False)
3220
3221 tsys: if True (default) then apply the operation to Tsys
3222 as well as the data
3223
3224 """
3225 if insitu is None: insitu = rcParams['insitu']
3226 self._math._setinsitu(insitu)
3227 varlist = vars()
3228 s = None
3229 import numpy
3230 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3231 if isinstance(factor[0], list) or isinstance(factor[0],
3232 numpy.ndarray):
3233 from asapmath import _array2dOp
3234 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3235 else:
3236 s = scantable( self._math._arrayop( self, factor,
3237 "MUL", tsys ) )
3238 else:
3239 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3240 s._add_history("scale", varlist)
3241 if insitu:
3242 self._assign(s)
3243 else:
3244 return s
3245
3246 @preserve_selection
3247 def set_sourcetype(self, match, matchtype="pattern",
3248 sourcetype="reference"):
3249 """\
3250 Set the type of the source to be an source or reference scan
3251 using the provided pattern.
3252
3253 Parameters:
3254
3255 match: a Unix style pattern, regular expression or selector
3256
3257 matchtype: 'pattern' (default) UNIX style pattern or
3258 'regex' regular expression
3259
3260 sourcetype: the type of the source to use (source/reference)
3261
3262 """
3263 varlist = vars()
3264 basesel = self.get_selection()
3265 stype = -1
3266 if sourcetype.lower().startswith("r"):
3267 stype = 1
3268 elif sourcetype.lower().startswith("s"):
3269 stype = 0
3270 else:
3271 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3272 if matchtype.lower().startswith("p"):
3273 matchtype = "pattern"
3274 elif matchtype.lower().startswith("r"):
3275 matchtype = "regex"
3276 else:
3277 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3278 sel = selector()
3279 if isinstance(match, selector):
3280 sel = match
3281 else:
3282 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3283 self._setsourcetype(stype)
3284 self._add_history("set_sourcetype", varlist)
3285
3286 @asaplog_post_dec
3287 @preserve_selection
3288 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3289 """\
3290 This function allows to build quotients automatically.
3291 It assumes the observation to have the same number of
3292 "ons" and "offs"
3293
3294 Parameters:
3295
3296 preserve: you can preserve (default) the continuum or
3297 remove it. The equations used are
3298
3299 preserve: Output = Toff * (on/off) - Toff
3300
3301 remove: Output = Toff * (on/off) - Ton
3302
3303 mode: the on/off detection mode
3304 'paired' (default)
3305 identifies 'off' scans by the
3306 trailing '_R' (Mopra/Parkes) or
3307 '_e'/'_w' (Tid) and matches
3308 on/off pairs from the observing pattern
3309 'time'
3310 finds the closest off in time
3311
3312 .. todo:: verify argument is not implemented
3313
3314 """
3315 varlist = vars()
3316 modes = ["time", "paired"]
3317 if not mode in modes:
3318 msg = "please provide valid mode. Valid modes are %s" % (modes)
3319 raise ValueError(msg)
3320 s = None
3321 if mode.lower() == "paired":
3322 sel = self.get_selection()
3323 sel.set_query("SRCTYPE==psoff")
3324 self.set_selection(sel)
3325 offs = self.copy()
3326 sel.set_query("SRCTYPE==pson")
3327 self.set_selection(sel)
3328 ons = self.copy()
3329 s = scantable(self._math._quotient(ons, offs, preserve))
3330 elif mode.lower() == "time":
3331 s = scantable(self._math._auto_quotient(self, mode, preserve))
3332 s._add_history("auto_quotient", varlist)
3333 return s
3334
3335 @asaplog_post_dec
3336 def mx_quotient(self, mask = None, weight='median', preserve=True):
3337 """\
3338 Form a quotient using "off" beams when observing in "MX" mode.
3339
3340 Parameters:
3341
3342 mask: an optional mask to be used when weight == 'stddev'
3343
3344 weight: How to average the off beams. Default is 'median'.
3345
3346 preserve: you can preserve (default) the continuum or
3347 remove it. The equations used are:
3348
3349 preserve: Output = Toff * (on/off) - Toff
3350
3351 remove: Output = Toff * (on/off) - Ton
3352
3353 """
3354 mask = mask or ()
3355 varlist = vars()
3356 on = scantable(self._math._mx_extract(self, 'on'))
3357 preoff = scantable(self._math._mx_extract(self, 'off'))
3358 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3359 from asapmath import quotient
3360 q = quotient(on, off, preserve)
3361 q._add_history("mx_quotient", varlist)
3362 return q
3363
3364 @asaplog_post_dec
3365 def freq_switch(self, insitu=None):
3366 """\
3367 Apply frequency switching to the data.
3368
3369 Parameters:
3370
3371 insitu: if False a new scantable is returned.
3372 Otherwise, the swictching is done in-situ
3373 The default is taken from .asaprc (False)
3374
3375 """
3376 if insitu is None: insitu = rcParams['insitu']
3377 self._math._setinsitu(insitu)
3378 varlist = vars()
3379 s = scantable(self._math._freqswitch(self))
3380 s._add_history("freq_switch", varlist)
3381 if insitu:
3382 self._assign(s)
3383 else:
3384 return s
3385
3386 @asaplog_post_dec
3387 def recalc_azel(self):
3388 """Recalculate the azimuth and elevation for each position."""
3389 varlist = vars()
3390 self._recalcazel()
3391 self._add_history("recalc_azel", varlist)
3392 return
3393
3394 @asaplog_post_dec
3395 def __add__(self, other):
3396 varlist = vars()
3397 s = None
3398 if isinstance(other, scantable):
3399 s = scantable(self._math._binaryop(self, other, "ADD"))
3400 elif isinstance(other, float):
3401 s = scantable(self._math._unaryop(self, other, "ADD", False))
3402 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3403 if isinstance(other[0], list) \
3404 or isinstance(other[0], numpy.ndarray):
3405 from asapmath import _array2dOp
3406 s = _array2dOp( self.copy(), other, "ADD", False )
3407 else:
3408 s = scantable( self._math._arrayop( self.copy(), other,
3409 "ADD", False ) )
3410 else:
3411 raise TypeError("Other input is not a scantable or float value")
3412 s._add_history("operator +", varlist)
3413 return s
3414
3415 @asaplog_post_dec
3416 def __sub__(self, other):
3417 """
3418 implicit on all axes and on Tsys
3419 """
3420 varlist = vars()
3421 s = None
3422 if isinstance(other, scantable):
3423 s = scantable(self._math._binaryop(self, other, "SUB"))
3424 elif isinstance(other, float):
3425 s = scantable(self._math._unaryop(self, other, "SUB", False))
3426 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3427 if isinstance(other[0], list) \
3428 or isinstance(other[0], numpy.ndarray):
3429 from asapmath import _array2dOp
3430 s = _array2dOp( self.copy(), other, "SUB", False )
3431 else:
3432 s = scantable( self._math._arrayop( self.copy(), other,
3433 "SUB", False ) )
3434 else:
3435 raise TypeError("Other input is not a scantable or float value")
3436 s._add_history("operator -", varlist)
3437 return s
3438
3439 @asaplog_post_dec
3440 def __mul__(self, other):
3441 """
3442 implicit on all axes and on Tsys
3443 """
3444 varlist = vars()
3445 s = None
3446 if isinstance(other, scantable):
3447 s = scantable(self._math._binaryop(self, other, "MUL"))
3448 elif isinstance(other, float):
3449 s = scantable(self._math._unaryop(self, other, "MUL", False))
3450 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3451 if isinstance(other[0], list) \
3452 or isinstance(other[0], numpy.ndarray):
3453 from asapmath import _array2dOp
3454 s = _array2dOp( self.copy(), other, "MUL", False )
3455 else:
3456 s = scantable( self._math._arrayop( self.copy(), other,
3457 "MUL", False ) )
3458 else:
3459 raise TypeError("Other input is not a scantable or float value")
3460 s._add_history("operator *", varlist)
3461 return s
3462
3463
3464 @asaplog_post_dec
3465 def __div__(self, other):
3466 """
3467 implicit on all axes and on Tsys
3468 """
3469 varlist = vars()
3470 s = None
3471 if isinstance(other, scantable):
3472 s = scantable(self._math._binaryop(self, other, "DIV"))
3473 elif isinstance(other, float):
3474 if other == 0.0:
3475 raise ZeroDivisionError("Dividing by zero is not recommended")
3476 s = scantable(self._math._unaryop(self, other, "DIV", False))
3477 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3478 if isinstance(other[0], list) \
3479 or isinstance(other[0], numpy.ndarray):
3480 from asapmath import _array2dOp
3481 s = _array2dOp( self.copy(), other, "DIV", False )
3482 else:
3483 s = scantable( self._math._arrayop( self.copy(), other,
3484 "DIV", False ) )
3485 else:
3486 raise TypeError("Other input is not a scantable or float value")
3487 s._add_history("operator /", varlist)
3488 return s
3489
3490 @asaplog_post_dec
3491 def get_fit(self, row=0):
3492 """\
3493 Print or return the stored fits for a row in the scantable
3494
3495 Parameters:
3496
3497 row: the row which the fit has been applied to.
3498
3499 """
3500 if row > self.nrow():
3501 return
3502 from asap.asapfit import asapfit
3503 fit = asapfit(self._getfit(row))
3504 asaplog.push( '%s' %(fit) )
3505 return fit.as_dict()
3506
3507 @preserve_selection
3508 def flag_nans(self):
3509 """\
3510 Utility function to flag NaN values in the scantable.
3511 """
3512 import numpy
3513 basesel = self.get_selection()
3514 for i in range(self.nrow()):
3515 sel = self.get_row_selector(i)
3516 self.set_selection(basesel+sel)
3517 nans = numpy.isnan(self._getspectrum(0))
3518 if numpy.any(nans):
3519 bnans = [ bool(v) for v in nans]
3520 self.flag(bnans)
3521
3522 def get_row_selector(self, rowno):
3523 return selector(rows=[rowno])
3524
3525 def _add_history(self, funcname, parameters):
3526 if not rcParams['scantable.history']:
3527 return
3528 # create date
3529 sep = "##"
3530 from datetime import datetime
3531 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3532 hist = dstr+sep
3533 hist += funcname+sep#cdate+sep
3534 if parameters.has_key('self'):
3535 del parameters['self']
3536 for k, v in parameters.iteritems():
3537 if type(v) is dict:
3538 for k2, v2 in v.iteritems():
3539 hist += k2
3540 hist += "="
3541 if isinstance(v2, scantable):
3542 hist += 'scantable'
3543 elif k2 == 'mask':
3544 if isinstance(v2, list) or isinstance(v2, tuple):
3545 hist += str(self._zip_mask(v2))
3546 else:
3547 hist += str(v2)
3548 else:
3549 hist += str(v2)
3550 else:
3551 hist += k
3552 hist += "="
3553 if isinstance(v, scantable):
3554 hist += 'scantable'
3555 elif k == 'mask':
3556 if isinstance(v, list) or isinstance(v, tuple):
3557 hist += str(self._zip_mask(v))
3558 else:
3559 hist += str(v)
3560 else:
3561 hist += str(v)
3562 hist += sep
3563 hist = hist[:-2] # remove trailing '##'
3564 self._addhistory(hist)
3565
3566
3567 def _zip_mask(self, mask):
3568 mask = list(mask)
3569 i = 0
3570 segments = []
3571 while mask[i:].count(1):
3572 i += mask[i:].index(1)
3573 if mask[i:].count(0):
3574 j = i + mask[i:].index(0)
3575 else:
3576 j = len(mask)
3577 segments.append([i, j])
3578 i = j
3579 return segments
3580
3581 def _get_ordinate_label(self):
3582 fu = "("+self.get_fluxunit()+")"
3583 import re
3584 lbl = "Intensity"
3585 if re.match(".K.", fu):
3586 lbl = "Brightness Temperature "+ fu
3587 elif re.match(".Jy.", fu):
3588 lbl = "Flux density "+ fu
3589 return lbl
3590
3591 def _check_ifs(self):
3592# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
3593 nchans = [self.nchan(i) for i in self.getifnos()]
3594 nchans = filter(lambda t: t > 0, nchans)
3595 return (sum(nchans)/len(nchans) == nchans[0])
3596
3597 @asaplog_post_dec
3598 def _fill(self, names, unit, average, opts={}):
3599 first = True
3600 fullnames = []
3601 for name in names:
3602 name = os.path.expandvars(name)
3603 name = os.path.expanduser(name)
3604 if not os.path.exists(name):
3605 msg = "File '%s' does not exists" % (name)
3606 raise IOError(msg)
3607 fullnames.append(name)
3608 if average:
3609 asaplog.push('Auto averaging integrations')
3610 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3611 for name in fullnames:
3612 tbl = Scantable(stype)
3613 if is_ms( name ):
3614 r = msfiller( tbl )
3615 else:
3616 r = filler( tbl )
3617 rx = rcParams['scantable.reference']
3618 r.setreferenceexpr(rx)
3619 msg = "Importing %s..." % (name)
3620 asaplog.push(msg, False)
3621 r.open(name, opts)
3622 r.fill()
3623 if average:
3624 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3625 if not first:
3626 tbl = self._math._merge([self, tbl])
3627 Scantable.__init__(self, tbl)
3628 r.close()
3629 del r, tbl
3630 first = False
3631 #flush log
3632 asaplog.post()
3633 if unit is not None:
3634 self.set_fluxunit(unit)
3635 if not is_casapy():
3636 self.set_freqframe(rcParams['scantable.freqframe'])
3637
3638
3639 def __getitem__(self, key):
3640 if key < 0:
3641 key += self.nrow()
3642 if key >= self.nrow():
3643 raise IndexError("Row index out of range.")
3644 return self._getspectrum(key)
3645
3646 def __setitem__(self, key, value):
3647 if key < 0:
3648 key += self.nrow()
3649 if key >= self.nrow():
3650 raise IndexError("Row index out of range.")
3651 if not hasattr(value, "__len__") or \
3652 len(value) > self.nchan(self.getif(key)):
3653 raise ValueError("Spectrum length doesn't match.")
3654 return self._setspectrum(value, key)
3655
3656 def __len__(self):
3657 return self.nrow()
3658
3659 def __iter__(self):
3660 for i in range(len(self)):
3661 yield self[i]
Note: See TracBrowser for help on using the repository browser.