source: trunk/python/scantable.py@ 2296

Last change on this file since 2296 was 2290, checked in by Kana Sugimoto, 13 years ago

New Development: Yes

JIRA Issue: Yes (CAS-3219/ASAP-247)

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs: sdlist unit test (reference data to be updated)

Put in Release Notes: Yes

Module(s): scantable.summary, sdlist

Description:

Output format of scantable summary changed.
Less use of TableIterator for speed up scantable.summary/sdlist now lists a scantable
with 348,000 records (NRO 45m w/ 25beams x 13,920scans) in ~30sec (was ~7 min previously).


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 135.2 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import numpy
5try:
6 from functools import wraps as wraps_dec
7except ImportError:
8 from asap.compatibility import wraps as wraps_dec
9
10from asap.env import is_casapy
11from asap._asap import Scantable
12from asap._asap import filler, msfiller
13from asap.parameters import rcParams
14from asap.logging import asaplog, asaplog_post_dec
15from asap.selector import selector
16from asap.linecatalog import linecatalog
17from asap.coordinate import coordinate
18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
19from asap.asapfitter import fitter
20
21
22def preserve_selection(func):
23 @wraps_dec(func)
24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
30 return val
31 return wrap
32
33def is_scantable(filename):
34 """Is the given file a scantable?
35
36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 #if ( l.find('Scantable') != -1 ):
48 if ( l.find('Measurement Set') == -1 ):
49 return True
50 else:
51 return False
52 else:
53 return False
54## return (os.path.isdir(filename)
55## and not os.path.exists(filename+'/table.f1')
56## and os.path.exists(filename+'/table.info'))
57
58def is_ms(filename):
59 """Is the given file a MeasurementSet?
60
61 Parameters:
62
63 filename: the name of the file/directory to test
64
65 """
66 if ( os.path.isdir(filename)
67 and os.path.exists(filename+'/table.info')
68 and os.path.exists(filename+'/table.dat') ):
69 f=open(filename+'/table.info')
70 l=f.readline()
71 f.close()
72 if ( l.find('Measurement Set') != -1 ):
73 return True
74 else:
75 return False
76 else:
77 return False
78
79def normalise_edge_param(edge):
80 """\
81 Convert a given edge value to a one-dimensional array that can be
82 given to baseline-fitting/subtraction functions.
83 The length of the output value will be an even because values for
84 the both sides of spectra are to be contained for each IF. When
85 the length is 2, the values will be applied to all IFs. If the length
86 is larger than 2, it will be 2*ifnos().
87 Accepted format of edge include:
88 * an integer - will be used for both sides of spectra of all IFs.
89 e.g. 10 is converted to [10,10]
90 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
91 * a list/tuple containing an integer - same as the above case.
92 e.g. [10] is converted to [10,10]
93 * a list/tuple containing two integers - will be used for all IFs.
94 e.g. [5,10] is output as it is. no need to convert.
95 * a list/tuple of lists/tuples containing TWO integers -
96 each element of edge will be used for each IF.
97 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
98
99 If an element contains the same integer values, the input 'edge'
100 parameter can be given in a simpler shape in the following cases:
101 ** when len(edge)!=2
102 any elements containing the same values can be replaced
103 to single integers.
104 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
105 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
106 ** when len(edge)=2
107 care is needed for this case: ONLY ONE of the
108 elements can be a single integer,
109 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
110 or [[5,5],10], but can NOT be simplified to [5,10].
111 when [5,10] given, it is interpreted as
112 [[5,10],[5,10],[5,10],...] instead, as shown before.
113 """
114 from asap import _is_sequence_or_number as _is_valid
115 if isinstance(edge, list) or isinstance(edge, tuple):
116 for edgepar in edge:
117 if not _is_valid(edgepar, int):
118 raise ValueError, "Each element of the 'edge' tuple has \
119 to be a pair of integers or an integer."
120 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
121 if len(edgepar) != 2:
122 raise ValueError, "Each element of the 'edge' tuple has \
123 to be a pair of integers or an integer."
124 else:
125 if not _is_valid(edge, int):
126 raise ValueError, "Parameter 'edge' has to be an integer or a \
127 pair of integers specified as a tuple. \
128 Nested tuples are allowed \
129 to make individual selection for different IFs."
130
131
132 if isinstance(edge, int):
133 edge = [ edge, edge ] # e.g. 3 => [3,3]
134 elif isinstance(edge, list) or isinstance(edge, tuple):
135 if len(edge) == 0:
136 edge = [0, 0] # e.g. [] => [0,0]
137 elif len(edge) == 1:
138 if isinstance(edge[0], int):
139 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
140
141 commonedge = True
142 if len(edge) > 2: commonedge = False
143 else:
144 for edgepar in edge:
145 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
146 commonedge = False
147 break
148
149 if commonedge:
150 if len(edge) > 1:
151 norm_edge = edge
152 else:
153 norm_edge = edge + edge
154 else:
155 norm_edge = []
156 for edgepar in edge:
157 if isinstance(edgepar, int):
158 norm_edge += [edgepar, edgepar]
159 else:
160 norm_edge += edgepar
161
162 return norm_edge
163
164def raise_fitting_failure_exception(e):
165 msg = "The fit failed, possibly because it didn't converge."
166 if rcParams["verbose"]:
167 asaplog.push(str(e))
168 asaplog.push(str(msg))
169 else:
170 raise RuntimeError(str(e)+'\n'+msg)
171
172def pack_progress_params(showprogress, minnrow):
173 return str(showprogress).lower() + ',' + str(minnrow)
174
175class scantable(Scantable):
176 """\
177 The ASAP container for scans (single-dish data).
178 """
179
180 @asaplog_post_dec
181 #def __init__(self, filename, average=None, unit=None, getpt=None,
182 # antenna=None, parallactify=None):
183 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
184 """\
185 Create a scantable from a saved one or make a reference
186
187 Parameters:
188
189 filename: the name of an asap table on disk
190 or
191 the name of a rpfits/sdfits/ms file
192 (integrations within scans are auto averaged
193 and the whole file is read) or
194 [advanced] a reference to an existing scantable
195
196 average: average all integrations withinb a scan on read.
197 The default (True) is taken from .asaprc.
198
199 unit: brightness unit; must be consistent with K or Jy.
200 Over-rides the default selected by the filler
201 (input rpfits/sdfits/ms) or replaces the value
202 in existing scantables
203
204 getpt: for MeasurementSet input data only:
205 If True, all pointing data are filled.
206 The deafult is False, which makes time to load
207 the MS data faster in some cases.
208
209 antenna: for MeasurementSet input data only:
210 Antenna selection. integer (id) or string (name or id).
211
212 parallactify: Indicate that the data had been parallatified. Default
213 is taken from rc file.
214
215 """
216 if average is None:
217 average = rcParams['scantable.autoaverage']
218 #if getpt is None:
219 # getpt = True
220 #if antenna is not None:
221 # asaplog.push("Antenna selection currently unsupported."
222 # "Using ''")
223 # asaplog.post('WARN')
224 #if antenna is None:
225 # antenna = ''
226 #elif type(antenna) == int:
227 # antenna = '%s' % antenna
228 #elif type(antenna) == list:
229 # tmpstr = ''
230 # for i in range( len(antenna) ):
231 # if type(antenna[i]) == int:
232 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
233 # elif type(antenna[i]) == str:
234 # tmpstr=tmpstr+antenna[i]+','
235 # else:
236 # raise TypeError('Bad antenna selection.')
237 # antenna = tmpstr.rstrip(',')
238 parallactify = parallactify or rcParams['scantable.parallactify']
239 varlist = vars()
240 from asap._asap import stmath
241 self._math = stmath( rcParams['insitu'] )
242 if isinstance(filename, Scantable):
243 Scantable.__init__(self, filename)
244 else:
245 if isinstance(filename, str):
246 filename = os.path.expandvars(filename)
247 filename = os.path.expanduser(filename)
248 if not os.path.exists(filename):
249 s = "File '%s' not found." % (filename)
250 raise IOError(s)
251 if is_scantable(filename):
252 ondisk = rcParams['scantable.storage'] == 'disk'
253 Scantable.__init__(self, filename, ondisk)
254 if unit is not None:
255 self.set_fluxunit(unit)
256 if average:
257 self._assign( self.average_time( scanav=True ) )
258 # do not reset to the default freqframe
259 #self.set_freqframe(rcParams['scantable.freqframe'])
260 #elif os.path.isdir(filename) \
261 # and not os.path.exists(filename+'/table.f1'):
262 elif is_ms(filename):
263 # Measurement Set
264 opts={'ms': {}}
265 mskeys=['getpt','antenna']
266 for key in mskeys:
267 if key in args.keys():
268 opts['ms'][key] = args[key]
269 #self._fill([filename], unit, average, getpt, antenna)
270 self._fill([filename], unit, average, opts)
271 elif os.path.isfile(filename):
272 #self._fill([filename], unit, average, getpt, antenna)
273 self._fill([filename], unit, average)
274 else:
275 msg = "The given file '%s'is not a valid " \
276 "asap table." % (filename)
277 raise IOError(msg)
278 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
279 and isinstance(filename[-1], str):
280 #self._fill(filename, unit, average, getpt, antenna)
281 self._fill(filename, unit, average)
282 self.parallactify(parallactify)
283 self._add_history("scantable", varlist)
284
285 @asaplog_post_dec
286 def save(self, name=None, format=None, overwrite=False):
287 """\
288 Store the scantable on disk. This can be an asap (aips++) Table,
289 SDFITS or MS2 format.
290
291 Parameters:
292
293 name: the name of the outputfile. For format "ASCII"
294 this is the root file name (data in 'name'.txt
295 and header in 'name'_header.txt)
296
297 format: an optional file format. Default is ASAP.
298 Allowed are:
299
300 * 'ASAP' (save as ASAP [aips++] Table),
301 * 'SDFITS' (save as SDFITS file)
302 * 'ASCII' (saves as ascii text file)
303 * 'MS2' (saves as an casacore MeasurementSet V2)
304 * 'FITS' (save as image FITS - not readable by class)
305 * 'CLASS' (save as FITS readable by CLASS)
306
307 overwrite: If the file should be overwritten if it exists.
308 The default False is to return with warning
309 without writing the output. USE WITH CARE.
310
311 Example::
312
313 scan.save('myscan.asap')
314 scan.save('myscan.sdfits', 'SDFITS')
315
316 """
317 from os import path
318 format = format or rcParams['scantable.save']
319 suffix = '.'+format.lower()
320 if name is None or name == "":
321 name = 'scantable'+suffix
322 msg = "No filename given. Using default name %s..." % name
323 asaplog.push(msg)
324 name = path.expandvars(name)
325 if path.isfile(name) or path.isdir(name):
326 if not overwrite:
327 msg = "File %s exists." % name
328 raise IOError(msg)
329 format2 = format.upper()
330 if format2 == 'ASAP':
331 self._save(name)
332 elif format2 == 'MS2':
333 msopt = {'ms': {'overwrite': overwrite } }
334 from asap._asap import mswriter
335 writer = mswriter( self )
336 writer.write( name, msopt )
337 else:
338 from asap._asap import stwriter as stw
339 writer = stw(format2)
340 writer.write(self, name)
341 return
342
343 def copy(self):
344 """Return a copy of this scantable.
345
346 *Note*:
347
348 This makes a full (deep) copy. scan2 = scan1 makes a reference.
349
350 Example::
351
352 copiedscan = scan.copy()
353
354 """
355 sd = scantable(Scantable._copy(self))
356 return sd
357
358 def drop_scan(self, scanid=None):
359 """\
360 Return a new scantable where the specified scan number(s) has(have)
361 been dropped.
362
363 Parameters:
364
365 scanid: a (list of) scan number(s)
366
367 """
368 from asap import _is_sequence_or_number as _is_valid
369 from asap import _to_list
370 from asap import unique
371 if not _is_valid(scanid):
372 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
373 scanid = _to_list(scanid)
374 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
375 for sid in scanid: allscans.remove(sid)
376 if len(allscans) == 0:
377 raise ValueError("Can't remove all scans")
378 sel = selector(scans=allscans)
379 return self._select_copy(sel)
380
381 def _select_copy(self, selection):
382 orig = self.get_selection()
383 self.set_selection(orig+selection)
384 cp = self.copy()
385 self.set_selection(orig)
386 return cp
387
388 def get_scan(self, scanid=None):
389 """\
390 Return a specific scan (by scanno) or collection of scans (by
391 source name) in a new scantable.
392
393 *Note*:
394
395 See scantable.drop_scan() for the inverse operation.
396
397 Parameters:
398
399 scanid: a (list of) scanno or a source name, unix-style
400 patterns are accepted for source name matching, e.g.
401 '*_R' gets all 'ref scans
402
403 Example::
404
405 # get all scans containing the source '323p459'
406 newscan = scan.get_scan('323p459')
407 # get all 'off' scans
408 refscans = scan.get_scan('*_R')
409 # get a susbset of scans by scanno (as listed in scan.summary())
410 newscan = scan.get_scan([0, 2, 7, 10])
411
412 """
413 if scanid is None:
414 raise RuntimeError( 'Please specify a scan no or name to '
415 'retrieve from the scantable' )
416 try:
417 bsel = self.get_selection()
418 sel = selector()
419 if type(scanid) is str:
420 sel.set_name(scanid)
421 return self._select_copy(sel)
422 elif type(scanid) is int:
423 sel.set_scans([scanid])
424 return self._select_copy(sel)
425 elif type(scanid) is list:
426 sel.set_scans(scanid)
427 return self._select_copy(sel)
428 else:
429 msg = "Illegal scanid type, use 'int' or 'list' if ints."
430 raise TypeError(msg)
431 except RuntimeError:
432 raise
433
434 def __str__(self):
435 return Scantable._summary(self)
436
437 def summary(self, filename=None):
438 """\
439 Print a summary of the contents of this scantable.
440
441 Parameters:
442
443 filename: the name of a file to write the putput to
444 Default - no file output
445
446 """
447# info = Scantable._summary(self)
448 if filename is not None:
449 if filename is "":
450 filename = 'scantable_summary.txt'
451 from os.path import expandvars, isdir
452 filename = expandvars(filename)
453# if not isdir(filename):
454# data = open(filename, 'w')
455# data.write(info)
456# data.close()
457# else:
458 if isdir(filename):
459 msg = "Illegal file name '%s'." % (filename)
460 raise IOError(msg)
461 else:
462 filename = ""
463 Scantable._summary(self, filename)
464# info = Scantable._summary(self, filename)
465# return page(info)
466
467 def get_spectrum(self, rowno):
468 """Return the spectrum for the current row in the scantable as a list.
469
470 Parameters:
471
472 rowno: the row number to retrieve the spectrum from
473
474 """
475 return self._getspectrum(rowno)
476
477 def get_mask(self, rowno):
478 """Return the mask for the current row in the scantable as a list.
479
480 Parameters:
481
482 rowno: the row number to retrieve the mask from
483
484 """
485 return self._getmask(rowno)
486
487 def set_spectrum(self, spec, rowno):
488 """Set the spectrum for the current row in the scantable.
489
490 Parameters:
491
492 spec: the new spectrum
493
494 rowno: the row number to set the spectrum for
495
496 """
497 assert(len(spec) == self.nchan())
498 return self._setspectrum(spec, rowno)
499
500 def get_coordinate(self, rowno):
501 """Return the (spectral) coordinate for a a given 'rowno'.
502
503 *Note*:
504
505 * This coordinate is only valid until a scantable method modifies
506 the frequency axis.
507 * This coordinate does contain the original frequency set-up
508 NOT the new frame. The conversions however are done using the user
509 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
510 use scantable.freq_align first. Without it there is no closure,
511 i.e.::
512
513 c = myscan.get_coordinate(0)
514 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
515
516 Parameters:
517
518 rowno: the row number for the spectral coordinate
519
520 """
521 return coordinate(Scantable.get_coordinate(self, rowno))
522
523 def get_selection(self):
524 """\
525 Get the selection object currently set on this scantable.
526
527 Example::
528
529 sel = scan.get_selection()
530 sel.set_ifs(0) # select IF 0
531 scan.set_selection(sel) # apply modified selection
532
533 """
534 return selector(self._getselection())
535
536 def set_selection(self, selection=None, **kw):
537 """\
538 Select a subset of the data. All following operations on this scantable
539 are only applied to thi selection.
540
541 Parameters:
542
543 selection: a selector object (default unset the selection), or
544 any combination of "pols", "ifs", "beams", "scans",
545 "cycles", "name", "query"
546
547 Examples::
548
549 sel = selector() # create a selection object
550 self.set_scans([0, 3]) # select SCANNO 0 and 3
551 scan.set_selection(sel) # set the selection
552 scan.summary() # will only print summary of scanno 0 an 3
553 scan.set_selection() # unset the selection
554 # or the equivalent
555 scan.set_selection(scans=[0,3])
556 scan.summary() # will only print summary of scanno 0 an 3
557 scan.set_selection() # unset the selection
558
559 """
560 if selection is None:
561 # reset
562 if len(kw) == 0:
563 selection = selector()
564 else:
565 # try keywords
566 for k in kw:
567 if k not in selector.fields:
568 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
569 selection = selector(**kw)
570 self._setselection(selection)
571
572 def get_row(self, row=0, insitu=None):
573 """\
574 Select a row in the scantable.
575 Return a scantable with single row.
576
577 Parameters:
578
579 row: row no of integration, default is 0.
580 insitu: if False a new scantable is returned. Otherwise, the
581 scaling is done in-situ. The default is taken from .asaprc
582 (False)
583
584 """
585 if insitu is None: insitu = rcParams['insitu']
586 if not insitu:
587 workscan = self.copy()
588 else:
589 workscan = self
590 # Select a row
591 sel=selector()
592 sel.set_rows([row])
593 #sel.set_scans([workscan.getscan(row)])
594 #sel.set_cycles([workscan.getcycle(row)])
595 #sel.set_beams([workscan.getbeam(row)])
596 #sel.set_ifs([workscan.getif(row)])
597 #sel.set_polarisations([workscan.getpol(row)])
598 #sel.set_name(workscan._getsourcename(row))
599 workscan.set_selection(sel)
600 if not workscan.nrow() == 1:
601 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
602 raise RuntimeError(msg)
603 del sel
604 if insitu:
605 self._assign(workscan)
606 else:
607 return workscan
608
609 @asaplog_post_dec
610 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
611 """\
612 Determine the specified statistic of the current beam/if/pol
613 Takes a 'mask' as an optional parameter to specify which
614 channels should be excluded.
615
616 Parameters:
617
618 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
619 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
620
621 mask: an optional mask specifying where the statistic
622 should be determined.
623
624 form: format string to print statistic values
625
626 row: row number of spectrum to process.
627 (default is None: for all rows)
628
629 Example:
630 scan.set_unit('channel')
631 msk = scan.create_mask([100, 200], [500, 600])
632 scan.stats(stat='mean', mask=m)
633
634 """
635 mask = mask or []
636 if not self._check_ifs():
637 raise ValueError("Cannot apply mask as the IFs have different "
638 "number of channels. Please use setselection() "
639 "to select individual IFs")
640 rtnabc = False
641 if stat.lower().endswith('_abc'): rtnabc = True
642 getchan = False
643 if stat.lower().startswith('min') or stat.lower().startswith('max'):
644 chan = self._math._minmaxchan(self, mask, stat)
645 getchan = True
646 statvals = []
647 if not rtnabc:
648 if row == None:
649 statvals = self._math._stats(self, mask, stat)
650 else:
651 statvals = self._math._statsrow(self, mask, stat, int(row))
652
653 #def cb(i):
654 # return statvals[i]
655
656 #return self._row_callback(cb, stat)
657
658 label=stat
659 #callback=cb
660 out = ""
661 #outvec = []
662 sep = '-'*50
663
664 if row == None:
665 rows = xrange(self.nrow())
666 elif isinstance(row, int):
667 rows = [ row ]
668
669 for i in rows:
670 refstr = ''
671 statunit= ''
672 if getchan:
673 qx, qy = self.chan2data(rowno=i, chan=chan[i])
674 if rtnabc:
675 statvals.append(qx['value'])
676 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
677 statunit= '['+qx['unit']+']'
678 else:
679 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
680
681 tm = self._gettime(i)
682 src = self._getsourcename(i)
683 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
684 out += 'Time[%s]:\n' % (tm)
685 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
686 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
687 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
688 #outvec.append(callback(i))
689 if len(rows) > 1:
690 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
691 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
692 else:
693 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
694 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
695 out += sep+"\n"
696
697 import os
698 if os.environ.has_key( 'USER' ):
699 usr = os.environ['USER']
700 else:
701 import commands
702 usr = commands.getoutput( 'whoami' )
703 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
704 f = open(tmpfile,'w')
705 print >> f, sep
706 print >> f, ' %s %s' % (label, statunit)
707 print >> f, sep
708 print >> f, out
709 f.close()
710 f = open(tmpfile,'r')
711 x = f.readlines()
712 f.close()
713 asaplog.push(''.join(x), False)
714
715 return statvals
716
717 def chan2data(self, rowno=0, chan=0):
718 """\
719 Returns channel/frequency/velocity and spectral value
720 at an arbitrary row and channel in the scantable.
721
722 Parameters:
723
724 rowno: a row number in the scantable. Default is the
725 first row, i.e. rowno=0
726
727 chan: a channel in the scantable. Default is the first
728 channel, i.e. pos=0
729
730 """
731 if isinstance(rowno, int) and isinstance(chan, int):
732 qx = {'unit': self.get_unit(),
733 'value': self._getabcissa(rowno)[chan]}
734 qy = {'unit': self.get_fluxunit(),
735 'value': self._getspectrum(rowno)[chan]}
736 return qx, qy
737
738 def stddev(self, mask=None):
739 """\
740 Determine the standard deviation of the current beam/if/pol
741 Takes a 'mask' as an optional parameter to specify which
742 channels should be excluded.
743
744 Parameters:
745
746 mask: an optional mask specifying where the standard
747 deviation should be determined.
748
749 Example::
750
751 scan.set_unit('channel')
752 msk = scan.create_mask([100, 200], [500, 600])
753 scan.stddev(mask=m)
754
755 """
756 return self.stats(stat='stddev', mask=mask);
757
758
759 def get_column_names(self):
760 """\
761 Return a list of column names, which can be used for selection.
762 """
763 return list(Scantable.get_column_names(self))
764
765 def get_tsys(self, row=-1):
766 """\
767 Return the System temperatures.
768
769 Parameters:
770
771 row: the rowno to get the information for. (default all rows)
772
773 Returns:
774
775 a list of Tsys values for the current selection
776
777 """
778 if row > -1:
779 return self._get_column(self._gettsys, row)
780 return self._row_callback(self._gettsys, "Tsys")
781
782
783 def get_weather(self, row=-1):
784 """\
785 Return the weather informations.
786
787 Parameters:
788
789 row: the rowno to get the information for. (default all rows)
790
791 Returns:
792
793 a dict or list of of dicts of values for the current selection
794
795 """
796
797 values = self._get_column(self._get_weather, row)
798 if row > -1:
799 return {'temperature': values[0],
800 'pressure': values[1], 'humidity' : values[2],
801 'windspeed' : values[3], 'windaz' : values[4]
802 }
803 else:
804 out = []
805 for r in values:
806
807 out.append({'temperature': r[0],
808 'pressure': r[1], 'humidity' : r[2],
809 'windspeed' : r[3], 'windaz' : r[4]
810 })
811 return out
812
813 def _row_callback(self, callback, label):
814 out = ""
815 outvec = []
816 sep = '-'*50
817 for i in range(self.nrow()):
818 tm = self._gettime(i)
819 src = self._getsourcename(i)
820 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
821 out += 'Time[%s]:\n' % (tm)
822 if self.nbeam(-1) > 1:
823 out += ' Beam[%d] ' % (self.getbeam(i))
824 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
825 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
826 outvec.append(callback(i))
827 out += '= %3.3f\n' % (outvec[i])
828 out += sep+'\n'
829
830 asaplog.push(sep)
831 asaplog.push(" %s" % (label))
832 asaplog.push(sep)
833 asaplog.push(out)
834 asaplog.post()
835 return outvec
836
837 def _get_column(self, callback, row=-1, *args):
838 """
839 """
840 if row == -1:
841 return [callback(i, *args) for i in range(self.nrow())]
842 else:
843 if 0 <= row < self.nrow():
844 return callback(row, *args)
845
846
847 def get_time(self, row=-1, asdatetime=False, prec=-1):
848 """\
849 Get a list of time stamps for the observations.
850 Return a datetime object or a string (default) for each
851 integration time stamp in the scantable.
852
853 Parameters:
854
855 row: row no of integration. Default -1 return all rows
856
857 asdatetime: return values as datetime objects rather than strings
858
859 prec: number of digits shown. Default -1 to automatic calculation.
860 Note this number is equals to the digits of MVTime,
861 i.e., 0<prec<3: dates with hh:: only,
862 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
863 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
864
865 """
866 from datetime import datetime
867 if prec < 0:
868 # automagically set necessary precision +1
869 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
870 prec = max(6, int(prec))
871 else:
872 prec = max(0, prec)
873 if asdatetime:
874 #precision can be 1 millisecond at max
875 prec = min(12, prec)
876
877 times = self._get_column(self._gettime, row, prec)
878 if not asdatetime:
879 return times
880 format = "%Y/%m/%d/%H:%M:%S.%f"
881 if prec < 7:
882 nsub = 1 + (((6-prec)/2) % 3)
883 substr = [".%f","%S","%M"]
884 for i in range(nsub):
885 format = format.replace(substr[i],"")
886 if isinstance(times, list):
887 return [datetime.strptime(i, format) for i in times]
888 else:
889 return datetime.strptime(times, format)
890
891
892 def get_inttime(self, row=-1):
893 """\
894 Get a list of integration times for the observations.
895 Return a time in seconds for each integration in the scantable.
896
897 Parameters:
898
899 row: row no of integration. Default -1 return all rows.
900
901 """
902 return self._get_column(self._getinttime, row)
903
904
905 def get_sourcename(self, row=-1):
906 """\
907 Get a list source names for the observations.
908 Return a string for each integration in the scantable.
909 Parameters:
910
911 row: row no of integration. Default -1 return all rows.
912
913 """
914 return self._get_column(self._getsourcename, row)
915
916 def get_elevation(self, row=-1):
917 """\
918 Get a list of elevations for the observations.
919 Return a float for each integration in the scantable.
920
921 Parameters:
922
923 row: row no of integration. Default -1 return all rows.
924
925 """
926 return self._get_column(self._getelevation, row)
927
928 def get_azimuth(self, row=-1):
929 """\
930 Get a list of azimuths for the observations.
931 Return a float for each integration in the scantable.
932
933 Parameters:
934 row: row no of integration. Default -1 return all rows.
935
936 """
937 return self._get_column(self._getazimuth, row)
938
939 def get_parangle(self, row=-1):
940 """\
941 Get a list of parallactic angles for the observations.
942 Return a float for each integration in the scantable.
943
944 Parameters:
945
946 row: row no of integration. Default -1 return all rows.
947
948 """
949 return self._get_column(self._getparangle, row)
950
951 def get_direction(self, row=-1):
952 """
953 Get a list of Positions on the sky (direction) for the observations.
954 Return a string for each integration in the scantable.
955
956 Parameters:
957
958 row: row no of integration. Default -1 return all rows
959
960 """
961 return self._get_column(self._getdirection, row)
962
963 def get_directionval(self, row=-1):
964 """\
965 Get a list of Positions on the sky (direction) for the observations.
966 Return a float for each integration in the scantable.
967
968 Parameters:
969
970 row: row no of integration. Default -1 return all rows
971
972 """
973 return self._get_column(self._getdirectionvec, row)
974
975 @asaplog_post_dec
976 def set_unit(self, unit='channel'):
977 """\
978 Set the unit for all following operations on this scantable
979
980 Parameters:
981
982 unit: optional unit, default is 'channel'. Use one of '*Hz',
983 'km/s', 'channel' or equivalent ''
984
985 """
986 varlist = vars()
987 if unit in ['', 'pixel', 'channel']:
988 unit = ''
989 inf = list(self._getcoordinfo())
990 inf[0] = unit
991 self._setcoordinfo(inf)
992 self._add_history("set_unit", varlist)
993
994 @asaplog_post_dec
995 def set_instrument(self, instr):
996 """\
997 Set the instrument for subsequent processing.
998
999 Parameters:
1000
1001 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1002 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1003
1004 """
1005 self._setInstrument(instr)
1006 self._add_history("set_instument", vars())
1007
1008 @asaplog_post_dec
1009 def set_feedtype(self, feedtype):
1010 """\
1011 Overwrite the feed type, which might not be set correctly.
1012
1013 Parameters:
1014
1015 feedtype: 'linear' or 'circular'
1016
1017 """
1018 self._setfeedtype(feedtype)
1019 self._add_history("set_feedtype", vars())
1020
1021 @asaplog_post_dec
1022 def set_doppler(self, doppler='RADIO'):
1023 """\
1024 Set the doppler for all following operations on this scantable.
1025
1026 Parameters:
1027
1028 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1029
1030 """
1031 varlist = vars()
1032 inf = list(self._getcoordinfo())
1033 inf[2] = doppler
1034 self._setcoordinfo(inf)
1035 self._add_history("set_doppler", vars())
1036
1037 @asaplog_post_dec
1038 def set_freqframe(self, frame=None):
1039 """\
1040 Set the frame type of the Spectral Axis.
1041
1042 Parameters:
1043
1044 frame: an optional frame type, default 'LSRK'. Valid frames are:
1045 'TOPO', 'LSRD', 'LSRK', 'BARY',
1046 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1047
1048 Example::
1049
1050 scan.set_freqframe('BARY')
1051
1052 """
1053 frame = frame or rcParams['scantable.freqframe']
1054 varlist = vars()
1055 # "REST" is not implemented in casacore
1056 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1057 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1058 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1059 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1060
1061 if frame in valid:
1062 inf = list(self._getcoordinfo())
1063 inf[1] = frame
1064 self._setcoordinfo(inf)
1065 self._add_history("set_freqframe", varlist)
1066 else:
1067 msg = "Please specify a valid freq type. Valid types are:\n", valid
1068 raise TypeError(msg)
1069
1070 @asaplog_post_dec
1071 def set_dirframe(self, frame=""):
1072 """\
1073 Set the frame type of the Direction on the sky.
1074
1075 Parameters:
1076
1077 frame: an optional frame type, default ''. Valid frames are:
1078 'J2000', 'B1950', 'GALACTIC'
1079
1080 Example:
1081
1082 scan.set_dirframe('GALACTIC')
1083
1084 """
1085 varlist = vars()
1086 Scantable.set_dirframe(self, frame)
1087 self._add_history("set_dirframe", varlist)
1088
1089 def get_unit(self):
1090 """\
1091 Get the default unit set in this scantable
1092
1093 Returns:
1094
1095 A unit string
1096
1097 """
1098 inf = self._getcoordinfo()
1099 unit = inf[0]
1100 if unit == '': unit = 'channel'
1101 return unit
1102
1103 @asaplog_post_dec
1104 def get_abcissa(self, rowno=0):
1105 """\
1106 Get the abcissa in the current coordinate setup for the currently
1107 selected Beam/IF/Pol
1108
1109 Parameters:
1110
1111 rowno: an optional row number in the scantable. Default is the
1112 first row, i.e. rowno=0
1113
1114 Returns:
1115
1116 The abcissa values and the format string (as a dictionary)
1117
1118 """
1119 abc = self._getabcissa(rowno)
1120 lbl = self._getabcissalabel(rowno)
1121 return abc, lbl
1122
1123 @asaplog_post_dec
1124 def flag(self, row=-1, mask=None, unflag=False):
1125 """\
1126 Flag the selected data using an optional channel mask.
1127
1128 Parameters:
1129
1130 row: an optional row number in the scantable.
1131 Default -1 flags all rows
1132
1133 mask: an optional channel mask, created with create_mask. Default
1134 (no mask) is all channels.
1135
1136 unflag: if True, unflag the data
1137
1138 """
1139 varlist = vars()
1140 mask = mask or []
1141 self._flag(row, mask, unflag)
1142 self._add_history("flag", varlist)
1143
1144 @asaplog_post_dec
1145 def flag_row(self, rows=[], unflag=False):
1146 """\
1147 Flag the selected data in row-based manner.
1148
1149 Parameters:
1150
1151 rows: list of row numbers to be flagged. Default is no row
1152 (must be explicitly specified to execute row-based flagging).
1153
1154 unflag: if True, unflag the data.
1155
1156 """
1157 varlist = vars()
1158 self._flag_row(rows, unflag)
1159 self._add_history("flag_row", varlist)
1160
1161 @asaplog_post_dec
1162 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1163 """\
1164 Flag the selected data outside a specified range (in channel-base)
1165
1166 Parameters:
1167
1168 uthres: upper threshold.
1169
1170 dthres: lower threshold
1171
1172 clipoutside: True for flagging data outside the range [dthres:uthres].
1173 False for flagging data inside the range.
1174
1175 unflag: if True, unflag the data.
1176
1177 """
1178 varlist = vars()
1179 self._clip(uthres, dthres, clipoutside, unflag)
1180 self._add_history("clip", varlist)
1181
1182 @asaplog_post_dec
1183 def lag_flag(self, start, end, unit="MHz", insitu=None):
1184 """\
1185 Flag the data in 'lag' space by providing a frequency to remove.
1186 Flagged data in the scantable get interpolated over the region.
1187 No taper is applied.
1188
1189 Parameters:
1190
1191 start: the start frequency (really a period within the
1192 bandwidth) or period to remove
1193
1194 end: the end frequency or period to remove
1195
1196 unit: the frequency unit (default "MHz") or "" for
1197 explicit lag channels
1198
1199 *Notes*:
1200
1201 It is recommended to flag edges of the band or strong
1202 signals beforehand.
1203
1204 """
1205 if insitu is None: insitu = rcParams['insitu']
1206 self._math._setinsitu(insitu)
1207 varlist = vars()
1208 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1209 if not (unit == "" or base.has_key(unit)):
1210 raise ValueError("%s is not a valid unit." % unit)
1211 if unit == "":
1212 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1213 else:
1214 s = scantable(self._math._lag_flag(self, start*base[unit],
1215 end*base[unit], "frequency"))
1216 s._add_history("lag_flag", varlist)
1217 if insitu:
1218 self._assign(s)
1219 else:
1220 return s
1221
1222 @asaplog_post_dec
1223 def fft(self, rowno=[], mask=[], getrealimag=False):
1224 """\
1225 Apply FFT to the spectra.
1226 Flagged data in the scantable get interpolated over the region.
1227
1228 Parameters:
1229
1230 rowno: The row number(s) to be processed. int, list
1231 and tuple are accepted. By default ([]), FFT
1232 is applied to the whole data.
1233
1234 mask: Auxiliary channel mask(s). Given as a boolean
1235 list, it is applied to all specified rows.
1236 A list of boolean lists can also be used to
1237 apply different masks. In the latter case, the
1238 length of 'mask' must be the same as that of
1239 'rowno'. The default is [].
1240
1241 getrealimag: If True, returns the real and imaginary part
1242 values of the complex results.
1243 If False (the default), returns the amplitude
1244 (absolute value) normalised with Ndata/2 and
1245 phase (argument, in unit of radian).
1246
1247 Returns:
1248
1249 A list of dictionaries containing the results for each spectrum.
1250 Each dictionary contains two values, the real and the imaginary
1251 parts when getrealimag = True, or the amplitude(absolute value)
1252 and the phase(argument) when getrealimag = False. The key for
1253 these values are 'real' and 'imag', or 'ampl' and 'phase',
1254 respectively.
1255 """
1256 if isinstance(rowno, int):
1257 rowno = [rowno]
1258 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1259 raise TypeError("The row number(s) must be int, list or tuple.")
1260
1261 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1262
1263 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1264 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1265 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1266 if isinstance(mask[0], bool): mask = [mask]
1267 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1268 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1269
1270 usecommonmask = (len(mask) == 1)
1271 if not usecommonmask:
1272 if len(mask) != len(rowno):
1273 raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1274 for amask in mask:
1275 if len(amask) != self.nchan():
1276 raise ValueError("The spectra and the mask have different length.")
1277
1278 res = []
1279
1280 imask = 0
1281 for whichrow in rowno:
1282 fspec = self._fft(whichrow, mask[imask], getrealimag)
1283 nspec = len(fspec)
1284
1285 i=0
1286 v1=[]
1287 v2=[]
1288 reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
1289
1290 while (i < nspec):
1291 v1.append(fspec[i])
1292 v2.append(fspec[i+1])
1293 i+=2
1294
1295 if getrealimag:
1296 reselem["real"] += v1
1297 reselem["imag"] += v2
1298 else:
1299 reselem["ampl"] += v1
1300 reselem["phase"] += v2
1301
1302 res.append(reselem)
1303
1304 if not usecommonmask: imask += 1
1305
1306 return res
1307
1308 @asaplog_post_dec
1309 def create_mask(self, *args, **kwargs):
1310 """\
1311 Compute and return a mask based on [min, max] windows.
1312 The specified windows are to be INCLUDED, when the mask is
1313 applied.
1314
1315 Parameters:
1316
1317 [min, max], [min2, max2], ...
1318 Pairs of start/end points (inclusive)specifying the regions
1319 to be masked
1320
1321 invert: optional argument. If specified as True,
1322 return an inverted mask, i.e. the regions
1323 specified are EXCLUDED
1324
1325 row: create the mask using the specified row for
1326 unit conversions, default is row=0
1327 only necessary if frequency varies over rows.
1328
1329 Examples::
1330
1331 scan.set_unit('channel')
1332 # a)
1333 msk = scan.create_mask([400, 500], [800, 900])
1334 # masks everything outside 400 and 500
1335 # and 800 and 900 in the unit 'channel'
1336
1337 # b)
1338 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1339 # masks the regions between 400 and 500
1340 # and 800 and 900 in the unit 'channel'
1341
1342 # c)
1343 #mask only channel 400
1344 msk = scan.create_mask([400])
1345
1346 """
1347 row = kwargs.get("row", 0)
1348 data = self._getabcissa(row)
1349 u = self._getcoordinfo()[0]
1350 if u == "":
1351 u = "channel"
1352 msg = "The current mask window unit is %s" % u
1353 i = self._check_ifs()
1354 if not i:
1355 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1356 asaplog.push(msg)
1357 n = self.nchan()
1358 msk = _n_bools(n, False)
1359 # test if args is a 'list' or a 'normal *args - UGLY!!!
1360
1361 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1362 and args or args[0]
1363 for window in ws:
1364 if len(window) == 1:
1365 window = [window[0], window[0]]
1366 if len(window) == 0 or len(window) > 2:
1367 raise ValueError("A window needs to be defined as [start(, end)]")
1368 if window[0] > window[1]:
1369 tmp = window[0]
1370 window[0] = window[1]
1371 window[1] = tmp
1372 for i in range(n):
1373 if data[i] >= window[0] and data[i] <= window[1]:
1374 msk[i] = True
1375 if kwargs.has_key('invert'):
1376 if kwargs.get('invert'):
1377 msk = mask_not(msk)
1378 return msk
1379
1380 def get_masklist(self, mask=None, row=0, silent=False):
1381 """\
1382 Compute and return a list of mask windows, [min, max].
1383
1384 Parameters:
1385
1386 mask: channel mask, created with create_mask.
1387
1388 row: calcutate the masklist using the specified row
1389 for unit conversions, default is row=0
1390 only necessary if frequency varies over rows.
1391
1392 Returns:
1393
1394 [min, max], [min2, max2], ...
1395 Pairs of start/end points (inclusive)specifying
1396 the masked regions
1397
1398 """
1399 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1400 raise TypeError("The mask should be list or tuple.")
1401 if len(mask) < 2:
1402 raise TypeError("The mask elements should be > 1")
1403 if self.nchan() != len(mask):
1404 msg = "Number of channels in scantable != number of mask elements"
1405 raise TypeError(msg)
1406 data = self._getabcissa(row)
1407 u = self._getcoordinfo()[0]
1408 if u == "":
1409 u = "channel"
1410 msg = "The current mask window unit is %s" % u
1411 i = self._check_ifs()
1412 if not i:
1413 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1414 if not silent:
1415 asaplog.push(msg)
1416 masklist=[]
1417 ist, ien = None, None
1418 ist, ien=self.get_mask_indices(mask)
1419 if ist is not None and ien is not None:
1420 for i in xrange(len(ist)):
1421 range=[data[ist[i]],data[ien[i]]]
1422 range.sort()
1423 masklist.append([range[0],range[1]])
1424 return masklist
1425
1426 def get_mask_indices(self, mask=None):
1427 """\
1428 Compute and Return lists of mask start indices and mask end indices.
1429
1430 Parameters:
1431
1432 mask: channel mask, created with create_mask.
1433
1434 Returns:
1435
1436 List of mask start indices and that of mask end indices,
1437 i.e., [istart1,istart2,....], [iend1,iend2,....].
1438
1439 """
1440 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1441 raise TypeError("The mask should be list or tuple.")
1442 if len(mask) < 2:
1443 raise TypeError("The mask elements should be > 1")
1444 istart=[]
1445 iend=[]
1446 if mask[0]: istart.append(0)
1447 for i in range(len(mask)-1):
1448 if not mask[i] and mask[i+1]:
1449 istart.append(i+1)
1450 elif mask[i] and not mask[i+1]:
1451 iend.append(i)
1452 if mask[len(mask)-1]: iend.append(len(mask)-1)
1453 if len(istart) != len(iend):
1454 raise RuntimeError("Numbers of mask start != mask end.")
1455 for i in range(len(istart)):
1456 if istart[i] > iend[i]:
1457 raise RuntimeError("Mask start index > mask end index")
1458 break
1459 return istart,iend
1460
1461 @asaplog_post_dec
1462 def parse_maskexpr(self,maskstring):
1463 """
1464 Parse CASA type mask selection syntax (IF dependent).
1465
1466 Parameters:
1467 maskstring : A string mask selection expression.
1468 A comma separated selections mean different IF -
1469 channel combinations. IFs and channel selections
1470 are partitioned by a colon, ':'.
1471 examples:
1472 '' = all IFs (all channels)
1473 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1474 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1475 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1476 all channels in IF8
1477 Returns:
1478 A dictionary of selected (valid) IF and masklist pairs,
1479 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1480 """
1481 if not isinstance(maskstring,str):
1482 asaplog.post()
1483 asaplog.push("Invalid mask expression")
1484 asaplog.post("ERROR")
1485
1486 valid_ifs = self.getifnos()
1487 frequnit = self.get_unit()
1488 seldict = {}
1489 if maskstring == "":
1490 maskstring = str(valid_ifs)[1:-1]
1491 ## split each selection
1492 sellist = maskstring.split(',')
1493 for currselstr in sellist:
1494 selset = currselstr.split(':')
1495 # spw and mask string (may include ~, < or >)
1496 spwmasklist = self._parse_selection(selset[0],typestr='integer',
1497 offset=1,minval=min(valid_ifs),
1498 maxval=max(valid_ifs))
1499 for spwlist in spwmasklist:
1500 selspws = []
1501 for ispw in range(spwlist[0],spwlist[1]+1):
1502 # Put into the list only if ispw exists
1503 if valid_ifs.count(ispw):
1504 selspws.append(ispw)
1505 del spwmasklist, spwlist
1506
1507 # parse frequency mask list
1508 if len(selset) > 1:
1509 freqmasklist = self._parse_selection(selset[1],typestr='float',
1510 offset=0.)
1511 else:
1512 # want to select the whole spectrum
1513 freqmasklist = [None]
1514
1515 ## define a dictionary of spw - masklist combination
1516 for ispw in selspws:
1517 #print "working on", ispw
1518 spwstr = str(ispw)
1519 if len(selspws) == 0:
1520 # empty spw
1521 continue
1522 else:
1523 ## want to get min and max of the spw and
1524 ## offset to set for '<' and '>'
1525 if frequnit == 'channel':
1526 minfreq = 0
1527 maxfreq = self.nchan(ifno=ispw)
1528 offset = 0.5
1529 else:
1530 ## This is ugly part. need improvement
1531 for ifrow in xrange(self.nrow()):
1532 if self.getif(ifrow) == ispw:
1533 #print "IF",ispw,"found in row =",ifrow
1534 break
1535 freqcoord = self.get_coordinate(ifrow)
1536 freqs = self._getabcissa(ifrow)
1537 minfreq = min(freqs)
1538 maxfreq = max(freqs)
1539 if len(freqs) == 1:
1540 offset = 0.5
1541 elif frequnit.find('Hz') > 0:
1542 offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1543 -freqcoord.to_frequency(0,unit=frequnit))*0.5
1544 elif frequnit.find('m/s') > 0:
1545 offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1546 -freqcoord.to_velocity(0,unit=frequnit))*0.5
1547 else:
1548 asaplog.post()
1549 asaplog.push("Invalid frequency unit")
1550 asaplog.post("ERROR")
1551 del freqs, freqcoord, ifrow
1552 for freq in freqmasklist:
1553 selmask = freq or [minfreq, maxfreq]
1554 if selmask[0] == None:
1555 ## selection was "<freq[1]".
1556 if selmask[1] < minfreq:
1557 ## avoid adding region selection
1558 selmask = None
1559 else:
1560 selmask = [minfreq,selmask[1]-offset]
1561 elif selmask[1] == None:
1562 ## selection was ">freq[0]"
1563 if selmask[0] > maxfreq:
1564 ## avoid adding region selection
1565 selmask = None
1566 else:
1567 selmask = [selmask[0]+offset,maxfreq]
1568 if selmask:
1569 if not seldict.has_key(spwstr):
1570 # new spw selection
1571 seldict[spwstr] = []
1572 seldict[spwstr] += [selmask]
1573 del minfreq,maxfreq,offset,freq,selmask
1574 del spwstr
1575 del freqmasklist
1576 del valid_ifs
1577 if len(seldict) == 0:
1578 asaplog.post()
1579 asaplog.push("No valid selection in the mask expression: "+maskstring)
1580 asaplog.post("WARN")
1581 return None
1582 msg = "Selected masklist:\n"
1583 for sif, lmask in seldict.iteritems():
1584 msg += " IF"+sif+" - "+str(lmask)+"\n"
1585 asaplog.push(msg)
1586 return seldict
1587
1588 def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1589 """
1590 Parameters:
1591 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1592 typestr : The type of the values in returned list
1593 ('integer' or 'float')
1594 offset : The offset value to subtract from or add to
1595 the boundary value if the selection string
1596 includes '<' or '>'
1597 minval, maxval : The minimum/maximum values to set if the
1598 selection string includes '<' or '>'.
1599 The list element is filled with None by default.
1600 Returns:
1601 A list of min/max pair of selections.
1602 Example:
1603 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1604 returns [[0,2],[5,7],[9,9]]
1605 """
1606 selgroups = selstr.split(';')
1607 sellists = []
1608 if typestr.lower().startswith('int'):
1609 formatfunc = int
1610 else:
1611 formatfunc = float
1612
1613 for currsel in selgroups:
1614 if currsel.find('~') > 0:
1615 minsel = formatfunc(currsel.split('~')[0].strip())
1616 maxsel = formatfunc(currsel.split('~')[1].strip())
1617 elif currsel.strip().startswith('<'):
1618 minsel = minval
1619 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1620 - formatfunc(offset)
1621 elif currsel.strip().startswith('>'):
1622 minsel = formatfunc(currsel.split('>')[1].strip()) \
1623 + formatfunc(offset)
1624 maxsel = maxval
1625 else:
1626 minsel = formatfunc(currsel)
1627 maxsel = formatfunc(currsel)
1628 sellists.append([minsel,maxsel])
1629 return sellists
1630
1631# def get_restfreqs(self):
1632# """
1633# Get the restfrequency(s) stored in this scantable.
1634# The return value(s) are always of unit 'Hz'
1635# Parameters:
1636# none
1637# Returns:
1638# a list of doubles
1639# """
1640# return list(self._getrestfreqs())
1641
1642 def get_restfreqs(self, ids=None):
1643 """\
1644 Get the restfrequency(s) stored in this scantable.
1645 The return value(s) are always of unit 'Hz'
1646
1647 Parameters:
1648
1649 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1650 be retrieved
1651
1652 Returns:
1653
1654 dictionary containing ids and a list of doubles for each id
1655
1656 """
1657 if ids is None:
1658 rfreqs={}
1659 idlist = self.getmolnos()
1660 for i in idlist:
1661 rfreqs[i]=list(self._getrestfreqs(i))
1662 return rfreqs
1663 else:
1664 if type(ids)==list or type(ids)==tuple:
1665 rfreqs={}
1666 for i in ids:
1667 rfreqs[i]=list(self._getrestfreqs(i))
1668 return rfreqs
1669 else:
1670 return list(self._getrestfreqs(ids))
1671 #return list(self._getrestfreqs(ids))
1672
1673 def set_restfreqs(self, freqs=None, unit='Hz'):
1674 """\
1675 Set or replace the restfrequency specified and
1676 if the 'freqs' argument holds a scalar,
1677 then that rest frequency will be applied to all the selected
1678 data. If the 'freqs' argument holds
1679 a vector, then it MUST be of equal or smaller length than
1680 the number of IFs (and the available restfrequencies will be
1681 replaced by this vector). In this case, *all* data have
1682 the restfrequency set per IF according
1683 to the corresponding value you give in the 'freqs' vector.
1684 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1685 IF 1 gets restfreq 2e9.
1686
1687 You can also specify the frequencies via a linecatalog.
1688
1689 Parameters:
1690
1691 freqs: list of rest frequency values or string idenitfiers
1692
1693 unit: unit for rest frequency (default 'Hz')
1694
1695
1696 Example::
1697
1698 # set the given restfrequency for the all currently selected IFs
1699 scan.set_restfreqs(freqs=1.4e9)
1700 # set restfrequencies for the n IFs (n > 1) in the order of the
1701 # list, i.e
1702 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1703 # len(list_of_restfreqs) == nIF
1704 # for nIF == 1 the following will set multiple restfrequency for
1705 # that IF
1706 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1707 # set multiple restfrequencies per IF. as a list of lists where
1708 # the outer list has nIF elements, the inner s arbitrary
1709 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1710
1711 *Note*:
1712
1713 To do more sophisticate Restfrequency setting, e.g. on a
1714 source and IF basis, use scantable.set_selection() before using
1715 this function::
1716
1717 # provided your scantable is called scan
1718 selection = selector()
1719 selection.set_name("ORION*")
1720 selection.set_ifs([1])
1721 scan.set_selection(selection)
1722 scan.set_restfreqs(freqs=86.6e9)
1723
1724 """
1725 varlist = vars()
1726 from asap import linecatalog
1727 # simple value
1728 if isinstance(freqs, int) or isinstance(freqs, float):
1729 self._setrestfreqs([freqs], [""], unit)
1730 # list of values
1731 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1732 # list values are scalars
1733 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1734 if len(freqs) == 1:
1735 self._setrestfreqs(freqs, [""], unit)
1736 else:
1737 # allow the 'old' mode of setting mulitple IFs
1738 sel = selector()
1739 savesel = self._getselection()
1740 iflist = self.getifnos()
1741 if len(freqs)>len(iflist):
1742 raise ValueError("number of elements in list of list "
1743 "exeeds the current IF selections")
1744 iflist = self.getifnos()
1745 for i, fval in enumerate(freqs):
1746 sel.set_ifs(iflist[i])
1747 self._setselection(sel)
1748 self._setrestfreqs([fval], [""], unit)
1749 self._setselection(savesel)
1750
1751 # list values are dict, {'value'=, 'name'=)
1752 elif isinstance(freqs[-1], dict):
1753 values = []
1754 names = []
1755 for d in freqs:
1756 values.append(d["value"])
1757 names.append(d["name"])
1758 self._setrestfreqs(values, names, unit)
1759 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1760 sel = selector()
1761 savesel = self._getselection()
1762 iflist = self.getifnos()
1763 if len(freqs)>len(iflist):
1764 raise ValueError("number of elements in list of list exeeds"
1765 " the current IF selections")
1766 for i, fval in enumerate(freqs):
1767 sel.set_ifs(iflist[i])
1768 self._setselection(sel)
1769 self._setrestfreqs(fval, [""], unit)
1770 self._setselection(savesel)
1771 # freqs are to be taken from a linecatalog
1772 elif isinstance(freqs, linecatalog):
1773 sel = selector()
1774 savesel = self._getselection()
1775 for i in xrange(freqs.nrow()):
1776 sel.set_ifs(iflist[i])
1777 self._setselection(sel)
1778 self._setrestfreqs([freqs.get_frequency(i)],
1779 [freqs.get_name(i)], "MHz")
1780 # ensure that we are not iterating past nIF
1781 if i == self.nif()-1: break
1782 self._setselection(savesel)
1783 else:
1784 return
1785 self._add_history("set_restfreqs", varlist)
1786
1787 def shift_refpix(self, delta):
1788 """\
1789 Shift the reference pixel of the Spectra Coordinate by an
1790 integer amount.
1791
1792 Parameters:
1793
1794 delta: the amount to shift by
1795
1796 *Note*:
1797
1798 Be careful using this with broadband data.
1799
1800 """
1801 Scantable.shift_refpix(self, delta)
1802
1803 @asaplog_post_dec
1804 def history(self, filename=None):
1805 """\
1806 Print the history. Optionally to a file.
1807
1808 Parameters:
1809
1810 filename: The name of the file to save the history to.
1811
1812 """
1813 hist = list(self._gethistory())
1814 out = "-"*80
1815 for h in hist:
1816 if h.startswith("---"):
1817 out = "\n".join([out, h])
1818 else:
1819 items = h.split("##")
1820 date = items[0]
1821 func = items[1]
1822 items = items[2:]
1823 out += "\n"+date+"\n"
1824 out += "Function: %s\n Parameters:" % (func)
1825 for i in items:
1826 if i == '':
1827 continue
1828 s = i.split("=")
1829 out += "\n %s = %s" % (s[0], s[1])
1830 out = "\n".join([out, "-"*80])
1831 if filename is not None:
1832 if filename is "":
1833 filename = 'scantable_history.txt'
1834 import os
1835 filename = os.path.expandvars(os.path.expanduser(filename))
1836 if not os.path.isdir(filename):
1837 data = open(filename, 'w')
1838 data.write(out)
1839 data.close()
1840 else:
1841 msg = "Illegal file name '%s'." % (filename)
1842 raise IOError(msg)
1843 return page(out)
1844 #
1845 # Maths business
1846 #
1847 @asaplog_post_dec
1848 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1849 """\
1850 Return the (time) weighted average of a scan.
1851
1852 *Note*:
1853
1854 in channels only - align if necessary
1855
1856 Parameters:
1857
1858 mask: an optional mask (only used for 'var' and 'tsys'
1859 weighting)
1860
1861 scanav: True averages each scan separately
1862 False (default) averages all scans together,
1863
1864 weight: Weighting scheme.
1865 'none' (mean no weight)
1866 'var' (1/var(spec) weighted)
1867 'tsys' (1/Tsys**2 weighted)
1868 'tint' (integration time weighted)
1869 'tintsys' (Tint/Tsys**2)
1870 'median' ( median averaging)
1871 The default is 'tint'
1872
1873 align: align the spectra in velocity before averaging. It takes
1874 the time of the first spectrum as reference time.
1875
1876 Example::
1877
1878 # time average the scantable without using a mask
1879 newscan = scan.average_time()
1880
1881 """
1882 varlist = vars()
1883 weight = weight or 'TINT'
1884 mask = mask or ()
1885 scanav = (scanav and 'SCAN') or 'NONE'
1886 scan = (self, )
1887
1888 if align:
1889 scan = (self.freq_align(insitu=False), )
1890 s = None
1891 if weight.upper() == 'MEDIAN':
1892 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1893 scanav))
1894 else:
1895 s = scantable(self._math._average(scan, mask, weight.upper(),
1896 scanav))
1897 s._add_history("average_time", varlist)
1898 return s
1899
1900 @asaplog_post_dec
1901 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1902 """\
1903 Return a scan where all spectra are converted to either
1904 Jansky or Kelvin depending upon the flux units of the scan table.
1905 By default the function tries to look the values up internally.
1906 If it can't find them (or if you want to over-ride), you must
1907 specify EITHER jyperk OR eta (and D which it will try to look up
1908 also if you don't set it). jyperk takes precedence if you set both.
1909
1910 Parameters:
1911
1912 jyperk: the Jy / K conversion factor
1913
1914 eta: the aperture efficiency
1915
1916 d: the geometric diameter (metres)
1917
1918 insitu: if False a new scantable is returned.
1919 Otherwise, the scaling is done in-situ
1920 The default is taken from .asaprc (False)
1921
1922 """
1923 if insitu is None: insitu = rcParams['insitu']
1924 self._math._setinsitu(insitu)
1925 varlist = vars()
1926 jyperk = jyperk or -1.0
1927 d = d or -1.0
1928 eta = eta or -1.0
1929 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1930 s._add_history("convert_flux", varlist)
1931 if insitu: self._assign(s)
1932 else: return s
1933
1934 @asaplog_post_dec
1935 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1936 """\
1937 Return a scan after applying a gain-elevation correction.
1938 The correction can be made via either a polynomial or a
1939 table-based interpolation (and extrapolation if necessary).
1940 You specify polynomial coefficients, an ascii table or neither.
1941 If you specify neither, then a polynomial correction will be made
1942 with built in coefficients known for certain telescopes (an error
1943 will occur if the instrument is not known).
1944 The data and Tsys are *divided* by the scaling factors.
1945
1946 Parameters:
1947
1948 poly: Polynomial coefficients (default None) to compute a
1949 gain-elevation correction as a function of
1950 elevation (in degrees).
1951
1952 filename: The name of an ascii file holding correction factors.
1953 The first row of the ascii file must give the column
1954 names and these MUST include columns
1955 "ELEVATION" (degrees) and "FACTOR" (multiply data
1956 by this) somewhere.
1957 The second row must give the data type of the
1958 column. Use 'R' for Real and 'I' for Integer.
1959 An example file would be
1960 (actual factors are arbitrary) :
1961
1962 TIME ELEVATION FACTOR
1963 R R R
1964 0.1 0 0.8
1965 0.2 20 0.85
1966 0.3 40 0.9
1967 0.4 60 0.85
1968 0.5 80 0.8
1969 0.6 90 0.75
1970
1971 method: Interpolation method when correcting from a table.
1972 Values are "nearest", "linear" (default), "cubic"
1973 and "spline"
1974
1975 insitu: if False a new scantable is returned.
1976 Otherwise, the scaling is done in-situ
1977 The default is taken from .asaprc (False)
1978
1979 """
1980
1981 if insitu is None: insitu = rcParams['insitu']
1982 self._math._setinsitu(insitu)
1983 varlist = vars()
1984 poly = poly or ()
1985 from os.path import expandvars
1986 filename = expandvars(filename)
1987 s = scantable(self._math._gainel(self, poly, filename, method))
1988 s._add_history("gain_el", varlist)
1989 if insitu:
1990 self._assign(s)
1991 else:
1992 return s
1993
1994 @asaplog_post_dec
1995 def freq_align(self, reftime=None, method='cubic', insitu=None):
1996 """\
1997 Return a scan where all rows have been aligned in frequency/velocity.
1998 The alignment frequency frame (e.g. LSRK) is that set by function
1999 set_freqframe.
2000
2001 Parameters:
2002
2003 reftime: reference time to align at. By default, the time of
2004 the first row of data is used.
2005
2006 method: Interpolation method for regridding the spectra.
2007 Choose from "nearest", "linear", "cubic" (default)
2008 and "spline"
2009
2010 insitu: if False a new scantable is returned.
2011 Otherwise, the scaling is done in-situ
2012 The default is taken from .asaprc (False)
2013
2014 """
2015 if insitu is None: insitu = rcParams["insitu"]
2016 self._math._setinsitu(insitu)
2017 varlist = vars()
2018 reftime = reftime or ""
2019 s = scantable(self._math._freq_align(self, reftime, method))
2020 s._add_history("freq_align", varlist)
2021 if insitu: self._assign(s)
2022 else: return s
2023
2024 @asaplog_post_dec
2025 def opacity(self, tau=None, insitu=None):
2026 """\
2027 Apply an opacity correction. The data
2028 and Tsys are multiplied by the correction factor.
2029
2030 Parameters:
2031
2032 tau: (list of) opacity from which the correction factor is
2033 exp(tau*ZD)
2034 where ZD is the zenith-distance.
2035 If a list is provided, it has to be of length nIF,
2036 nIF*nPol or 1 and in order of IF/POL, e.g.
2037 [opif0pol0, opif0pol1, opif1pol0 ...]
2038 if tau is `None` the opacities are determined from a
2039 model.
2040
2041 insitu: if False a new scantable is returned.
2042 Otherwise, the scaling is done in-situ
2043 The default is taken from .asaprc (False)
2044
2045 """
2046 if insitu is None: insitu = rcParams['insitu']
2047 self._math._setinsitu(insitu)
2048 varlist = vars()
2049 if not hasattr(tau, "__len__"):
2050 tau = [tau]
2051 s = scantable(self._math._opacity(self, tau))
2052 s._add_history("opacity", varlist)
2053 if insitu: self._assign(s)
2054 else: return s
2055
2056 @asaplog_post_dec
2057 def bin(self, width=5, insitu=None):
2058 """\
2059 Return a scan where all spectra have been binned up.
2060
2061 Parameters:
2062
2063 width: The bin width (default=5) in pixels
2064
2065 insitu: if False a new scantable is returned.
2066 Otherwise, the scaling is done in-situ
2067 The default is taken from .asaprc (False)
2068
2069 """
2070 if insitu is None: insitu = rcParams['insitu']
2071 self._math._setinsitu(insitu)
2072 varlist = vars()
2073 s = scantable(self._math._bin(self, width))
2074 s._add_history("bin", varlist)
2075 if insitu:
2076 self._assign(s)
2077 else:
2078 return s
2079
2080 @asaplog_post_dec
2081 def resample(self, width=5, method='cubic', insitu=None):
2082 """\
2083 Return a scan where all spectra have been binned up.
2084
2085 Parameters:
2086
2087 width: The bin width (default=5) in pixels
2088
2089 method: Interpolation method when correcting from a table.
2090 Values are "nearest", "linear", "cubic" (default)
2091 and "spline"
2092
2093 insitu: if False a new scantable is returned.
2094 Otherwise, the scaling is done in-situ
2095 The default is taken from .asaprc (False)
2096
2097 """
2098 if insitu is None: insitu = rcParams['insitu']
2099 self._math._setinsitu(insitu)
2100 varlist = vars()
2101 s = scantable(self._math._resample(self, method, width))
2102 s._add_history("resample", varlist)
2103 if insitu: self._assign(s)
2104 else: return s
2105
2106 @asaplog_post_dec
2107 def average_pol(self, mask=None, weight='none'):
2108 """\
2109 Average the Polarisations together.
2110
2111 Parameters:
2112
2113 mask: An optional mask defining the region, where the
2114 averaging will be applied. The output will have all
2115 specified points masked.
2116
2117 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2118 weighted), or 'tsys' (1/Tsys**2 weighted)
2119
2120 """
2121 varlist = vars()
2122 mask = mask or ()
2123 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2124 s._add_history("average_pol", varlist)
2125 return s
2126
2127 @asaplog_post_dec
2128 def average_beam(self, mask=None, weight='none'):
2129 """\
2130 Average the Beams together.
2131
2132 Parameters:
2133 mask: An optional mask defining the region, where the
2134 averaging will be applied. The output will have all
2135 specified points masked.
2136
2137 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2138 weighted), or 'tsys' (1/Tsys**2 weighted)
2139
2140 """
2141 varlist = vars()
2142 mask = mask or ()
2143 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2144 s._add_history("average_beam", varlist)
2145 return s
2146
2147 def parallactify(self, pflag):
2148 """\
2149 Set a flag to indicate whether this data should be treated as having
2150 been 'parallactified' (total phase == 0.0)
2151
2152 Parameters:
2153
2154 pflag: Bool indicating whether to turn this on (True) or
2155 off (False)
2156
2157 """
2158 varlist = vars()
2159 self._parallactify(pflag)
2160 self._add_history("parallactify", varlist)
2161
2162 @asaplog_post_dec
2163 def convert_pol(self, poltype=None):
2164 """\
2165 Convert the data to a different polarisation type.
2166 Note that you will need cross-polarisation terms for most conversions.
2167
2168 Parameters:
2169
2170 poltype: The new polarisation type. Valid types are:
2171 "linear", "circular", "stokes" and "linpol"
2172
2173 """
2174 varlist = vars()
2175 s = scantable(self._math._convertpol(self, poltype))
2176 s._add_history("convert_pol", varlist)
2177 return s
2178
2179 @asaplog_post_dec
2180 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2181 insitu=None):
2182 """\
2183 Smooth the spectrum by the specified kernel (conserving flux).
2184
2185 Parameters:
2186
2187 kernel: The type of smoothing kernel. Select from
2188 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2189 or 'poly'
2190
2191 width: The width of the kernel in pixels. For hanning this is
2192 ignored otherwise it defauls to 5 pixels.
2193 For 'gaussian' it is the Full Width Half
2194 Maximum. For 'boxcar' it is the full width.
2195 For 'rmedian' and 'poly' it is the half width.
2196
2197 order: Optional parameter for 'poly' kernel (default is 2), to
2198 specify the order of the polnomial. Ignored by all other
2199 kernels.
2200
2201 plot: plot the original and the smoothed spectra.
2202 In this each indivual fit has to be approved, by
2203 typing 'y' or 'n'
2204
2205 insitu: if False a new scantable is returned.
2206 Otherwise, the scaling is done in-situ
2207 The default is taken from .asaprc (False)
2208
2209 """
2210 if insitu is None: insitu = rcParams['insitu']
2211 self._math._setinsitu(insitu)
2212 varlist = vars()
2213
2214 if plot: orgscan = self.copy()
2215
2216 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2217 s._add_history("smooth", varlist)
2218
2219 if plot:
2220 from asap.asapplotter import new_asaplot
2221 theplot = new_asaplot(rcParams['plotter.gui'])
2222 theplot.set_panels()
2223 ylab=s._get_ordinate_label()
2224 #theplot.palette(0,["#777777","red"])
2225 for r in xrange(s.nrow()):
2226 xsm=s._getabcissa(r)
2227 ysm=s._getspectrum(r)
2228 xorg=orgscan._getabcissa(r)
2229 yorg=orgscan._getspectrum(r)
2230 theplot.clear()
2231 theplot.hold()
2232 theplot.set_axes('ylabel',ylab)
2233 theplot.set_axes('xlabel',s._getabcissalabel(r))
2234 theplot.set_axes('title',s._getsourcename(r))
2235 theplot.set_line(label='Original',color="#777777")
2236 theplot.plot(xorg,yorg)
2237 theplot.set_line(label='Smoothed',color="red")
2238 theplot.plot(xsm,ysm)
2239 ### Ugly part for legend
2240 for i in [0,1]:
2241 theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2242 theplot.release()
2243 ### Ugly part for legend
2244 theplot.subplots[0]['lines']=[]
2245 res = raw_input("Accept smoothing ([y]/n): ")
2246 if res.upper() == 'N':
2247 s._setspectrum(yorg, r)
2248 theplot.quit()
2249 del theplot
2250 del orgscan
2251
2252 if insitu: self._assign(s)
2253 else: return s
2254
2255 @asaplog_post_dec
2256 def _parse_wn(self, wn):
2257 if isinstance(wn, list) or isinstance(wn, tuple):
2258 return wn
2259 elif isinstance(wn, int):
2260 return [ wn ]
2261 elif isinstance(wn, str):
2262 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2263 val = wn.split('-')
2264 val = [int(val[0]), int(val[1])]
2265 val.sort()
2266 res = [i for i in xrange(val[0], val[1]+1)]
2267 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2268 val = int(wn[2:])+1
2269 res = [i for i in xrange(val)]
2270 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2271 val = int(wn[:-2])+1
2272 res = [i for i in xrange(val)]
2273 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2274 val = int(wn[1:])
2275 res = [i for i in xrange(val)]
2276 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2277 val = int(wn[:-1])
2278 res = [i for i in xrange(val)]
2279 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2280 val = int(wn[2:])
2281 res = [i for i in xrange(val, self.nchan()/2+1)]
2282 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2283 val = int(wn[:-2])
2284 res = [i for i in xrange(val, self.nchan()/2+1)]
2285 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
2286 val = int(wn[1:])+1
2287 res = [i for i in xrange(val, self.nchan()/2+1)]
2288 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
2289 val = int(wn[:-1])+1
2290 res = [i for i in xrange(val, self.nchan()/2+1)]
2291
2292 return res
2293 else:
2294 msg = 'wrong value given for addwn/rejwn'
2295 raise RuntimeError(msg)
2296
2297
2298 @asaplog_post_dec
2299 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2300 fftmethod=None, fftthresh=None,
2301 addwn=None, rejwn=None, clipthresh=None,
2302 clipniter=None, plot=None,
2303 getresidual=None, showprogress=None,
2304 minnrow=None, outlog=None, blfile=None):
2305 """\
2306 Return a scan which has been baselined (all rows) with sinusoidal functions.
2307 Parameters:
2308 insitu: if False a new scantable is returned.
2309 Otherwise, the scaling is done in-situ
2310 The default is taken from .asaprc (False)
2311 mask: an optional mask
2312 applyfft: if True use some method, such as FFT, to find
2313 strongest sinusoidal components in the wavenumber
2314 domain to be used for baseline fitting.
2315 default is True.
2316 fftmethod: method to find the strong sinusoidal components.
2317 now only 'fft' is available and it is the default.
2318 fftthresh: the threshold to select wave numbers to be used for
2319 fitting from the distribution of amplitudes in the
2320 wavenumber domain.
2321 both float and string values accepted.
2322 given a float value, the unit is set to sigma.
2323 for string values, allowed formats include:
2324 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2325 'topx' (= the x strongest ones, e.g. 'top5').
2326 default is 3.0 (unit: sigma).
2327 addwn: the additional wave numbers to be used for fitting.
2328 list or integer value is accepted to specify every
2329 wave numbers. also string value can be used in case
2330 you need to specify wave numbers in a certain range,
2331 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2332 '<a' (= 0,1,...,a-2,a-1),
2333 '>=a' (= a, a+1, ... up to the maximum wave
2334 number corresponding to the Nyquist
2335 frequency for the case of FFT).
2336 default is [].
2337 rejwn: the wave numbers NOT to be used for fitting.
2338 can be set just as addwn but has higher priority:
2339 wave numbers which are specified both in addwn
2340 and rejwn will NOT be used. default is [].
2341 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2342 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2343 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2344 plot the fit and the residual. In this each
2345 indivual fit has to be approved, by typing 'y'
2346 or 'n'
2347 getresidual: if False, returns best-fit values instead of
2348 residual. (default is True)
2349 showprogress: show progress status for large data.
2350 default is True.
2351 minnrow: minimum number of input spectra to show.
2352 default is 1000.
2353 outlog: Output the coefficients of the best-fit
2354 function to logger (default is False)
2355 blfile: Name of a text file in which the best-fit
2356 parameter values to be written
2357 (default is '': no file/logger output)
2358
2359 Example:
2360 # return a scan baselined by a combination of sinusoidal curves having
2361 # wave numbers in spectral window up to 10,
2362 # also with 3-sigma clipping, iteration up to 4 times
2363 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2364
2365 Note:
2366 The best-fit parameter values output in logger and/or blfile are now
2367 based on specunit of 'channel'.
2368 """
2369
2370 try:
2371 varlist = vars()
2372
2373 if insitu is None: insitu = rcParams['insitu']
2374 if insitu:
2375 workscan = self
2376 else:
2377 workscan = self.copy()
2378
2379 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2380 if applyfft is None: applyfft = True
2381 if fftmethod is None: fftmethod = 'fft'
2382 if fftthresh is None: fftthresh = 3.0
2383 if addwn is None: addwn = []
2384 if rejwn is None: rejwn = []
2385 if clipthresh is None: clipthresh = 3.0
2386 if clipniter is None: clipniter = 0
2387 if plot is None: plot = False
2388 if getresidual is None: getresidual = True
2389 if showprogress is None: showprogress = True
2390 if minnrow is None: minnrow = 1000
2391 if outlog is None: outlog = False
2392 if blfile is None: blfile = ''
2393
2394 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2395 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2396 workscan._add_history('sinusoid_baseline', varlist)
2397
2398 if insitu:
2399 self._assign(workscan)
2400 else:
2401 return workscan
2402
2403 except RuntimeError, e:
2404 raise_fitting_failure_exception(e)
2405
2406
2407 @asaplog_post_dec
2408 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2409 addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
2410 chan_avg_limit=None, plot=None, getresidual=None, showprogress=None, minnrow=None,
2411 outlog=None, blfile=None):
2412 """\
2413 Return a scan which has been baselined (all rows) with sinusoidal functions.
2414 Spectral lines are detected first using linefinder and masked out
2415 to avoid them affecting the baseline solution.
2416
2417 Parameters:
2418 insitu: if False a new scantable is returned.
2419 Otherwise, the scaling is done in-situ
2420 The default is taken from .asaprc (False)
2421 mask: an optional mask retreived from scantable
2422 applyfft: if True use some method, such as FFT, to find
2423 strongest sinusoidal components in the wavenumber
2424 domain to be used for baseline fitting.
2425 default is True.
2426 fftmethod: method to find the strong sinusoidal components.
2427 now only 'fft' is available and it is the default.
2428 fftthresh: the threshold to select wave numbers to be used for
2429 fitting from the distribution of amplitudes in the
2430 wavenumber domain.
2431 both float and string values accepted.
2432 given a float value, the unit is set to sigma.
2433 for string values, allowed formats include:
2434 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2435 'topx' (= the x strongest ones, e.g. 'top5').
2436 default is 3.0 (unit: sigma).
2437 addwn: the additional wave numbers to be used for fitting.
2438 list or integer value is accepted to specify every
2439 wave numbers. also string value can be used in case
2440 you need to specify wave numbers in a certain range,
2441 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2442 '<a' (= 0,1,...,a-2,a-1),
2443 '>=a' (= a, a+1, ... up to the maximum wave
2444 number corresponding to the Nyquist
2445 frequency for the case of FFT).
2446 default is [].
2447 rejwn: the wave numbers NOT to be used for fitting.
2448 can be set just as addwn but has higher priority:
2449 wave numbers which are specified both in addwn
2450 and rejwn will NOT be used. default is [].
2451 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2452 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2453 edge: an optional number of channel to drop at
2454 the edge of spectrum. If only one value is
2455 specified, the same number will be dropped
2456 from both sides of the spectrum. Default
2457 is to keep all channels. Nested tuples
2458 represent individual edge selection for
2459 different IFs (a number of spectral channels
2460 can be different)
2461 threshold: the threshold used by line finder. It is
2462 better to keep it large as only strong lines
2463 affect the baseline solution.
2464 chan_avg_limit: a maximum number of consequtive spectral
2465 channels to average during the search of
2466 weak and broad lines. The default is no
2467 averaging (and no search for weak lines).
2468 If such lines can affect the fitted baseline
2469 (e.g. a high order polynomial is fitted),
2470 increase this parameter (usually values up
2471 to 8 are reasonable). Most users of this
2472 method should find the default value sufficient.
2473 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2474 plot the fit and the residual. In this each
2475 indivual fit has to be approved, by typing 'y'
2476 or 'n'
2477 getresidual: if False, returns best-fit values instead of
2478 residual. (default is True)
2479 showprogress: show progress status for large data.
2480 default is True.
2481 minnrow: minimum number of input spectra to show.
2482 default is 1000.
2483 outlog: Output the coefficients of the best-fit
2484 function to logger (default is False)
2485 blfile: Name of a text file in which the best-fit
2486 parameter values to be written
2487 (default is "": no file/logger output)
2488
2489 Example:
2490 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2491
2492 Note:
2493 The best-fit parameter values output in logger and/or blfile are now
2494 based on specunit of 'channel'.
2495 """
2496
2497 try:
2498 varlist = vars()
2499
2500 if insitu is None: insitu = rcParams['insitu']
2501 if insitu:
2502 workscan = self
2503 else:
2504 workscan = self.copy()
2505
2506 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2507 if applyfft is None: applyfft = True
2508 if fftmethod is None: fftmethod = 'fft'
2509 if fftthresh is None: fftthresh = 3.0
2510 if addwn is None: addwn = []
2511 if rejwn is None: rejwn = []
2512 if clipthresh is None: clipthresh = 3.0
2513 if clipniter is None: clipniter = 0
2514 if edge is None: edge = (0,0)
2515 if threshold is None: threshold = 3
2516 if chan_avg_limit is None: chan_avg_limit = 1
2517 if plot is None: plot = False
2518 if getresidual is None: getresidual = True
2519 if showprogress is None: showprogress = True
2520 if minnrow is None: minnrow = 1000
2521 if outlog is None: outlog = False
2522 if blfile is None: blfile = ''
2523
2524 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2525 workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2526 workscan._add_history("auto_sinusoid_baseline", varlist)
2527
2528 if insitu:
2529 self._assign(workscan)
2530 else:
2531 return workscan
2532
2533 except RuntimeError, e:
2534 raise_fitting_failure_exception(e)
2535
2536 @asaplog_post_dec
2537 def cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2538 plot=None, getresidual=None, showprogress=None, minnrow=None, outlog=None, blfile=None):
2539 """\
2540 Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
2541 Parameters:
2542 insitu: If False a new scantable is returned.
2543 Otherwise, the scaling is done in-situ
2544 The default is taken from .asaprc (False)
2545 mask: An optional mask
2546 npiece: Number of pieces. (default is 2)
2547 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2548 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2549 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2550 plot the fit and the residual. In this each
2551 indivual fit has to be approved, by typing 'y'
2552 or 'n'
2553 getresidual: if False, returns best-fit values instead of
2554 residual. (default is True)
2555 showprogress: show progress status for large data.
2556 default is True.
2557 minnrow: minimum number of input spectra to show.
2558 default is 1000.
2559 outlog: Output the coefficients of the best-fit
2560 function to logger (default is False)
2561 blfile: Name of a text file in which the best-fit
2562 parameter values to be written
2563 (default is "": no file/logger output)
2564
2565 Example:
2566 # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
2567 # also with 3-sigma clipping, iteration up to 4 times
2568 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2569
2570 Note:
2571 The best-fit parameter values output in logger and/or blfile are now
2572 based on specunit of 'channel'.
2573 """
2574
2575 try:
2576 varlist = vars()
2577
2578 if insitu is None: insitu = rcParams['insitu']
2579 if insitu:
2580 workscan = self
2581 else:
2582 workscan = self.copy()
2583
2584 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2585 if npiece is None: npiece = 2
2586 if clipthresh is None: clipthresh = 3.0
2587 if clipniter is None: clipniter = 0
2588 if plot is None: plot = False
2589 if getresidual is None: getresidual = True
2590 if showprogress is None: showprogress = True
2591 if minnrow is None: minnrow = 1000
2592 if outlog is None: outlog = False
2593 if blfile is None: blfile = ''
2594
2595 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2596 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2597 workscan._add_history("cspline_baseline", varlist)
2598
2599 if insitu:
2600 self._assign(workscan)
2601 else:
2602 return workscan
2603
2604 except RuntimeError, e:
2605 raise_fitting_failure_exception(e)
2606
2607 @asaplog_post_dec
2608 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2609 edge=None, threshold=None, chan_avg_limit=None, getresidual=None, plot=None,
2610 showprogress=None, minnrow=None, outlog=None, blfile=None):
2611 """\
2612 Return a scan which has been baselined (all rows) by cubic spline
2613 function (piecewise cubic polynomial).
2614 Spectral lines are detected first using linefinder and masked out
2615 to avoid them affecting the baseline solution.
2616
2617 Parameters:
2618 insitu: if False a new scantable is returned.
2619 Otherwise, the scaling is done in-situ
2620 The default is taken from .asaprc (False)
2621 mask: an optional mask retreived from scantable
2622 npiece: Number of pieces. (default is 2)
2623 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2624 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2625 edge: an optional number of channel to drop at
2626 the edge of spectrum. If only one value is
2627 specified, the same number will be dropped
2628 from both sides of the spectrum. Default
2629 is to keep all channels. Nested tuples
2630 represent individual edge selection for
2631 different IFs (a number of spectral channels
2632 can be different)
2633 threshold: the threshold used by line finder. It is
2634 better to keep it large as only strong lines
2635 affect the baseline solution.
2636 chan_avg_limit: a maximum number of consequtive spectral
2637 channels to average during the search of
2638 weak and broad lines. The default is no
2639 averaging (and no search for weak lines).
2640 If such lines can affect the fitted baseline
2641 (e.g. a high order polynomial is fitted),
2642 increase this parameter (usually values up
2643 to 8 are reasonable). Most users of this
2644 method should find the default value sufficient.
2645 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2646 plot the fit and the residual. In this each
2647 indivual fit has to be approved, by typing 'y'
2648 or 'n'
2649 getresidual: if False, returns best-fit values instead of
2650 residual. (default is True)
2651 showprogress: show progress status for large data.
2652 default is True.
2653 minnrow: minimum number of input spectra to show.
2654 default is 1000.
2655 outlog: Output the coefficients of the best-fit
2656 function to logger (default is False)
2657 blfile: Name of a text file in which the best-fit
2658 parameter values to be written
2659 (default is "": no file/logger output)
2660
2661 Example:
2662 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2663
2664 Note:
2665 The best-fit parameter values output in logger and/or blfile are now
2666 based on specunit of 'channel'.
2667 """
2668
2669 try:
2670 varlist = vars()
2671
2672 if insitu is None: insitu = rcParams['insitu']
2673 if insitu:
2674 workscan = self
2675 else:
2676 workscan = self.copy()
2677
2678 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2679 if npiece is None: npiece = 2
2680 if clipthresh is None: clipthresh = 3.0
2681 if clipniter is None: clipniter = 0
2682 if edge is None: edge = (0, 0)
2683 if threshold is None: threshold = 3
2684 if chan_avg_limit is None: chan_avg_limit = 1
2685 if plot is None: plot = False
2686 if getresidual is None: getresidual = True
2687 if showprogress is None: showprogress = True
2688 if minnrow is None: minnrow = 1000
2689 if outlog is None: outlog = False
2690 if blfile is None: blfile = ''
2691
2692 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2693 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2694 clipniter,
2695 normalise_edge_param(edge),
2696 threshold,
2697 chan_avg_limit, getresidual,
2698 pack_progress_params(showprogress,
2699 minnrow),
2700 outlog, blfile)
2701 workscan._add_history("auto_cspline_baseline", varlist)
2702
2703 if insitu:
2704 self._assign(workscan)
2705 else:
2706 return workscan
2707
2708 except RuntimeError, e:
2709 raise_fitting_failure_exception(e)
2710
2711 @asaplog_post_dec
2712 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2713 getresidual=None, showprogress=None, minnrow=None,
2714 outlog=None, blfile=None):
2715 """\
2716 Return a scan which has been baselined (all rows) by a polynomial.
2717 Parameters:
2718 insitu: if False a new scantable is returned.
2719 Otherwise, the scaling is done in-situ
2720 The default is taken from .asaprc (False)
2721 mask: an optional mask
2722 order: the order of the polynomial (default is 0)
2723 plot: plot the fit and the residual. In this each
2724 indivual fit has to be approved, by typing 'y'
2725 or 'n'
2726 getresidual: if False, returns best-fit values instead of
2727 residual. (default is True)
2728 showprogress: show progress status for large data.
2729 default is True.
2730 minnrow: minimum number of input spectra to show.
2731 default is 1000.
2732 outlog: Output the coefficients of the best-fit
2733 function to logger (default is False)
2734 blfile: Name of a text file in which the best-fit
2735 parameter values to be written
2736 (default is "": no file/logger output)
2737
2738 Example:
2739 # return a scan baselined by a third order polynomial,
2740 # not using a mask
2741 bscan = scan.poly_baseline(order=3)
2742 """
2743
2744 try:
2745 varlist = vars()
2746
2747 if insitu is None:
2748 insitu = rcParams["insitu"]
2749 if insitu:
2750 workscan = self
2751 else:
2752 workscan = self.copy()
2753
2754 if mask is None: mask = [True for i in \
2755 xrange(workscan.nchan())]
2756 if order is None: order = 0
2757 if plot is None: plot = False
2758 if getresidual is None: getresidual = True
2759 if showprogress is None: showprogress = True
2760 if minnrow is None: minnrow = 1000
2761 if outlog is None: outlog = False
2762 if blfile is None: blfile = ""
2763
2764 if plot:
2765 outblfile = (blfile != "") and \
2766 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2767 if outblfile:
2768 blf = open(blfile, "a")
2769
2770 f = fitter()
2771 f.set_function(lpoly=order)
2772
2773 rows = xrange(workscan.nrow())
2774 #if len(rows) > 0: workscan._init_blinfo()
2775
2776 for r in rows:
2777 f.x = workscan._getabcissa(r)
2778 f.y = workscan._getspectrum(r)
2779 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2780 f.data = None
2781 f.fit()
2782
2783 f.plot(residual=True)
2784 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2785 if accept_fit.upper() == "N":
2786 #workscan._append_blinfo(None, None, None)
2787 continue
2788
2789 blpars = f.get_parameters()
2790 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2791 #workscan._append_blinfo(blpars, masklist, f.mask)
2792 workscan._setspectrum((f.fitter.getresidual()
2793 if getresidual else f.fitter.getfit()), r)
2794
2795 if outblfile:
2796 rms = workscan.get_rms(f.mask, r)
2797 dataout = \
2798 workscan.format_blparams_row(blpars["params"],
2799 blpars["fixed"],
2800 rms, str(masklist),
2801 r, True)
2802 blf.write(dataout)
2803
2804 f._p.unmap()
2805 f._p = None
2806
2807 if outblfile: blf.close()
2808 else:
2809 workscan._poly_baseline(mask, order, getresidual,
2810 pack_progress_params(showprogress,
2811 minnrow),
2812 outlog, blfile)
2813
2814 workscan._add_history("poly_baseline", varlist)
2815
2816 if insitu:
2817 self._assign(workscan)
2818 else:
2819 return workscan
2820
2821 except RuntimeError, e:
2822 raise_fitting_failure_exception(e)
2823
2824 @asaplog_post_dec
2825 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2826 threshold=None, chan_avg_limit=None,
2827 plot=None, insitu=None,
2828 getresidual=None, showprogress=None,
2829 minnrow=None, outlog=None, blfile=None):
2830 """\
2831 Return a scan which has been baselined (all rows) by a polynomial.
2832 Spectral lines are detected first using linefinder and masked out
2833 to avoid them affecting the baseline solution.
2834
2835 Parameters:
2836 insitu: if False a new scantable is returned.
2837 Otherwise, the scaling is done in-situ
2838 The default is taken from .asaprc (False)
2839 mask: an optional mask retreived from scantable
2840 order: the order of the polynomial (default is 0)
2841 edge: an optional number of channel to drop at
2842 the edge of spectrum. If only one value is
2843 specified, the same number will be dropped
2844 from both sides of the spectrum. Default
2845 is to keep all channels. Nested tuples
2846 represent individual edge selection for
2847 different IFs (a number of spectral channels
2848 can be different)
2849 threshold: the threshold used by line finder. It is
2850 better to keep it large as only strong lines
2851 affect the baseline solution.
2852 chan_avg_limit: a maximum number of consequtive spectral
2853 channels to average during the search of
2854 weak and broad lines. The default is no
2855 averaging (and no search for weak lines).
2856 If such lines can affect the fitted baseline
2857 (e.g. a high order polynomial is fitted),
2858 increase this parameter (usually values up
2859 to 8 are reasonable). Most users of this
2860 method should find the default value sufficient.
2861 plot: plot the fit and the residual. In this each
2862 indivual fit has to be approved, by typing 'y'
2863 or 'n'
2864 getresidual: if False, returns best-fit values instead of
2865 residual. (default is True)
2866 showprogress: show progress status for large data.
2867 default is True.
2868 minnrow: minimum number of input spectra to show.
2869 default is 1000.
2870 outlog: Output the coefficients of the best-fit
2871 function to logger (default is False)
2872 blfile: Name of a text file in which the best-fit
2873 parameter values to be written
2874 (default is "": no file/logger output)
2875
2876 Example:
2877 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2878 """
2879
2880 try:
2881 varlist = vars()
2882
2883 if insitu is None:
2884 insitu = rcParams['insitu']
2885 if insitu:
2886 workscan = self
2887 else:
2888 workscan = self.copy()
2889
2890 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2891 if order is None: order = 0
2892 if edge is None: edge = (0, 0)
2893 if threshold is None: threshold = 3
2894 if chan_avg_limit is None: chan_avg_limit = 1
2895 if plot is None: plot = False
2896 if getresidual is None: getresidual = True
2897 if showprogress is None: showprogress = True
2898 if minnrow is None: minnrow = 1000
2899 if outlog is None: outlog = False
2900 if blfile is None: blfile = ''
2901
2902 edge = normalise_edge_param(edge)
2903
2904 if plot:
2905 outblfile = (blfile != "") and \
2906 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2907 if outblfile: blf = open(blfile, "a")
2908
2909 from asap.asaplinefind import linefinder
2910 fl = linefinder()
2911 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
2912 fl.set_scan(workscan)
2913
2914 f = fitter()
2915 f.set_function(lpoly=order)
2916
2917 rows = xrange(workscan.nrow())
2918 #if len(rows) > 0: workscan._init_blinfo()
2919
2920 for r in rows:
2921 idx = 2*workscan.getif(r)
2922 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2923 edge[idx:idx+2]) # (CAS-1434)
2924
2925 f.x = workscan._getabcissa(r)
2926 f.y = workscan._getspectrum(r)
2927 f.mask = fl.get_mask()
2928 f.data = None
2929 f.fit()
2930
2931 f.plot(residual=True)
2932 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2933 if accept_fit.upper() == "N":
2934 #workscan._append_blinfo(None, None, None)
2935 continue
2936
2937 blpars = f.get_parameters()
2938 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2939 #workscan._append_blinfo(blpars, masklist, f.mask)
2940 workscan._setspectrum((f.fitter.getresidual() \
2941 if getresidual else f.fitter.getfit()), r)
2942
2943 if outblfile:
2944 rms = workscan.get_rms(f.mask, r)
2945 dataout = \
2946 workscan.format_blparams_row(blpars["params"],
2947 blpars["fixed"],
2948 rms, str(masklist),
2949 r, True)
2950 blf.write(dataout)
2951
2952 f._p.unmap()
2953 f._p = None
2954
2955 if outblfile: blf.close()
2956 else:
2957 workscan._auto_poly_baseline(mask, order, edge, threshold,
2958 chan_avg_limit, getresidual,
2959 pack_progress_params(showprogress,
2960 minnrow),
2961 outlog, blfile)
2962
2963 workscan._add_history("auto_poly_baseline", varlist)
2964
2965 if insitu:
2966 self._assign(workscan)
2967 else:
2968 return workscan
2969
2970 except RuntimeError, e:
2971 raise_fitting_failure_exception(e)
2972
2973 def _init_blinfo(self):
2974 """\
2975 Initialise the following three auxiliary members:
2976 blpars : parameters of the best-fit baseline,
2977 masklists : mask data (edge positions of masked channels) and
2978 actualmask : mask data (in boolean list),
2979 to keep for use later (including output to logger/text files).
2980 Used by poly_baseline() and auto_poly_baseline() in case of
2981 'plot=True'.
2982 """
2983 self.blpars = []
2984 self.masklists = []
2985 self.actualmask = []
2986 return
2987
2988 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
2989 """\
2990 Append baseline-fitting related info to blpars, masklist and
2991 actualmask.
2992 """
2993 self.blpars.append(data_blpars)
2994 self.masklists.append(data_masklists)
2995 self.actualmask.append(data_actualmask)
2996 return
2997
2998 @asaplog_post_dec
2999 def rotate_linpolphase(self, angle):
3000 """\
3001 Rotate the phase of the complex polarization O=Q+iU correlation.
3002 This is always done in situ in the raw data. So if you call this
3003 function more than once then each call rotates the phase further.
3004
3005 Parameters:
3006
3007 angle: The angle (degrees) to rotate (add) by.
3008
3009 Example::
3010
3011 scan.rotate_linpolphase(2.3)
3012
3013 """
3014 varlist = vars()
3015 self._math._rotate_linpolphase(self, angle)
3016 self._add_history("rotate_linpolphase", varlist)
3017 return
3018
3019 @asaplog_post_dec
3020 def rotate_xyphase(self, angle):
3021 """\
3022 Rotate the phase of the XY correlation. This is always done in situ
3023 in the data. So if you call this function more than once
3024 then each call rotates the phase further.
3025
3026 Parameters:
3027
3028 angle: The angle (degrees) to rotate (add) by.
3029
3030 Example::
3031
3032 scan.rotate_xyphase(2.3)
3033
3034 """
3035 varlist = vars()
3036 self._math._rotate_xyphase(self, angle)
3037 self._add_history("rotate_xyphase", varlist)
3038 return
3039
3040 @asaplog_post_dec
3041 def swap_linears(self):
3042 """\
3043 Swap the linear polarisations XX and YY, or better the first two
3044 polarisations as this also works for ciculars.
3045 """
3046 varlist = vars()
3047 self._math._swap_linears(self)
3048 self._add_history("swap_linears", varlist)
3049 return
3050
3051 @asaplog_post_dec
3052 def invert_phase(self):
3053 """\
3054 Invert the phase of the complex polarisation
3055 """
3056 varlist = vars()
3057 self._math._invert_phase(self)
3058 self._add_history("invert_phase", varlist)
3059 return
3060
3061 @asaplog_post_dec
3062 def add(self, offset, insitu=None):
3063 """\
3064 Return a scan where all spectra have the offset added
3065
3066 Parameters:
3067
3068 offset: the offset
3069
3070 insitu: if False a new scantable is returned.
3071 Otherwise, the scaling is done in-situ
3072 The default is taken from .asaprc (False)
3073
3074 """
3075 if insitu is None: insitu = rcParams['insitu']
3076 self._math._setinsitu(insitu)
3077 varlist = vars()
3078 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3079 s._add_history("add", varlist)
3080 if insitu:
3081 self._assign(s)
3082 else:
3083 return s
3084
3085 @asaplog_post_dec
3086 def scale(self, factor, tsys=True, insitu=None):
3087 """\
3088
3089 Return a scan where all spectra are scaled by the given 'factor'
3090
3091 Parameters:
3092
3093 factor: the scaling factor (float or 1D float list)
3094
3095 insitu: if False a new scantable is returned.
3096 Otherwise, the scaling is done in-situ
3097 The default is taken from .asaprc (False)
3098
3099 tsys: if True (default) then apply the operation to Tsys
3100 as well as the data
3101
3102 """
3103 if insitu is None: insitu = rcParams['insitu']
3104 self._math._setinsitu(insitu)
3105 varlist = vars()
3106 s = None
3107 import numpy
3108 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3109 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3110 from asapmath import _array2dOp
3111 s = _array2dOp( self.copy(), factor, "MUL", tsys )
3112 else:
3113 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3114 else:
3115 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
3116 s._add_history("scale", varlist)
3117 if insitu:
3118 self._assign(s)
3119 else:
3120 return s
3121
3122 def set_sourcetype(self, match, matchtype="pattern",
3123 sourcetype="reference"):
3124 """\
3125 Set the type of the source to be an source or reference scan
3126 using the provided pattern.
3127
3128 Parameters:
3129
3130 match: a Unix style pattern, regular expression or selector
3131
3132 matchtype: 'pattern' (default) UNIX style pattern or
3133 'regex' regular expression
3134
3135 sourcetype: the type of the source to use (source/reference)
3136
3137 """
3138 varlist = vars()
3139 basesel = self.get_selection()
3140 stype = -1
3141 if sourcetype.lower().startswith("r"):
3142 stype = 1
3143 elif sourcetype.lower().startswith("s"):
3144 stype = 0
3145 else:
3146 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3147 if matchtype.lower().startswith("p"):
3148 matchtype = "pattern"
3149 elif matchtype.lower().startswith("r"):
3150 matchtype = "regex"
3151 else:
3152 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3153 sel = selector()
3154 if isinstance(match, selector):
3155 sel = match
3156 else:
3157 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3158 self.set_selection(basesel+sel)
3159 self._setsourcetype(stype)
3160 self.set_selection(basesel)
3161 self._add_history("set_sourcetype", varlist)
3162
3163 @asaplog_post_dec
3164 @preserve_selection
3165 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3166 """\
3167 This function allows to build quotients automatically.
3168 It assumes the observation to have the same number of
3169 "ons" and "offs"
3170
3171 Parameters:
3172
3173 preserve: you can preserve (default) the continuum or
3174 remove it. The equations used are
3175
3176 preserve: Output = Toff * (on/off) - Toff
3177
3178 remove: Output = Toff * (on/off) - Ton
3179
3180 mode: the on/off detection mode
3181 'paired' (default)
3182 identifies 'off' scans by the
3183 trailing '_R' (Mopra/Parkes) or
3184 '_e'/'_w' (Tid) and matches
3185 on/off pairs from the observing pattern
3186 'time'
3187 finds the closest off in time
3188
3189 .. todo:: verify argument is not implemented
3190
3191 """
3192 varlist = vars()
3193 modes = ["time", "paired"]
3194 if not mode in modes:
3195 msg = "please provide valid mode. Valid modes are %s" % (modes)
3196 raise ValueError(msg)
3197 s = None
3198 if mode.lower() == "paired":
3199 sel = self.get_selection()
3200 sel.set_query("SRCTYPE==psoff")
3201 self.set_selection(sel)
3202 offs = self.copy()
3203 sel.set_query("SRCTYPE==pson")
3204 self.set_selection(sel)
3205 ons = self.copy()
3206 s = scantable(self._math._quotient(ons, offs, preserve))
3207 elif mode.lower() == "time":
3208 s = scantable(self._math._auto_quotient(self, mode, preserve))
3209 s._add_history("auto_quotient", varlist)
3210 return s
3211
3212 @asaplog_post_dec
3213 def mx_quotient(self, mask = None, weight='median', preserve=True):
3214 """\
3215 Form a quotient using "off" beams when observing in "MX" mode.
3216
3217 Parameters:
3218
3219 mask: an optional mask to be used when weight == 'stddev'
3220
3221 weight: How to average the off beams. Default is 'median'.
3222
3223 preserve: you can preserve (default) the continuum or
3224 remove it. The equations used are:
3225
3226 preserve: Output = Toff * (on/off) - Toff
3227
3228 remove: Output = Toff * (on/off) - Ton
3229
3230 """
3231 mask = mask or ()
3232 varlist = vars()
3233 on = scantable(self._math._mx_extract(self, 'on'))
3234 preoff = scantable(self._math._mx_extract(self, 'off'))
3235 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3236 from asapmath import quotient
3237 q = quotient(on, off, preserve)
3238 q._add_history("mx_quotient", varlist)
3239 return q
3240
3241 @asaplog_post_dec
3242 def freq_switch(self, insitu=None):
3243 """\
3244 Apply frequency switching to the data.
3245
3246 Parameters:
3247
3248 insitu: if False a new scantable is returned.
3249 Otherwise, the swictching is done in-situ
3250 The default is taken from .asaprc (False)
3251
3252 """
3253 if insitu is None: insitu = rcParams['insitu']
3254 self._math._setinsitu(insitu)
3255 varlist = vars()
3256 s = scantable(self._math._freqswitch(self))
3257 s._add_history("freq_switch", varlist)
3258 if insitu:
3259 self._assign(s)
3260 else:
3261 return s
3262
3263 @asaplog_post_dec
3264 def recalc_azel(self):
3265 """Recalculate the azimuth and elevation for each position."""
3266 varlist = vars()
3267 self._recalcazel()
3268 self._add_history("recalc_azel", varlist)
3269 return
3270
3271 @asaplog_post_dec
3272 def __add__(self, other):
3273 varlist = vars()
3274 s = None
3275 if isinstance(other, scantable):
3276 s = scantable(self._math._binaryop(self, other, "ADD"))
3277 elif isinstance(other, float):
3278 s = scantable(self._math._unaryop(self, other, "ADD", False))
3279 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3280 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3281 from asapmath import _array2dOp
3282 s = _array2dOp( self.copy(), other, "ADD", False )
3283 else:
3284 s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
3285 else:
3286 raise TypeError("Other input is not a scantable or float value")
3287 s._add_history("operator +", varlist)
3288 return s
3289
3290 @asaplog_post_dec
3291 def __sub__(self, other):
3292 """
3293 implicit on all axes and on Tsys
3294 """
3295 varlist = vars()
3296 s = None
3297 if isinstance(other, scantable):
3298 s = scantable(self._math._binaryop(self, other, "SUB"))
3299 elif isinstance(other, float):
3300 s = scantable(self._math._unaryop(self, other, "SUB", False))
3301 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3302 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3303 from asapmath import _array2dOp
3304 s = _array2dOp( self.copy(), other, "SUB", False )
3305 else:
3306 s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
3307 else:
3308 raise TypeError("Other input is not a scantable or float value")
3309 s._add_history("operator -", varlist)
3310 return s
3311
3312 @asaplog_post_dec
3313 def __mul__(self, other):
3314 """
3315 implicit on all axes and on Tsys
3316 """
3317 varlist = vars()
3318 s = None
3319 if isinstance(other, scantable):
3320 s = scantable(self._math._binaryop(self, other, "MUL"))
3321 elif isinstance(other, float):
3322 s = scantable(self._math._unaryop(self, other, "MUL", False))
3323 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3324 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3325 from asapmath import _array2dOp
3326 s = _array2dOp( self.copy(), other, "MUL", False )
3327 else:
3328 s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
3329 else:
3330 raise TypeError("Other input is not a scantable or float value")
3331 s._add_history("operator *", varlist)
3332 return s
3333
3334
3335 @asaplog_post_dec
3336 def __div__(self, other):
3337 """
3338 implicit on all axes and on Tsys
3339 """
3340 varlist = vars()
3341 s = None
3342 if isinstance(other, scantable):
3343 s = scantable(self._math._binaryop(self, other, "DIV"))
3344 elif isinstance(other, float):
3345 if other == 0.0:
3346 raise ZeroDivisionError("Dividing by zero is not recommended")
3347 s = scantable(self._math._unaryop(self, other, "DIV", False))
3348 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3349 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3350 from asapmath import _array2dOp
3351 s = _array2dOp( self.copy(), other, "DIV", False )
3352 else:
3353 s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
3354 else:
3355 raise TypeError("Other input is not a scantable or float value")
3356 s._add_history("operator /", varlist)
3357 return s
3358
3359 @asaplog_post_dec
3360 def get_fit(self, row=0):
3361 """\
3362 Print or return the stored fits for a row in the scantable
3363
3364 Parameters:
3365
3366 row: the row which the fit has been applied to.
3367
3368 """
3369 if row > self.nrow():
3370 return
3371 from asap.asapfit import asapfit
3372 fit = asapfit(self._getfit(row))
3373 asaplog.push( '%s' %(fit) )
3374 return fit.as_dict()
3375
3376 def flag_nans(self):
3377 """\
3378 Utility function to flag NaN values in the scantable.
3379 """
3380 import numpy
3381 basesel = self.get_selection()
3382 for i in range(self.nrow()):
3383 sel = self.get_row_selector(i)
3384 self.set_selection(basesel+sel)
3385 nans = numpy.isnan(self._getspectrum(0))
3386 if numpy.any(nans):
3387 bnans = [ bool(v) for v in nans]
3388 self.flag(bnans)
3389 self.set_selection(basesel)
3390
3391 def get_row_selector(self, rowno):
3392 #return selector(beams=self.getbeam(rowno),
3393 # ifs=self.getif(rowno),
3394 # pols=self.getpol(rowno),
3395 # scans=self.getscan(rowno),
3396 # cycles=self.getcycle(rowno))
3397 return selector(rows=[rowno])
3398
3399 def _add_history(self, funcname, parameters):
3400 if not rcParams['scantable.history']:
3401 return
3402 # create date
3403 sep = "##"
3404 from datetime import datetime
3405 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3406 hist = dstr+sep
3407 hist += funcname+sep#cdate+sep
3408 if parameters.has_key('self'): del parameters['self']
3409 for k, v in parameters.iteritems():
3410 if type(v) is dict:
3411 for k2, v2 in v.iteritems():
3412 hist += k2
3413 hist += "="
3414 if isinstance(v2, scantable):
3415 hist += 'scantable'
3416 elif k2 == 'mask':
3417 if isinstance(v2, list) or isinstance(v2, tuple):
3418 hist += str(self._zip_mask(v2))
3419 else:
3420 hist += str(v2)
3421 else:
3422 hist += str(v2)
3423 else:
3424 hist += k
3425 hist += "="
3426 if isinstance(v, scantable):
3427 hist += 'scantable'
3428 elif k == 'mask':
3429 if isinstance(v, list) or isinstance(v, tuple):
3430 hist += str(self._zip_mask(v))
3431 else:
3432 hist += str(v)
3433 else:
3434 hist += str(v)
3435 hist += sep
3436 hist = hist[:-2] # remove trailing '##'
3437 self._addhistory(hist)
3438
3439
3440 def _zip_mask(self, mask):
3441 mask = list(mask)
3442 i = 0
3443 segments = []
3444 while mask[i:].count(1):
3445 i += mask[i:].index(1)
3446 if mask[i:].count(0):
3447 j = i + mask[i:].index(0)
3448 else:
3449 j = len(mask)
3450 segments.append([i, j])
3451 i = j
3452 return segments
3453
3454 def _get_ordinate_label(self):
3455 fu = "("+self.get_fluxunit()+")"
3456 import re
3457 lbl = "Intensity"
3458 if re.match(".K.", fu):
3459 lbl = "Brightness Temperature "+ fu
3460 elif re.match(".Jy.", fu):
3461 lbl = "Flux density "+ fu
3462 return lbl
3463
3464 def _check_ifs(self):
3465 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3466 nchans = [self.nchan(i) for i in self.getifnos()]
3467 nchans = filter(lambda t: t > 0, nchans)
3468 return (sum(nchans)/len(nchans) == nchans[0])
3469
3470 @asaplog_post_dec
3471 #def _fill(self, names, unit, average, getpt, antenna):
3472 def _fill(self, names, unit, average, opts={}):
3473 first = True
3474 fullnames = []
3475 for name in names:
3476 name = os.path.expandvars(name)
3477 name = os.path.expanduser(name)
3478 if not os.path.exists(name):
3479 msg = "File '%s' does not exists" % (name)
3480 raise IOError(msg)
3481 fullnames.append(name)
3482 if average:
3483 asaplog.push('Auto averaging integrations')
3484 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3485 for name in fullnames:
3486 tbl = Scantable(stype)
3487 if is_ms( name ):
3488 r = msfiller( tbl )
3489 else:
3490 r = filler( tbl )
3491 rx = rcParams['scantable.reference']
3492 r.setreferenceexpr(rx)
3493 #r = filler(tbl)
3494 #rx = rcParams['scantable.reference']
3495 #r.setreferenceexpr(rx)
3496 msg = "Importing %s..." % (name)
3497 asaplog.push(msg, False)
3498 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
3499 r.open(name, opts)# antenna, -1, -1, getpt)
3500 r.fill()
3501 if average:
3502 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3503 if not first:
3504 tbl = self._math._merge([self, tbl])
3505 Scantable.__init__(self, tbl)
3506 r.close()
3507 del r, tbl
3508 first = False
3509 #flush log
3510 asaplog.post()
3511 if unit is not None:
3512 self.set_fluxunit(unit)
3513 if not is_casapy():
3514 self.set_freqframe(rcParams['scantable.freqframe'])
3515
3516
3517 def __getitem__(self, key):
3518 if key < 0:
3519 key += self.nrow()
3520 if key >= self.nrow():
3521 raise IndexError("Row index out of range.")
3522 return self._getspectrum(key)
3523
3524 def __setitem__(self, key, value):
3525 if key < 0:
3526 key += self.nrow()
3527 if key >= self.nrow():
3528 raise IndexError("Row index out of range.")
3529 if not hasattr(value, "__len__") or \
3530 len(value) > self.nchan(self.getif(key)):
3531 raise ValueError("Spectrum length doesn't match.")
3532 return self._setspectrum(value, key)
3533
3534 def __len__(self):
3535 return self.nrow()
3536
3537 def __iter__(self):
3538 for i in range(len(self)):
3539 yield self[i]
Note: See TracBrowser for help on using the repository browser.