source: trunk/python/scantable.py@ 2342

Last change on this file since 2342 was 2322, checked in by Malte Marquarding, 13 years ago

Fixed another instance of inserting a new method argument. THIS NEEDS TO GO TO THE END!

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 135.7 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import tempfile
5import numpy
6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
11from asap.env import is_casapy
12from asap._asap import Scantable
13from asap._asap import filler, msfiller
14from asap.parameters import rcParams
15from asap.logging import asaplog, asaplog_post_dec
16from asap.selector import selector
17from asap.linecatalog import linecatalog
18from asap.coordinate import coordinate
19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
20from asap.asapfitter import fitter
21
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 #if ( l.find('Scantable') != -1 ):
49 if ( l.find('Measurement Set') == -1 ):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176class scantable(Scantable):
177 """\
178 The ASAP container for scans (single-dish data).
179 """
180
181 @asaplog_post_dec
182 #def __init__(self, filename, average=None, unit=None, getpt=None,
183 # antenna=None, parallactify=None):
184 def __init__(self, filename, average=None, unit=None, parallactify=None,
185 **args):
186 """\
187 Create a scantable from a saved one or make a reference
188
189 Parameters:
190
191 filename: the name of an asap table on disk
192 or
193 the name of a rpfits/sdfits/ms file
194 (integrations within scans are auto averaged
195 and the whole file is read) or
196 [advanced] a reference to an existing scantable
197
198 average: average all integrations withinb a scan on read.
199 The default (True) is taken from .asaprc.
200
201 unit: brightness unit; must be consistent with K or Jy.
202 Over-rides the default selected by the filler
203 (input rpfits/sdfits/ms) or replaces the value
204 in existing scantables
205
206 getpt: for MeasurementSet input data only:
207 If True, all pointing data are filled.
208 The deafult is False, which makes time to load
209 the MS data faster in some cases.
210
211 antenna: for MeasurementSet input data only:
212 Antenna selection. integer (id) or string (name or id).
213
214 parallactify: Indicate that the data had been parallatified. Default
215 is taken from rc file.
216
217 """
218 if average is None:
219 average = rcParams['scantable.autoaverage']
220 #if getpt is None:
221 # getpt = True
222 #if antenna is not None:
223 # asaplog.push("Antenna selection currently unsupported."
224 # "Using ''")
225 # asaplog.post('WARN')
226 #if antenna is None:
227 # antenna = ''
228 #elif type(antenna) == int:
229 # antenna = '%s' % antenna
230 #elif type(antenna) == list:
231 # tmpstr = ''
232 # for i in range( len(antenna) ):
233 # if type(antenna[i]) == int:
234 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
235 # elif type(antenna[i]) == str:
236 # tmpstr=tmpstr+antenna[i]+','
237 # else:
238 # raise TypeError('Bad antenna selection.')
239 # antenna = tmpstr.rstrip(',')
240 parallactify = parallactify or rcParams['scantable.parallactify']
241 varlist = vars()
242 from asap._asap import stmath
243 self._math = stmath( rcParams['insitu'] )
244 if isinstance(filename, Scantable):
245 Scantable.__init__(self, filename)
246 else:
247 if isinstance(filename, str):
248 filename = os.path.expandvars(filename)
249 filename = os.path.expanduser(filename)
250 if not os.path.exists(filename):
251 s = "File '%s' not found." % (filename)
252 raise IOError(s)
253 if is_scantable(filename):
254 ondisk = rcParams['scantable.storage'] == 'disk'
255 Scantable.__init__(self, filename, ondisk)
256 if unit is not None:
257 self.set_fluxunit(unit)
258 if average:
259 self._assign( self.average_time( scanav=True ) )
260 # do not reset to the default freqframe
261 #self.set_freqframe(rcParams['scantable.freqframe'])
262 #elif os.path.isdir(filename) \
263 # and not os.path.exists(filename+'/table.f1'):
264 elif is_ms(filename):
265 # Measurement Set
266 opts={'ms': {}}
267 mskeys=['getpt','antenna']
268 for key in mskeys:
269 if key in args.keys():
270 opts['ms'][key] = args[key]
271 #self._fill([filename], unit, average, getpt, antenna)
272 self._fill([filename], unit, average, opts)
273 elif os.path.isfile(filename):
274 #self._fill([filename], unit, average, getpt, antenna)
275 self._fill([filename], unit, average)
276 else:
277 msg = "The given file '%s'is not a valid " \
278 "asap table." % (filename)
279 raise IOError(msg)
280 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
281 and isinstance(filename[-1], str):
282 #self._fill(filename, unit, average, getpt, antenna)
283 self._fill(filename, unit, average)
284 self.parallactify(parallactify)
285 self._add_history("scantable", varlist)
286
287 @asaplog_post_dec
288 def save(self, name=None, format=None, overwrite=False):
289 """\
290 Store the scantable on disk. This can be an asap (aips++) Table,
291 SDFITS or MS2 format.
292
293 Parameters:
294
295 name: the name of the outputfile. For format "ASCII"
296 this is the root file name (data in 'name'.txt
297 and header in 'name'_header.txt)
298
299 format: an optional file format. Default is ASAP.
300 Allowed are:
301
302 * 'ASAP' (save as ASAP [aips++] Table),
303 * 'SDFITS' (save as SDFITS file)
304 * 'ASCII' (saves as ascii text file)
305 * 'MS2' (saves as an casacore MeasurementSet V2)
306 * 'FITS' (save as image FITS - not readable by
307 class)
308 * 'CLASS' (save as FITS readable by CLASS)
309
310 overwrite: If the file should be overwritten if it exists.
311 The default False is to return with warning
312 without writing the output. USE WITH CARE.
313
314 Example::
315
316 scan.save('myscan.asap')
317 scan.save('myscan.sdfits', 'SDFITS')
318
319 """
320 from os import path
321 format = format or rcParams['scantable.save']
322 suffix = '.'+format.lower()
323 if name is None or name == "":
324 name = 'scantable'+suffix
325 msg = "No filename given. Using default name %s..." % name
326 asaplog.push(msg)
327 name = path.expandvars(name)
328 if path.isfile(name) or path.isdir(name):
329 if not overwrite:
330 msg = "File %s exists." % name
331 raise IOError(msg)
332 format2 = format.upper()
333 if format2 == 'ASAP':
334 self._save(name)
335 elif format2 == 'MS2':
336 msopt = {'ms': {'overwrite': overwrite } }
337 from asap._asap import mswriter
338 writer = mswriter( self )
339 writer.write( name, msopt )
340 else:
341 from asap._asap import stwriter as stw
342 writer = stw(format2)
343 writer.write(self, name)
344 return
345
346 def copy(self):
347 """Return a copy of this scantable.
348
349 *Note*:
350
351 This makes a full (deep) copy. scan2 = scan1 makes a reference.
352
353 Example::
354
355 copiedscan = scan.copy()
356
357 """
358 sd = scantable(Scantable._copy(self))
359 return sd
360
361 def drop_scan(self, scanid=None):
362 """\
363 Return a new scantable where the specified scan number(s) has(have)
364 been dropped.
365
366 Parameters:
367
368 scanid: a (list of) scan number(s)
369
370 """
371 from asap import _is_sequence_or_number as _is_valid
372 from asap import _to_list
373 from asap import unique
374 if not _is_valid(scanid):
375 raise RuntimeError( 'Please specify a scanno to drop from the'
376 ' scantable' )
377 scanid = _to_list(scanid)
378 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
379 for sid in scanid: allscans.remove(sid)
380 if len(allscans) == 0:
381 raise ValueError("Can't remove all scans")
382 sel = selector(scans=allscans)
383 return self._select_copy(sel)
384
385 def _select_copy(self, selection):
386 orig = self.get_selection()
387 self.set_selection(orig+selection)
388 cp = self.copy()
389 self.set_selection(orig)
390 return cp
391
392 def get_scan(self, scanid=None):
393 """\
394 Return a specific scan (by scanno) or collection of scans (by
395 source name) in a new scantable.
396
397 *Note*:
398
399 See scantable.drop_scan() for the inverse operation.
400
401 Parameters:
402
403 scanid: a (list of) scanno or a source name, unix-style
404 patterns are accepted for source name matching, e.g.
405 '*_R' gets all 'ref scans
406
407 Example::
408
409 # get all scans containing the source '323p459'
410 newscan = scan.get_scan('323p459')
411 # get all 'off' scans
412 refscans = scan.get_scan('*_R')
413 # get a susbset of scans by scanno (as listed in scan.summary())
414 newscan = scan.get_scan([0, 2, 7, 10])
415
416 """
417 if scanid is None:
418 raise RuntimeError( 'Please specify a scan no or name to '
419 'retrieve from the scantable' )
420 try:
421 bsel = self.get_selection()
422 sel = selector()
423 if type(scanid) is str:
424 sel.set_name(scanid)
425 return self._select_copy(sel)
426 elif type(scanid) is int:
427 sel.set_scans([scanid])
428 return self._select_copy(sel)
429 elif type(scanid) is list:
430 sel.set_scans(scanid)
431 return self._select_copy(sel)
432 else:
433 msg = "Illegal scanid type, use 'int' or 'list' if ints."
434 raise TypeError(msg)
435 except RuntimeError:
436 raise
437
438 def __str__(self):
439 tempFile = tempfile.NamedTemporaryFile()
440 Scantable._summary(self, tempFile.name)
441 tempFile.seek(0)
442 asaplog.clear()
443 return tempFile.file.read()
444
445 @asaplog_post_dec
446 def summary(self, filename=None):
447 """\
448 Print a summary of the contents of this scantable.
449
450 Parameters:
451
452 filename: the name of a file to write the putput to
453 Default - no file output
454
455 """
456# info = Scantable._summary(self)
457 if filename is not None:
458 if filename is "":
459 filename = 'scantable_summary.txt'
460 from os.path import expandvars, isdir
461 filename = expandvars(filename)
462# if not isdir(filename):
463# data = open(filename, 'w')
464# data.write(info)
465# data.close()
466# else:
467 if isdir(filename):
468 msg = "Illegal file name '%s'." % (filename)
469 raise IOError(msg)
470 else:
471 filename = ""
472 Scantable._summary(self, filename)
473# info = Scantable._summary(self, filename)
474# return page(info)
475
476 def get_spectrum(self, rowno):
477 """Return the spectrum for the current row in the scantable as a list.
478
479 Parameters:
480
481 rowno: the row number to retrieve the spectrum from
482
483 """
484 return self._getspectrum(rowno)
485
486 def get_mask(self, rowno):
487 """Return the mask for the current row in the scantable as a list.
488
489 Parameters:
490
491 rowno: the row number to retrieve the mask from
492
493 """
494 return self._getmask(rowno)
495
496 def set_spectrum(self, spec, rowno):
497 """Set the spectrum for the current row in the scantable.
498
499 Parameters:
500
501 spec: the new spectrum
502
503 rowno: the row number to set the spectrum for
504
505 """
506 assert(len(spec) == self.nchan())
507 return self._setspectrum(spec, rowno)
508
509 def get_coordinate(self, rowno):
510 """Return the (spectral) coordinate for a a given 'rowno'.
511
512 *Note*:
513
514 * This coordinate is only valid until a scantable method modifies
515 the frequency axis.
516 * This coordinate does contain the original frequency set-up
517 NOT the new frame. The conversions however are done using the user
518 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
519 use scantable.freq_align first. Without it there is no closure,
520 i.e.::
521
522 c = myscan.get_coordinate(0)
523 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
524
525 Parameters:
526
527 rowno: the row number for the spectral coordinate
528
529 """
530 return coordinate(Scantable.get_coordinate(self, rowno))
531
532 def get_selection(self):
533 """\
534 Get the selection object currently set on this scantable.
535
536 Example::
537
538 sel = scan.get_selection()
539 sel.set_ifs(0) # select IF 0
540 scan.set_selection(sel) # apply modified selection
541
542 """
543 return selector(self._getselection())
544
545 def set_selection(self, selection=None, **kw):
546 """\
547 Select a subset of the data. All following operations on this scantable
548 are only applied to thi selection.
549
550 Parameters:
551
552 selection: a selector object (default unset the selection), or
553 any combination of "pols", "ifs", "beams", "scans",
554 "cycles", "name", "query"
555
556 Examples::
557
558 sel = selector() # create a selection object
559 self.set_scans([0, 3]) # select SCANNO 0 and 3
560 scan.set_selection(sel) # set the selection
561 scan.summary() # will only print summary of scanno 0 an 3
562 scan.set_selection() # unset the selection
563 # or the equivalent
564 scan.set_selection(scans=[0,3])
565 scan.summary() # will only print summary of scanno 0 an 3
566 scan.set_selection() # unset the selection
567
568 """
569 if selection is None:
570 # reset
571 if len(kw) == 0:
572 selection = selector()
573 else:
574 # try keywords
575 for k in kw:
576 if k not in selector.fields:
577 raise KeyError("Invalid selection key '%s', "
578 "valid keys are %s" % (k,
579 selector.fields))
580 selection = selector(**kw)
581 self._setselection(selection)
582
583 def get_row(self, row=0, insitu=None):
584 """\
585 Select a row in the scantable.
586 Return a scantable with single row.
587
588 Parameters:
589
590 row: row no of integration, default is 0.
591 insitu: if False a new scantable is returned. Otherwise, the
592 scaling is done in-situ. The default is taken from .asaprc
593 (False)
594
595 """
596 if insitu is None: insitu = rcParams['insitu']
597 if not insitu:
598 workscan = self.copy()
599 else:
600 workscan = self
601 # Select a row
602 sel=selector()
603 sel.set_rows([row])
604 #sel.set_scans([workscan.getscan(row)])
605 #sel.set_cycles([workscan.getcycle(row)])
606 #sel.set_beams([workscan.getbeam(row)])
607 #sel.set_ifs([workscan.getif(row)])
608 #sel.set_polarisations([workscan.getpol(row)])
609 #sel.set_name(workscan._getsourcename(row))
610 workscan.set_selection(sel)
611 if not workscan.nrow() == 1:
612 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
613 raise RuntimeError(msg)
614 del sel
615 if insitu:
616 self._assign(workscan)
617 else:
618 return workscan
619
620 @asaplog_post_dec
621 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
622 """\
623 Determine the specified statistic of the current beam/if/pol
624 Takes a 'mask' as an optional parameter to specify which
625 channels should be excluded.
626
627 Parameters:
628
629 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
630 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
631
632 mask: an optional mask specifying where the statistic
633 should be determined.
634
635 form: format string to print statistic values
636
637 row: row number of spectrum to process.
638 (default is None: for all rows)
639
640 Example:
641 scan.set_unit('channel')
642 msk = scan.create_mask([100, 200], [500, 600])
643 scan.stats(stat='mean', mask=m)
644
645 """
646 mask = mask or []
647 if not self._check_ifs():
648 raise ValueError("Cannot apply mask as the IFs have different "
649 "number of channels. Please use setselection() "
650 "to select individual IFs")
651 rtnabc = False
652 if stat.lower().endswith('_abc'): rtnabc = True
653 getchan = False
654 if stat.lower().startswith('min') or stat.lower().startswith('max'):
655 chan = self._math._minmaxchan(self, mask, stat)
656 getchan = True
657 statvals = []
658 if not rtnabc:
659 if row == None:
660 statvals = self._math._stats(self, mask, stat)
661 else:
662 statvals = self._math._statsrow(self, mask, stat, int(row))
663
664 #def cb(i):
665 # return statvals[i]
666
667 #return self._row_callback(cb, stat)
668
669 label=stat
670 #callback=cb
671 out = ""
672 #outvec = []
673 sep = '-'*50
674
675 if row == None:
676 rows = xrange(self.nrow())
677 elif isinstance(row, int):
678 rows = [ row ]
679
680 for i in rows:
681 refstr = ''
682 statunit= ''
683 if getchan:
684 qx, qy = self.chan2data(rowno=i, chan=chan[i])
685 if rtnabc:
686 statvals.append(qx['value'])
687 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
688 statunit= '['+qx['unit']+']'
689 else:
690 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
691
692 tm = self._gettime(i)
693 src = self._getsourcename(i)
694 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
695 out += 'Time[%s]:\n' % (tm)
696 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
697 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
698 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
699 #outvec.append(callback(i))
700 if len(rows) > 1:
701 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
702 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
703 else:
704 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
705 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
706 out += sep+"\n"
707
708 import os
709 if os.environ.has_key( 'USER' ):
710 usr = os.environ['USER']
711 else:
712 import commands
713 usr = commands.getoutput( 'whoami' )
714 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
715 f = open(tmpfile,'w')
716 print >> f, sep
717 print >> f, ' %s %s' % (label, statunit)
718 print >> f, sep
719 print >> f, out
720 f.close()
721 f = open(tmpfile,'r')
722 x = f.readlines()
723 f.close()
724 asaplog.push(''.join(x), False)
725
726 return statvals
727
728 def chan2data(self, rowno=0, chan=0):
729 """\
730 Returns channel/frequency/velocity and spectral value
731 at an arbitrary row and channel in the scantable.
732
733 Parameters:
734
735 rowno: a row number in the scantable. Default is the
736 first row, i.e. rowno=0
737
738 chan: a channel in the scantable. Default is the first
739 channel, i.e. pos=0
740
741 """
742 if isinstance(rowno, int) and isinstance(chan, int):
743 qx = {'unit': self.get_unit(),
744 'value': self._getabcissa(rowno)[chan]}
745 qy = {'unit': self.get_fluxunit(),
746 'value': self._getspectrum(rowno)[chan]}
747 return qx, qy
748
749 def stddev(self, mask=None):
750 """\
751 Determine the standard deviation of the current beam/if/pol
752 Takes a 'mask' as an optional parameter to specify which
753 channels should be excluded.
754
755 Parameters:
756
757 mask: an optional mask specifying where the standard
758 deviation should be determined.
759
760 Example::
761
762 scan.set_unit('channel')
763 msk = scan.create_mask([100, 200], [500, 600])
764 scan.stddev(mask=m)
765
766 """
767 return self.stats(stat='stddev', mask=mask);
768
769
770 def get_column_names(self):
771 """\
772 Return a list of column names, which can be used for selection.
773 """
774 return list(Scantable.get_column_names(self))
775
776 def get_tsys(self, row=-1):
777 """\
778 Return the System temperatures.
779
780 Parameters:
781
782 row: the rowno to get the information for. (default all rows)
783
784 Returns:
785
786 a list of Tsys values for the current selection
787
788 """
789 if row > -1:
790 return self._get_column(self._gettsys, row)
791 return self._row_callback(self._gettsys, "Tsys")
792
793
794 def get_weather(self, row=-1):
795 """\
796 Return the weather informations.
797
798 Parameters:
799
800 row: the rowno to get the information for. (default all rows)
801
802 Returns:
803
804 a dict or list of of dicts of values for the current selection
805
806 """
807
808 values = self._get_column(self._get_weather, row)
809 if row > -1:
810 return {'temperature': values[0],
811 'pressure': values[1], 'humidity' : values[2],
812 'windspeed' : values[3], 'windaz' : values[4]
813 }
814 else:
815 out = []
816 for r in values:
817
818 out.append({'temperature': r[0],
819 'pressure': r[1], 'humidity' : r[2],
820 'windspeed' : r[3], 'windaz' : r[4]
821 })
822 return out
823
824 def _row_callback(self, callback, label):
825 out = ""
826 outvec = []
827 sep = '-'*50
828 for i in range(self.nrow()):
829 tm = self._gettime(i)
830 src = self._getsourcename(i)
831 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
832 out += 'Time[%s]:\n' % (tm)
833 if self.nbeam(-1) > 1:
834 out += ' Beam[%d] ' % (self.getbeam(i))
835 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
836 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
837 outvec.append(callback(i))
838 out += '= %3.3f\n' % (outvec[i])
839 out += sep+'\n'
840
841 asaplog.push(sep)
842 asaplog.push(" %s" % (label))
843 asaplog.push(sep)
844 asaplog.push(out)
845 asaplog.post()
846 return outvec
847
848 def _get_column(self, callback, row=-1, *args):
849 """
850 """
851 if row == -1:
852 return [callback(i, *args) for i in range(self.nrow())]
853 else:
854 if 0 <= row < self.nrow():
855 return callback(row, *args)
856
857
858 def get_time(self, row=-1, asdatetime=False, prec=-1):
859 """\
860 Get a list of time stamps for the observations.
861 Return a datetime object or a string (default) for each
862 integration time stamp in the scantable.
863
864 Parameters:
865
866 row: row no of integration. Default -1 return all rows
867
868 asdatetime: return values as datetime objects rather than strings
869
870 prec: number of digits shown. Default -1 to automatic calculation.
871 Note this number is equals to the digits of MVTime,
872 i.e., 0<prec<3: dates with hh:: only,
873 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
874 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
875
876 """
877 from datetime import datetime
878 if prec < 0:
879 # automagically set necessary precision +1
880 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
881 prec = max(6, int(prec))
882 else:
883 prec = max(0, prec)
884 if asdatetime:
885 #precision can be 1 millisecond at max
886 prec = min(12, prec)
887
888 times = self._get_column(self._gettime, row, prec)
889 if not asdatetime:
890 return times
891 format = "%Y/%m/%d/%H:%M:%S.%f"
892 if prec < 7:
893 nsub = 1 + (((6-prec)/2) % 3)
894 substr = [".%f","%S","%M"]
895 for i in range(nsub):
896 format = format.replace(substr[i],"")
897 if isinstance(times, list):
898 return [datetime.strptime(i, format) for i in times]
899 else:
900 return datetime.strptime(times, format)
901
902
903 def get_inttime(self, row=-1):
904 """\
905 Get a list of integration times for the observations.
906 Return a time in seconds for each integration in the scantable.
907
908 Parameters:
909
910 row: row no of integration. Default -1 return all rows.
911
912 """
913 return self._get_column(self._getinttime, row)
914
915
916 def get_sourcename(self, row=-1):
917 """\
918 Get a list source names for the observations.
919 Return a string for each integration in the scantable.
920 Parameters:
921
922 row: row no of integration. Default -1 return all rows.
923
924 """
925 return self._get_column(self._getsourcename, row)
926
927 def get_elevation(self, row=-1):
928 """\
929 Get a list of elevations for the observations.
930 Return a float for each integration in the scantable.
931
932 Parameters:
933
934 row: row no of integration. Default -1 return all rows.
935
936 """
937 return self._get_column(self._getelevation, row)
938
939 def get_azimuth(self, row=-1):
940 """\
941 Get a list of azimuths for the observations.
942 Return a float for each integration in the scantable.
943
944 Parameters:
945 row: row no of integration. Default -1 return all rows.
946
947 """
948 return self._get_column(self._getazimuth, row)
949
950 def get_parangle(self, row=-1):
951 """\
952 Get a list of parallactic angles for the observations.
953 Return a float for each integration in the scantable.
954
955 Parameters:
956
957 row: row no of integration. Default -1 return all rows.
958
959 """
960 return self._get_column(self._getparangle, row)
961
962 def get_direction(self, row=-1):
963 """
964 Get a list of Positions on the sky (direction) for the observations.
965 Return a string for each integration in the scantable.
966
967 Parameters:
968
969 row: row no of integration. Default -1 return all rows
970
971 """
972 return self._get_column(self._getdirection, row)
973
974 def get_directionval(self, row=-1):
975 """\
976 Get a list of Positions on the sky (direction) for the observations.
977 Return a float for each integration in the scantable.
978
979 Parameters:
980
981 row: row no of integration. Default -1 return all rows
982
983 """
984 return self._get_column(self._getdirectionvec, row)
985
986 @asaplog_post_dec
987 def set_unit(self, unit='channel'):
988 """\
989 Set the unit for all following operations on this scantable
990
991 Parameters:
992
993 unit: optional unit, default is 'channel'. Use one of '*Hz',
994 'km/s', 'channel' or equivalent ''
995
996 """
997 varlist = vars()
998 if unit in ['', 'pixel', 'channel']:
999 unit = ''
1000 inf = list(self._getcoordinfo())
1001 inf[0] = unit
1002 self._setcoordinfo(inf)
1003 self._add_history("set_unit", varlist)
1004
1005 @asaplog_post_dec
1006 def set_instrument(self, instr):
1007 """\
1008 Set the instrument for subsequent processing.
1009
1010 Parameters:
1011
1012 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1013 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1014
1015 """
1016 self._setInstrument(instr)
1017 self._add_history("set_instument", vars())
1018
1019 @asaplog_post_dec
1020 def set_feedtype(self, feedtype):
1021 """\
1022 Overwrite the feed type, which might not be set correctly.
1023
1024 Parameters:
1025
1026 feedtype: 'linear' or 'circular'
1027
1028 """
1029 self._setfeedtype(feedtype)
1030 self._add_history("set_feedtype", vars())
1031
1032 @asaplog_post_dec
1033 def set_doppler(self, doppler='RADIO'):
1034 """\
1035 Set the doppler for all following operations on this scantable.
1036
1037 Parameters:
1038
1039 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1040
1041 """
1042 varlist = vars()
1043 inf = list(self._getcoordinfo())
1044 inf[2] = doppler
1045 self._setcoordinfo(inf)
1046 self._add_history("set_doppler", vars())
1047
1048 @asaplog_post_dec
1049 def set_freqframe(self, frame=None):
1050 """\
1051 Set the frame type of the Spectral Axis.
1052
1053 Parameters:
1054
1055 frame: an optional frame type, default 'LSRK'. Valid frames are:
1056 'TOPO', 'LSRD', 'LSRK', 'BARY',
1057 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1058
1059 Example::
1060
1061 scan.set_freqframe('BARY')
1062
1063 """
1064 frame = frame or rcParams['scantable.freqframe']
1065 varlist = vars()
1066 # "REST" is not implemented in casacore
1067 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1068 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1069 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1070 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1071
1072 if frame in valid:
1073 inf = list(self._getcoordinfo())
1074 inf[1] = frame
1075 self._setcoordinfo(inf)
1076 self._add_history("set_freqframe", varlist)
1077 else:
1078 msg = "Please specify a valid freq type. Valid types are:\n", valid
1079 raise TypeError(msg)
1080
1081 @asaplog_post_dec
1082 def set_dirframe(self, frame=""):
1083 """\
1084 Set the frame type of the Direction on the sky.
1085
1086 Parameters:
1087
1088 frame: an optional frame type, default ''. Valid frames are:
1089 'J2000', 'B1950', 'GALACTIC'
1090
1091 Example:
1092
1093 scan.set_dirframe('GALACTIC')
1094
1095 """
1096 varlist = vars()
1097 Scantable.set_dirframe(self, frame)
1098 self._add_history("set_dirframe", varlist)
1099
1100 def get_unit(self):
1101 """\
1102 Get the default unit set in this scantable
1103
1104 Returns:
1105
1106 A unit string
1107
1108 """
1109 inf = self._getcoordinfo()
1110 unit = inf[0]
1111 if unit == '': unit = 'channel'
1112 return unit
1113
1114 @asaplog_post_dec
1115 def get_abcissa(self, rowno=0):
1116 """\
1117 Get the abcissa in the current coordinate setup for the currently
1118 selected Beam/IF/Pol
1119
1120 Parameters:
1121
1122 rowno: an optional row number in the scantable. Default is the
1123 first row, i.e. rowno=0
1124
1125 Returns:
1126
1127 The abcissa values and the format string (as a dictionary)
1128
1129 """
1130 abc = self._getabcissa(rowno)
1131 lbl = self._getabcissalabel(rowno)
1132 return abc, lbl
1133
1134 @asaplog_post_dec
1135 def flag(self, mask=None, unflag=False, row=-1):
1136 """\
1137 Flag the selected data using an optional channel mask.
1138
1139 Parameters:
1140
1141 mask: an optional channel mask, created with create_mask. Default
1142 (no mask) is all channels.
1143
1144 unflag: if True, unflag the data
1145
1146 row: an optional row number in the scantable.
1147 Default -1 flags all rows
1148
1149 """
1150 varlist = vars()
1151 mask = mask or []
1152 self._flag(row, mask, unflag)
1153 self._add_history("flag", varlist)
1154
1155 @asaplog_post_dec
1156 def flag_row(self, rows=None, unflag=False):
1157 """\
1158 Flag the selected data in row-based manner.
1159
1160 Parameters:
1161
1162 rows: list of row numbers to be flagged. Default is no row
1163 (must be explicitly specified to execute row-based
1164 flagging).
1165
1166 unflag: if True, unflag the data.
1167
1168 """
1169 varlist = vars()
1170 if rows is None:
1171 rows = []
1172 self._flag_row(rows, unflag)
1173 self._add_history("flag_row", varlist)
1174
1175 @asaplog_post_dec
1176 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1177 """\
1178 Flag the selected data outside a specified range (in channel-base)
1179
1180 Parameters:
1181
1182 uthres: upper threshold.
1183
1184 dthres: lower threshold
1185
1186 clipoutside: True for flagging data outside the range
1187 [dthres:uthres].
1188 False for flagging data inside the range.
1189
1190 unflag: if True, unflag the data.
1191
1192 """
1193 varlist = vars()
1194 self._clip(uthres, dthres, clipoutside, unflag)
1195 self._add_history("clip", varlist)
1196
1197 @asaplog_post_dec
1198 def lag_flag(self, start, end, unit="MHz", insitu=None):
1199 """\
1200 Flag the data in 'lag' space by providing a frequency to remove.
1201 Flagged data in the scantable get interpolated over the region.
1202 No taper is applied.
1203
1204 Parameters:
1205
1206 start: the start frequency (really a period within the
1207 bandwidth) or period to remove
1208
1209 end: the end frequency or period to remove
1210
1211 unit: the frequency unit (default "MHz") or "" for
1212 explicit lag channels
1213
1214 *Notes*:
1215
1216 It is recommended to flag edges of the band or strong
1217 signals beforehand.
1218
1219 """
1220 if insitu is None: insitu = rcParams['insitu']
1221 self._math._setinsitu(insitu)
1222 varlist = vars()
1223 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1224 if not (unit == "" or base.has_key(unit)):
1225 raise ValueError("%s is not a valid unit." % unit)
1226 if unit == "":
1227 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1228 else:
1229 s = scantable(self._math._lag_flag(self, start*base[unit],
1230 end*base[unit], "frequency"))
1231 s._add_history("lag_flag", varlist)
1232 if insitu:
1233 self._assign(s)
1234 else:
1235 return s
1236
1237 @asaplog_post_dec
1238 def fft(self, rowno=[], mask=[], getrealimag=False):
1239 """\
1240 Apply FFT to the spectra.
1241 Flagged data in the scantable get interpolated over the region.
1242
1243 Parameters:
1244
1245 rowno: The row number(s) to be processed. int, list
1246 and tuple are accepted. By default ([]), FFT
1247 is applied to the whole data.
1248
1249 mask: Auxiliary channel mask(s). Given as a boolean
1250 list, it is applied to all specified rows.
1251 A list of boolean lists can also be used to
1252 apply different masks. In the latter case, the
1253 length of 'mask' must be the same as that of
1254 'rowno'. The default is [].
1255
1256 getrealimag: If True, returns the real and imaginary part
1257 values of the complex results.
1258 If False (the default), returns the amplitude
1259 (absolute value) normalised with Ndata/2 and
1260 phase (argument, in unit of radian).
1261
1262 Returns:
1263
1264 A list of dictionaries containing the results for each spectrum.
1265 Each dictionary contains two values, the real and the imaginary
1266 parts when getrealimag = True, or the amplitude(absolute value)
1267 and the phase(argument) when getrealimag = False. The key for
1268 these values are 'real' and 'imag', or 'ampl' and 'phase',
1269 respectively.
1270 """
1271 if isinstance(rowno, int):
1272 rowno = [rowno]
1273 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1274 raise TypeError("The row number(s) must be int, list or tuple.")
1275
1276 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1277
1278 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1279 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1280 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1281 if isinstance(mask[0], bool): mask = [mask]
1282 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1283 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1284
1285 usecommonmask = (len(mask) == 1)
1286 if not usecommonmask:
1287 if len(mask) != len(rowno):
1288 raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1289 for amask in mask:
1290 if len(amask) != self.nchan():
1291 raise ValueError("The spectra and the mask have different length.")
1292
1293 res = []
1294
1295 imask = 0
1296 for whichrow in rowno:
1297 fspec = self._fft(whichrow, mask[imask], getrealimag)
1298 nspec = len(fspec)
1299
1300 i=0
1301 v1=[]
1302 v2=[]
1303 reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
1304
1305 while (i < nspec):
1306 v1.append(fspec[i])
1307 v2.append(fspec[i+1])
1308 i+=2
1309
1310 if getrealimag:
1311 reselem["real"] += v1
1312 reselem["imag"] += v2
1313 else:
1314 reselem["ampl"] += v1
1315 reselem["phase"] += v2
1316
1317 res.append(reselem)
1318
1319 if not usecommonmask: imask += 1
1320
1321 return res
1322
1323 @asaplog_post_dec
1324 def create_mask(self, *args, **kwargs):
1325 """\
1326 Compute and return a mask based on [min, max] windows.
1327 The specified windows are to be INCLUDED, when the mask is
1328 applied.
1329
1330 Parameters:
1331
1332 [min, max], [min2, max2], ...
1333 Pairs of start/end points (inclusive)specifying the regions
1334 to be masked
1335
1336 invert: optional argument. If specified as True,
1337 return an inverted mask, i.e. the regions
1338 specified are EXCLUDED
1339
1340 row: create the mask using the specified row for
1341 unit conversions, default is row=0
1342 only necessary if frequency varies over rows.
1343
1344 Examples::
1345
1346 scan.set_unit('channel')
1347 # a)
1348 msk = scan.create_mask([400, 500], [800, 900])
1349 # masks everything outside 400 and 500
1350 # and 800 and 900 in the unit 'channel'
1351
1352 # b)
1353 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1354 # masks the regions between 400 and 500
1355 # and 800 and 900 in the unit 'channel'
1356
1357 # c)
1358 #mask only channel 400
1359 msk = scan.create_mask([400])
1360
1361 """
1362 row = kwargs.get("row", 0)
1363 data = self._getabcissa(row)
1364 u = self._getcoordinfo()[0]
1365 if u == "":
1366 u = "channel"
1367 msg = "The current mask window unit is %s" % u
1368 i = self._check_ifs()
1369 if not i:
1370 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1371 asaplog.push(msg)
1372 n = self.nchan()
1373 msk = _n_bools(n, False)
1374 # test if args is a 'list' or a 'normal *args - UGLY!!!
1375
1376 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1377 and args or args[0]
1378 for window in ws:
1379 if len(window) == 1:
1380 window = [window[0], window[0]]
1381 if len(window) == 0 or len(window) > 2:
1382 raise ValueError("A window needs to be defined as [start(, end)]")
1383 if window[0] > window[1]:
1384 tmp = window[0]
1385 window[0] = window[1]
1386 window[1] = tmp
1387 for i in range(n):
1388 if data[i] >= window[0] and data[i] <= window[1]:
1389 msk[i] = True
1390 if kwargs.has_key('invert'):
1391 if kwargs.get('invert'):
1392 msk = mask_not(msk)
1393 return msk
1394
1395 def get_masklist(self, mask=None, row=0, silent=False):
1396 """\
1397 Compute and return a list of mask windows, [min, max].
1398
1399 Parameters:
1400
1401 mask: channel mask, created with create_mask.
1402
1403 row: calcutate the masklist using the specified row
1404 for unit conversions, default is row=0
1405 only necessary if frequency varies over rows.
1406
1407 Returns:
1408
1409 [min, max], [min2, max2], ...
1410 Pairs of start/end points (inclusive)specifying
1411 the masked regions
1412
1413 """
1414 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1415 raise TypeError("The mask should be list or tuple.")
1416 if len(mask) < 2:
1417 raise TypeError("The mask elements should be > 1")
1418 if self.nchan() != len(mask):
1419 msg = "Number of channels in scantable != number of mask elements"
1420 raise TypeError(msg)
1421 data = self._getabcissa(row)
1422 u = self._getcoordinfo()[0]
1423 if u == "":
1424 u = "channel"
1425 msg = "The current mask window unit is %s" % u
1426 i = self._check_ifs()
1427 if not i:
1428 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1429 if not silent:
1430 asaplog.push(msg)
1431 masklist=[]
1432 ist, ien = None, None
1433 ist, ien=self.get_mask_indices(mask)
1434 if ist is not None and ien is not None:
1435 for i in xrange(len(ist)):
1436 range=[data[ist[i]],data[ien[i]]]
1437 range.sort()
1438 masklist.append([range[0],range[1]])
1439 return masklist
1440
1441 def get_mask_indices(self, mask=None):
1442 """\
1443 Compute and Return lists of mask start indices and mask end indices.
1444
1445 Parameters:
1446
1447 mask: channel mask, created with create_mask.
1448
1449 Returns:
1450
1451 List of mask start indices and that of mask end indices,
1452 i.e., [istart1,istart2,....], [iend1,iend2,....].
1453
1454 """
1455 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1456 raise TypeError("The mask should be list or tuple.")
1457 if len(mask) < 2:
1458 raise TypeError("The mask elements should be > 1")
1459 istart=[]
1460 iend=[]
1461 if mask[0]: istart.append(0)
1462 for i in range(len(mask)-1):
1463 if not mask[i] and mask[i+1]:
1464 istart.append(i+1)
1465 elif mask[i] and not mask[i+1]:
1466 iend.append(i)
1467 if mask[len(mask)-1]: iend.append(len(mask)-1)
1468 if len(istart) != len(iend):
1469 raise RuntimeError("Numbers of mask start != mask end.")
1470 for i in range(len(istart)):
1471 if istart[i] > iend[i]:
1472 raise RuntimeError("Mask start index > mask end index")
1473 break
1474 return istart,iend
1475
1476 @asaplog_post_dec
1477 def parse_maskexpr(self,maskstring):
1478 """
1479 Parse CASA type mask selection syntax (IF dependent).
1480
1481 Parameters:
1482 maskstring : A string mask selection expression.
1483 A comma separated selections mean different IF -
1484 channel combinations. IFs and channel selections
1485 are partitioned by a colon, ':'.
1486 examples:
1487 '' = all IFs (all channels)
1488 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1489 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1490 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1491 all channels in IF8
1492 Returns:
1493 A dictionary of selected (valid) IF and masklist pairs,
1494 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1495 """
1496 if not isinstance(maskstring,str):
1497 asaplog.post()
1498 asaplog.push("Invalid mask expression")
1499 asaplog.post("ERROR")
1500
1501 valid_ifs = self.getifnos()
1502 frequnit = self.get_unit()
1503 seldict = {}
1504 if maskstring == "":
1505 maskstring = str(valid_ifs)[1:-1]
1506 ## split each selection
1507 sellist = maskstring.split(',')
1508 for currselstr in sellist:
1509 selset = currselstr.split(':')
1510 # spw and mask string (may include ~, < or >)
1511 spwmasklist = self._parse_selection(selset[0],typestr='integer',
1512 offset=1,minval=min(valid_ifs),
1513 maxval=max(valid_ifs))
1514 for spwlist in spwmasklist:
1515 selspws = []
1516 for ispw in range(spwlist[0],spwlist[1]+1):
1517 # Put into the list only if ispw exists
1518 if valid_ifs.count(ispw):
1519 selspws.append(ispw)
1520 del spwmasklist, spwlist
1521
1522 # parse frequency mask list
1523 if len(selset) > 1:
1524 freqmasklist = self._parse_selection(selset[1],typestr='float',
1525 offset=0.)
1526 else:
1527 # want to select the whole spectrum
1528 freqmasklist = [None]
1529
1530 ## define a dictionary of spw - masklist combination
1531 for ispw in selspws:
1532 #print "working on", ispw
1533 spwstr = str(ispw)
1534 if len(selspws) == 0:
1535 # empty spw
1536 continue
1537 else:
1538 ## want to get min and max of the spw and
1539 ## offset to set for '<' and '>'
1540 if frequnit == 'channel':
1541 minfreq = 0
1542 maxfreq = self.nchan(ifno=ispw)
1543 offset = 0.5
1544 else:
1545 ## This is ugly part. need improvement
1546 for ifrow in xrange(self.nrow()):
1547 if self.getif(ifrow) == ispw:
1548 #print "IF",ispw,"found in row =",ifrow
1549 break
1550 freqcoord = self.get_coordinate(ifrow)
1551 freqs = self._getabcissa(ifrow)
1552 minfreq = min(freqs)
1553 maxfreq = max(freqs)
1554 if len(freqs) == 1:
1555 offset = 0.5
1556 elif frequnit.find('Hz') > 0:
1557 offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1558 -freqcoord.to_frequency(0,unit=frequnit))*0.5
1559 elif frequnit.find('m/s') > 0:
1560 offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1561 -freqcoord.to_velocity(0,unit=frequnit))*0.5
1562 else:
1563 asaplog.post()
1564 asaplog.push("Invalid frequency unit")
1565 asaplog.post("ERROR")
1566 del freqs, freqcoord, ifrow
1567 for freq in freqmasklist:
1568 selmask = freq or [minfreq, maxfreq]
1569 if selmask[0] == None:
1570 ## selection was "<freq[1]".
1571 if selmask[1] < minfreq:
1572 ## avoid adding region selection
1573 selmask = None
1574 else:
1575 selmask = [minfreq,selmask[1]-offset]
1576 elif selmask[1] == None:
1577 ## selection was ">freq[0]"
1578 if selmask[0] > maxfreq:
1579 ## avoid adding region selection
1580 selmask = None
1581 else:
1582 selmask = [selmask[0]+offset,maxfreq]
1583 if selmask:
1584 if not seldict.has_key(spwstr):
1585 # new spw selection
1586 seldict[spwstr] = []
1587 seldict[spwstr] += [selmask]
1588 del minfreq,maxfreq,offset,freq,selmask
1589 del spwstr
1590 del freqmasklist
1591 del valid_ifs
1592 if len(seldict) == 0:
1593 asaplog.post()
1594 asaplog.push("No valid selection in the mask expression: "+maskstring)
1595 asaplog.post("WARN")
1596 return None
1597 msg = "Selected masklist:\n"
1598 for sif, lmask in seldict.iteritems():
1599 msg += " IF"+sif+" - "+str(lmask)+"\n"
1600 asaplog.push(msg)
1601 return seldict
1602
1603 def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1604 """
1605 Parameters:
1606 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1607 typestr : The type of the values in returned list
1608 ('integer' or 'float')
1609 offset : The offset value to subtract from or add to
1610 the boundary value if the selection string
1611 includes '<' or '>'
1612 minval, maxval : The minimum/maximum values to set if the
1613 selection string includes '<' or '>'.
1614 The list element is filled with None by default.
1615 Returns:
1616 A list of min/max pair of selections.
1617 Example:
1618 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1619 returns [[0,2],[5,7],[9,9]]
1620 """
1621 selgroups = selstr.split(';')
1622 sellists = []
1623 if typestr.lower().startswith('int'):
1624 formatfunc = int
1625 else:
1626 formatfunc = float
1627
1628 for currsel in selgroups:
1629 if currsel.find('~') > 0:
1630 minsel = formatfunc(currsel.split('~')[0].strip())
1631 maxsel = formatfunc(currsel.split('~')[1].strip())
1632 elif currsel.strip().startswith('<'):
1633 minsel = minval
1634 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1635 - formatfunc(offset)
1636 elif currsel.strip().startswith('>'):
1637 minsel = formatfunc(currsel.split('>')[1].strip()) \
1638 + formatfunc(offset)
1639 maxsel = maxval
1640 else:
1641 minsel = formatfunc(currsel)
1642 maxsel = formatfunc(currsel)
1643 sellists.append([minsel,maxsel])
1644 return sellists
1645
1646# def get_restfreqs(self):
1647# """
1648# Get the restfrequency(s) stored in this scantable.
1649# The return value(s) are always of unit 'Hz'
1650# Parameters:
1651# none
1652# Returns:
1653# a list of doubles
1654# """
1655# return list(self._getrestfreqs())
1656
1657 def get_restfreqs(self, ids=None):
1658 """\
1659 Get the restfrequency(s) stored in this scantable.
1660 The return value(s) are always of unit 'Hz'
1661
1662 Parameters:
1663
1664 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1665 be retrieved
1666
1667 Returns:
1668
1669 dictionary containing ids and a list of doubles for each id
1670
1671 """
1672 if ids is None:
1673 rfreqs={}
1674 idlist = self.getmolnos()
1675 for i in idlist:
1676 rfreqs[i]=list(self._getrestfreqs(i))
1677 return rfreqs
1678 else:
1679 if type(ids)==list or type(ids)==tuple:
1680 rfreqs={}
1681 for i in ids:
1682 rfreqs[i]=list(self._getrestfreqs(i))
1683 return rfreqs
1684 else:
1685 return list(self._getrestfreqs(ids))
1686 #return list(self._getrestfreqs(ids))
1687
1688 def set_restfreqs(self, freqs=None, unit='Hz'):
1689 """\
1690 Set or replace the restfrequency specified and
1691 if the 'freqs' argument holds a scalar,
1692 then that rest frequency will be applied to all the selected
1693 data. If the 'freqs' argument holds
1694 a vector, then it MUST be of equal or smaller length than
1695 the number of IFs (and the available restfrequencies will be
1696 replaced by this vector). In this case, *all* data have
1697 the restfrequency set per IF according
1698 to the corresponding value you give in the 'freqs' vector.
1699 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1700 IF 1 gets restfreq 2e9.
1701
1702 You can also specify the frequencies via a linecatalog.
1703
1704 Parameters:
1705
1706 freqs: list of rest frequency values or string idenitfiers
1707
1708 unit: unit for rest frequency (default 'Hz')
1709
1710
1711 Example::
1712
1713 # set the given restfrequency for the all currently selected IFs
1714 scan.set_restfreqs(freqs=1.4e9)
1715 # set restfrequencies for the n IFs (n > 1) in the order of the
1716 # list, i.e
1717 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1718 # len(list_of_restfreqs) == nIF
1719 # for nIF == 1 the following will set multiple restfrequency for
1720 # that IF
1721 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1722 # set multiple restfrequencies per IF. as a list of lists where
1723 # the outer list has nIF elements, the inner s arbitrary
1724 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1725
1726 *Note*:
1727
1728 To do more sophisticate Restfrequency setting, e.g. on a
1729 source and IF basis, use scantable.set_selection() before using
1730 this function::
1731
1732 # provided your scantable is called scan
1733 selection = selector()
1734 selection.set_name("ORION*")
1735 selection.set_ifs([1])
1736 scan.set_selection(selection)
1737 scan.set_restfreqs(freqs=86.6e9)
1738
1739 """
1740 varlist = vars()
1741 from asap import linecatalog
1742 # simple value
1743 if isinstance(freqs, int) or isinstance(freqs, float):
1744 self._setrestfreqs([freqs], [""], unit)
1745 # list of values
1746 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1747 # list values are scalars
1748 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1749 if len(freqs) == 1:
1750 self._setrestfreqs(freqs, [""], unit)
1751 else:
1752 # allow the 'old' mode of setting mulitple IFs
1753 sel = selector()
1754 savesel = self._getselection()
1755 iflist = self.getifnos()
1756 if len(freqs)>len(iflist):
1757 raise ValueError("number of elements in list of list "
1758 "exeeds the current IF selections")
1759 iflist = self.getifnos()
1760 for i, fval in enumerate(freqs):
1761 sel.set_ifs(iflist[i])
1762 self._setselection(sel)
1763 self._setrestfreqs([fval], [""], unit)
1764 self._setselection(savesel)
1765
1766 # list values are dict, {'value'=, 'name'=)
1767 elif isinstance(freqs[-1], dict):
1768 values = []
1769 names = []
1770 for d in freqs:
1771 values.append(d["value"])
1772 names.append(d["name"])
1773 self._setrestfreqs(values, names, unit)
1774 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1775 sel = selector()
1776 savesel = self._getselection()
1777 iflist = self.getifnos()
1778 if len(freqs)>len(iflist):
1779 raise ValueError("number of elements in list of list exeeds"
1780 " the current IF selections")
1781 for i, fval in enumerate(freqs):
1782 sel.set_ifs(iflist[i])
1783 self._setselection(sel)
1784 self._setrestfreqs(fval, [""], unit)
1785 self._setselection(savesel)
1786 # freqs are to be taken from a linecatalog
1787 elif isinstance(freqs, linecatalog):
1788 sel = selector()
1789 savesel = self._getselection()
1790 for i in xrange(freqs.nrow()):
1791 sel.set_ifs(iflist[i])
1792 self._setselection(sel)
1793 self._setrestfreqs([freqs.get_frequency(i)],
1794 [freqs.get_name(i)], "MHz")
1795 # ensure that we are not iterating past nIF
1796 if i == self.nif()-1: break
1797 self._setselection(savesel)
1798 else:
1799 return
1800 self._add_history("set_restfreqs", varlist)
1801
1802 def shift_refpix(self, delta):
1803 """\
1804 Shift the reference pixel of the Spectra Coordinate by an
1805 integer amount.
1806
1807 Parameters:
1808
1809 delta: the amount to shift by
1810
1811 *Note*:
1812
1813 Be careful using this with broadband data.
1814
1815 """
1816 Scantable.shift_refpix(self, delta)
1817
1818 @asaplog_post_dec
1819 def history(self, filename=None):
1820 """\
1821 Print the history. Optionally to a file.
1822
1823 Parameters:
1824
1825 filename: The name of the file to save the history to.
1826
1827 """
1828 hist = list(self._gethistory())
1829 out = "-"*80
1830 for h in hist:
1831 if h.startswith("---"):
1832 out = "\n".join([out, h])
1833 else:
1834 items = h.split("##")
1835 date = items[0]
1836 func = items[1]
1837 items = items[2:]
1838 out += "\n"+date+"\n"
1839 out += "Function: %s\n Parameters:" % (func)
1840 for i in items:
1841 if i == '':
1842 continue
1843 s = i.split("=")
1844 out += "\n %s = %s" % (s[0], s[1])
1845 out = "\n".join([out, "-"*80])
1846 if filename is not None:
1847 if filename is "":
1848 filename = 'scantable_history.txt'
1849 import os
1850 filename = os.path.expandvars(os.path.expanduser(filename))
1851 if not os.path.isdir(filename):
1852 data = open(filename, 'w')
1853 data.write(out)
1854 data.close()
1855 else:
1856 msg = "Illegal file name '%s'." % (filename)
1857 raise IOError(msg)
1858 return page(out)
1859 #
1860 # Maths business
1861 #
1862 @asaplog_post_dec
1863 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1864 """\
1865 Return the (time) weighted average of a scan.
1866
1867 *Note*:
1868
1869 in channels only - align if necessary
1870
1871 Parameters:
1872
1873 mask: an optional mask (only used for 'var' and 'tsys'
1874 weighting)
1875
1876 scanav: True averages each scan separately
1877 False (default) averages all scans together,
1878
1879 weight: Weighting scheme.
1880 'none' (mean no weight)
1881 'var' (1/var(spec) weighted)
1882 'tsys' (1/Tsys**2 weighted)
1883 'tint' (integration time weighted)
1884 'tintsys' (Tint/Tsys**2)
1885 'median' ( median averaging)
1886 The default is 'tint'
1887
1888 align: align the spectra in velocity before averaging. It takes
1889 the time of the first spectrum as reference time.
1890
1891 Example::
1892
1893 # time average the scantable without using a mask
1894 newscan = scan.average_time()
1895
1896 """
1897 varlist = vars()
1898 weight = weight or 'TINT'
1899 mask = mask or ()
1900 scanav = (scanav and 'SCAN') or 'NONE'
1901 scan = (self, )
1902
1903 if align:
1904 scan = (self.freq_align(insitu=False), )
1905 s = None
1906 if weight.upper() == 'MEDIAN':
1907 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1908 scanav))
1909 else:
1910 s = scantable(self._math._average(scan, mask, weight.upper(),
1911 scanav))
1912 s._add_history("average_time", varlist)
1913 return s
1914
1915 @asaplog_post_dec
1916 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1917 """\
1918 Return a scan where all spectra are converted to either
1919 Jansky or Kelvin depending upon the flux units of the scan table.
1920 By default the function tries to look the values up internally.
1921 If it can't find them (or if you want to over-ride), you must
1922 specify EITHER jyperk OR eta (and D which it will try to look up
1923 also if you don't set it). jyperk takes precedence if you set both.
1924
1925 Parameters:
1926
1927 jyperk: the Jy / K conversion factor
1928
1929 eta: the aperture efficiency
1930
1931 d: the geometric diameter (metres)
1932
1933 insitu: if False a new scantable is returned.
1934 Otherwise, the scaling is done in-situ
1935 The default is taken from .asaprc (False)
1936
1937 """
1938 if insitu is None: insitu = rcParams['insitu']
1939 self._math._setinsitu(insitu)
1940 varlist = vars()
1941 jyperk = jyperk or -1.0
1942 d = d or -1.0
1943 eta = eta or -1.0
1944 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1945 s._add_history("convert_flux", varlist)
1946 if insitu: self._assign(s)
1947 else: return s
1948
1949 @asaplog_post_dec
1950 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1951 """\
1952 Return a scan after applying a gain-elevation correction.
1953 The correction can be made via either a polynomial or a
1954 table-based interpolation (and extrapolation if necessary).
1955 You specify polynomial coefficients, an ascii table or neither.
1956 If you specify neither, then a polynomial correction will be made
1957 with built in coefficients known for certain telescopes (an error
1958 will occur if the instrument is not known).
1959 The data and Tsys are *divided* by the scaling factors.
1960
1961 Parameters:
1962
1963 poly: Polynomial coefficients (default None) to compute a
1964 gain-elevation correction as a function of
1965 elevation (in degrees).
1966
1967 filename: The name of an ascii file holding correction factors.
1968 The first row of the ascii file must give the column
1969 names and these MUST include columns
1970 "ELEVATION" (degrees) and "FACTOR" (multiply data
1971 by this) somewhere.
1972 The second row must give the data type of the
1973 column. Use 'R' for Real and 'I' for Integer.
1974 An example file would be
1975 (actual factors are arbitrary) :
1976
1977 TIME ELEVATION FACTOR
1978 R R R
1979 0.1 0 0.8
1980 0.2 20 0.85
1981 0.3 40 0.9
1982 0.4 60 0.85
1983 0.5 80 0.8
1984 0.6 90 0.75
1985
1986 method: Interpolation method when correcting from a table.
1987 Values are "nearest", "linear" (default), "cubic"
1988 and "spline"
1989
1990 insitu: if False a new scantable is returned.
1991 Otherwise, the scaling is done in-situ
1992 The default is taken from .asaprc (False)
1993
1994 """
1995
1996 if insitu is None: insitu = rcParams['insitu']
1997 self._math._setinsitu(insitu)
1998 varlist = vars()
1999 poly = poly or ()
2000 from os.path import expandvars
2001 filename = expandvars(filename)
2002 s = scantable(self._math._gainel(self, poly, filename, method))
2003 s._add_history("gain_el", varlist)
2004 if insitu:
2005 self._assign(s)
2006 else:
2007 return s
2008
2009 @asaplog_post_dec
2010 def freq_align(self, reftime=None, method='cubic', insitu=None):
2011 """\
2012 Return a scan where all rows have been aligned in frequency/velocity.
2013 The alignment frequency frame (e.g. LSRK) is that set by function
2014 set_freqframe.
2015
2016 Parameters:
2017
2018 reftime: reference time to align at. By default, the time of
2019 the first row of data is used.
2020
2021 method: Interpolation method for regridding the spectra.
2022 Choose from "nearest", "linear", "cubic" (default)
2023 and "spline"
2024
2025 insitu: if False a new scantable is returned.
2026 Otherwise, the scaling is done in-situ
2027 The default is taken from .asaprc (False)
2028
2029 """
2030 if insitu is None: insitu = rcParams["insitu"]
2031 self._math._setinsitu(insitu)
2032 varlist = vars()
2033 reftime = reftime or ""
2034 s = scantable(self._math._freq_align(self, reftime, method))
2035 s._add_history("freq_align", varlist)
2036 if insitu: self._assign(s)
2037 else: return s
2038
2039 @asaplog_post_dec
2040 def opacity(self, tau=None, insitu=None):
2041 """\
2042 Apply an opacity correction. The data
2043 and Tsys are multiplied by the correction factor.
2044
2045 Parameters:
2046
2047 tau: (list of) opacity from which the correction factor is
2048 exp(tau*ZD)
2049 where ZD is the zenith-distance.
2050 If a list is provided, it has to be of length nIF,
2051 nIF*nPol or 1 and in order of IF/POL, e.g.
2052 [opif0pol0, opif0pol1, opif1pol0 ...]
2053 if tau is `None` the opacities are determined from a
2054 model.
2055
2056 insitu: if False a new scantable is returned.
2057 Otherwise, the scaling is done in-situ
2058 The default is taken from .asaprc (False)
2059
2060 """
2061 if insitu is None: insitu = rcParams['insitu']
2062 self._math._setinsitu(insitu)
2063 varlist = vars()
2064 if not hasattr(tau, "__len__"):
2065 tau = [tau]
2066 s = scantable(self._math._opacity(self, tau))
2067 s._add_history("opacity", varlist)
2068 if insitu: self._assign(s)
2069 else: return s
2070
2071 @asaplog_post_dec
2072 def bin(self, width=5, insitu=None):
2073 """\
2074 Return a scan where all spectra have been binned up.
2075
2076 Parameters:
2077
2078 width: The bin width (default=5) in pixels
2079
2080 insitu: if False a new scantable is returned.
2081 Otherwise, the scaling is done in-situ
2082 The default is taken from .asaprc (False)
2083
2084 """
2085 if insitu is None: insitu = rcParams['insitu']
2086 self._math._setinsitu(insitu)
2087 varlist = vars()
2088 s = scantable(self._math._bin(self, width))
2089 s._add_history("bin", varlist)
2090 if insitu:
2091 self._assign(s)
2092 else:
2093 return s
2094
2095 @asaplog_post_dec
2096 def resample(self, width=5, method='cubic', insitu=None):
2097 """\
2098 Return a scan where all spectra have been binned up.
2099
2100 Parameters:
2101
2102 width: The bin width (default=5) in pixels
2103
2104 method: Interpolation method when correcting from a table.
2105 Values are "nearest", "linear", "cubic" (default)
2106 and "spline"
2107
2108 insitu: if False a new scantable is returned.
2109 Otherwise, the scaling is done in-situ
2110 The default is taken from .asaprc (False)
2111
2112 """
2113 if insitu is None: insitu = rcParams['insitu']
2114 self._math._setinsitu(insitu)
2115 varlist = vars()
2116 s = scantable(self._math._resample(self, method, width))
2117 s._add_history("resample", varlist)
2118 if insitu: self._assign(s)
2119 else: return s
2120
2121 @asaplog_post_dec
2122 def average_pol(self, mask=None, weight='none'):
2123 """\
2124 Average the Polarisations together.
2125
2126 Parameters:
2127
2128 mask: An optional mask defining the region, where the
2129 averaging will be applied. The output will have all
2130 specified points masked.
2131
2132 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2133 weighted), or 'tsys' (1/Tsys**2 weighted)
2134
2135 """
2136 varlist = vars()
2137 mask = mask or ()
2138 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2139 s._add_history("average_pol", varlist)
2140 return s
2141
2142 @asaplog_post_dec
2143 def average_beam(self, mask=None, weight='none'):
2144 """\
2145 Average the Beams together.
2146
2147 Parameters:
2148 mask: An optional mask defining the region, where the
2149 averaging will be applied. The output will have all
2150 specified points masked.
2151
2152 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2153 weighted), or 'tsys' (1/Tsys**2 weighted)
2154
2155 """
2156 varlist = vars()
2157 mask = mask or ()
2158 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2159 s._add_history("average_beam", varlist)
2160 return s
2161
2162 def parallactify(self, pflag):
2163 """\
2164 Set a flag to indicate whether this data should be treated as having
2165 been 'parallactified' (total phase == 0.0)
2166
2167 Parameters:
2168
2169 pflag: Bool indicating whether to turn this on (True) or
2170 off (False)
2171
2172 """
2173 varlist = vars()
2174 self._parallactify(pflag)
2175 self._add_history("parallactify", varlist)
2176
2177 @asaplog_post_dec
2178 def convert_pol(self, poltype=None):
2179 """\
2180 Convert the data to a different polarisation type.
2181 Note that you will need cross-polarisation terms for most conversions.
2182
2183 Parameters:
2184
2185 poltype: The new polarisation type. Valid types are:
2186 "linear", "circular", "stokes" and "linpol"
2187
2188 """
2189 varlist = vars()
2190 s = scantable(self._math._convertpol(self, poltype))
2191 s._add_history("convert_pol", varlist)
2192 return s
2193
2194 @asaplog_post_dec
2195 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2196 insitu=None):
2197 """\
2198 Smooth the spectrum by the specified kernel (conserving flux).
2199
2200 Parameters:
2201
2202 kernel: The type of smoothing kernel. Select from
2203 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2204 or 'poly'
2205
2206 width: The width of the kernel in pixels. For hanning this is
2207 ignored otherwise it defauls to 5 pixels.
2208 For 'gaussian' it is the Full Width Half
2209 Maximum. For 'boxcar' it is the full width.
2210 For 'rmedian' and 'poly' it is the half width.
2211
2212 order: Optional parameter for 'poly' kernel (default is 2), to
2213 specify the order of the polnomial. Ignored by all other
2214 kernels.
2215
2216 plot: plot the original and the smoothed spectra.
2217 In this each indivual fit has to be approved, by
2218 typing 'y' or 'n'
2219
2220 insitu: if False a new scantable is returned.
2221 Otherwise, the scaling is done in-situ
2222 The default is taken from .asaprc (False)
2223
2224 """
2225 if insitu is None: insitu = rcParams['insitu']
2226 self._math._setinsitu(insitu)
2227 varlist = vars()
2228
2229 if plot: orgscan = self.copy()
2230
2231 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2232 s._add_history("smooth", varlist)
2233
2234 if plot:
2235 from asap.asapplotter import new_asaplot
2236 theplot = new_asaplot(rcParams['plotter.gui'])
2237 theplot.set_panels()
2238 ylab=s._get_ordinate_label()
2239 #theplot.palette(0,["#777777","red"])
2240 for r in xrange(s.nrow()):
2241 xsm=s._getabcissa(r)
2242 ysm=s._getspectrum(r)
2243 xorg=orgscan._getabcissa(r)
2244 yorg=orgscan._getspectrum(r)
2245 theplot.clear()
2246 theplot.hold()
2247 theplot.set_axes('ylabel',ylab)
2248 theplot.set_axes('xlabel',s._getabcissalabel(r))
2249 theplot.set_axes('title',s._getsourcename(r))
2250 theplot.set_line(label='Original',color="#777777")
2251 theplot.plot(xorg,yorg)
2252 theplot.set_line(label='Smoothed',color="red")
2253 theplot.plot(xsm,ysm)
2254 ### Ugly part for legend
2255 for i in [0,1]:
2256 theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2257 theplot.release()
2258 ### Ugly part for legend
2259 theplot.subplots[0]['lines']=[]
2260 res = raw_input("Accept smoothing ([y]/n): ")
2261 if res.upper() == 'N':
2262 s._setspectrum(yorg, r)
2263 theplot.quit()
2264 del theplot
2265 del orgscan
2266
2267 if insitu: self._assign(s)
2268 else: return s
2269
2270 @asaplog_post_dec
2271 def _parse_wn(self, wn):
2272 if isinstance(wn, list) or isinstance(wn, tuple):
2273 return wn
2274 elif isinstance(wn, int):
2275 return [ wn ]
2276 elif isinstance(wn, str):
2277 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2278 val = wn.split('-')
2279 val = [int(val[0]), int(val[1])]
2280 val.sort()
2281 res = [i for i in xrange(val[0], val[1]+1)]
2282 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2283 val = int(wn[2:])+1
2284 res = [i for i in xrange(val)]
2285 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2286 val = int(wn[:-2])+1
2287 res = [i for i in xrange(val)]
2288 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2289 val = int(wn[1:])
2290 res = [i for i in xrange(val)]
2291 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2292 val = int(wn[:-1])
2293 res = [i for i in xrange(val)]
2294 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2295 val = int(wn[2:])
2296 res = [i for i in xrange(val, self.nchan()/2+1)]
2297 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2298 val = int(wn[:-2])
2299 res = [i for i in xrange(val, self.nchan()/2+1)]
2300 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
2301 val = int(wn[1:])+1
2302 res = [i for i in xrange(val, self.nchan()/2+1)]
2303 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
2304 val = int(wn[:-1])+1
2305 res = [i for i in xrange(val, self.nchan()/2+1)]
2306
2307 return res
2308 else:
2309 msg = 'wrong value given for addwn/rejwn'
2310 raise RuntimeError(msg)
2311
2312
2313 @asaplog_post_dec
2314 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2315 fftmethod=None, fftthresh=None,
2316 addwn=None, rejwn=None, clipthresh=None,
2317 clipniter=None, plot=None,
2318 getresidual=None, showprogress=None,
2319 minnrow=None, outlog=None, blfile=None):
2320 """\
2321 Return a scan which has been baselined (all rows) with sinusoidal functions.
2322 Parameters:
2323 insitu: if False a new scantable is returned.
2324 Otherwise, the scaling is done in-situ
2325 The default is taken from .asaprc (False)
2326 mask: an optional mask
2327 applyfft: if True use some method, such as FFT, to find
2328 strongest sinusoidal components in the wavenumber
2329 domain to be used for baseline fitting.
2330 default is True.
2331 fftmethod: method to find the strong sinusoidal components.
2332 now only 'fft' is available and it is the default.
2333 fftthresh: the threshold to select wave numbers to be used for
2334 fitting from the distribution of amplitudes in the
2335 wavenumber domain.
2336 both float and string values accepted.
2337 given a float value, the unit is set to sigma.
2338 for string values, allowed formats include:
2339 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2340 'topx' (= the x strongest ones, e.g. 'top5').
2341 default is 3.0 (unit: sigma).
2342 addwn: the additional wave numbers to be used for fitting.
2343 list or integer value is accepted to specify every
2344 wave numbers. also string value can be used in case
2345 you need to specify wave numbers in a certain range,
2346 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2347 '<a' (= 0,1,...,a-2,a-1),
2348 '>=a' (= a, a+1, ... up to the maximum wave
2349 number corresponding to the Nyquist
2350 frequency for the case of FFT).
2351 default is [].
2352 rejwn: the wave numbers NOT to be used for fitting.
2353 can be set just as addwn but has higher priority:
2354 wave numbers which are specified both in addwn
2355 and rejwn will NOT be used. default is [].
2356 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2357 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2358 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2359 plot the fit and the residual. In this each
2360 indivual fit has to be approved, by typing 'y'
2361 or 'n'
2362 getresidual: if False, returns best-fit values instead of
2363 residual. (default is True)
2364 showprogress: show progress status for large data.
2365 default is True.
2366 minnrow: minimum number of input spectra to show.
2367 default is 1000.
2368 outlog: Output the coefficients of the best-fit
2369 function to logger (default is False)
2370 blfile: Name of a text file in which the best-fit
2371 parameter values to be written
2372 (default is '': no file/logger output)
2373
2374 Example:
2375 # return a scan baselined by a combination of sinusoidal curves having
2376 # wave numbers in spectral window up to 10,
2377 # also with 3-sigma clipping, iteration up to 4 times
2378 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2379
2380 Note:
2381 The best-fit parameter values output in logger and/or blfile are now
2382 based on specunit of 'channel'.
2383 """
2384
2385 try:
2386 varlist = vars()
2387
2388 if insitu is None: insitu = rcParams['insitu']
2389 if insitu:
2390 workscan = self
2391 else:
2392 workscan = self.copy()
2393
2394 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2395 if applyfft is None: applyfft = True
2396 if fftmethod is None: fftmethod = 'fft'
2397 if fftthresh is None: fftthresh = 3.0
2398 if addwn is None: addwn = []
2399 if rejwn is None: rejwn = []
2400 if clipthresh is None: clipthresh = 3.0
2401 if clipniter is None: clipniter = 0
2402 if plot is None: plot = False
2403 if getresidual is None: getresidual = True
2404 if showprogress is None: showprogress = True
2405 if minnrow is None: minnrow = 1000
2406 if outlog is None: outlog = False
2407 if blfile is None: blfile = ''
2408
2409 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2410 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2411 workscan._add_history('sinusoid_baseline', varlist)
2412
2413 if insitu:
2414 self._assign(workscan)
2415 else:
2416 return workscan
2417
2418 except RuntimeError, e:
2419 raise_fitting_failure_exception(e)
2420
2421
2422 @asaplog_post_dec
2423 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2424 addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
2425 chan_avg_limit=None, plot=None, getresidual=None, showprogress=None, minnrow=None,
2426 outlog=None, blfile=None):
2427 """\
2428 Return a scan which has been baselined (all rows) with sinusoidal functions.
2429 Spectral lines are detected first using linefinder and masked out
2430 to avoid them affecting the baseline solution.
2431
2432 Parameters:
2433 insitu: if False a new scantable is returned.
2434 Otherwise, the scaling is done in-situ
2435 The default is taken from .asaprc (False)
2436 mask: an optional mask retreived from scantable
2437 applyfft: if True use some method, such as FFT, to find
2438 strongest sinusoidal components in the wavenumber
2439 domain to be used for baseline fitting.
2440 default is True.
2441 fftmethod: method to find the strong sinusoidal components.
2442 now only 'fft' is available and it is the default.
2443 fftthresh: the threshold to select wave numbers to be used for
2444 fitting from the distribution of amplitudes in the
2445 wavenumber domain.
2446 both float and string values accepted.
2447 given a float value, the unit is set to sigma.
2448 for string values, allowed formats include:
2449 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2450 'topx' (= the x strongest ones, e.g. 'top5').
2451 default is 3.0 (unit: sigma).
2452 addwn: the additional wave numbers to be used for fitting.
2453 list or integer value is accepted to specify every
2454 wave numbers. also string value can be used in case
2455 you need to specify wave numbers in a certain range,
2456 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2457 '<a' (= 0,1,...,a-2,a-1),
2458 '>=a' (= a, a+1, ... up to the maximum wave
2459 number corresponding to the Nyquist
2460 frequency for the case of FFT).
2461 default is [].
2462 rejwn: the wave numbers NOT to be used for fitting.
2463 can be set just as addwn but has higher priority:
2464 wave numbers which are specified both in addwn
2465 and rejwn will NOT be used. default is [].
2466 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2467 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2468 edge: an optional number of channel to drop at
2469 the edge of spectrum. If only one value is
2470 specified, the same number will be dropped
2471 from both sides of the spectrum. Default
2472 is to keep all channels. Nested tuples
2473 represent individual edge selection for
2474 different IFs (a number of spectral channels
2475 can be different)
2476 threshold: the threshold used by line finder. It is
2477 better to keep it large as only strong lines
2478 affect the baseline solution.
2479 chan_avg_limit: a maximum number of consequtive spectral
2480 channels to average during the search of
2481 weak and broad lines. The default is no
2482 averaging (and no search for weak lines).
2483 If such lines can affect the fitted baseline
2484 (e.g. a high order polynomial is fitted),
2485 increase this parameter (usually values up
2486 to 8 are reasonable). Most users of this
2487 method should find the default value sufficient.
2488 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2489 plot the fit and the residual. In this each
2490 indivual fit has to be approved, by typing 'y'
2491 or 'n'
2492 getresidual: if False, returns best-fit values instead of
2493 residual. (default is True)
2494 showprogress: show progress status for large data.
2495 default is True.
2496 minnrow: minimum number of input spectra to show.
2497 default is 1000.
2498 outlog: Output the coefficients of the best-fit
2499 function to logger (default is False)
2500 blfile: Name of a text file in which the best-fit
2501 parameter values to be written
2502 (default is "": no file/logger output)
2503
2504 Example:
2505 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2506
2507 Note:
2508 The best-fit parameter values output in logger and/or blfile are now
2509 based on specunit of 'channel'.
2510 """
2511
2512 try:
2513 varlist = vars()
2514
2515 if insitu is None: insitu = rcParams['insitu']
2516 if insitu:
2517 workscan = self
2518 else:
2519 workscan = self.copy()
2520
2521 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2522 if applyfft is None: applyfft = True
2523 if fftmethod is None: fftmethod = 'fft'
2524 if fftthresh is None: fftthresh = 3.0
2525 if addwn is None: addwn = []
2526 if rejwn is None: rejwn = []
2527 if clipthresh is None: clipthresh = 3.0
2528 if clipniter is None: clipniter = 0
2529 if edge is None: edge = (0,0)
2530 if threshold is None: threshold = 3
2531 if chan_avg_limit is None: chan_avg_limit = 1
2532 if plot is None: plot = False
2533 if getresidual is None: getresidual = True
2534 if showprogress is None: showprogress = True
2535 if minnrow is None: minnrow = 1000
2536 if outlog is None: outlog = False
2537 if blfile is None: blfile = ''
2538
2539 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2540 workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2541 workscan._add_history("auto_sinusoid_baseline", varlist)
2542
2543 if insitu:
2544 self._assign(workscan)
2545 else:
2546 return workscan
2547
2548 except RuntimeError, e:
2549 raise_fitting_failure_exception(e)
2550
2551 @asaplog_post_dec
2552 def cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2553 plot=None, getresidual=None, showprogress=None, minnrow=None, outlog=None, blfile=None):
2554 """\
2555 Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
2556 Parameters:
2557 insitu: If False a new scantable is returned.
2558 Otherwise, the scaling is done in-situ
2559 The default is taken from .asaprc (False)
2560 mask: An optional mask
2561 npiece: Number of pieces. (default is 2)
2562 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2563 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2564 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2565 plot the fit and the residual. In this each
2566 indivual fit has to be approved, by typing 'y'
2567 or 'n'
2568 getresidual: if False, returns best-fit values instead of
2569 residual. (default is True)
2570 showprogress: show progress status for large data.
2571 default is True.
2572 minnrow: minimum number of input spectra to show.
2573 default is 1000.
2574 outlog: Output the coefficients of the best-fit
2575 function to logger (default is False)
2576 blfile: Name of a text file in which the best-fit
2577 parameter values to be written
2578 (default is "": no file/logger output)
2579
2580 Example:
2581 # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
2582 # also with 3-sigma clipping, iteration up to 4 times
2583 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2584
2585 Note:
2586 The best-fit parameter values output in logger and/or blfile are now
2587 based on specunit of 'channel'.
2588 """
2589
2590 try:
2591 varlist = vars()
2592
2593 if insitu is None: insitu = rcParams['insitu']
2594 if insitu:
2595 workscan = self
2596 else:
2597 workscan = self.copy()
2598
2599 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2600 if npiece is None: npiece = 2
2601 if clipthresh is None: clipthresh = 3.0
2602 if clipniter is None: clipniter = 0
2603 if plot is None: plot = False
2604 if getresidual is None: getresidual = True
2605 if showprogress is None: showprogress = True
2606 if minnrow is None: minnrow = 1000
2607 if outlog is None: outlog = False
2608 if blfile is None: blfile = ''
2609
2610 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2611 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2612 workscan._add_history("cspline_baseline", varlist)
2613
2614 if insitu:
2615 self._assign(workscan)
2616 else:
2617 return workscan
2618
2619 except RuntimeError, e:
2620 raise_fitting_failure_exception(e)
2621
2622 @asaplog_post_dec
2623 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2624 edge=None, threshold=None, chan_avg_limit=None, getresidual=None, plot=None,
2625 showprogress=None, minnrow=None, outlog=None, blfile=None):
2626 """\
2627 Return a scan which has been baselined (all rows) by cubic spline
2628 function (piecewise cubic polynomial).
2629 Spectral lines are detected first using linefinder and masked out
2630 to avoid them affecting the baseline solution.
2631
2632 Parameters:
2633 insitu: if False a new scantable is returned.
2634 Otherwise, the scaling is done in-situ
2635 The default is taken from .asaprc (False)
2636 mask: an optional mask retreived from scantable
2637 npiece: Number of pieces. (default is 2)
2638 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2639 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2640 edge: an optional number of channel to drop at
2641 the edge of spectrum. If only one value is
2642 specified, the same number will be dropped
2643 from both sides of the spectrum. Default
2644 is to keep all channels. Nested tuples
2645 represent individual edge selection for
2646 different IFs (a number of spectral channels
2647 can be different)
2648 threshold: the threshold used by line finder. It is
2649 better to keep it large as only strong lines
2650 affect the baseline solution.
2651 chan_avg_limit: a maximum number of consequtive spectral
2652 channels to average during the search of
2653 weak and broad lines. The default is no
2654 averaging (and no search for weak lines).
2655 If such lines can affect the fitted baseline
2656 (e.g. a high order polynomial is fitted),
2657 increase this parameter (usually values up
2658 to 8 are reasonable). Most users of this
2659 method should find the default value sufficient.
2660 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2661 plot the fit and the residual. In this each
2662 indivual fit has to be approved, by typing 'y'
2663 or 'n'
2664 getresidual: if False, returns best-fit values instead of
2665 residual. (default is True)
2666 showprogress: show progress status for large data.
2667 default is True.
2668 minnrow: minimum number of input spectra to show.
2669 default is 1000.
2670 outlog: Output the coefficients of the best-fit
2671 function to logger (default is False)
2672 blfile: Name of a text file in which the best-fit
2673 parameter values to be written
2674 (default is "": no file/logger output)
2675
2676 Example:
2677 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2678
2679 Note:
2680 The best-fit parameter values output in logger and/or blfile are now
2681 based on specunit of 'channel'.
2682 """
2683
2684 try:
2685 varlist = vars()
2686
2687 if insitu is None: insitu = rcParams['insitu']
2688 if insitu:
2689 workscan = self
2690 else:
2691 workscan = self.copy()
2692
2693 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2694 if npiece is None: npiece = 2
2695 if clipthresh is None: clipthresh = 3.0
2696 if clipniter is None: clipniter = 0
2697 if edge is None: edge = (0, 0)
2698 if threshold is None: threshold = 3
2699 if chan_avg_limit is None: chan_avg_limit = 1
2700 if plot is None: plot = False
2701 if getresidual is None: getresidual = True
2702 if showprogress is None: showprogress = True
2703 if minnrow is None: minnrow = 1000
2704 if outlog is None: outlog = False
2705 if blfile is None: blfile = ''
2706
2707 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2708 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2709 clipniter,
2710 normalise_edge_param(edge),
2711 threshold,
2712 chan_avg_limit, getresidual,
2713 pack_progress_params(showprogress,
2714 minnrow),
2715 outlog, blfile)
2716 workscan._add_history("auto_cspline_baseline", varlist)
2717
2718 if insitu:
2719 self._assign(workscan)
2720 else:
2721 return workscan
2722
2723 except RuntimeError, e:
2724 raise_fitting_failure_exception(e)
2725
2726 @asaplog_post_dec
2727 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2728 getresidual=None, showprogress=None, minnrow=None,
2729 outlog=None, blfile=None):
2730 """\
2731 Return a scan which has been baselined (all rows) by a polynomial.
2732 Parameters:
2733 insitu: if False a new scantable is returned.
2734 Otherwise, the scaling is done in-situ
2735 The default is taken from .asaprc (False)
2736 mask: an optional mask
2737 order: the order of the polynomial (default is 0)
2738 plot: plot the fit and the residual. In this each
2739 indivual fit has to be approved, by typing 'y'
2740 or 'n'
2741 getresidual: if False, returns best-fit values instead of
2742 residual. (default is True)
2743 showprogress: show progress status for large data.
2744 default is True.
2745 minnrow: minimum number of input spectra to show.
2746 default is 1000.
2747 outlog: Output the coefficients of the best-fit
2748 function to logger (default is False)
2749 blfile: Name of a text file in which the best-fit
2750 parameter values to be written
2751 (default is "": no file/logger output)
2752
2753 Example:
2754 # return a scan baselined by a third order polynomial,
2755 # not using a mask
2756 bscan = scan.poly_baseline(order=3)
2757 """
2758
2759 try:
2760 varlist = vars()
2761
2762 if insitu is None:
2763 insitu = rcParams["insitu"]
2764 if insitu:
2765 workscan = self
2766 else:
2767 workscan = self.copy()
2768
2769 if mask is None: mask = [True for i in \
2770 xrange(workscan.nchan())]
2771 if order is None: order = 0
2772 if plot is None: plot = False
2773 if getresidual is None: getresidual = True
2774 if showprogress is None: showprogress = True
2775 if minnrow is None: minnrow = 1000
2776 if outlog is None: outlog = False
2777 if blfile is None: blfile = ""
2778
2779 if plot:
2780 outblfile = (blfile != "") and \
2781 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2782 if outblfile:
2783 blf = open(blfile, "a")
2784
2785 f = fitter()
2786 f.set_function(lpoly=order)
2787
2788 rows = xrange(workscan.nrow())
2789 #if len(rows) > 0: workscan._init_blinfo()
2790
2791 for r in rows:
2792 f.x = workscan._getabcissa(r)
2793 f.y = workscan._getspectrum(r)
2794 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2795 f.data = None
2796 f.fit()
2797
2798 f.plot(residual=True)
2799 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2800 if accept_fit.upper() == "N":
2801 #workscan._append_blinfo(None, None, None)
2802 continue
2803
2804 blpars = f.get_parameters()
2805 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2806 #workscan._append_blinfo(blpars, masklist, f.mask)
2807 workscan._setspectrum((f.fitter.getresidual()
2808 if getresidual else f.fitter.getfit()), r)
2809
2810 if outblfile:
2811 rms = workscan.get_rms(f.mask, r)
2812 dataout = \
2813 workscan.format_blparams_row(blpars["params"],
2814 blpars["fixed"],
2815 rms, str(masklist),
2816 r, True)
2817 blf.write(dataout)
2818
2819 f._p.unmap()
2820 f._p = None
2821
2822 if outblfile: blf.close()
2823 else:
2824 workscan._poly_baseline(mask, order, getresidual,
2825 pack_progress_params(showprogress,
2826 minnrow),
2827 outlog, blfile)
2828
2829 workscan._add_history("poly_baseline", varlist)
2830
2831 if insitu:
2832 self._assign(workscan)
2833 else:
2834 return workscan
2835
2836 except RuntimeError, e:
2837 raise_fitting_failure_exception(e)
2838
2839 @asaplog_post_dec
2840 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2841 threshold=None, chan_avg_limit=None,
2842 plot=None, insitu=None,
2843 getresidual=None, showprogress=None,
2844 minnrow=None, outlog=None, blfile=None):
2845 """\
2846 Return a scan which has been baselined (all rows) by a polynomial.
2847 Spectral lines are detected first using linefinder and masked out
2848 to avoid them affecting the baseline solution.
2849
2850 Parameters:
2851 insitu: if False a new scantable is returned.
2852 Otherwise, the scaling is done in-situ
2853 The default is taken from .asaprc (False)
2854 mask: an optional mask retreived from scantable
2855 order: the order of the polynomial (default is 0)
2856 edge: an optional number of channel to drop at
2857 the edge of spectrum. If only one value is
2858 specified, the same number will be dropped
2859 from both sides of the spectrum. Default
2860 is to keep all channels. Nested tuples
2861 represent individual edge selection for
2862 different IFs (a number of spectral channels
2863 can be different)
2864 threshold: the threshold used by line finder. It is
2865 better to keep it large as only strong lines
2866 affect the baseline solution.
2867 chan_avg_limit: a maximum number of consequtive spectral
2868 channels to average during the search of
2869 weak and broad lines. The default is no
2870 averaging (and no search for weak lines).
2871 If such lines can affect the fitted baseline
2872 (e.g. a high order polynomial is fitted),
2873 increase this parameter (usually values up
2874 to 8 are reasonable). Most users of this
2875 method should find the default value sufficient.
2876 plot: plot the fit and the residual. In this each
2877 indivual fit has to be approved, by typing 'y'
2878 or 'n'
2879 getresidual: if False, returns best-fit values instead of
2880 residual. (default is True)
2881 showprogress: show progress status for large data.
2882 default is True.
2883 minnrow: minimum number of input spectra to show.
2884 default is 1000.
2885 outlog: Output the coefficients of the best-fit
2886 function to logger (default is False)
2887 blfile: Name of a text file in which the best-fit
2888 parameter values to be written
2889 (default is "": no file/logger output)
2890
2891 Example:
2892 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2893 """
2894
2895 try:
2896 varlist = vars()
2897
2898 if insitu is None:
2899 insitu = rcParams['insitu']
2900 if insitu:
2901 workscan = self
2902 else:
2903 workscan = self.copy()
2904
2905 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2906 if order is None: order = 0
2907 if edge is None: edge = (0, 0)
2908 if threshold is None: threshold = 3
2909 if chan_avg_limit is None: chan_avg_limit = 1
2910 if plot is None: plot = False
2911 if getresidual is None: getresidual = True
2912 if showprogress is None: showprogress = True
2913 if minnrow is None: minnrow = 1000
2914 if outlog is None: outlog = False
2915 if blfile is None: blfile = ''
2916
2917 edge = normalise_edge_param(edge)
2918
2919 if plot:
2920 outblfile = (blfile != "") and \
2921 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2922 if outblfile: blf = open(blfile, "a")
2923
2924 from asap.asaplinefind import linefinder
2925 fl = linefinder()
2926 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
2927 fl.set_scan(workscan)
2928
2929 f = fitter()
2930 f.set_function(lpoly=order)
2931
2932 rows = xrange(workscan.nrow())
2933 #if len(rows) > 0: workscan._init_blinfo()
2934
2935 for r in rows:
2936 idx = 2*workscan.getif(r)
2937 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2938 edge[idx:idx+2]) # (CAS-1434)
2939
2940 f.x = workscan._getabcissa(r)
2941 f.y = workscan._getspectrum(r)
2942 f.mask = fl.get_mask()
2943 f.data = None
2944 f.fit()
2945
2946 f.plot(residual=True)
2947 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2948 if accept_fit.upper() == "N":
2949 #workscan._append_blinfo(None, None, None)
2950 continue
2951
2952 blpars = f.get_parameters()
2953 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2954 #workscan._append_blinfo(blpars, masklist, f.mask)
2955 workscan._setspectrum((f.fitter.getresidual() \
2956 if getresidual else f.fitter.getfit()), r)
2957
2958 if outblfile:
2959 rms = workscan.get_rms(f.mask, r)
2960 dataout = \
2961 workscan.format_blparams_row(blpars["params"],
2962 blpars["fixed"],
2963 rms, str(masklist),
2964 r, True)
2965 blf.write(dataout)
2966
2967 f._p.unmap()
2968 f._p = None
2969
2970 if outblfile: blf.close()
2971 else:
2972 workscan._auto_poly_baseline(mask, order, edge, threshold,
2973 chan_avg_limit, getresidual,
2974 pack_progress_params(showprogress,
2975 minnrow),
2976 outlog, blfile)
2977
2978 workscan._add_history("auto_poly_baseline", varlist)
2979
2980 if insitu:
2981 self._assign(workscan)
2982 else:
2983 return workscan
2984
2985 except RuntimeError, e:
2986 raise_fitting_failure_exception(e)
2987
2988 def _init_blinfo(self):
2989 """\
2990 Initialise the following three auxiliary members:
2991 blpars : parameters of the best-fit baseline,
2992 masklists : mask data (edge positions of masked channels) and
2993 actualmask : mask data (in boolean list),
2994 to keep for use later (including output to logger/text files).
2995 Used by poly_baseline() and auto_poly_baseline() in case of
2996 'plot=True'.
2997 """
2998 self.blpars = []
2999 self.masklists = []
3000 self.actualmask = []
3001 return
3002
3003 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3004 """\
3005 Append baseline-fitting related info to blpars, masklist and
3006 actualmask.
3007 """
3008 self.blpars.append(data_blpars)
3009 self.masklists.append(data_masklists)
3010 self.actualmask.append(data_actualmask)
3011 return
3012
3013 @asaplog_post_dec
3014 def rotate_linpolphase(self, angle):
3015 """\
3016 Rotate the phase of the complex polarization O=Q+iU correlation.
3017 This is always done in situ in the raw data. So if you call this
3018 function more than once then each call rotates the phase further.
3019
3020 Parameters:
3021
3022 angle: The angle (degrees) to rotate (add) by.
3023
3024 Example::
3025
3026 scan.rotate_linpolphase(2.3)
3027
3028 """
3029 varlist = vars()
3030 self._math._rotate_linpolphase(self, angle)
3031 self._add_history("rotate_linpolphase", varlist)
3032 return
3033
3034 @asaplog_post_dec
3035 def rotate_xyphase(self, angle):
3036 """\
3037 Rotate the phase of the XY correlation. This is always done in situ
3038 in the data. So if you call this function more than once
3039 then each call rotates the phase further.
3040
3041 Parameters:
3042
3043 angle: The angle (degrees) to rotate (add) by.
3044
3045 Example::
3046
3047 scan.rotate_xyphase(2.3)
3048
3049 """
3050 varlist = vars()
3051 self._math._rotate_xyphase(self, angle)
3052 self._add_history("rotate_xyphase", varlist)
3053 return
3054
3055 @asaplog_post_dec
3056 def swap_linears(self):
3057 """\
3058 Swap the linear polarisations XX and YY, or better the first two
3059 polarisations as this also works for ciculars.
3060 """
3061 varlist = vars()
3062 self._math._swap_linears(self)
3063 self._add_history("swap_linears", varlist)
3064 return
3065
3066 @asaplog_post_dec
3067 def invert_phase(self):
3068 """\
3069 Invert the phase of the complex polarisation
3070 """
3071 varlist = vars()
3072 self._math._invert_phase(self)
3073 self._add_history("invert_phase", varlist)
3074 return
3075
3076 @asaplog_post_dec
3077 def add(self, offset, insitu=None):
3078 """\
3079 Return a scan where all spectra have the offset added
3080
3081 Parameters:
3082
3083 offset: the offset
3084
3085 insitu: if False a new scantable is returned.
3086 Otherwise, the scaling is done in-situ
3087 The default is taken from .asaprc (False)
3088
3089 """
3090 if insitu is None: insitu = rcParams['insitu']
3091 self._math._setinsitu(insitu)
3092 varlist = vars()
3093 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3094 s._add_history("add", varlist)
3095 if insitu:
3096 self._assign(s)
3097 else:
3098 return s
3099
3100 @asaplog_post_dec
3101 def scale(self, factor, tsys=True, insitu=None):
3102 """\
3103
3104 Return a scan where all spectra are scaled by the given 'factor'
3105
3106 Parameters:
3107
3108 factor: the scaling factor (float or 1D float list)
3109
3110 insitu: if False a new scantable is returned.
3111 Otherwise, the scaling is done in-situ
3112 The default is taken from .asaprc (False)
3113
3114 tsys: if True (default) then apply the operation to Tsys
3115 as well as the data
3116
3117 """
3118 if insitu is None: insitu = rcParams['insitu']
3119 self._math._setinsitu(insitu)
3120 varlist = vars()
3121 s = None
3122 import numpy
3123 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3124 if isinstance(factor[0], list) or isinstance(factor[0],
3125 numpy.ndarray):
3126 from asapmath import _array2dOp
3127 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3128 else:
3129 s = scantable( self._math._arrayop( self, factor,
3130 "MUL", tsys ) )
3131 else:
3132 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3133 s._add_history("scale", varlist)
3134 if insitu:
3135 self._assign(s)
3136 else:
3137 return s
3138
3139 def set_sourcetype(self, match, matchtype="pattern",
3140 sourcetype="reference"):
3141 """\
3142 Set the type of the source to be an source or reference scan
3143 using the provided pattern.
3144
3145 Parameters:
3146
3147 match: a Unix style pattern, regular expression or selector
3148
3149 matchtype: 'pattern' (default) UNIX style pattern or
3150 'regex' regular expression
3151
3152 sourcetype: the type of the source to use (source/reference)
3153
3154 """
3155 varlist = vars()
3156 basesel = self.get_selection()
3157 stype = -1
3158 if sourcetype.lower().startswith("r"):
3159 stype = 1
3160 elif sourcetype.lower().startswith("s"):
3161 stype = 0
3162 else:
3163 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3164 if matchtype.lower().startswith("p"):
3165 matchtype = "pattern"
3166 elif matchtype.lower().startswith("r"):
3167 matchtype = "regex"
3168 else:
3169 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3170 sel = selector()
3171 if isinstance(match, selector):
3172 sel = match
3173 else:
3174 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3175 self.set_selection(basesel+sel)
3176 self._setsourcetype(stype)
3177 self.set_selection(basesel)
3178 self._add_history("set_sourcetype", varlist)
3179
3180 @asaplog_post_dec
3181 @preserve_selection
3182 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3183 """\
3184 This function allows to build quotients automatically.
3185 It assumes the observation to have the same number of
3186 "ons" and "offs"
3187
3188 Parameters:
3189
3190 preserve: you can preserve (default) the continuum or
3191 remove it. The equations used are
3192
3193 preserve: Output = Toff * (on/off) - Toff
3194
3195 remove: Output = Toff * (on/off) - Ton
3196
3197 mode: the on/off detection mode
3198 'paired' (default)
3199 identifies 'off' scans by the
3200 trailing '_R' (Mopra/Parkes) or
3201 '_e'/'_w' (Tid) and matches
3202 on/off pairs from the observing pattern
3203 'time'
3204 finds the closest off in time
3205
3206 .. todo:: verify argument is not implemented
3207
3208 """
3209 varlist = vars()
3210 modes = ["time", "paired"]
3211 if not mode in modes:
3212 msg = "please provide valid mode. Valid modes are %s" % (modes)
3213 raise ValueError(msg)
3214 s = None
3215 if mode.lower() == "paired":
3216 sel = self.get_selection()
3217 sel.set_query("SRCTYPE==psoff")
3218 self.set_selection(sel)
3219 offs = self.copy()
3220 sel.set_query("SRCTYPE==pson")
3221 self.set_selection(sel)
3222 ons = self.copy()
3223 s = scantable(self._math._quotient(ons, offs, preserve))
3224 elif mode.lower() == "time":
3225 s = scantable(self._math._auto_quotient(self, mode, preserve))
3226 s._add_history("auto_quotient", varlist)
3227 return s
3228
3229 @asaplog_post_dec
3230 def mx_quotient(self, mask = None, weight='median', preserve=True):
3231 """\
3232 Form a quotient using "off" beams when observing in "MX" mode.
3233
3234 Parameters:
3235
3236 mask: an optional mask to be used when weight == 'stddev'
3237
3238 weight: How to average the off beams. Default is 'median'.
3239
3240 preserve: you can preserve (default) the continuum or
3241 remove it. The equations used are:
3242
3243 preserve: Output = Toff * (on/off) - Toff
3244
3245 remove: Output = Toff * (on/off) - Ton
3246
3247 """
3248 mask = mask or ()
3249 varlist = vars()
3250 on = scantable(self._math._mx_extract(self, 'on'))
3251 preoff = scantable(self._math._mx_extract(self, 'off'))
3252 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3253 from asapmath import quotient
3254 q = quotient(on, off, preserve)
3255 q._add_history("mx_quotient", varlist)
3256 return q
3257
3258 @asaplog_post_dec
3259 def freq_switch(self, insitu=None):
3260 """\
3261 Apply frequency switching to the data.
3262
3263 Parameters:
3264
3265 insitu: if False a new scantable is returned.
3266 Otherwise, the swictching is done in-situ
3267 The default is taken from .asaprc (False)
3268
3269 """
3270 if insitu is None: insitu = rcParams['insitu']
3271 self._math._setinsitu(insitu)
3272 varlist = vars()
3273 s = scantable(self._math._freqswitch(self))
3274 s._add_history("freq_switch", varlist)
3275 if insitu:
3276 self._assign(s)
3277 else:
3278 return s
3279
3280 @asaplog_post_dec
3281 def recalc_azel(self):
3282 """Recalculate the azimuth and elevation for each position."""
3283 varlist = vars()
3284 self._recalcazel()
3285 self._add_history("recalc_azel", varlist)
3286 return
3287
3288 @asaplog_post_dec
3289 def __add__(self, other):
3290 varlist = vars()
3291 s = None
3292 if isinstance(other, scantable):
3293 s = scantable(self._math._binaryop(self, other, "ADD"))
3294 elif isinstance(other, float):
3295 s = scantable(self._math._unaryop(self, other, "ADD", False))
3296 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3297 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3298 from asapmath import _array2dOp
3299 s = _array2dOp( self.copy(), other, "ADD", False )
3300 else:
3301 s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
3302 else:
3303 raise TypeError("Other input is not a scantable or float value")
3304 s._add_history("operator +", varlist)
3305 return s
3306
3307 @asaplog_post_dec
3308 def __sub__(self, other):
3309 """
3310 implicit on all axes and on Tsys
3311 """
3312 varlist = vars()
3313 s = None
3314 if isinstance(other, scantable):
3315 s = scantable(self._math._binaryop(self, other, "SUB"))
3316 elif isinstance(other, float):
3317 s = scantable(self._math._unaryop(self, other, "SUB", False))
3318 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3319 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3320 from asapmath import _array2dOp
3321 s = _array2dOp( self.copy(), other, "SUB", False )
3322 else:
3323 s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
3324 else:
3325 raise TypeError("Other input is not a scantable or float value")
3326 s._add_history("operator -", varlist)
3327 return s
3328
3329 @asaplog_post_dec
3330 def __mul__(self, other):
3331 """
3332 implicit on all axes and on Tsys
3333 """
3334 varlist = vars()
3335 s = None
3336 if isinstance(other, scantable):
3337 s = scantable(self._math._binaryop(self, other, "MUL"))
3338 elif isinstance(other, float):
3339 s = scantable(self._math._unaryop(self, other, "MUL", False))
3340 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3341 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3342 from asapmath import _array2dOp
3343 s = _array2dOp( self.copy(), other, "MUL", False )
3344 else:
3345 s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
3346 else:
3347 raise TypeError("Other input is not a scantable or float value")
3348 s._add_history("operator *", varlist)
3349 return s
3350
3351
3352 @asaplog_post_dec
3353 def __div__(self, other):
3354 """
3355 implicit on all axes and on Tsys
3356 """
3357 varlist = vars()
3358 s = None
3359 if isinstance(other, scantable):
3360 s = scantable(self._math._binaryop(self, other, "DIV"))
3361 elif isinstance(other, float):
3362 if other == 0.0:
3363 raise ZeroDivisionError("Dividing by zero is not recommended")
3364 s = scantable(self._math._unaryop(self, other, "DIV", False))
3365 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3366 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3367 from asapmath import _array2dOp
3368 s = _array2dOp( self.copy(), other, "DIV", False )
3369 else:
3370 s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
3371 else:
3372 raise TypeError("Other input is not a scantable or float value")
3373 s._add_history("operator /", varlist)
3374 return s
3375
3376 @asaplog_post_dec
3377 def get_fit(self, row=0):
3378 """\
3379 Print or return the stored fits for a row in the scantable
3380
3381 Parameters:
3382
3383 row: the row which the fit has been applied to.
3384
3385 """
3386 if row > self.nrow():
3387 return
3388 from asap.asapfit import asapfit
3389 fit = asapfit(self._getfit(row))
3390 asaplog.push( '%s' %(fit) )
3391 return fit.as_dict()
3392
3393 def flag_nans(self):
3394 """\
3395 Utility function to flag NaN values in the scantable.
3396 """
3397 import numpy
3398 basesel = self.get_selection()
3399 for i in range(self.nrow()):
3400 sel = self.get_row_selector(i)
3401 self.set_selection(basesel+sel)
3402 nans = numpy.isnan(self._getspectrum(0))
3403 if numpy.any(nans):
3404 bnans = [ bool(v) for v in nans]
3405 self.flag(bnans)
3406 self.set_selection(basesel)
3407
3408 def get_row_selector(self, rowno):
3409 #return selector(beams=self.getbeam(rowno),
3410 # ifs=self.getif(rowno),
3411 # pols=self.getpol(rowno),
3412 # scans=self.getscan(rowno),
3413 # cycles=self.getcycle(rowno))
3414 return selector(rows=[rowno])
3415
3416 def _add_history(self, funcname, parameters):
3417 if not rcParams['scantable.history']:
3418 return
3419 # create date
3420 sep = "##"
3421 from datetime import datetime
3422 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3423 hist = dstr+sep
3424 hist += funcname+sep#cdate+sep
3425 if parameters.has_key('self'): del parameters['self']
3426 for k, v in parameters.iteritems():
3427 if type(v) is dict:
3428 for k2, v2 in v.iteritems():
3429 hist += k2
3430 hist += "="
3431 if isinstance(v2, scantable):
3432 hist += 'scantable'
3433 elif k2 == 'mask':
3434 if isinstance(v2, list) or isinstance(v2, tuple):
3435 hist += str(self._zip_mask(v2))
3436 else:
3437 hist += str(v2)
3438 else:
3439 hist += str(v2)
3440 else:
3441 hist += k
3442 hist += "="
3443 if isinstance(v, scantable):
3444 hist += 'scantable'
3445 elif k == 'mask':
3446 if isinstance(v, list) or isinstance(v, tuple):
3447 hist += str(self._zip_mask(v))
3448 else:
3449 hist += str(v)
3450 else:
3451 hist += str(v)
3452 hist += sep
3453 hist = hist[:-2] # remove trailing '##'
3454 self._addhistory(hist)
3455
3456
3457 def _zip_mask(self, mask):
3458 mask = list(mask)
3459 i = 0
3460 segments = []
3461 while mask[i:].count(1):
3462 i += mask[i:].index(1)
3463 if mask[i:].count(0):
3464 j = i + mask[i:].index(0)
3465 else:
3466 j = len(mask)
3467 segments.append([i, j])
3468 i = j
3469 return segments
3470
3471 def _get_ordinate_label(self):
3472 fu = "("+self.get_fluxunit()+")"
3473 import re
3474 lbl = "Intensity"
3475 if re.match(".K.", fu):
3476 lbl = "Brightness Temperature "+ fu
3477 elif re.match(".Jy.", fu):
3478 lbl = "Flux density "+ fu
3479 return lbl
3480
3481 def _check_ifs(self):
3482 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3483 nchans = [self.nchan(i) for i in self.getifnos()]
3484 nchans = filter(lambda t: t > 0, nchans)
3485 return (sum(nchans)/len(nchans) == nchans[0])
3486
3487 @asaplog_post_dec
3488 #def _fill(self, names, unit, average, getpt, antenna):
3489 def _fill(self, names, unit, average, opts={}):
3490 first = True
3491 fullnames = []
3492 for name in names:
3493 name = os.path.expandvars(name)
3494 name = os.path.expanduser(name)
3495 if not os.path.exists(name):
3496 msg = "File '%s' does not exists" % (name)
3497 raise IOError(msg)
3498 fullnames.append(name)
3499 if average:
3500 asaplog.push('Auto averaging integrations')
3501 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3502 for name in fullnames:
3503 tbl = Scantable(stype)
3504 if is_ms( name ):
3505 r = msfiller( tbl )
3506 else:
3507 r = filler( tbl )
3508 rx = rcParams['scantable.reference']
3509 r.setreferenceexpr(rx)
3510 #r = filler(tbl)
3511 #rx = rcParams['scantable.reference']
3512 #r.setreferenceexpr(rx)
3513 msg = "Importing %s..." % (name)
3514 asaplog.push(msg, False)
3515 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
3516 r.open(name, opts)# antenna, -1, -1, getpt)
3517 r.fill()
3518 if average:
3519 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3520 if not first:
3521 tbl = self._math._merge([self, tbl])
3522 Scantable.__init__(self, tbl)
3523 r.close()
3524 del r, tbl
3525 first = False
3526 #flush log
3527 asaplog.post()
3528 if unit is not None:
3529 self.set_fluxunit(unit)
3530 if not is_casapy():
3531 self.set_freqframe(rcParams['scantable.freqframe'])
3532
3533
3534 def __getitem__(self, key):
3535 if key < 0:
3536 key += self.nrow()
3537 if key >= self.nrow():
3538 raise IndexError("Row index out of range.")
3539 return self._getspectrum(key)
3540
3541 def __setitem__(self, key, value):
3542 if key < 0:
3543 key += self.nrow()
3544 if key >= self.nrow():
3545 raise IndexError("Row index out of range.")
3546 if not hasattr(value, "__len__") or \
3547 len(value) > self.nchan(self.getif(key)):
3548 raise ValueError("Spectrum length doesn't match.")
3549 return self._setspectrum(value, key)
3550
3551 def __len__(self):
3552 return self.nrow()
3553
3554 def __iter__(self):
3555 for i in range(len(self)):
3556 yield self[i]
Note: See TracBrowser for help on using the repository browser.