source: trunk/python/scantable.py@ 2321

Last change on this file since 2321 was 2320, checked in by Malte Marquarding, 13 years ago

Ticket #251: fixed copy of input scantable which breaks scantable selection

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 135.6 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import tempfile
5import numpy
6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
11from asap.env import is_casapy
12from asap._asap import Scantable
13from asap._asap import filler, msfiller
14from asap.parameters import rcParams
15from asap.logging import asaplog, asaplog_post_dec
16from asap.selector import selector
17from asap.linecatalog import linecatalog
18from asap.coordinate import coordinate
19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
20from asap.asapfitter import fitter
21
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 #if ( l.find('Scantable') != -1 ):
49 if ( l.find('Measurement Set') == -1 ):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176class scantable(Scantable):
177 """\
178 The ASAP container for scans (single-dish data).
179 """
180
181 @asaplog_post_dec
182 #def __init__(self, filename, average=None, unit=None, getpt=None,
183 # antenna=None, parallactify=None):
184 def __init__(self, filename, average=None, unit=None, parallactify=None,
185 **args):
186 """\
187 Create a scantable from a saved one or make a reference
188
189 Parameters:
190
191 filename: the name of an asap table on disk
192 or
193 the name of a rpfits/sdfits/ms file
194 (integrations within scans are auto averaged
195 and the whole file is read) or
196 [advanced] a reference to an existing scantable
197
198 average: average all integrations withinb a scan on read.
199 The default (True) is taken from .asaprc.
200
201 unit: brightness unit; must be consistent with K or Jy.
202 Over-rides the default selected by the filler
203 (input rpfits/sdfits/ms) or replaces the value
204 in existing scantables
205
206 getpt: for MeasurementSet input data only:
207 If True, all pointing data are filled.
208 The deafult is False, which makes time to load
209 the MS data faster in some cases.
210
211 antenna: for MeasurementSet input data only:
212 Antenna selection. integer (id) or string (name or id).
213
214 parallactify: Indicate that the data had been parallatified. Default
215 is taken from rc file.
216
217 """
218 if average is None:
219 average = rcParams['scantable.autoaverage']
220 #if getpt is None:
221 # getpt = True
222 #if antenna is not None:
223 # asaplog.push("Antenna selection currently unsupported."
224 # "Using ''")
225 # asaplog.post('WARN')
226 #if antenna is None:
227 # antenna = ''
228 #elif type(antenna) == int:
229 # antenna = '%s' % antenna
230 #elif type(antenna) == list:
231 # tmpstr = ''
232 # for i in range( len(antenna) ):
233 # if type(antenna[i]) == int:
234 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
235 # elif type(antenna[i]) == str:
236 # tmpstr=tmpstr+antenna[i]+','
237 # else:
238 # raise TypeError('Bad antenna selection.')
239 # antenna = tmpstr.rstrip(',')
240 parallactify = parallactify or rcParams['scantable.parallactify']
241 varlist = vars()
242 from asap._asap import stmath
243 self._math = stmath( rcParams['insitu'] )
244 if isinstance(filename, Scantable):
245 Scantable.__init__(self, filename)
246 else:
247 if isinstance(filename, str):
248 filename = os.path.expandvars(filename)
249 filename = os.path.expanduser(filename)
250 if not os.path.exists(filename):
251 s = "File '%s' not found." % (filename)
252 raise IOError(s)
253 if is_scantable(filename):
254 ondisk = rcParams['scantable.storage'] == 'disk'
255 Scantable.__init__(self, filename, ondisk)
256 if unit is not None:
257 self.set_fluxunit(unit)
258 if average:
259 self._assign( self.average_time( scanav=True ) )
260 # do not reset to the default freqframe
261 #self.set_freqframe(rcParams['scantable.freqframe'])
262 #elif os.path.isdir(filename) \
263 # and not os.path.exists(filename+'/table.f1'):
264 elif is_ms(filename):
265 # Measurement Set
266 opts={'ms': {}}
267 mskeys=['getpt','antenna']
268 for key in mskeys:
269 if key in args.keys():
270 opts['ms'][key] = args[key]
271 #self._fill([filename], unit, average, getpt, antenna)
272 self._fill([filename], unit, average, opts)
273 elif os.path.isfile(filename):
274 #self._fill([filename], unit, average, getpt, antenna)
275 self._fill([filename], unit, average)
276 else:
277 msg = "The given file '%s'is not a valid " \
278 "asap table." % (filename)
279 raise IOError(msg)
280 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
281 and isinstance(filename[-1], str):
282 #self._fill(filename, unit, average, getpt, antenna)
283 self._fill(filename, unit, average)
284 self.parallactify(parallactify)
285 self._add_history("scantable", varlist)
286
287 @asaplog_post_dec
288 def save(self, name=None, format=None, overwrite=False):
289 """\
290 Store the scantable on disk. This can be an asap (aips++) Table,
291 SDFITS or MS2 format.
292
293 Parameters:
294
295 name: the name of the outputfile. For format "ASCII"
296 this is the root file name (data in 'name'.txt
297 and header in 'name'_header.txt)
298
299 format: an optional file format. Default is ASAP.
300 Allowed are:
301
302 * 'ASAP' (save as ASAP [aips++] Table),
303 * 'SDFITS' (save as SDFITS file)
304 * 'ASCII' (saves as ascii text file)
305 * 'MS2' (saves as an casacore MeasurementSet V2)
306 * 'FITS' (save as image FITS - not readable by
307 class)
308 * 'CLASS' (save as FITS readable by CLASS)
309
310 overwrite: If the file should be overwritten if it exists.
311 The default False is to return with warning
312 without writing the output. USE WITH CARE.
313
314 Example::
315
316 scan.save('myscan.asap')
317 scan.save('myscan.sdfits', 'SDFITS')
318
319 """
320 from os import path
321 format = format or rcParams['scantable.save']
322 suffix = '.'+format.lower()
323 if name is None or name == "":
324 name = 'scantable'+suffix
325 msg = "No filename given. Using default name %s..." % name
326 asaplog.push(msg)
327 name = path.expandvars(name)
328 if path.isfile(name) or path.isdir(name):
329 if not overwrite:
330 msg = "File %s exists." % name
331 raise IOError(msg)
332 format2 = format.upper()
333 if format2 == 'ASAP':
334 self._save(name)
335 elif format2 == 'MS2':
336 msopt = {'ms': {'overwrite': overwrite } }
337 from asap._asap import mswriter
338 writer = mswriter( self )
339 writer.write( name, msopt )
340 else:
341 from asap._asap import stwriter as stw
342 writer = stw(format2)
343 writer.write(self, name)
344 return
345
346 def copy(self):
347 """Return a copy of this scantable.
348
349 *Note*:
350
351 This makes a full (deep) copy. scan2 = scan1 makes a reference.
352
353 Example::
354
355 copiedscan = scan.copy()
356
357 """
358 sd = scantable(Scantable._copy(self))
359 return sd
360
361 def drop_scan(self, scanid=None):
362 """\
363 Return a new scantable where the specified scan number(s) has(have)
364 been dropped.
365
366 Parameters:
367
368 scanid: a (list of) scan number(s)
369
370 """
371 from asap import _is_sequence_or_number as _is_valid
372 from asap import _to_list
373 from asap import unique
374 if not _is_valid(scanid):
375 raise RuntimeError( 'Please specify a scanno to drop from the'
376 ' scantable' )
377 scanid = _to_list(scanid)
378 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
379 for sid in scanid: allscans.remove(sid)
380 if len(allscans) == 0:
381 raise ValueError("Can't remove all scans")
382 sel = selector(scans=allscans)
383 return self._select_copy(sel)
384
385 def _select_copy(self, selection):
386 orig = self.get_selection()
387 self.set_selection(orig+selection)
388 cp = self.copy()
389 self.set_selection(orig)
390 return cp
391
392 def get_scan(self, scanid=None):
393 """\
394 Return a specific scan (by scanno) or collection of scans (by
395 source name) in a new scantable.
396
397 *Note*:
398
399 See scantable.drop_scan() for the inverse operation.
400
401 Parameters:
402
403 scanid: a (list of) scanno or a source name, unix-style
404 patterns are accepted for source name matching, e.g.
405 '*_R' gets all 'ref scans
406
407 Example::
408
409 # get all scans containing the source '323p459'
410 newscan = scan.get_scan('323p459')
411 # get all 'off' scans
412 refscans = scan.get_scan('*_R')
413 # get a susbset of scans by scanno (as listed in scan.summary())
414 newscan = scan.get_scan([0, 2, 7, 10])
415
416 """
417 if scanid is None:
418 raise RuntimeError( 'Please specify a scan no or name to '
419 'retrieve from the scantable' )
420 try:
421 bsel = self.get_selection()
422 sel = selector()
423 if type(scanid) is str:
424 sel.set_name(scanid)
425 return self._select_copy(sel)
426 elif type(scanid) is int:
427 sel.set_scans([scanid])
428 return self._select_copy(sel)
429 elif type(scanid) is list:
430 sel.set_scans(scanid)
431 return self._select_copy(sel)
432 else:
433 msg = "Illegal scanid type, use 'int' or 'list' if ints."
434 raise TypeError(msg)
435 except RuntimeError:
436 raise
437
438 def __str__(self):
439 tempFile = tempfile.NamedTemporaryFile()
440 Scantable._summary(self, tempFile.name)
441 tempFile.seek(0)
442 asaplog.clear()
443 return tempFile.file.read()
444
445 @asaplog_post_dec
446 def summary(self, filename=None):
447 """\
448 Print a summary of the contents of this scantable.
449
450 Parameters:
451
452 filename: the name of a file to write the putput to
453 Default - no file output
454
455 """
456# info = Scantable._summary(self)
457 if filename is not None:
458 if filename is "":
459 filename = 'scantable_summary.txt'
460 from os.path import expandvars, isdir
461 filename = expandvars(filename)
462# if not isdir(filename):
463# data = open(filename, 'w')
464# data.write(info)
465# data.close()
466# else:
467 if isdir(filename):
468 msg = "Illegal file name '%s'." % (filename)
469 raise IOError(msg)
470 else:
471 filename = ""
472 Scantable._summary(self, filename)
473# info = Scantable._summary(self, filename)
474# return page(info)
475
476 def get_spectrum(self, rowno):
477 """Return the spectrum for the current row in the scantable as a list.
478
479 Parameters:
480
481 rowno: the row number to retrieve the spectrum from
482
483 """
484 return self._getspectrum(rowno)
485
486 def get_mask(self, rowno):
487 """Return the mask for the current row in the scantable as a list.
488
489 Parameters:
490
491 rowno: the row number to retrieve the mask from
492
493 """
494 return self._getmask(rowno)
495
496 def set_spectrum(self, spec, rowno):
497 """Set the spectrum for the current row in the scantable.
498
499 Parameters:
500
501 spec: the new spectrum
502
503 rowno: the row number to set the spectrum for
504
505 """
506 assert(len(spec) == self.nchan())
507 return self._setspectrum(spec, rowno)
508
509 def get_coordinate(self, rowno):
510 """Return the (spectral) coordinate for a a given 'rowno'.
511
512 *Note*:
513
514 * This coordinate is only valid until a scantable method modifies
515 the frequency axis.
516 * This coordinate does contain the original frequency set-up
517 NOT the new frame. The conversions however are done using the user
518 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
519 use scantable.freq_align first. Without it there is no closure,
520 i.e.::
521
522 c = myscan.get_coordinate(0)
523 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
524
525 Parameters:
526
527 rowno: the row number for the spectral coordinate
528
529 """
530 return coordinate(Scantable.get_coordinate(self, rowno))
531
532 def get_selection(self):
533 """\
534 Get the selection object currently set on this scantable.
535
536 Example::
537
538 sel = scan.get_selection()
539 sel.set_ifs(0) # select IF 0
540 scan.set_selection(sel) # apply modified selection
541
542 """
543 return selector(self._getselection())
544
545 def set_selection(self, selection=None, **kw):
546 """\
547 Select a subset of the data. All following operations on this scantable
548 are only applied to thi selection.
549
550 Parameters:
551
552 selection: a selector object (default unset the selection), or
553 any combination of "pols", "ifs", "beams", "scans",
554 "cycles", "name", "query"
555
556 Examples::
557
558 sel = selector() # create a selection object
559 self.set_scans([0, 3]) # select SCANNO 0 and 3
560 scan.set_selection(sel) # set the selection
561 scan.summary() # will only print summary of scanno 0 an 3
562 scan.set_selection() # unset the selection
563 # or the equivalent
564 scan.set_selection(scans=[0,3])
565 scan.summary() # will only print summary of scanno 0 an 3
566 scan.set_selection() # unset the selection
567
568 """
569 if selection is None:
570 # reset
571 if len(kw) == 0:
572 selection = selector()
573 else:
574 # try keywords
575 for k in kw:
576 if k not in selector.fields:
577 raise KeyError("Invalid selection key '%s', "
578 "valid keys are %s" % (k,
579 selector.fields))
580 selection = selector(**kw)
581 self._setselection(selection)
582
583 def get_row(self, row=0, insitu=None):
584 """\
585 Select a row in the scantable.
586 Return a scantable with single row.
587
588 Parameters:
589
590 row: row no of integration, default is 0.
591 insitu: if False a new scantable is returned. Otherwise, the
592 scaling is done in-situ. The default is taken from .asaprc
593 (False)
594
595 """
596 if insitu is None: insitu = rcParams['insitu']
597 if not insitu:
598 workscan = self.copy()
599 else:
600 workscan = self
601 # Select a row
602 sel=selector()
603 sel.set_rows([row])
604 #sel.set_scans([workscan.getscan(row)])
605 #sel.set_cycles([workscan.getcycle(row)])
606 #sel.set_beams([workscan.getbeam(row)])
607 #sel.set_ifs([workscan.getif(row)])
608 #sel.set_polarisations([workscan.getpol(row)])
609 #sel.set_name(workscan._getsourcename(row))
610 workscan.set_selection(sel)
611 if not workscan.nrow() == 1:
612 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
613 raise RuntimeError(msg)
614 del sel
615 if insitu:
616 self._assign(workscan)
617 else:
618 return workscan
619
620 @asaplog_post_dec
621 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
622 """\
623 Determine the specified statistic of the current beam/if/pol
624 Takes a 'mask' as an optional parameter to specify which
625 channels should be excluded.
626
627 Parameters:
628
629 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
630 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
631
632 mask: an optional mask specifying where the statistic
633 should be determined.
634
635 form: format string to print statistic values
636
637 row: row number of spectrum to process.
638 (default is None: for all rows)
639
640 Example:
641 scan.set_unit('channel')
642 msk = scan.create_mask([100, 200], [500, 600])
643 scan.stats(stat='mean', mask=m)
644
645 """
646 mask = mask or []
647 if not self._check_ifs():
648 raise ValueError("Cannot apply mask as the IFs have different "
649 "number of channels. Please use setselection() "
650 "to select individual IFs")
651 rtnabc = False
652 if stat.lower().endswith('_abc'): rtnabc = True
653 getchan = False
654 if stat.lower().startswith('min') or stat.lower().startswith('max'):
655 chan = self._math._minmaxchan(self, mask, stat)
656 getchan = True
657 statvals = []
658 if not rtnabc:
659 if row == None:
660 statvals = self._math._stats(self, mask, stat)
661 else:
662 statvals = self._math._statsrow(self, mask, stat, int(row))
663
664 #def cb(i):
665 # return statvals[i]
666
667 #return self._row_callback(cb, stat)
668
669 label=stat
670 #callback=cb
671 out = ""
672 #outvec = []
673 sep = '-'*50
674
675 if row == None:
676 rows = xrange(self.nrow())
677 elif isinstance(row, int):
678 rows = [ row ]
679
680 for i in rows:
681 refstr = ''
682 statunit= ''
683 if getchan:
684 qx, qy = self.chan2data(rowno=i, chan=chan[i])
685 if rtnabc:
686 statvals.append(qx['value'])
687 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
688 statunit= '['+qx['unit']+']'
689 else:
690 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
691
692 tm = self._gettime(i)
693 src = self._getsourcename(i)
694 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
695 out += 'Time[%s]:\n' % (tm)
696 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
697 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
698 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
699 #outvec.append(callback(i))
700 if len(rows) > 1:
701 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
702 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
703 else:
704 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
705 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
706 out += sep+"\n"
707
708 import os
709 if os.environ.has_key( 'USER' ):
710 usr = os.environ['USER']
711 else:
712 import commands
713 usr = commands.getoutput( 'whoami' )
714 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
715 f = open(tmpfile,'w')
716 print >> f, sep
717 print >> f, ' %s %s' % (label, statunit)
718 print >> f, sep
719 print >> f, out
720 f.close()
721 f = open(tmpfile,'r')
722 x = f.readlines()
723 f.close()
724 asaplog.push(''.join(x), False)
725
726 return statvals
727
728 def chan2data(self, rowno=0, chan=0):
729 """\
730 Returns channel/frequency/velocity and spectral value
731 at an arbitrary row and channel in the scantable.
732
733 Parameters:
734
735 rowno: a row number in the scantable. Default is the
736 first row, i.e. rowno=0
737
738 chan: a channel in the scantable. Default is the first
739 channel, i.e. pos=0
740
741 """
742 if isinstance(rowno, int) and isinstance(chan, int):
743 qx = {'unit': self.get_unit(),
744 'value': self._getabcissa(rowno)[chan]}
745 qy = {'unit': self.get_fluxunit(),
746 'value': self._getspectrum(rowno)[chan]}
747 return qx, qy
748
749 def stddev(self, mask=None):
750 """\
751 Determine the standard deviation of the current beam/if/pol
752 Takes a 'mask' as an optional parameter to specify which
753 channels should be excluded.
754
755 Parameters:
756
757 mask: an optional mask specifying where the standard
758 deviation should be determined.
759
760 Example::
761
762 scan.set_unit('channel')
763 msk = scan.create_mask([100, 200], [500, 600])
764 scan.stddev(mask=m)
765
766 """
767 return self.stats(stat='stddev', mask=mask);
768
769
770 def get_column_names(self):
771 """\
772 Return a list of column names, which can be used for selection.
773 """
774 return list(Scantable.get_column_names(self))
775
776 def get_tsys(self, row=-1):
777 """\
778 Return the System temperatures.
779
780 Parameters:
781
782 row: the rowno to get the information for. (default all rows)
783
784 Returns:
785
786 a list of Tsys values for the current selection
787
788 """
789 if row > -1:
790 return self._get_column(self._gettsys, row)
791 return self._row_callback(self._gettsys, "Tsys")
792
793
794 def get_weather(self, row=-1):
795 """\
796 Return the weather informations.
797
798 Parameters:
799
800 row: the rowno to get the information for. (default all rows)
801
802 Returns:
803
804 a dict or list of of dicts of values for the current selection
805
806 """
807
808 values = self._get_column(self._get_weather, row)
809 if row > -1:
810 return {'temperature': values[0],
811 'pressure': values[1], 'humidity' : values[2],
812 'windspeed' : values[3], 'windaz' : values[4]
813 }
814 else:
815 out = []
816 for r in values:
817
818 out.append({'temperature': r[0],
819 'pressure': r[1], 'humidity' : r[2],
820 'windspeed' : r[3], 'windaz' : r[4]
821 })
822 return out
823
824 def _row_callback(self, callback, label):
825 out = ""
826 outvec = []
827 sep = '-'*50
828 for i in range(self.nrow()):
829 tm = self._gettime(i)
830 src = self._getsourcename(i)
831 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
832 out += 'Time[%s]:\n' % (tm)
833 if self.nbeam(-1) > 1:
834 out += ' Beam[%d] ' % (self.getbeam(i))
835 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
836 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
837 outvec.append(callback(i))
838 out += '= %3.3f\n' % (outvec[i])
839 out += sep+'\n'
840
841 asaplog.push(sep)
842 asaplog.push(" %s" % (label))
843 asaplog.push(sep)
844 asaplog.push(out)
845 asaplog.post()
846 return outvec
847
848 def _get_column(self, callback, row=-1, *args):
849 """
850 """
851 if row == -1:
852 return [callback(i, *args) for i in range(self.nrow())]
853 else:
854 if 0 <= row < self.nrow():
855 return callback(row, *args)
856
857
858 def get_time(self, row=-1, asdatetime=False, prec=-1):
859 """\
860 Get a list of time stamps for the observations.
861 Return a datetime object or a string (default) for each
862 integration time stamp in the scantable.
863
864 Parameters:
865
866 row: row no of integration. Default -1 return all rows
867
868 asdatetime: return values as datetime objects rather than strings
869
870 prec: number of digits shown. Default -1 to automatic calculation.
871 Note this number is equals to the digits of MVTime,
872 i.e., 0<prec<3: dates with hh:: only,
873 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
874 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
875
876 """
877 from datetime import datetime
878 if prec < 0:
879 # automagically set necessary precision +1
880 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
881 prec = max(6, int(prec))
882 else:
883 prec = max(0, prec)
884 if asdatetime:
885 #precision can be 1 millisecond at max
886 prec = min(12, prec)
887
888 times = self._get_column(self._gettime, row, prec)
889 if not asdatetime:
890 return times
891 format = "%Y/%m/%d/%H:%M:%S.%f"
892 if prec < 7:
893 nsub = 1 + (((6-prec)/2) % 3)
894 substr = [".%f","%S","%M"]
895 for i in range(nsub):
896 format = format.replace(substr[i],"")
897 if isinstance(times, list):
898 return [datetime.strptime(i, format) for i in times]
899 else:
900 return datetime.strptime(times, format)
901
902
903 def get_inttime(self, row=-1):
904 """\
905 Get a list of integration times for the observations.
906 Return a time in seconds for each integration in the scantable.
907
908 Parameters:
909
910 row: row no of integration. Default -1 return all rows.
911
912 """
913 return self._get_column(self._getinttime, row)
914
915
916 def get_sourcename(self, row=-1):
917 """\
918 Get a list source names for the observations.
919 Return a string for each integration in the scantable.
920 Parameters:
921
922 row: row no of integration. Default -1 return all rows.
923
924 """
925 return self._get_column(self._getsourcename, row)
926
927 def get_elevation(self, row=-1):
928 """\
929 Get a list of elevations for the observations.
930 Return a float for each integration in the scantable.
931
932 Parameters:
933
934 row: row no of integration. Default -1 return all rows.
935
936 """
937 return self._get_column(self._getelevation, row)
938
939 def get_azimuth(self, row=-1):
940 """\
941 Get a list of azimuths for the observations.
942 Return a float for each integration in the scantable.
943
944 Parameters:
945 row: row no of integration. Default -1 return all rows.
946
947 """
948 return self._get_column(self._getazimuth, row)
949
950 def get_parangle(self, row=-1):
951 """\
952 Get a list of parallactic angles for the observations.
953 Return a float for each integration in the scantable.
954
955 Parameters:
956
957 row: row no of integration. Default -1 return all rows.
958
959 """
960 return self._get_column(self._getparangle, row)
961
962 def get_direction(self, row=-1):
963 """
964 Get a list of Positions on the sky (direction) for the observations.
965 Return a string for each integration in the scantable.
966
967 Parameters:
968
969 row: row no of integration. Default -1 return all rows
970
971 """
972 return self._get_column(self._getdirection, row)
973
974 def get_directionval(self, row=-1):
975 """\
976 Get a list of Positions on the sky (direction) for the observations.
977 Return a float for each integration in the scantable.
978
979 Parameters:
980
981 row: row no of integration. Default -1 return all rows
982
983 """
984 return self._get_column(self._getdirectionvec, row)
985
986 @asaplog_post_dec
987 def set_unit(self, unit='channel'):
988 """\
989 Set the unit for all following operations on this scantable
990
991 Parameters:
992
993 unit: optional unit, default is 'channel'. Use one of '*Hz',
994 'km/s', 'channel' or equivalent ''
995
996 """
997 varlist = vars()
998 if unit in ['', 'pixel', 'channel']:
999 unit = ''
1000 inf = list(self._getcoordinfo())
1001 inf[0] = unit
1002 self._setcoordinfo(inf)
1003 self._add_history("set_unit", varlist)
1004
1005 @asaplog_post_dec
1006 def set_instrument(self, instr):
1007 """\
1008 Set the instrument for subsequent processing.
1009
1010 Parameters:
1011
1012 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1013 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1014
1015 """
1016 self._setInstrument(instr)
1017 self._add_history("set_instument", vars())
1018
1019 @asaplog_post_dec
1020 def set_feedtype(self, feedtype):
1021 """\
1022 Overwrite the feed type, which might not be set correctly.
1023
1024 Parameters:
1025
1026 feedtype: 'linear' or 'circular'
1027
1028 """
1029 self._setfeedtype(feedtype)
1030 self._add_history("set_feedtype", vars())
1031
1032 @asaplog_post_dec
1033 def set_doppler(self, doppler='RADIO'):
1034 """\
1035 Set the doppler for all following operations on this scantable.
1036
1037 Parameters:
1038
1039 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1040
1041 """
1042 varlist = vars()
1043 inf = list(self._getcoordinfo())
1044 inf[2] = doppler
1045 self._setcoordinfo(inf)
1046 self._add_history("set_doppler", vars())
1047
1048 @asaplog_post_dec
1049 def set_freqframe(self, frame=None):
1050 """\
1051 Set the frame type of the Spectral Axis.
1052
1053 Parameters:
1054
1055 frame: an optional frame type, default 'LSRK'. Valid frames are:
1056 'TOPO', 'LSRD', 'LSRK', 'BARY',
1057 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1058
1059 Example::
1060
1061 scan.set_freqframe('BARY')
1062
1063 """
1064 frame = frame or rcParams['scantable.freqframe']
1065 varlist = vars()
1066 # "REST" is not implemented in casacore
1067 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1068 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1069 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1070 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1071
1072 if frame in valid:
1073 inf = list(self._getcoordinfo())
1074 inf[1] = frame
1075 self._setcoordinfo(inf)
1076 self._add_history("set_freqframe", varlist)
1077 else:
1078 msg = "Please specify a valid freq type. Valid types are:\n", valid
1079 raise TypeError(msg)
1080
1081 @asaplog_post_dec
1082 def set_dirframe(self, frame=""):
1083 """\
1084 Set the frame type of the Direction on the sky.
1085
1086 Parameters:
1087
1088 frame: an optional frame type, default ''. Valid frames are:
1089 'J2000', 'B1950', 'GALACTIC'
1090
1091 Example:
1092
1093 scan.set_dirframe('GALACTIC')
1094
1095 """
1096 varlist = vars()
1097 Scantable.set_dirframe(self, frame)
1098 self._add_history("set_dirframe", varlist)
1099
1100 def get_unit(self):
1101 """\
1102 Get the default unit set in this scantable
1103
1104 Returns:
1105
1106 A unit string
1107
1108 """
1109 inf = self._getcoordinfo()
1110 unit = inf[0]
1111 if unit == '': unit = 'channel'
1112 return unit
1113
1114 @asaplog_post_dec
1115 def get_abcissa(self, rowno=0):
1116 """\
1117 Get the abcissa in the current coordinate setup for the currently
1118 selected Beam/IF/Pol
1119
1120 Parameters:
1121
1122 rowno: an optional row number in the scantable. Default is the
1123 first row, i.e. rowno=0
1124
1125 Returns:
1126
1127 The abcissa values and the format string (as a dictionary)
1128
1129 """
1130 abc = self._getabcissa(rowno)
1131 lbl = self._getabcissalabel(rowno)
1132 return abc, lbl
1133
1134 @asaplog_post_dec
1135 def flag(self, row=-1, mask=None, unflag=False):
1136 """\
1137 Flag the selected data using an optional channel mask.
1138
1139 Parameters:
1140
1141 row: an optional row number in the scantable.
1142 Default -1 flags all rows
1143
1144 mask: an optional channel mask, created with create_mask. Default
1145 (no mask) is all channels.
1146
1147 unflag: if True, unflag the data
1148
1149 """
1150 varlist = vars()
1151 mask = mask or []
1152 self._flag(row, mask, unflag)
1153 self._add_history("flag", varlist)
1154
1155 @asaplog_post_dec
1156 def flag_row(self, rows=[], unflag=False):
1157 """\
1158 Flag the selected data in row-based manner.
1159
1160 Parameters:
1161
1162 rows: list of row numbers to be flagged. Default is no row
1163 (must be explicitly specified to execute row-based flagging).
1164
1165 unflag: if True, unflag the data.
1166
1167 """
1168 varlist = vars()
1169 self._flag_row(rows, unflag)
1170 self._add_history("flag_row", varlist)
1171
1172 @asaplog_post_dec
1173 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1174 """\
1175 Flag the selected data outside a specified range (in channel-base)
1176
1177 Parameters:
1178
1179 uthres: upper threshold.
1180
1181 dthres: lower threshold
1182
1183 clipoutside: True for flagging data outside the range [dthres:uthres].
1184 False for flagging data inside the range.
1185
1186 unflag: if True, unflag the data.
1187
1188 """
1189 varlist = vars()
1190 self._clip(uthres, dthres, clipoutside, unflag)
1191 self._add_history("clip", varlist)
1192
1193 @asaplog_post_dec
1194 def lag_flag(self, start, end, unit="MHz", insitu=None):
1195 """\
1196 Flag the data in 'lag' space by providing a frequency to remove.
1197 Flagged data in the scantable get interpolated over the region.
1198 No taper is applied.
1199
1200 Parameters:
1201
1202 start: the start frequency (really a period within the
1203 bandwidth) or period to remove
1204
1205 end: the end frequency or period to remove
1206
1207 unit: the frequency unit (default "MHz") or "" for
1208 explicit lag channels
1209
1210 *Notes*:
1211
1212 It is recommended to flag edges of the band or strong
1213 signals beforehand.
1214
1215 """
1216 if insitu is None: insitu = rcParams['insitu']
1217 self._math._setinsitu(insitu)
1218 varlist = vars()
1219 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1220 if not (unit == "" or base.has_key(unit)):
1221 raise ValueError("%s is not a valid unit." % unit)
1222 if unit == "":
1223 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1224 else:
1225 s = scantable(self._math._lag_flag(self, start*base[unit],
1226 end*base[unit], "frequency"))
1227 s._add_history("lag_flag", varlist)
1228 if insitu:
1229 self._assign(s)
1230 else:
1231 return s
1232
1233 @asaplog_post_dec
1234 def fft(self, rowno=[], mask=[], getrealimag=False):
1235 """\
1236 Apply FFT to the spectra.
1237 Flagged data in the scantable get interpolated over the region.
1238
1239 Parameters:
1240
1241 rowno: The row number(s) to be processed. int, list
1242 and tuple are accepted. By default ([]), FFT
1243 is applied to the whole data.
1244
1245 mask: Auxiliary channel mask(s). Given as a boolean
1246 list, it is applied to all specified rows.
1247 A list of boolean lists can also be used to
1248 apply different masks. In the latter case, the
1249 length of 'mask' must be the same as that of
1250 'rowno'. The default is [].
1251
1252 getrealimag: If True, returns the real and imaginary part
1253 values of the complex results.
1254 If False (the default), returns the amplitude
1255 (absolute value) normalised with Ndata/2 and
1256 phase (argument, in unit of radian).
1257
1258 Returns:
1259
1260 A list of dictionaries containing the results for each spectrum.
1261 Each dictionary contains two values, the real and the imaginary
1262 parts when getrealimag = True, or the amplitude(absolute value)
1263 and the phase(argument) when getrealimag = False. The key for
1264 these values are 'real' and 'imag', or 'ampl' and 'phase',
1265 respectively.
1266 """
1267 if isinstance(rowno, int):
1268 rowno = [rowno]
1269 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1270 raise TypeError("The row number(s) must be int, list or tuple.")
1271
1272 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1273
1274 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1275 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1276 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1277 if isinstance(mask[0], bool): mask = [mask]
1278 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1279 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1280
1281 usecommonmask = (len(mask) == 1)
1282 if not usecommonmask:
1283 if len(mask) != len(rowno):
1284 raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1285 for amask in mask:
1286 if len(amask) != self.nchan():
1287 raise ValueError("The spectra and the mask have different length.")
1288
1289 res = []
1290
1291 imask = 0
1292 for whichrow in rowno:
1293 fspec = self._fft(whichrow, mask[imask], getrealimag)
1294 nspec = len(fspec)
1295
1296 i=0
1297 v1=[]
1298 v2=[]
1299 reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
1300
1301 while (i < nspec):
1302 v1.append(fspec[i])
1303 v2.append(fspec[i+1])
1304 i+=2
1305
1306 if getrealimag:
1307 reselem["real"] += v1
1308 reselem["imag"] += v2
1309 else:
1310 reselem["ampl"] += v1
1311 reselem["phase"] += v2
1312
1313 res.append(reselem)
1314
1315 if not usecommonmask: imask += 1
1316
1317 return res
1318
1319 @asaplog_post_dec
1320 def create_mask(self, *args, **kwargs):
1321 """\
1322 Compute and return a mask based on [min, max] windows.
1323 The specified windows are to be INCLUDED, when the mask is
1324 applied.
1325
1326 Parameters:
1327
1328 [min, max], [min2, max2], ...
1329 Pairs of start/end points (inclusive)specifying the regions
1330 to be masked
1331
1332 invert: optional argument. If specified as True,
1333 return an inverted mask, i.e. the regions
1334 specified are EXCLUDED
1335
1336 row: create the mask using the specified row for
1337 unit conversions, default is row=0
1338 only necessary if frequency varies over rows.
1339
1340 Examples::
1341
1342 scan.set_unit('channel')
1343 # a)
1344 msk = scan.create_mask([400, 500], [800, 900])
1345 # masks everything outside 400 and 500
1346 # and 800 and 900 in the unit 'channel'
1347
1348 # b)
1349 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1350 # masks the regions between 400 and 500
1351 # and 800 and 900 in the unit 'channel'
1352
1353 # c)
1354 #mask only channel 400
1355 msk = scan.create_mask([400])
1356
1357 """
1358 row = kwargs.get("row", 0)
1359 data = self._getabcissa(row)
1360 u = self._getcoordinfo()[0]
1361 if u == "":
1362 u = "channel"
1363 msg = "The current mask window unit is %s" % u
1364 i = self._check_ifs()
1365 if not i:
1366 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1367 asaplog.push(msg)
1368 n = self.nchan()
1369 msk = _n_bools(n, False)
1370 # test if args is a 'list' or a 'normal *args - UGLY!!!
1371
1372 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1373 and args or args[0]
1374 for window in ws:
1375 if len(window) == 1:
1376 window = [window[0], window[0]]
1377 if len(window) == 0 or len(window) > 2:
1378 raise ValueError("A window needs to be defined as [start(, end)]")
1379 if window[0] > window[1]:
1380 tmp = window[0]
1381 window[0] = window[1]
1382 window[1] = tmp
1383 for i in range(n):
1384 if data[i] >= window[0] and data[i] <= window[1]:
1385 msk[i] = True
1386 if kwargs.has_key('invert'):
1387 if kwargs.get('invert'):
1388 msk = mask_not(msk)
1389 return msk
1390
1391 def get_masklist(self, mask=None, row=0, silent=False):
1392 """\
1393 Compute and return a list of mask windows, [min, max].
1394
1395 Parameters:
1396
1397 mask: channel mask, created with create_mask.
1398
1399 row: calcutate the masklist using the specified row
1400 for unit conversions, default is row=0
1401 only necessary if frequency varies over rows.
1402
1403 Returns:
1404
1405 [min, max], [min2, max2], ...
1406 Pairs of start/end points (inclusive)specifying
1407 the masked regions
1408
1409 """
1410 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1411 raise TypeError("The mask should be list or tuple.")
1412 if len(mask) < 2:
1413 raise TypeError("The mask elements should be > 1")
1414 if self.nchan() != len(mask):
1415 msg = "Number of channels in scantable != number of mask elements"
1416 raise TypeError(msg)
1417 data = self._getabcissa(row)
1418 u = self._getcoordinfo()[0]
1419 if u == "":
1420 u = "channel"
1421 msg = "The current mask window unit is %s" % u
1422 i = self._check_ifs()
1423 if not i:
1424 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1425 if not silent:
1426 asaplog.push(msg)
1427 masklist=[]
1428 ist, ien = None, None
1429 ist, ien=self.get_mask_indices(mask)
1430 if ist is not None and ien is not None:
1431 for i in xrange(len(ist)):
1432 range=[data[ist[i]],data[ien[i]]]
1433 range.sort()
1434 masklist.append([range[0],range[1]])
1435 return masklist
1436
1437 def get_mask_indices(self, mask=None):
1438 """\
1439 Compute and Return lists of mask start indices and mask end indices.
1440
1441 Parameters:
1442
1443 mask: channel mask, created with create_mask.
1444
1445 Returns:
1446
1447 List of mask start indices and that of mask end indices,
1448 i.e., [istart1,istart2,....], [iend1,iend2,....].
1449
1450 """
1451 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1452 raise TypeError("The mask should be list or tuple.")
1453 if len(mask) < 2:
1454 raise TypeError("The mask elements should be > 1")
1455 istart=[]
1456 iend=[]
1457 if mask[0]: istart.append(0)
1458 for i in range(len(mask)-1):
1459 if not mask[i] and mask[i+1]:
1460 istart.append(i+1)
1461 elif mask[i] and not mask[i+1]:
1462 iend.append(i)
1463 if mask[len(mask)-1]: iend.append(len(mask)-1)
1464 if len(istart) != len(iend):
1465 raise RuntimeError("Numbers of mask start != mask end.")
1466 for i in range(len(istart)):
1467 if istart[i] > iend[i]:
1468 raise RuntimeError("Mask start index > mask end index")
1469 break
1470 return istart,iend
1471
1472 @asaplog_post_dec
1473 def parse_maskexpr(self,maskstring):
1474 """
1475 Parse CASA type mask selection syntax (IF dependent).
1476
1477 Parameters:
1478 maskstring : A string mask selection expression.
1479 A comma separated selections mean different IF -
1480 channel combinations. IFs and channel selections
1481 are partitioned by a colon, ':'.
1482 examples:
1483 '' = all IFs (all channels)
1484 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1485 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1486 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1487 all channels in IF8
1488 Returns:
1489 A dictionary of selected (valid) IF and masklist pairs,
1490 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1491 """
1492 if not isinstance(maskstring,str):
1493 asaplog.post()
1494 asaplog.push("Invalid mask expression")
1495 asaplog.post("ERROR")
1496
1497 valid_ifs = self.getifnos()
1498 frequnit = self.get_unit()
1499 seldict = {}
1500 if maskstring == "":
1501 maskstring = str(valid_ifs)[1:-1]
1502 ## split each selection
1503 sellist = maskstring.split(',')
1504 for currselstr in sellist:
1505 selset = currselstr.split(':')
1506 # spw and mask string (may include ~, < or >)
1507 spwmasklist = self._parse_selection(selset[0],typestr='integer',
1508 offset=1,minval=min(valid_ifs),
1509 maxval=max(valid_ifs))
1510 for spwlist in spwmasklist:
1511 selspws = []
1512 for ispw in range(spwlist[0],spwlist[1]+1):
1513 # Put into the list only if ispw exists
1514 if valid_ifs.count(ispw):
1515 selspws.append(ispw)
1516 del spwmasklist, spwlist
1517
1518 # parse frequency mask list
1519 if len(selset) > 1:
1520 freqmasklist = self._parse_selection(selset[1],typestr='float',
1521 offset=0.)
1522 else:
1523 # want to select the whole spectrum
1524 freqmasklist = [None]
1525
1526 ## define a dictionary of spw - masklist combination
1527 for ispw in selspws:
1528 #print "working on", ispw
1529 spwstr = str(ispw)
1530 if len(selspws) == 0:
1531 # empty spw
1532 continue
1533 else:
1534 ## want to get min and max of the spw and
1535 ## offset to set for '<' and '>'
1536 if frequnit == 'channel':
1537 minfreq = 0
1538 maxfreq = self.nchan(ifno=ispw)
1539 offset = 0.5
1540 else:
1541 ## This is ugly part. need improvement
1542 for ifrow in xrange(self.nrow()):
1543 if self.getif(ifrow) == ispw:
1544 #print "IF",ispw,"found in row =",ifrow
1545 break
1546 freqcoord = self.get_coordinate(ifrow)
1547 freqs = self._getabcissa(ifrow)
1548 minfreq = min(freqs)
1549 maxfreq = max(freqs)
1550 if len(freqs) == 1:
1551 offset = 0.5
1552 elif frequnit.find('Hz') > 0:
1553 offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1554 -freqcoord.to_frequency(0,unit=frequnit))*0.5
1555 elif frequnit.find('m/s') > 0:
1556 offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1557 -freqcoord.to_velocity(0,unit=frequnit))*0.5
1558 else:
1559 asaplog.post()
1560 asaplog.push("Invalid frequency unit")
1561 asaplog.post("ERROR")
1562 del freqs, freqcoord, ifrow
1563 for freq in freqmasklist:
1564 selmask = freq or [minfreq, maxfreq]
1565 if selmask[0] == None:
1566 ## selection was "<freq[1]".
1567 if selmask[1] < minfreq:
1568 ## avoid adding region selection
1569 selmask = None
1570 else:
1571 selmask = [minfreq,selmask[1]-offset]
1572 elif selmask[1] == None:
1573 ## selection was ">freq[0]"
1574 if selmask[0] > maxfreq:
1575 ## avoid adding region selection
1576 selmask = None
1577 else:
1578 selmask = [selmask[0]+offset,maxfreq]
1579 if selmask:
1580 if not seldict.has_key(spwstr):
1581 # new spw selection
1582 seldict[spwstr] = []
1583 seldict[spwstr] += [selmask]
1584 del minfreq,maxfreq,offset,freq,selmask
1585 del spwstr
1586 del freqmasklist
1587 del valid_ifs
1588 if len(seldict) == 0:
1589 asaplog.post()
1590 asaplog.push("No valid selection in the mask expression: "+maskstring)
1591 asaplog.post("WARN")
1592 return None
1593 msg = "Selected masklist:\n"
1594 for sif, lmask in seldict.iteritems():
1595 msg += " IF"+sif+" - "+str(lmask)+"\n"
1596 asaplog.push(msg)
1597 return seldict
1598
1599 def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1600 """
1601 Parameters:
1602 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1603 typestr : The type of the values in returned list
1604 ('integer' or 'float')
1605 offset : The offset value to subtract from or add to
1606 the boundary value if the selection string
1607 includes '<' or '>'
1608 minval, maxval : The minimum/maximum values to set if the
1609 selection string includes '<' or '>'.
1610 The list element is filled with None by default.
1611 Returns:
1612 A list of min/max pair of selections.
1613 Example:
1614 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1615 returns [[0,2],[5,7],[9,9]]
1616 """
1617 selgroups = selstr.split(';')
1618 sellists = []
1619 if typestr.lower().startswith('int'):
1620 formatfunc = int
1621 else:
1622 formatfunc = float
1623
1624 for currsel in selgroups:
1625 if currsel.find('~') > 0:
1626 minsel = formatfunc(currsel.split('~')[0].strip())
1627 maxsel = formatfunc(currsel.split('~')[1].strip())
1628 elif currsel.strip().startswith('<'):
1629 minsel = minval
1630 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1631 - formatfunc(offset)
1632 elif currsel.strip().startswith('>'):
1633 minsel = formatfunc(currsel.split('>')[1].strip()) \
1634 + formatfunc(offset)
1635 maxsel = maxval
1636 else:
1637 minsel = formatfunc(currsel)
1638 maxsel = formatfunc(currsel)
1639 sellists.append([minsel,maxsel])
1640 return sellists
1641
1642# def get_restfreqs(self):
1643# """
1644# Get the restfrequency(s) stored in this scantable.
1645# The return value(s) are always of unit 'Hz'
1646# Parameters:
1647# none
1648# Returns:
1649# a list of doubles
1650# """
1651# return list(self._getrestfreqs())
1652
1653 def get_restfreqs(self, ids=None):
1654 """\
1655 Get the restfrequency(s) stored in this scantable.
1656 The return value(s) are always of unit 'Hz'
1657
1658 Parameters:
1659
1660 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1661 be retrieved
1662
1663 Returns:
1664
1665 dictionary containing ids and a list of doubles for each id
1666
1667 """
1668 if ids is None:
1669 rfreqs={}
1670 idlist = self.getmolnos()
1671 for i in idlist:
1672 rfreqs[i]=list(self._getrestfreqs(i))
1673 return rfreqs
1674 else:
1675 if type(ids)==list or type(ids)==tuple:
1676 rfreqs={}
1677 for i in ids:
1678 rfreqs[i]=list(self._getrestfreqs(i))
1679 return rfreqs
1680 else:
1681 return list(self._getrestfreqs(ids))
1682 #return list(self._getrestfreqs(ids))
1683
1684 def set_restfreqs(self, freqs=None, unit='Hz'):
1685 """\
1686 Set or replace the restfrequency specified and
1687 if the 'freqs' argument holds a scalar,
1688 then that rest frequency will be applied to all the selected
1689 data. If the 'freqs' argument holds
1690 a vector, then it MUST be of equal or smaller length than
1691 the number of IFs (and the available restfrequencies will be
1692 replaced by this vector). In this case, *all* data have
1693 the restfrequency set per IF according
1694 to the corresponding value you give in the 'freqs' vector.
1695 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1696 IF 1 gets restfreq 2e9.
1697
1698 You can also specify the frequencies via a linecatalog.
1699
1700 Parameters:
1701
1702 freqs: list of rest frequency values or string idenitfiers
1703
1704 unit: unit for rest frequency (default 'Hz')
1705
1706
1707 Example::
1708
1709 # set the given restfrequency for the all currently selected IFs
1710 scan.set_restfreqs(freqs=1.4e9)
1711 # set restfrequencies for the n IFs (n > 1) in the order of the
1712 # list, i.e
1713 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1714 # len(list_of_restfreqs) == nIF
1715 # for nIF == 1 the following will set multiple restfrequency for
1716 # that IF
1717 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1718 # set multiple restfrequencies per IF. as a list of lists where
1719 # the outer list has nIF elements, the inner s arbitrary
1720 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1721
1722 *Note*:
1723
1724 To do more sophisticate Restfrequency setting, e.g. on a
1725 source and IF basis, use scantable.set_selection() before using
1726 this function::
1727
1728 # provided your scantable is called scan
1729 selection = selector()
1730 selection.set_name("ORION*")
1731 selection.set_ifs([1])
1732 scan.set_selection(selection)
1733 scan.set_restfreqs(freqs=86.6e9)
1734
1735 """
1736 varlist = vars()
1737 from asap import linecatalog
1738 # simple value
1739 if isinstance(freqs, int) or isinstance(freqs, float):
1740 self._setrestfreqs([freqs], [""], unit)
1741 # list of values
1742 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1743 # list values are scalars
1744 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1745 if len(freqs) == 1:
1746 self._setrestfreqs(freqs, [""], unit)
1747 else:
1748 # allow the 'old' mode of setting mulitple IFs
1749 sel = selector()
1750 savesel = self._getselection()
1751 iflist = self.getifnos()
1752 if len(freqs)>len(iflist):
1753 raise ValueError("number of elements in list of list "
1754 "exeeds the current IF selections")
1755 iflist = self.getifnos()
1756 for i, fval in enumerate(freqs):
1757 sel.set_ifs(iflist[i])
1758 self._setselection(sel)
1759 self._setrestfreqs([fval], [""], unit)
1760 self._setselection(savesel)
1761
1762 # list values are dict, {'value'=, 'name'=)
1763 elif isinstance(freqs[-1], dict):
1764 values = []
1765 names = []
1766 for d in freqs:
1767 values.append(d["value"])
1768 names.append(d["name"])
1769 self._setrestfreqs(values, names, unit)
1770 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1771 sel = selector()
1772 savesel = self._getselection()
1773 iflist = self.getifnos()
1774 if len(freqs)>len(iflist):
1775 raise ValueError("number of elements in list of list exeeds"
1776 " the current IF selections")
1777 for i, fval in enumerate(freqs):
1778 sel.set_ifs(iflist[i])
1779 self._setselection(sel)
1780 self._setrestfreqs(fval, [""], unit)
1781 self._setselection(savesel)
1782 # freqs are to be taken from a linecatalog
1783 elif isinstance(freqs, linecatalog):
1784 sel = selector()
1785 savesel = self._getselection()
1786 for i in xrange(freqs.nrow()):
1787 sel.set_ifs(iflist[i])
1788 self._setselection(sel)
1789 self._setrestfreqs([freqs.get_frequency(i)],
1790 [freqs.get_name(i)], "MHz")
1791 # ensure that we are not iterating past nIF
1792 if i == self.nif()-1: break
1793 self._setselection(savesel)
1794 else:
1795 return
1796 self._add_history("set_restfreqs", varlist)
1797
1798 def shift_refpix(self, delta):
1799 """\
1800 Shift the reference pixel of the Spectra Coordinate by an
1801 integer amount.
1802
1803 Parameters:
1804
1805 delta: the amount to shift by
1806
1807 *Note*:
1808
1809 Be careful using this with broadband data.
1810
1811 """
1812 Scantable.shift_refpix(self, delta)
1813
1814 @asaplog_post_dec
1815 def history(self, filename=None):
1816 """\
1817 Print the history. Optionally to a file.
1818
1819 Parameters:
1820
1821 filename: The name of the file to save the history to.
1822
1823 """
1824 hist = list(self._gethistory())
1825 out = "-"*80
1826 for h in hist:
1827 if h.startswith("---"):
1828 out = "\n".join([out, h])
1829 else:
1830 items = h.split("##")
1831 date = items[0]
1832 func = items[1]
1833 items = items[2:]
1834 out += "\n"+date+"\n"
1835 out += "Function: %s\n Parameters:" % (func)
1836 for i in items:
1837 if i == '':
1838 continue
1839 s = i.split("=")
1840 out += "\n %s = %s" % (s[0], s[1])
1841 out = "\n".join([out, "-"*80])
1842 if filename is not None:
1843 if filename is "":
1844 filename = 'scantable_history.txt'
1845 import os
1846 filename = os.path.expandvars(os.path.expanduser(filename))
1847 if not os.path.isdir(filename):
1848 data = open(filename, 'w')
1849 data.write(out)
1850 data.close()
1851 else:
1852 msg = "Illegal file name '%s'." % (filename)
1853 raise IOError(msg)
1854 return page(out)
1855 #
1856 # Maths business
1857 #
1858 @asaplog_post_dec
1859 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1860 """\
1861 Return the (time) weighted average of a scan.
1862
1863 *Note*:
1864
1865 in channels only - align if necessary
1866
1867 Parameters:
1868
1869 mask: an optional mask (only used for 'var' and 'tsys'
1870 weighting)
1871
1872 scanav: True averages each scan separately
1873 False (default) averages all scans together,
1874
1875 weight: Weighting scheme.
1876 'none' (mean no weight)
1877 'var' (1/var(spec) weighted)
1878 'tsys' (1/Tsys**2 weighted)
1879 'tint' (integration time weighted)
1880 'tintsys' (Tint/Tsys**2)
1881 'median' ( median averaging)
1882 The default is 'tint'
1883
1884 align: align the spectra in velocity before averaging. It takes
1885 the time of the first spectrum as reference time.
1886
1887 Example::
1888
1889 # time average the scantable without using a mask
1890 newscan = scan.average_time()
1891
1892 """
1893 varlist = vars()
1894 weight = weight or 'TINT'
1895 mask = mask or ()
1896 scanav = (scanav and 'SCAN') or 'NONE'
1897 scan = (self, )
1898
1899 if align:
1900 scan = (self.freq_align(insitu=False), )
1901 s = None
1902 if weight.upper() == 'MEDIAN':
1903 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1904 scanav))
1905 else:
1906 s = scantable(self._math._average(scan, mask, weight.upper(),
1907 scanav))
1908 s._add_history("average_time", varlist)
1909 return s
1910
1911 @asaplog_post_dec
1912 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1913 """\
1914 Return a scan where all spectra are converted to either
1915 Jansky or Kelvin depending upon the flux units of the scan table.
1916 By default the function tries to look the values up internally.
1917 If it can't find them (or if you want to over-ride), you must
1918 specify EITHER jyperk OR eta (and D which it will try to look up
1919 also if you don't set it). jyperk takes precedence if you set both.
1920
1921 Parameters:
1922
1923 jyperk: the Jy / K conversion factor
1924
1925 eta: the aperture efficiency
1926
1927 d: the geometric diameter (metres)
1928
1929 insitu: if False a new scantable is returned.
1930 Otherwise, the scaling is done in-situ
1931 The default is taken from .asaprc (False)
1932
1933 """
1934 if insitu is None: insitu = rcParams['insitu']
1935 self._math._setinsitu(insitu)
1936 varlist = vars()
1937 jyperk = jyperk or -1.0
1938 d = d or -1.0
1939 eta = eta or -1.0
1940 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1941 s._add_history("convert_flux", varlist)
1942 if insitu: self._assign(s)
1943 else: return s
1944
1945 @asaplog_post_dec
1946 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1947 """\
1948 Return a scan after applying a gain-elevation correction.
1949 The correction can be made via either a polynomial or a
1950 table-based interpolation (and extrapolation if necessary).
1951 You specify polynomial coefficients, an ascii table or neither.
1952 If you specify neither, then a polynomial correction will be made
1953 with built in coefficients known for certain telescopes (an error
1954 will occur if the instrument is not known).
1955 The data and Tsys are *divided* by the scaling factors.
1956
1957 Parameters:
1958
1959 poly: Polynomial coefficients (default None) to compute a
1960 gain-elevation correction as a function of
1961 elevation (in degrees).
1962
1963 filename: The name of an ascii file holding correction factors.
1964 The first row of the ascii file must give the column
1965 names and these MUST include columns
1966 "ELEVATION" (degrees) and "FACTOR" (multiply data
1967 by this) somewhere.
1968 The second row must give the data type of the
1969 column. Use 'R' for Real and 'I' for Integer.
1970 An example file would be
1971 (actual factors are arbitrary) :
1972
1973 TIME ELEVATION FACTOR
1974 R R R
1975 0.1 0 0.8
1976 0.2 20 0.85
1977 0.3 40 0.9
1978 0.4 60 0.85
1979 0.5 80 0.8
1980 0.6 90 0.75
1981
1982 method: Interpolation method when correcting from a table.
1983 Values are "nearest", "linear" (default), "cubic"
1984 and "spline"
1985
1986 insitu: if False a new scantable is returned.
1987 Otherwise, the scaling is done in-situ
1988 The default is taken from .asaprc (False)
1989
1990 """
1991
1992 if insitu is None: insitu = rcParams['insitu']
1993 self._math._setinsitu(insitu)
1994 varlist = vars()
1995 poly = poly or ()
1996 from os.path import expandvars
1997 filename = expandvars(filename)
1998 s = scantable(self._math._gainel(self, poly, filename, method))
1999 s._add_history("gain_el", varlist)
2000 if insitu:
2001 self._assign(s)
2002 else:
2003 return s
2004
2005 @asaplog_post_dec
2006 def freq_align(self, reftime=None, method='cubic', insitu=None):
2007 """\
2008 Return a scan where all rows have been aligned in frequency/velocity.
2009 The alignment frequency frame (e.g. LSRK) is that set by function
2010 set_freqframe.
2011
2012 Parameters:
2013
2014 reftime: reference time to align at. By default, the time of
2015 the first row of data is used.
2016
2017 method: Interpolation method for regridding the spectra.
2018 Choose from "nearest", "linear", "cubic" (default)
2019 and "spline"
2020
2021 insitu: if False a new scantable is returned.
2022 Otherwise, the scaling is done in-situ
2023 The default is taken from .asaprc (False)
2024
2025 """
2026 if insitu is None: insitu = rcParams["insitu"]
2027 self._math._setinsitu(insitu)
2028 varlist = vars()
2029 reftime = reftime or ""
2030 s = scantable(self._math._freq_align(self, reftime, method))
2031 s._add_history("freq_align", varlist)
2032 if insitu: self._assign(s)
2033 else: return s
2034
2035 @asaplog_post_dec
2036 def opacity(self, tau=None, insitu=None):
2037 """\
2038 Apply an opacity correction. The data
2039 and Tsys are multiplied by the correction factor.
2040
2041 Parameters:
2042
2043 tau: (list of) opacity from which the correction factor is
2044 exp(tau*ZD)
2045 where ZD is the zenith-distance.
2046 If a list is provided, it has to be of length nIF,
2047 nIF*nPol or 1 and in order of IF/POL, e.g.
2048 [opif0pol0, opif0pol1, opif1pol0 ...]
2049 if tau is `None` the opacities are determined from a
2050 model.
2051
2052 insitu: if False a new scantable is returned.
2053 Otherwise, the scaling is done in-situ
2054 The default is taken from .asaprc (False)
2055
2056 """
2057 if insitu is None: insitu = rcParams['insitu']
2058 self._math._setinsitu(insitu)
2059 varlist = vars()
2060 if not hasattr(tau, "__len__"):
2061 tau = [tau]
2062 s = scantable(self._math._opacity(self, tau))
2063 s._add_history("opacity", varlist)
2064 if insitu: self._assign(s)
2065 else: return s
2066
2067 @asaplog_post_dec
2068 def bin(self, width=5, insitu=None):
2069 """\
2070 Return a scan where all spectra have been binned up.
2071
2072 Parameters:
2073
2074 width: The bin width (default=5) in pixels
2075
2076 insitu: if False a new scantable is returned.
2077 Otherwise, the scaling is done in-situ
2078 The default is taken from .asaprc (False)
2079
2080 """
2081 if insitu is None: insitu = rcParams['insitu']
2082 self._math._setinsitu(insitu)
2083 varlist = vars()
2084 s = scantable(self._math._bin(self, width))
2085 s._add_history("bin", varlist)
2086 if insitu:
2087 self._assign(s)
2088 else:
2089 return s
2090
2091 @asaplog_post_dec
2092 def resample(self, width=5, method='cubic', insitu=None):
2093 """\
2094 Return a scan where all spectra have been binned up.
2095
2096 Parameters:
2097
2098 width: The bin width (default=5) in pixels
2099
2100 method: Interpolation method when correcting from a table.
2101 Values are "nearest", "linear", "cubic" (default)
2102 and "spline"
2103
2104 insitu: if False a new scantable is returned.
2105 Otherwise, the scaling is done in-situ
2106 The default is taken from .asaprc (False)
2107
2108 """
2109 if insitu is None: insitu = rcParams['insitu']
2110 self._math._setinsitu(insitu)
2111 varlist = vars()
2112 s = scantable(self._math._resample(self, method, width))
2113 s._add_history("resample", varlist)
2114 if insitu: self._assign(s)
2115 else: return s
2116
2117 @asaplog_post_dec
2118 def average_pol(self, mask=None, weight='none'):
2119 """\
2120 Average the Polarisations together.
2121
2122 Parameters:
2123
2124 mask: An optional mask defining the region, where the
2125 averaging will be applied. The output will have all
2126 specified points masked.
2127
2128 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2129 weighted), or 'tsys' (1/Tsys**2 weighted)
2130
2131 """
2132 varlist = vars()
2133 mask = mask or ()
2134 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2135 s._add_history("average_pol", varlist)
2136 return s
2137
2138 @asaplog_post_dec
2139 def average_beam(self, mask=None, weight='none'):
2140 """\
2141 Average the Beams together.
2142
2143 Parameters:
2144 mask: An optional mask defining the region, where the
2145 averaging will be applied. The output will have all
2146 specified points masked.
2147
2148 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2149 weighted), or 'tsys' (1/Tsys**2 weighted)
2150
2151 """
2152 varlist = vars()
2153 mask = mask or ()
2154 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2155 s._add_history("average_beam", varlist)
2156 return s
2157
2158 def parallactify(self, pflag):
2159 """\
2160 Set a flag to indicate whether this data should be treated as having
2161 been 'parallactified' (total phase == 0.0)
2162
2163 Parameters:
2164
2165 pflag: Bool indicating whether to turn this on (True) or
2166 off (False)
2167
2168 """
2169 varlist = vars()
2170 self._parallactify(pflag)
2171 self._add_history("parallactify", varlist)
2172
2173 @asaplog_post_dec
2174 def convert_pol(self, poltype=None):
2175 """\
2176 Convert the data to a different polarisation type.
2177 Note that you will need cross-polarisation terms for most conversions.
2178
2179 Parameters:
2180
2181 poltype: The new polarisation type. Valid types are:
2182 "linear", "circular", "stokes" and "linpol"
2183
2184 """
2185 varlist = vars()
2186 s = scantable(self._math._convertpol(self, poltype))
2187 s._add_history("convert_pol", varlist)
2188 return s
2189
2190 @asaplog_post_dec
2191 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2192 insitu=None):
2193 """\
2194 Smooth the spectrum by the specified kernel (conserving flux).
2195
2196 Parameters:
2197
2198 kernel: The type of smoothing kernel. Select from
2199 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2200 or 'poly'
2201
2202 width: The width of the kernel in pixels. For hanning this is
2203 ignored otherwise it defauls to 5 pixels.
2204 For 'gaussian' it is the Full Width Half
2205 Maximum. For 'boxcar' it is the full width.
2206 For 'rmedian' and 'poly' it is the half width.
2207
2208 order: Optional parameter for 'poly' kernel (default is 2), to
2209 specify the order of the polnomial. Ignored by all other
2210 kernels.
2211
2212 plot: plot the original and the smoothed spectra.
2213 In this each indivual fit has to be approved, by
2214 typing 'y' or 'n'
2215
2216 insitu: if False a new scantable is returned.
2217 Otherwise, the scaling is done in-situ
2218 The default is taken from .asaprc (False)
2219
2220 """
2221 if insitu is None: insitu = rcParams['insitu']
2222 self._math._setinsitu(insitu)
2223 varlist = vars()
2224
2225 if plot: orgscan = self.copy()
2226
2227 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2228 s._add_history("smooth", varlist)
2229
2230 if plot:
2231 from asap.asapplotter import new_asaplot
2232 theplot = new_asaplot(rcParams['plotter.gui'])
2233 theplot.set_panels()
2234 ylab=s._get_ordinate_label()
2235 #theplot.palette(0,["#777777","red"])
2236 for r in xrange(s.nrow()):
2237 xsm=s._getabcissa(r)
2238 ysm=s._getspectrum(r)
2239 xorg=orgscan._getabcissa(r)
2240 yorg=orgscan._getspectrum(r)
2241 theplot.clear()
2242 theplot.hold()
2243 theplot.set_axes('ylabel',ylab)
2244 theplot.set_axes('xlabel',s._getabcissalabel(r))
2245 theplot.set_axes('title',s._getsourcename(r))
2246 theplot.set_line(label='Original',color="#777777")
2247 theplot.plot(xorg,yorg)
2248 theplot.set_line(label='Smoothed',color="red")
2249 theplot.plot(xsm,ysm)
2250 ### Ugly part for legend
2251 for i in [0,1]:
2252 theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2253 theplot.release()
2254 ### Ugly part for legend
2255 theplot.subplots[0]['lines']=[]
2256 res = raw_input("Accept smoothing ([y]/n): ")
2257 if res.upper() == 'N':
2258 s._setspectrum(yorg, r)
2259 theplot.quit()
2260 del theplot
2261 del orgscan
2262
2263 if insitu: self._assign(s)
2264 else: return s
2265
2266 @asaplog_post_dec
2267 def _parse_wn(self, wn):
2268 if isinstance(wn, list) or isinstance(wn, tuple):
2269 return wn
2270 elif isinstance(wn, int):
2271 return [ wn ]
2272 elif isinstance(wn, str):
2273 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2274 val = wn.split('-')
2275 val = [int(val[0]), int(val[1])]
2276 val.sort()
2277 res = [i for i in xrange(val[0], val[1]+1)]
2278 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2279 val = int(wn[2:])+1
2280 res = [i for i in xrange(val)]
2281 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2282 val = int(wn[:-2])+1
2283 res = [i for i in xrange(val)]
2284 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2285 val = int(wn[1:])
2286 res = [i for i in xrange(val)]
2287 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2288 val = int(wn[:-1])
2289 res = [i for i in xrange(val)]
2290 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2291 val = int(wn[2:])
2292 res = [i for i in xrange(val, self.nchan()/2+1)]
2293 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2294 val = int(wn[:-2])
2295 res = [i for i in xrange(val, self.nchan()/2+1)]
2296 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
2297 val = int(wn[1:])+1
2298 res = [i for i in xrange(val, self.nchan()/2+1)]
2299 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
2300 val = int(wn[:-1])+1
2301 res = [i for i in xrange(val, self.nchan()/2+1)]
2302
2303 return res
2304 else:
2305 msg = 'wrong value given for addwn/rejwn'
2306 raise RuntimeError(msg)
2307
2308
2309 @asaplog_post_dec
2310 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2311 fftmethod=None, fftthresh=None,
2312 addwn=None, rejwn=None, clipthresh=None,
2313 clipniter=None, plot=None,
2314 getresidual=None, showprogress=None,
2315 minnrow=None, outlog=None, blfile=None):
2316 """\
2317 Return a scan which has been baselined (all rows) with sinusoidal functions.
2318 Parameters:
2319 insitu: if False a new scantable is returned.
2320 Otherwise, the scaling is done in-situ
2321 The default is taken from .asaprc (False)
2322 mask: an optional mask
2323 applyfft: if True use some method, such as FFT, to find
2324 strongest sinusoidal components in the wavenumber
2325 domain to be used for baseline fitting.
2326 default is True.
2327 fftmethod: method to find the strong sinusoidal components.
2328 now only 'fft' is available and it is the default.
2329 fftthresh: the threshold to select wave numbers to be used for
2330 fitting from the distribution of amplitudes in the
2331 wavenumber domain.
2332 both float and string values accepted.
2333 given a float value, the unit is set to sigma.
2334 for string values, allowed formats include:
2335 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2336 'topx' (= the x strongest ones, e.g. 'top5').
2337 default is 3.0 (unit: sigma).
2338 addwn: the additional wave numbers to be used for fitting.
2339 list or integer value is accepted to specify every
2340 wave numbers. also string value can be used in case
2341 you need to specify wave numbers in a certain range,
2342 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2343 '<a' (= 0,1,...,a-2,a-1),
2344 '>=a' (= a, a+1, ... up to the maximum wave
2345 number corresponding to the Nyquist
2346 frequency for the case of FFT).
2347 default is [].
2348 rejwn: the wave numbers NOT to be used for fitting.
2349 can be set just as addwn but has higher priority:
2350 wave numbers which are specified both in addwn
2351 and rejwn will NOT be used. default is [].
2352 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2353 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2354 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2355 plot the fit and the residual. In this each
2356 indivual fit has to be approved, by typing 'y'
2357 or 'n'
2358 getresidual: if False, returns best-fit values instead of
2359 residual. (default is True)
2360 showprogress: show progress status for large data.
2361 default is True.
2362 minnrow: minimum number of input spectra to show.
2363 default is 1000.
2364 outlog: Output the coefficients of the best-fit
2365 function to logger (default is False)
2366 blfile: Name of a text file in which the best-fit
2367 parameter values to be written
2368 (default is '': no file/logger output)
2369
2370 Example:
2371 # return a scan baselined by a combination of sinusoidal curves having
2372 # wave numbers in spectral window up to 10,
2373 # also with 3-sigma clipping, iteration up to 4 times
2374 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2375
2376 Note:
2377 The best-fit parameter values output in logger and/or blfile are now
2378 based on specunit of 'channel'.
2379 """
2380
2381 try:
2382 varlist = vars()
2383
2384 if insitu is None: insitu = rcParams['insitu']
2385 if insitu:
2386 workscan = self
2387 else:
2388 workscan = self.copy()
2389
2390 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2391 if applyfft is None: applyfft = True
2392 if fftmethod is None: fftmethod = 'fft'
2393 if fftthresh is None: fftthresh = 3.0
2394 if addwn is None: addwn = []
2395 if rejwn is None: rejwn = []
2396 if clipthresh is None: clipthresh = 3.0
2397 if clipniter is None: clipniter = 0
2398 if plot is None: plot = False
2399 if getresidual is None: getresidual = True
2400 if showprogress is None: showprogress = True
2401 if minnrow is None: minnrow = 1000
2402 if outlog is None: outlog = False
2403 if blfile is None: blfile = ''
2404
2405 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2406 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2407 workscan._add_history('sinusoid_baseline', varlist)
2408
2409 if insitu:
2410 self._assign(workscan)
2411 else:
2412 return workscan
2413
2414 except RuntimeError, e:
2415 raise_fitting_failure_exception(e)
2416
2417
2418 @asaplog_post_dec
2419 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2420 addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
2421 chan_avg_limit=None, plot=None, getresidual=None, showprogress=None, minnrow=None,
2422 outlog=None, blfile=None):
2423 """\
2424 Return a scan which has been baselined (all rows) with sinusoidal functions.
2425 Spectral lines are detected first using linefinder and masked out
2426 to avoid them affecting the baseline solution.
2427
2428 Parameters:
2429 insitu: if False a new scantable is returned.
2430 Otherwise, the scaling is done in-situ
2431 The default is taken from .asaprc (False)
2432 mask: an optional mask retreived from scantable
2433 applyfft: if True use some method, such as FFT, to find
2434 strongest sinusoidal components in the wavenumber
2435 domain to be used for baseline fitting.
2436 default is True.
2437 fftmethod: method to find the strong sinusoidal components.
2438 now only 'fft' is available and it is the default.
2439 fftthresh: the threshold to select wave numbers to be used for
2440 fitting from the distribution of amplitudes in the
2441 wavenumber domain.
2442 both float and string values accepted.
2443 given a float value, the unit is set to sigma.
2444 for string values, allowed formats include:
2445 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2446 'topx' (= the x strongest ones, e.g. 'top5').
2447 default is 3.0 (unit: sigma).
2448 addwn: the additional wave numbers to be used for fitting.
2449 list or integer value is accepted to specify every
2450 wave numbers. also string value can be used in case
2451 you need to specify wave numbers in a certain range,
2452 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2453 '<a' (= 0,1,...,a-2,a-1),
2454 '>=a' (= a, a+1, ... up to the maximum wave
2455 number corresponding to the Nyquist
2456 frequency for the case of FFT).
2457 default is [].
2458 rejwn: the wave numbers NOT to be used for fitting.
2459 can be set just as addwn but has higher priority:
2460 wave numbers which are specified both in addwn
2461 and rejwn will NOT be used. default is [].
2462 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2463 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2464 edge: an optional number of channel to drop at
2465 the edge of spectrum. If only one value is
2466 specified, the same number will be dropped
2467 from both sides of the spectrum. Default
2468 is to keep all channels. Nested tuples
2469 represent individual edge selection for
2470 different IFs (a number of spectral channels
2471 can be different)
2472 threshold: the threshold used by line finder. It is
2473 better to keep it large as only strong lines
2474 affect the baseline solution.
2475 chan_avg_limit: a maximum number of consequtive spectral
2476 channels to average during the search of
2477 weak and broad lines. The default is no
2478 averaging (and no search for weak lines).
2479 If such lines can affect the fitted baseline
2480 (e.g. a high order polynomial is fitted),
2481 increase this parameter (usually values up
2482 to 8 are reasonable). Most users of this
2483 method should find the default value sufficient.
2484 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2485 plot the fit and the residual. In this each
2486 indivual fit has to be approved, by typing 'y'
2487 or 'n'
2488 getresidual: if False, returns best-fit values instead of
2489 residual. (default is True)
2490 showprogress: show progress status for large data.
2491 default is True.
2492 minnrow: minimum number of input spectra to show.
2493 default is 1000.
2494 outlog: Output the coefficients of the best-fit
2495 function to logger (default is False)
2496 blfile: Name of a text file in which the best-fit
2497 parameter values to be written
2498 (default is "": no file/logger output)
2499
2500 Example:
2501 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2502
2503 Note:
2504 The best-fit parameter values output in logger and/or blfile are now
2505 based on specunit of 'channel'.
2506 """
2507
2508 try:
2509 varlist = vars()
2510
2511 if insitu is None: insitu = rcParams['insitu']
2512 if insitu:
2513 workscan = self
2514 else:
2515 workscan = self.copy()
2516
2517 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2518 if applyfft is None: applyfft = True
2519 if fftmethod is None: fftmethod = 'fft'
2520 if fftthresh is None: fftthresh = 3.0
2521 if addwn is None: addwn = []
2522 if rejwn is None: rejwn = []
2523 if clipthresh is None: clipthresh = 3.0
2524 if clipniter is None: clipniter = 0
2525 if edge is None: edge = (0,0)
2526 if threshold is None: threshold = 3
2527 if chan_avg_limit is None: chan_avg_limit = 1
2528 if plot is None: plot = False
2529 if getresidual is None: getresidual = True
2530 if showprogress is None: showprogress = True
2531 if minnrow is None: minnrow = 1000
2532 if outlog is None: outlog = False
2533 if blfile is None: blfile = ''
2534
2535 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2536 workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2537 workscan._add_history("auto_sinusoid_baseline", varlist)
2538
2539 if insitu:
2540 self._assign(workscan)
2541 else:
2542 return workscan
2543
2544 except RuntimeError, e:
2545 raise_fitting_failure_exception(e)
2546
2547 @asaplog_post_dec
2548 def cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2549 plot=None, getresidual=None, showprogress=None, minnrow=None, outlog=None, blfile=None):
2550 """\
2551 Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
2552 Parameters:
2553 insitu: If False a new scantable is returned.
2554 Otherwise, the scaling is done in-situ
2555 The default is taken from .asaprc (False)
2556 mask: An optional mask
2557 npiece: Number of pieces. (default is 2)
2558 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2559 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2560 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2561 plot the fit and the residual. In this each
2562 indivual fit has to be approved, by typing 'y'
2563 or 'n'
2564 getresidual: if False, returns best-fit values instead of
2565 residual. (default is True)
2566 showprogress: show progress status for large data.
2567 default is True.
2568 minnrow: minimum number of input spectra to show.
2569 default is 1000.
2570 outlog: Output the coefficients of the best-fit
2571 function to logger (default is False)
2572 blfile: Name of a text file in which the best-fit
2573 parameter values to be written
2574 (default is "": no file/logger output)
2575
2576 Example:
2577 # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
2578 # also with 3-sigma clipping, iteration up to 4 times
2579 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2580
2581 Note:
2582 The best-fit parameter values output in logger and/or blfile are now
2583 based on specunit of 'channel'.
2584 """
2585
2586 try:
2587 varlist = vars()
2588
2589 if insitu is None: insitu = rcParams['insitu']
2590 if insitu:
2591 workscan = self
2592 else:
2593 workscan = self.copy()
2594
2595 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2596 if npiece is None: npiece = 2
2597 if clipthresh is None: clipthresh = 3.0
2598 if clipniter is None: clipniter = 0
2599 if plot is None: plot = False
2600 if getresidual is None: getresidual = True
2601 if showprogress is None: showprogress = True
2602 if minnrow is None: minnrow = 1000
2603 if outlog is None: outlog = False
2604 if blfile is None: blfile = ''
2605
2606 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2607 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2608 workscan._add_history("cspline_baseline", varlist)
2609
2610 if insitu:
2611 self._assign(workscan)
2612 else:
2613 return workscan
2614
2615 except RuntimeError, e:
2616 raise_fitting_failure_exception(e)
2617
2618 @asaplog_post_dec
2619 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2620 edge=None, threshold=None, chan_avg_limit=None, getresidual=None, plot=None,
2621 showprogress=None, minnrow=None, outlog=None, blfile=None):
2622 """\
2623 Return a scan which has been baselined (all rows) by cubic spline
2624 function (piecewise cubic polynomial).
2625 Spectral lines are detected first using linefinder and masked out
2626 to avoid them affecting the baseline solution.
2627
2628 Parameters:
2629 insitu: if False a new scantable is returned.
2630 Otherwise, the scaling is done in-situ
2631 The default is taken from .asaprc (False)
2632 mask: an optional mask retreived from scantable
2633 npiece: Number of pieces. (default is 2)
2634 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2635 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2636 edge: an optional number of channel to drop at
2637 the edge of spectrum. If only one value is
2638 specified, the same number will be dropped
2639 from both sides of the spectrum. Default
2640 is to keep all channels. Nested tuples
2641 represent individual edge selection for
2642 different IFs (a number of spectral channels
2643 can be different)
2644 threshold: the threshold used by line finder. It is
2645 better to keep it large as only strong lines
2646 affect the baseline solution.
2647 chan_avg_limit: a maximum number of consequtive spectral
2648 channels to average during the search of
2649 weak and broad lines. The default is no
2650 averaging (and no search for weak lines).
2651 If such lines can affect the fitted baseline
2652 (e.g. a high order polynomial is fitted),
2653 increase this parameter (usually values up
2654 to 8 are reasonable). Most users of this
2655 method should find the default value sufficient.
2656 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2657 plot the fit and the residual. In this each
2658 indivual fit has to be approved, by typing 'y'
2659 or 'n'
2660 getresidual: if False, returns best-fit values instead of
2661 residual. (default is True)
2662 showprogress: show progress status for large data.
2663 default is True.
2664 minnrow: minimum number of input spectra to show.
2665 default is 1000.
2666 outlog: Output the coefficients of the best-fit
2667 function to logger (default is False)
2668 blfile: Name of a text file in which the best-fit
2669 parameter values to be written
2670 (default is "": no file/logger output)
2671
2672 Example:
2673 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2674
2675 Note:
2676 The best-fit parameter values output in logger and/or blfile are now
2677 based on specunit of 'channel'.
2678 """
2679
2680 try:
2681 varlist = vars()
2682
2683 if insitu is None: insitu = rcParams['insitu']
2684 if insitu:
2685 workscan = self
2686 else:
2687 workscan = self.copy()
2688
2689 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2690 if npiece is None: npiece = 2
2691 if clipthresh is None: clipthresh = 3.0
2692 if clipniter is None: clipniter = 0
2693 if edge is None: edge = (0, 0)
2694 if threshold is None: threshold = 3
2695 if chan_avg_limit is None: chan_avg_limit = 1
2696 if plot is None: plot = False
2697 if getresidual is None: getresidual = True
2698 if showprogress is None: showprogress = True
2699 if minnrow is None: minnrow = 1000
2700 if outlog is None: outlog = False
2701 if blfile is None: blfile = ''
2702
2703 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2704 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2705 clipniter,
2706 normalise_edge_param(edge),
2707 threshold,
2708 chan_avg_limit, getresidual,
2709 pack_progress_params(showprogress,
2710 minnrow),
2711 outlog, blfile)
2712 workscan._add_history("auto_cspline_baseline", varlist)
2713
2714 if insitu:
2715 self._assign(workscan)
2716 else:
2717 return workscan
2718
2719 except RuntimeError, e:
2720 raise_fitting_failure_exception(e)
2721
2722 @asaplog_post_dec
2723 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2724 getresidual=None, showprogress=None, minnrow=None,
2725 outlog=None, blfile=None):
2726 """\
2727 Return a scan which has been baselined (all rows) by a polynomial.
2728 Parameters:
2729 insitu: if False a new scantable is returned.
2730 Otherwise, the scaling is done in-situ
2731 The default is taken from .asaprc (False)
2732 mask: an optional mask
2733 order: the order of the polynomial (default is 0)
2734 plot: plot the fit and the residual. In this each
2735 indivual fit has to be approved, by typing 'y'
2736 or 'n'
2737 getresidual: if False, returns best-fit values instead of
2738 residual. (default is True)
2739 showprogress: show progress status for large data.
2740 default is True.
2741 minnrow: minimum number of input spectra to show.
2742 default is 1000.
2743 outlog: Output the coefficients of the best-fit
2744 function to logger (default is False)
2745 blfile: Name of a text file in which the best-fit
2746 parameter values to be written
2747 (default is "": no file/logger output)
2748
2749 Example:
2750 # return a scan baselined by a third order polynomial,
2751 # not using a mask
2752 bscan = scan.poly_baseline(order=3)
2753 """
2754
2755 try:
2756 varlist = vars()
2757
2758 if insitu is None:
2759 insitu = rcParams["insitu"]
2760 if insitu:
2761 workscan = self
2762 else:
2763 workscan = self.copy()
2764
2765 if mask is None: mask = [True for i in \
2766 xrange(workscan.nchan())]
2767 if order is None: order = 0
2768 if plot is None: plot = False
2769 if getresidual is None: getresidual = True
2770 if showprogress is None: showprogress = True
2771 if minnrow is None: minnrow = 1000
2772 if outlog is None: outlog = False
2773 if blfile is None: blfile = ""
2774
2775 if plot:
2776 outblfile = (blfile != "") and \
2777 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2778 if outblfile:
2779 blf = open(blfile, "a")
2780
2781 f = fitter()
2782 f.set_function(lpoly=order)
2783
2784 rows = xrange(workscan.nrow())
2785 #if len(rows) > 0: workscan._init_blinfo()
2786
2787 for r in rows:
2788 f.x = workscan._getabcissa(r)
2789 f.y = workscan._getspectrum(r)
2790 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2791 f.data = None
2792 f.fit()
2793
2794 f.plot(residual=True)
2795 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2796 if accept_fit.upper() == "N":
2797 #workscan._append_blinfo(None, None, None)
2798 continue
2799
2800 blpars = f.get_parameters()
2801 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2802 #workscan._append_blinfo(blpars, masklist, f.mask)
2803 workscan._setspectrum((f.fitter.getresidual()
2804 if getresidual else f.fitter.getfit()), r)
2805
2806 if outblfile:
2807 rms = workscan.get_rms(f.mask, r)
2808 dataout = \
2809 workscan.format_blparams_row(blpars["params"],
2810 blpars["fixed"],
2811 rms, str(masklist),
2812 r, True)
2813 blf.write(dataout)
2814
2815 f._p.unmap()
2816 f._p = None
2817
2818 if outblfile: blf.close()
2819 else:
2820 workscan._poly_baseline(mask, order, getresidual,
2821 pack_progress_params(showprogress,
2822 minnrow),
2823 outlog, blfile)
2824
2825 workscan._add_history("poly_baseline", varlist)
2826
2827 if insitu:
2828 self._assign(workscan)
2829 else:
2830 return workscan
2831
2832 except RuntimeError, e:
2833 raise_fitting_failure_exception(e)
2834
2835 @asaplog_post_dec
2836 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2837 threshold=None, chan_avg_limit=None,
2838 plot=None, insitu=None,
2839 getresidual=None, showprogress=None,
2840 minnrow=None, outlog=None, blfile=None):
2841 """\
2842 Return a scan which has been baselined (all rows) by a polynomial.
2843 Spectral lines are detected first using linefinder and masked out
2844 to avoid them affecting the baseline solution.
2845
2846 Parameters:
2847 insitu: if False a new scantable is returned.
2848 Otherwise, the scaling is done in-situ
2849 The default is taken from .asaprc (False)
2850 mask: an optional mask retreived from scantable
2851 order: the order of the polynomial (default is 0)
2852 edge: an optional number of channel to drop at
2853 the edge of spectrum. If only one value is
2854 specified, the same number will be dropped
2855 from both sides of the spectrum. Default
2856 is to keep all channels. Nested tuples
2857 represent individual edge selection for
2858 different IFs (a number of spectral channels
2859 can be different)
2860 threshold: the threshold used by line finder. It is
2861 better to keep it large as only strong lines
2862 affect the baseline solution.
2863 chan_avg_limit: a maximum number of consequtive spectral
2864 channels to average during the search of
2865 weak and broad lines. The default is no
2866 averaging (and no search for weak lines).
2867 If such lines can affect the fitted baseline
2868 (e.g. a high order polynomial is fitted),
2869 increase this parameter (usually values up
2870 to 8 are reasonable). Most users of this
2871 method should find the default value sufficient.
2872 plot: plot the fit and the residual. In this each
2873 indivual fit has to be approved, by typing 'y'
2874 or 'n'
2875 getresidual: if False, returns best-fit values instead of
2876 residual. (default is True)
2877 showprogress: show progress status for large data.
2878 default is True.
2879 minnrow: minimum number of input spectra to show.
2880 default is 1000.
2881 outlog: Output the coefficients of the best-fit
2882 function to logger (default is False)
2883 blfile: Name of a text file in which the best-fit
2884 parameter values to be written
2885 (default is "": no file/logger output)
2886
2887 Example:
2888 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2889 """
2890
2891 try:
2892 varlist = vars()
2893
2894 if insitu is None:
2895 insitu = rcParams['insitu']
2896 if insitu:
2897 workscan = self
2898 else:
2899 workscan = self.copy()
2900
2901 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2902 if order is None: order = 0
2903 if edge is None: edge = (0, 0)
2904 if threshold is None: threshold = 3
2905 if chan_avg_limit is None: chan_avg_limit = 1
2906 if plot is None: plot = False
2907 if getresidual is None: getresidual = True
2908 if showprogress is None: showprogress = True
2909 if minnrow is None: minnrow = 1000
2910 if outlog is None: outlog = False
2911 if blfile is None: blfile = ''
2912
2913 edge = normalise_edge_param(edge)
2914
2915 if plot:
2916 outblfile = (blfile != "") and \
2917 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2918 if outblfile: blf = open(blfile, "a")
2919
2920 from asap.asaplinefind import linefinder
2921 fl = linefinder()
2922 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
2923 fl.set_scan(workscan)
2924
2925 f = fitter()
2926 f.set_function(lpoly=order)
2927
2928 rows = xrange(workscan.nrow())
2929 #if len(rows) > 0: workscan._init_blinfo()
2930
2931 for r in rows:
2932 idx = 2*workscan.getif(r)
2933 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2934 edge[idx:idx+2]) # (CAS-1434)
2935
2936 f.x = workscan._getabcissa(r)
2937 f.y = workscan._getspectrum(r)
2938 f.mask = fl.get_mask()
2939 f.data = None
2940 f.fit()
2941
2942 f.plot(residual=True)
2943 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2944 if accept_fit.upper() == "N":
2945 #workscan._append_blinfo(None, None, None)
2946 continue
2947
2948 blpars = f.get_parameters()
2949 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2950 #workscan._append_blinfo(blpars, masklist, f.mask)
2951 workscan._setspectrum((f.fitter.getresidual() \
2952 if getresidual else f.fitter.getfit()), r)
2953
2954 if outblfile:
2955 rms = workscan.get_rms(f.mask, r)
2956 dataout = \
2957 workscan.format_blparams_row(blpars["params"],
2958 blpars["fixed"],
2959 rms, str(masklist),
2960 r, True)
2961 blf.write(dataout)
2962
2963 f._p.unmap()
2964 f._p = None
2965
2966 if outblfile: blf.close()
2967 else:
2968 workscan._auto_poly_baseline(mask, order, edge, threshold,
2969 chan_avg_limit, getresidual,
2970 pack_progress_params(showprogress,
2971 minnrow),
2972 outlog, blfile)
2973
2974 workscan._add_history("auto_poly_baseline", varlist)
2975
2976 if insitu:
2977 self._assign(workscan)
2978 else:
2979 return workscan
2980
2981 except RuntimeError, e:
2982 raise_fitting_failure_exception(e)
2983
2984 def _init_blinfo(self):
2985 """\
2986 Initialise the following three auxiliary members:
2987 blpars : parameters of the best-fit baseline,
2988 masklists : mask data (edge positions of masked channels) and
2989 actualmask : mask data (in boolean list),
2990 to keep for use later (including output to logger/text files).
2991 Used by poly_baseline() and auto_poly_baseline() in case of
2992 'plot=True'.
2993 """
2994 self.blpars = []
2995 self.masklists = []
2996 self.actualmask = []
2997 return
2998
2999 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3000 """\
3001 Append baseline-fitting related info to blpars, masklist and
3002 actualmask.
3003 """
3004 self.blpars.append(data_blpars)
3005 self.masklists.append(data_masklists)
3006 self.actualmask.append(data_actualmask)
3007 return
3008
3009 @asaplog_post_dec
3010 def rotate_linpolphase(self, angle):
3011 """\
3012 Rotate the phase of the complex polarization O=Q+iU correlation.
3013 This is always done in situ in the raw data. So if you call this
3014 function more than once then each call rotates the phase further.
3015
3016 Parameters:
3017
3018 angle: The angle (degrees) to rotate (add) by.
3019
3020 Example::
3021
3022 scan.rotate_linpolphase(2.3)
3023
3024 """
3025 varlist = vars()
3026 self._math._rotate_linpolphase(self, angle)
3027 self._add_history("rotate_linpolphase", varlist)
3028 return
3029
3030 @asaplog_post_dec
3031 def rotate_xyphase(self, angle):
3032 """\
3033 Rotate the phase of the XY correlation. This is always done in situ
3034 in the data. So if you call this function more than once
3035 then each call rotates the phase further.
3036
3037 Parameters:
3038
3039 angle: The angle (degrees) to rotate (add) by.
3040
3041 Example::
3042
3043 scan.rotate_xyphase(2.3)
3044
3045 """
3046 varlist = vars()
3047 self._math._rotate_xyphase(self, angle)
3048 self._add_history("rotate_xyphase", varlist)
3049 return
3050
3051 @asaplog_post_dec
3052 def swap_linears(self):
3053 """\
3054 Swap the linear polarisations XX and YY, or better the first two
3055 polarisations as this also works for ciculars.
3056 """
3057 varlist = vars()
3058 self._math._swap_linears(self)
3059 self._add_history("swap_linears", varlist)
3060 return
3061
3062 @asaplog_post_dec
3063 def invert_phase(self):
3064 """\
3065 Invert the phase of the complex polarisation
3066 """
3067 varlist = vars()
3068 self._math._invert_phase(self)
3069 self._add_history("invert_phase", varlist)
3070 return
3071
3072 @asaplog_post_dec
3073 def add(self, offset, insitu=None):
3074 """\
3075 Return a scan where all spectra have the offset added
3076
3077 Parameters:
3078
3079 offset: the offset
3080
3081 insitu: if False a new scantable is returned.
3082 Otherwise, the scaling is done in-situ
3083 The default is taken from .asaprc (False)
3084
3085 """
3086 if insitu is None: insitu = rcParams['insitu']
3087 self._math._setinsitu(insitu)
3088 varlist = vars()
3089 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3090 s._add_history("add", varlist)
3091 if insitu:
3092 self._assign(s)
3093 else:
3094 return s
3095
3096 @asaplog_post_dec
3097 def scale(self, factor, tsys=True, insitu=None):
3098 """\
3099
3100 Return a scan where all spectra are scaled by the given 'factor'
3101
3102 Parameters:
3103
3104 factor: the scaling factor (float or 1D float list)
3105
3106 insitu: if False a new scantable is returned.
3107 Otherwise, the scaling is done in-situ
3108 The default is taken from .asaprc (False)
3109
3110 tsys: if True (default) then apply the operation to Tsys
3111 as well as the data
3112
3113 """
3114 if insitu is None: insitu = rcParams['insitu']
3115 self._math._setinsitu(insitu)
3116 varlist = vars()
3117 s = None
3118 import numpy
3119 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3120 if isinstance(factor[0], list) or isinstance(factor[0],
3121 numpy.ndarray):
3122 from asapmath import _array2dOp
3123 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3124 else:
3125 s = scantable( self._math._arrayop( self, factor,
3126 "MUL", tsys ) )
3127 else:
3128 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3129 s._add_history("scale", varlist)
3130 if insitu:
3131 self._assign(s)
3132 else:
3133 return s
3134
3135 def set_sourcetype(self, match, matchtype="pattern",
3136 sourcetype="reference"):
3137 """\
3138 Set the type of the source to be an source or reference scan
3139 using the provided pattern.
3140
3141 Parameters:
3142
3143 match: a Unix style pattern, regular expression or selector
3144
3145 matchtype: 'pattern' (default) UNIX style pattern or
3146 'regex' regular expression
3147
3148 sourcetype: the type of the source to use (source/reference)
3149
3150 """
3151 varlist = vars()
3152 basesel = self.get_selection()
3153 stype = -1
3154 if sourcetype.lower().startswith("r"):
3155 stype = 1
3156 elif sourcetype.lower().startswith("s"):
3157 stype = 0
3158 else:
3159 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3160 if matchtype.lower().startswith("p"):
3161 matchtype = "pattern"
3162 elif matchtype.lower().startswith("r"):
3163 matchtype = "regex"
3164 else:
3165 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3166 sel = selector()
3167 if isinstance(match, selector):
3168 sel = match
3169 else:
3170 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3171 self.set_selection(basesel+sel)
3172 self._setsourcetype(stype)
3173 self.set_selection(basesel)
3174 self._add_history("set_sourcetype", varlist)
3175
3176 @asaplog_post_dec
3177 @preserve_selection
3178 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3179 """\
3180 This function allows to build quotients automatically.
3181 It assumes the observation to have the same number of
3182 "ons" and "offs"
3183
3184 Parameters:
3185
3186 preserve: you can preserve (default) the continuum or
3187 remove it. The equations used are
3188
3189 preserve: Output = Toff * (on/off) - Toff
3190
3191 remove: Output = Toff * (on/off) - Ton
3192
3193 mode: the on/off detection mode
3194 'paired' (default)
3195 identifies 'off' scans by the
3196 trailing '_R' (Mopra/Parkes) or
3197 '_e'/'_w' (Tid) and matches
3198 on/off pairs from the observing pattern
3199 'time'
3200 finds the closest off in time
3201
3202 .. todo:: verify argument is not implemented
3203
3204 """
3205 varlist = vars()
3206 modes = ["time", "paired"]
3207 if not mode in modes:
3208 msg = "please provide valid mode. Valid modes are %s" % (modes)
3209 raise ValueError(msg)
3210 s = None
3211 if mode.lower() == "paired":
3212 sel = self.get_selection()
3213 sel.set_query("SRCTYPE==psoff")
3214 self.set_selection(sel)
3215 offs = self.copy()
3216 sel.set_query("SRCTYPE==pson")
3217 self.set_selection(sel)
3218 ons = self.copy()
3219 s = scantable(self._math._quotient(ons, offs, preserve))
3220 elif mode.lower() == "time":
3221 s = scantable(self._math._auto_quotient(self, mode, preserve))
3222 s._add_history("auto_quotient", varlist)
3223 return s
3224
3225 @asaplog_post_dec
3226 def mx_quotient(self, mask = None, weight='median', preserve=True):
3227 """\
3228 Form a quotient using "off" beams when observing in "MX" mode.
3229
3230 Parameters:
3231
3232 mask: an optional mask to be used when weight == 'stddev'
3233
3234 weight: How to average the off beams. Default is 'median'.
3235
3236 preserve: you can preserve (default) the continuum or
3237 remove it. The equations used are:
3238
3239 preserve: Output = Toff * (on/off) - Toff
3240
3241 remove: Output = Toff * (on/off) - Ton
3242
3243 """
3244 mask = mask or ()
3245 varlist = vars()
3246 on = scantable(self._math._mx_extract(self, 'on'))
3247 preoff = scantable(self._math._mx_extract(self, 'off'))
3248 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3249 from asapmath import quotient
3250 q = quotient(on, off, preserve)
3251 q._add_history("mx_quotient", varlist)
3252 return q
3253
3254 @asaplog_post_dec
3255 def freq_switch(self, insitu=None):
3256 """\
3257 Apply frequency switching to the data.
3258
3259 Parameters:
3260
3261 insitu: if False a new scantable is returned.
3262 Otherwise, the swictching is done in-situ
3263 The default is taken from .asaprc (False)
3264
3265 """
3266 if insitu is None: insitu = rcParams['insitu']
3267 self._math._setinsitu(insitu)
3268 varlist = vars()
3269 s = scantable(self._math._freqswitch(self))
3270 s._add_history("freq_switch", varlist)
3271 if insitu:
3272 self._assign(s)
3273 else:
3274 return s
3275
3276 @asaplog_post_dec
3277 def recalc_azel(self):
3278 """Recalculate the azimuth and elevation for each position."""
3279 varlist = vars()
3280 self._recalcazel()
3281 self._add_history("recalc_azel", varlist)
3282 return
3283
3284 @asaplog_post_dec
3285 def __add__(self, other):
3286 varlist = vars()
3287 s = None
3288 if isinstance(other, scantable):
3289 s = scantable(self._math._binaryop(self, other, "ADD"))
3290 elif isinstance(other, float):
3291 s = scantable(self._math._unaryop(self, other, "ADD", False))
3292 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3293 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3294 from asapmath import _array2dOp
3295 s = _array2dOp( self.copy(), other, "ADD", False )
3296 else:
3297 s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
3298 else:
3299 raise TypeError("Other input is not a scantable or float value")
3300 s._add_history("operator +", varlist)
3301 return s
3302
3303 @asaplog_post_dec
3304 def __sub__(self, other):
3305 """
3306 implicit on all axes and on Tsys
3307 """
3308 varlist = vars()
3309 s = None
3310 if isinstance(other, scantable):
3311 s = scantable(self._math._binaryop(self, other, "SUB"))
3312 elif isinstance(other, float):
3313 s = scantable(self._math._unaryop(self, other, "SUB", False))
3314 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3315 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3316 from asapmath import _array2dOp
3317 s = _array2dOp( self.copy(), other, "SUB", False )
3318 else:
3319 s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
3320 else:
3321 raise TypeError("Other input is not a scantable or float value")
3322 s._add_history("operator -", varlist)
3323 return s
3324
3325 @asaplog_post_dec
3326 def __mul__(self, other):
3327 """
3328 implicit on all axes and on Tsys
3329 """
3330 varlist = vars()
3331 s = None
3332 if isinstance(other, scantable):
3333 s = scantable(self._math._binaryop(self, other, "MUL"))
3334 elif isinstance(other, float):
3335 s = scantable(self._math._unaryop(self, other, "MUL", False))
3336 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3337 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3338 from asapmath import _array2dOp
3339 s = _array2dOp( self.copy(), other, "MUL", False )
3340 else:
3341 s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
3342 else:
3343 raise TypeError("Other input is not a scantable or float value")
3344 s._add_history("operator *", varlist)
3345 return s
3346
3347
3348 @asaplog_post_dec
3349 def __div__(self, other):
3350 """
3351 implicit on all axes and on Tsys
3352 """
3353 varlist = vars()
3354 s = None
3355 if isinstance(other, scantable):
3356 s = scantable(self._math._binaryop(self, other, "DIV"))
3357 elif isinstance(other, float):
3358 if other == 0.0:
3359 raise ZeroDivisionError("Dividing by zero is not recommended")
3360 s = scantable(self._math._unaryop(self, other, "DIV", False))
3361 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3362 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3363 from asapmath import _array2dOp
3364 s = _array2dOp( self.copy(), other, "DIV", False )
3365 else:
3366 s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
3367 else:
3368 raise TypeError("Other input is not a scantable or float value")
3369 s._add_history("operator /", varlist)
3370 return s
3371
3372 @asaplog_post_dec
3373 def get_fit(self, row=0):
3374 """\
3375 Print or return the stored fits for a row in the scantable
3376
3377 Parameters:
3378
3379 row: the row which the fit has been applied to.
3380
3381 """
3382 if row > self.nrow():
3383 return
3384 from asap.asapfit import asapfit
3385 fit = asapfit(self._getfit(row))
3386 asaplog.push( '%s' %(fit) )
3387 return fit.as_dict()
3388
3389 def flag_nans(self):
3390 """\
3391 Utility function to flag NaN values in the scantable.
3392 """
3393 import numpy
3394 basesel = self.get_selection()
3395 for i in range(self.nrow()):
3396 sel = self.get_row_selector(i)
3397 self.set_selection(basesel+sel)
3398 nans = numpy.isnan(self._getspectrum(0))
3399 if numpy.any(nans):
3400 bnans = [ bool(v) for v in nans]
3401 self.flag(bnans)
3402 self.set_selection(basesel)
3403
3404 def get_row_selector(self, rowno):
3405 #return selector(beams=self.getbeam(rowno),
3406 # ifs=self.getif(rowno),
3407 # pols=self.getpol(rowno),
3408 # scans=self.getscan(rowno),
3409 # cycles=self.getcycle(rowno))
3410 return selector(rows=[rowno])
3411
3412 def _add_history(self, funcname, parameters):
3413 if not rcParams['scantable.history']:
3414 return
3415 # create date
3416 sep = "##"
3417 from datetime import datetime
3418 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3419 hist = dstr+sep
3420 hist += funcname+sep#cdate+sep
3421 if parameters.has_key('self'): del parameters['self']
3422 for k, v in parameters.iteritems():
3423 if type(v) is dict:
3424 for k2, v2 in v.iteritems():
3425 hist += k2
3426 hist += "="
3427 if isinstance(v2, scantable):
3428 hist += 'scantable'
3429 elif k2 == 'mask':
3430 if isinstance(v2, list) or isinstance(v2, tuple):
3431 hist += str(self._zip_mask(v2))
3432 else:
3433 hist += str(v2)
3434 else:
3435 hist += str(v2)
3436 else:
3437 hist += k
3438 hist += "="
3439 if isinstance(v, scantable):
3440 hist += 'scantable'
3441 elif k == 'mask':
3442 if isinstance(v, list) or isinstance(v, tuple):
3443 hist += str(self._zip_mask(v))
3444 else:
3445 hist += str(v)
3446 else:
3447 hist += str(v)
3448 hist += sep
3449 hist = hist[:-2] # remove trailing '##'
3450 self._addhistory(hist)
3451
3452
3453 def _zip_mask(self, mask):
3454 mask = list(mask)
3455 i = 0
3456 segments = []
3457 while mask[i:].count(1):
3458 i += mask[i:].index(1)
3459 if mask[i:].count(0):
3460 j = i + mask[i:].index(0)
3461 else:
3462 j = len(mask)
3463 segments.append([i, j])
3464 i = j
3465 return segments
3466
3467 def _get_ordinate_label(self):
3468 fu = "("+self.get_fluxunit()+")"
3469 import re
3470 lbl = "Intensity"
3471 if re.match(".K.", fu):
3472 lbl = "Brightness Temperature "+ fu
3473 elif re.match(".Jy.", fu):
3474 lbl = "Flux density "+ fu
3475 return lbl
3476
3477 def _check_ifs(self):
3478 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3479 nchans = [self.nchan(i) for i in self.getifnos()]
3480 nchans = filter(lambda t: t > 0, nchans)
3481 return (sum(nchans)/len(nchans) == nchans[0])
3482
3483 @asaplog_post_dec
3484 #def _fill(self, names, unit, average, getpt, antenna):
3485 def _fill(self, names, unit, average, opts={}):
3486 first = True
3487 fullnames = []
3488 for name in names:
3489 name = os.path.expandvars(name)
3490 name = os.path.expanduser(name)
3491 if not os.path.exists(name):
3492 msg = "File '%s' does not exists" % (name)
3493 raise IOError(msg)
3494 fullnames.append(name)
3495 if average:
3496 asaplog.push('Auto averaging integrations')
3497 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3498 for name in fullnames:
3499 tbl = Scantable(stype)
3500 if is_ms( name ):
3501 r = msfiller( tbl )
3502 else:
3503 r = filler( tbl )
3504 rx = rcParams['scantable.reference']
3505 r.setreferenceexpr(rx)
3506 #r = filler(tbl)
3507 #rx = rcParams['scantable.reference']
3508 #r.setreferenceexpr(rx)
3509 msg = "Importing %s..." % (name)
3510 asaplog.push(msg, False)
3511 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
3512 r.open(name, opts)# antenna, -1, -1, getpt)
3513 r.fill()
3514 if average:
3515 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3516 if not first:
3517 tbl = self._math._merge([self, tbl])
3518 Scantable.__init__(self, tbl)
3519 r.close()
3520 del r, tbl
3521 first = False
3522 #flush log
3523 asaplog.post()
3524 if unit is not None:
3525 self.set_fluxunit(unit)
3526 if not is_casapy():
3527 self.set_freqframe(rcParams['scantable.freqframe'])
3528
3529
3530 def __getitem__(self, key):
3531 if key < 0:
3532 key += self.nrow()
3533 if key >= self.nrow():
3534 raise IndexError("Row index out of range.")
3535 return self._getspectrum(key)
3536
3537 def __setitem__(self, key, value):
3538 if key < 0:
3539 key += self.nrow()
3540 if key >= self.nrow():
3541 raise IndexError("Row index out of range.")
3542 if not hasattr(value, "__len__") or \
3543 len(value) > self.nchan(self.getif(key)):
3544 raise ValueError("Spectrum length doesn't match.")
3545 return self._setspectrum(value, key)
3546
3547 def __len__(self):
3548 return self.nrow()
3549
3550 def __iter__(self):
3551 for i in range(len(self)):
3552 yield self[i]
Note: See TracBrowser for help on using the repository browser.