source: trunk/python/scantable.py@ 2276

Last change on this file since 2276 was 2276, checked in by Malte Marquarding, 14 years ago

Created 3.1.0 release tag

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 137.2 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import numpy
5try:
6 from functools import wraps as wraps_dec
7except ImportError:
8 from asap.compatibility import wraps as wraps_dec
9
10from asap.env import is_casapy
11from asap._asap import Scantable
12from asap._asap import filler, msfiller
13from asap.parameters import rcParams
14from asap.logging import asaplog, asaplog_post_dec
15from asap.selector import selector
16from asap.linecatalog import linecatalog
17from asap.coordinate import coordinate
18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
19from asap.asapfitter import fitter
20
21
22def preserve_selection(func):
23 @wraps_dec(func)
24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
30 return val
31 return wrap
32
33def is_scantable(filename):
34 """Is the given file a scantable?
35
36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 #if ( l.find('Scantable') != -1 ):
48 if ( l.find('Measurement Set') == -1 ):
49 return True
50 else:
51 return False
52 else:
53 return False
54## return (os.path.isdir(filename)
55## and not os.path.exists(filename+'/table.f1')
56## and os.path.exists(filename+'/table.info'))
57
58def is_ms(filename):
59 """Is the given file a MeasurementSet?
60
61 Parameters:
62
63 filename: the name of the file/directory to test
64
65 """
66 if ( os.path.isdir(filename)
67 and os.path.exists(filename+'/table.info')
68 and os.path.exists(filename+'/table.dat') ):
69 f=open(filename+'/table.info')
70 l=f.readline()
71 f.close()
72 if ( l.find('Measurement Set') != -1 ):
73 return True
74 else:
75 return False
76 else:
77 return False
78
79def normalise_edge_param(edge):
80 """\
81 Convert a given edge value to a one-dimensional array that can be
82 given to baseline-fitting/subtraction functions.
83 The length of the output value will be an even because values for
84 the both sides of spectra are to be contained for each IF. When
85 the length is 2, the values will be applied to all IFs. If the length
86 is larger than 2, it will be 2*ifnos().
87 Accepted format of edge include:
88 * an integer - will be used for both sides of spectra of all IFs.
89 e.g. 10 is converted to [10,10]
90 * an empty list/tuple [] - converted to [0, 0] and used for all
91 IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for
99 IF[1].
100 If an element contains the same integer values, the input
101 'edge' parameter can be given in a simpler shape in the
102 following cases:
103 ** when len(edge)!=2
104 any elements containing the same values can be
105 replaced to single integers.
106 e.g. [[15,15]] can be simplified to [15]
107 (or [15,15] or 15 also).
108 e.g. [[1,1],[2,2],[3,3]] can be simplified to
109 [1,2,3].
110 ** when len(edge)=2
111 care is needed for this case: ONLY ONE of the
112 elements can be a single integer,
113 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
114 or [[5,5],10], but can NOT be simplified to
115 [5,10].
116 when [5,10] given, it is interpreted as
117 [[5,10],[5,10],[5,10],...] instead, as shown
118 before.
119 """
120 from asap import _is_sequence_or_number as _is_valid
121 if isinstance(edge, list) or isinstance(edge, tuple):
122 for edgepar in edge:
123 if not _is_valid(edgepar, int):
124 raise ValueError, "Each element of the 'edge' tuple has \
125 to be a pair of integers or an integer."
126 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
127 if len(edgepar) != 2:
128 raise ValueError, "Each element of the 'edge' tuple has \
129 to be a pair of integers or an integer."
130 else:
131 if not _is_valid(edge, int):
132 raise ValueError, "Parameter 'edge' has to be an integer or a \
133 pair of integers specified as a tuple. \
134 Nested tuples are allowed \
135 to make individual selection for different IFs."
136
137
138 if isinstance(edge, int):
139 edge = [ edge, edge ] # e.g. 3 => [3,3]
140 elif isinstance(edge, list) or isinstance(edge, tuple):
141 if len(edge) == 0:
142 edge = [0, 0] # e.g. [] => [0,0]
143 elif len(edge) == 1:
144 if isinstance(edge[0], int):
145 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
146
147 commonedge = True
148 if len(edge) > 2: commonedge = False
149 else:
150 for edgepar in edge:
151 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
152 commonedge = False
153 break
154
155 if commonedge:
156 if len(edge) > 1:
157 norm_edge = edge
158 else:
159 norm_edge = edge + edge
160 else:
161 norm_edge = []
162 for edgepar in edge:
163 if isinstance(edgepar, int):
164 norm_edge += [edgepar, edgepar]
165 else:
166 norm_edge += edgepar
167
168 return norm_edge
169
170def raise_fitting_failure_exception(e):
171 msg = "The fit failed, possibly because it didn't converge."
172 if rcParams["verbose"]:
173 asaplog.push(str(e))
174 asaplog.push(str(msg))
175 else:
176 raise RuntimeError(str(e)+'\n'+msg)
177
178def pack_progress_params(showprogress, minnrow):
179 return str(showprogress).lower() + ',' + str(minnrow)
180
181class scantable(Scantable):
182 """\
183 The ASAP container for scans (single-dish data).
184 """
185
186 @asaplog_post_dec
187 #def __init__(self, filename, average=None, unit=None, getpt=None,
188 # antenna=None, parallactify=None):
189 def __init__(self, filename, average=None, unit=None, parallactify=None,
190 **args):
191 """\
192 Create a scantable from a saved one or make a reference
193
194 Parameters:
195
196 filename: the name of an asap table on disk
197 or
198 the name of a rpfits/sdfits/ms file
199 (integrations within scans are auto averaged
200 and the whole file is read) or
201 [advanced] a reference to an existing scantable
202
203 average: average all integrations withinb a scan on read.
204 The default (True) is taken from .asaprc.
205
206 unit: brightness unit; must be consistent with K or Jy.
207 Over-rides the default selected by the filler
208 (input rpfits/sdfits/ms) or replaces the value
209 in existing scantables
210
211 getpt: for MeasurementSet input data only:
212 If True, all pointing data are filled.
213 The deafult is False, which makes time to load
214 the MS data faster in some cases.
215
216 antenna: for MeasurementSet input data only:
217 Antenna selection. integer (id) or string (name
218 or id).
219
220 parallactify: Indicate that the data had been parallatified. Default
221 is taken from rc file.
222
223 """
224 if average is None:
225 average = rcParams['scantable.autoaverage']
226 #if getpt is None:
227 # getpt = True
228 #if antenna is not None:
229 # asaplog.push("Antenna selection currently unsupported."
230 # "Using ''")
231 # asaplog.post('WARN')
232 #if antenna is None:
233 # antenna = ''
234 #elif type(antenna) == int:
235 # antenna = '%s' % antenna
236 #elif type(antenna) == list:
237 # tmpstr = ''
238 # for i in range( len(antenna) ):
239 # if type(antenna[i]) == int:
240 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
241 # elif type(antenna[i]) == str:
242 # tmpstr=tmpstr+antenna[i]+','
243 # else:
244 # raise TypeError('Bad antenna selection.')
245 # antenna = tmpstr.rstrip(',')
246 parallactify = parallactify or rcParams['scantable.parallactify']
247 varlist = vars()
248 from asap._asap import stmath
249 self._math = stmath( rcParams['insitu'] )
250 if isinstance(filename, Scantable):
251 Scantable.__init__(self, filename)
252 else:
253 if isinstance(filename, str):
254 filename = os.path.expandvars(filename)
255 filename = os.path.expanduser(filename)
256 if not os.path.exists(filename):
257 s = "File '%s' not found." % (filename)
258 raise IOError(s)
259 if is_scantable(filename):
260 ondisk = rcParams['scantable.storage'] == 'disk'
261 Scantable.__init__(self, filename, ondisk)
262 if unit is not None:
263 self.set_fluxunit(unit)
264 if average:
265 self._assign( self.average_time( scanav=True ) )
266 # do not reset to the default freqframe
267 #self.set_freqframe(rcParams['scantable.freqframe'])
268 #elif os.path.isdir(filename) \
269 # and not os.path.exists(filename+'/table.f1'):
270 elif is_ms(filename):
271 # Measurement Set
272 opts={'ms': {}}
273 mskeys=['getpt','antenna']
274 for key in mskeys:
275 if key in args.keys():
276 opts['ms'][key] = args[key]
277 #self._fill([filename], unit, average, getpt, antenna)
278 self._fill([filename], unit, average, opts)
279 elif os.path.isfile(filename):
280 #self._fill([filename], unit, average, getpt, antenna)
281 self._fill([filename], unit, average)
282 else:
283 msg = "The given file '%s'is not a valid " \
284 "asap table." % (filename)
285 raise IOError(msg)
286 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
287 and isinstance(filename[-1], str):
288 #self._fill(filename, unit, average, getpt, antenna)
289 self._fill(filename, unit, average)
290 self.parallactify(parallactify)
291 self._add_history("scantable", varlist)
292
293 @asaplog_post_dec
294 def save(self, name=None, format=None, overwrite=False):
295 """\
296 Store the scantable on disk. This can be an asap (aips++) Table,
297 SDFITS or MS2 format.
298
299 Parameters:
300
301 name: the name of the outputfile. For format "ASCII"
302 this is the root file name (data in 'name'.txt
303 and header in 'name'_header.txt)
304
305 format: an optional file format. Default is ASAP.
306 Allowed are:
307
308 * 'ASAP' (save as ASAP [aips++] Table),
309 * 'SDFITS' (save as SDFITS file)
310 * 'ASCII' (saves as ascii text file)
311 * 'MS2' (saves as an casacore MeasurementSet V2)
312 * 'FITS' (save as image FITS - not readable by
313 CLASS)
314 * 'CLASS' (save as FITS readable by CLASS)
315
316 overwrite: If the file should be overwritten if it exists.
317 The default False is to return with warning
318 without writing the output. USE WITH CARE.
319
320 Example::
321
322 scan.save('myscan.asap')
323 scan.save('myscan.sdfits', 'SDFITS')
324
325 """
326 from os import path
327 format = format or rcParams['scantable.save']
328 suffix = '.'+format.lower()
329 if name is None or name == "":
330 name = 'scantable'+suffix
331 msg = "No filename given. Using default name %s..." % name
332 asaplog.push(msg)
333 name = path.expandvars(name)
334 if path.isfile(name) or path.isdir(name):
335 if not overwrite:
336 msg = "File %s exists." % name
337 raise IOError(msg)
338 format2 = format.upper()
339 if format2 == 'ASAP':
340 self._save(name)
341 elif format2 == 'MS2':
342 msopt = {'ms': {'overwrite': overwrite } }
343 from asap._asap import mswriter
344 writer = mswriter( self )
345 writer.write( name, msopt )
346 else:
347 from asap._asap import stwriter as stw
348 writer = stw(format2)
349 writer.write(self, name)
350 return
351
352 def copy(self):
353 """Return a copy of this scantable.
354
355 *Note*:
356
357 This makes a full (deep) copy. scan2 = scan1 makes a reference.
358
359 Example::
360
361 copiedscan = scan.copy()
362
363 """
364 sd = scantable(Scantable._copy(self))
365 return sd
366
367 def drop_scan(self, scanid=None):
368 """\
369 Return a new scantable where the specified scan number(s) has(have)
370 been dropped.
371
372 Parameters:
373
374 scanid: a (list of) scan number(s)
375
376 """
377 from asap import _is_sequence_or_number as _is_valid
378 from asap import _to_list
379 from asap import unique
380 if not _is_valid(scanid):
381 raise RuntimeError( 'Please specify a scanno to drop from the '\
382 'scantable' )
383 scanid = _to_list(scanid)
384 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
385 for sid in scanid: allscans.remove(sid)
386 if len(allscans) == 0:
387 raise ValueError("Can't remove all scans")
388 sel = selector(scans=allscans)
389 return self._select_copy(sel)
390
391 def _select_copy(self, selection):
392 orig = self.get_selection()
393 self.set_selection(orig+selection)
394 cp = self.copy()
395 self.set_selection(orig)
396 return cp
397
398 def get_scan(self, scanid=None):
399 """\
400 Return a specific scan (by scanno) or collection of scans (by
401 source name) in a new scantable.
402
403 *Note*:
404
405 See scantable.drop_scan() for the inverse operation.
406
407 Parameters:
408
409 scanid: a (list of) scanno or a source name, unix-style
410 patterns are accepted for source name matching, e.g.
411 '*_R' gets all 'ref scans
412
413 Example::
414
415 # get all scans containing the source '323p459'
416 newscan = scan.get_scan('323p459')
417 # get all 'off' scans
418 refscans = scan.get_scan('*_R')
419 # get a susbset of scans by scanno (as listed in scan.summary())
420 newscan = scan.get_scan([0, 2, 7, 10])
421
422 """
423 if scanid is None:
424 raise RuntimeError( 'Please specify a scan no or name to '
425 'retrieve from the scantable' )
426 try:
427 bsel = self.get_selection()
428 sel = selector()
429 if type(scanid) is str:
430 sel.set_name(scanid)
431 return self._select_copy(sel)
432 elif type(scanid) is int:
433 sel.set_scans([scanid])
434 return self._select_copy(sel)
435 elif type(scanid) is list:
436 sel.set_scans(scanid)
437 return self._select_copy(sel)
438 else:
439 msg = "Illegal scanid type, use 'int' or 'list' if ints."
440 raise TypeError(msg)
441 except RuntimeError:
442 raise
443
444 def __str__(self):
445 return Scantable._summary(self)
446
447 def summary(self, filename=None):
448 """\
449 Print a summary of the contents of this scantable.
450
451 Parameters:
452
453 filename: the name of a file to write the putput to
454 Default - no file output
455
456 """
457 info = Scantable._summary(self)
458 if filename is not None:
459 if filename is "":
460 filename = 'scantable_summary.txt'
461 from os.path import expandvars, isdir
462 filename = expandvars(filename)
463 if not isdir(filename):
464 data = open(filename, 'w')
465 data.write(info)
466 data.close()
467 else:
468 msg = "Illegal file name '%s'." % (filename)
469 raise IOError(msg)
470 return page(info)
471
472 def get_spectrum(self, rowno):
473 """Return the spectrum for the current row in the scantable as a list.
474
475 Parameters:
476
477 rowno: the row number to retrieve the spectrum from
478
479 """
480 return self._getspectrum(rowno)
481
482 def get_mask(self, rowno):
483 """Return the mask for the current row in the scantable as a list.
484
485 Parameters:
486
487 rowno: the row number to retrieve the mask from
488
489 """
490 return self._getmask(rowno)
491
492 def set_spectrum(self, spec, rowno):
493 """Set the spectrum for the current row in the scantable.
494
495 Parameters:
496
497 spec: the new spectrum
498
499 rowno: the row number to set the spectrum for
500
501 """
502 assert(len(spec) == self.nchan())
503 return self._setspectrum(spec, rowno)
504
505 def get_coordinate(self, rowno):
506 """Return the (spectral) coordinate for a a given 'rowno'.
507
508 *Note*:
509
510 * This coordinate is only valid until a scantable method modifies
511 the frequency axis.
512 * This coordinate does contain the original frequency set-up
513 NOT the new frame. The conversions however are done using the user
514 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
515 use scantable.freq_align first. Without it there is no closure,
516 i.e.::
517
518 c = myscan.get_coordinate(0)
519 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
520
521 Parameters:
522
523 rowno: the row number for the spectral coordinate
524
525 """
526 return coordinate(Scantable.get_coordinate(self, rowno))
527
528 def get_selection(self):
529 """\
530 Get the selection object currently set on this scantable.
531
532 Example::
533
534 sel = scan.get_selection()
535 sel.set_ifs(0) # select IF 0
536 scan.set_selection(sel) # apply modified selection
537
538 """
539 return selector(self._getselection())
540
541 def set_selection(self, selection=None, **kw):
542 """\
543 Select a subset of the data. All following operations on this scantable
544 are only applied to thi selection.
545
546 Parameters:
547
548 selection: a selector object (default unset the selection), or
549 any combination of "pols", "ifs", "beams", "scans",
550 "cycles", "name", "query"
551
552 Examples::
553
554 sel = selector() # create a selection object
555 self.set_scans([0, 3]) # select SCANNO 0 and 3
556 scan.set_selection(sel) # set the selection
557 scan.summary() # will only print summary of scanno 0 an 3
558 scan.set_selection() # unset the selection
559 # or the equivalent
560 scan.set_selection(scans=[0,3])
561 scan.summary() # will only print summary of scanno 0 an 3
562 scan.set_selection() # unset the selection
563
564 """
565 if selection is None:
566 # reset
567 if len(kw) == 0:
568 selection = selector()
569 else:
570 # try keywords
571 for k in kw:
572 if k not in selector.fields:
573 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
574 selection = selector(**kw)
575 self._setselection(selection)
576
577 def get_row(self, row=0, insitu=None):
578 """\
579 Select a row in the scantable.
580 Return a scantable with single row.
581
582 Parameters:
583
584 row: row no of integration, default is 0.
585 insitu: if False a new scantable is returned. Otherwise, the
586 scaling is done in-situ. The default is taken from .asaprc
587 (False)
588
589 """
590 if insitu is None: insitu = rcParams['insitu']
591 if not insitu:
592 workscan = self.copy()
593 else:
594 workscan = self
595 # Select a row
596 sel=selector()
597 sel.set_rows([row])
598 #sel.set_scans([workscan.getscan(row)])
599 #sel.set_cycles([workscan.getcycle(row)])
600 #sel.set_beams([workscan.getbeam(row)])
601 #sel.set_ifs([workscan.getif(row)])
602 #sel.set_polarisations([workscan.getpol(row)])
603 #sel.set_name(workscan._getsourcename(row))
604 workscan.set_selection(sel)
605 if not workscan.nrow() == 1:
606 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
607 raise RuntimeError(msg)
608 del sel
609 if insitu:
610 self._assign(workscan)
611 else:
612 return workscan
613
614 @asaplog_post_dec
615 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
616 """\
617 Determine the specified statistic of the current beam/if/pol
618 Takes a 'mask' as an optional parameter to specify which
619 channels should be excluded.
620
621 Parameters:
622
623 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
624 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
625
626 mask: an optional mask specifying where the statistic
627 should be determined.
628
629 form: format string to print statistic values
630
631 row: row number of spectrum to process.
632 (default is None: for all rows)
633
634 Example:
635 scan.set_unit('channel')
636 msk = scan.create_mask([100, 200], [500, 600])
637 scan.stats(stat='mean', mask=m)
638
639 """
640 mask = mask or []
641 if not self._check_ifs():
642 raise ValueError("Cannot apply mask as the IFs have different "
643 "number of channels. Please use setselection() "
644 "to select individual IFs")
645 rtnabc = False
646 if stat.lower().endswith('_abc'): rtnabc = True
647 getchan = False
648 if stat.lower().startswith('min') or stat.lower().startswith('max'):
649 chan = self._math._minmaxchan(self, mask, stat)
650 getchan = True
651 statvals = []
652 if not rtnabc:
653 if row == None:
654 statvals = self._math._stats(self, mask, stat)
655 else:
656 statvals = self._math._statsrow(self, mask, stat, int(row))
657
658 #def cb(i):
659 # return statvals[i]
660
661 #return self._row_callback(cb, stat)
662
663 label=stat
664 #callback=cb
665 out = ""
666 #outvec = []
667 sep = '-'*50
668
669 if row == None:
670 rows = xrange(self.nrow())
671 elif isinstance(row, int):
672 rows = [ row ]
673
674 for i in rows:
675 refstr = ''
676 statunit= ''
677 if getchan:
678 qx, qy = self.chan2data(rowno=i, chan=chan[i])
679 if rtnabc:
680 statvals.append(qx['value'])
681 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
682 statunit= '['+qx['unit']+']'
683 else:
684 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
685
686 tm = self._gettime(i)
687 src = self._getsourcename(i)
688 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
689 out += 'Time[%s]:\n' % (tm)
690 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
691 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
692 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
693 #outvec.append(callback(i))
694 if len(rows) > 1:
695 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
696 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
697 else:
698 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
699 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
700 out += sep+"\n"
701
702 import os
703 if os.environ.has_key( 'USER' ):
704 usr = os.environ['USER']
705 else:
706 import commands
707 usr = commands.getoutput( 'whoami' )
708 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
709 f = open(tmpfile,'w')
710 print >> f, sep
711 print >> f, ' %s %s' % (label, statunit)
712 print >> f, sep
713 print >> f, out
714 f.close()
715 f = open(tmpfile,'r')
716 x = f.readlines()
717 f.close()
718 asaplog.push(''.join(x), False)
719
720 return statvals
721
722 def chan2data(self, rowno=0, chan=0):
723 """\
724 Returns channel/frequency/velocity and spectral value
725 at an arbitrary row and channel in the scantable.
726
727 Parameters:
728
729 rowno: a row number in the scantable. Default is the
730 first row, i.e. rowno=0
731
732 chan: a channel in the scantable. Default is the first
733 channel, i.e. pos=0
734
735 """
736 if isinstance(rowno, int) and isinstance(chan, int):
737 qx = {'unit': self.get_unit(),
738 'value': self._getabcissa(rowno)[chan]}
739 qy = {'unit': self.get_fluxunit(),
740 'value': self._getspectrum(rowno)[chan]}
741 return qx, qy
742
743 def stddev(self, mask=None):
744 """\
745 Determine the standard deviation of the current beam/if/pol
746 Takes a 'mask' as an optional parameter to specify which
747 channels should be excluded.
748
749 Parameters:
750
751 mask: an optional mask specifying where the standard
752 deviation should be determined.
753
754 Example::
755
756 scan.set_unit('channel')
757 msk = scan.create_mask([100, 200], [500, 600])
758 scan.stddev(mask=m)
759
760 """
761 return self.stats(stat='stddev', mask=mask);
762
763
764 def get_column_names(self):
765 """\
766 Return a list of column names, which can be used for selection.
767 """
768 return list(Scantable.get_column_names(self))
769
770 def get_tsys(self, row=-1):
771 """\
772 Return the System temperatures.
773
774 Parameters:
775
776 row: the rowno to get the information for. (default all rows)
777
778 Returns:
779
780 a list of Tsys values for the current selection
781
782 """
783 if row > -1:
784 return self._get_column(self._gettsys, row)
785 return self._row_callback(self._gettsys, "Tsys")
786
787
788 def get_weather(self, row=-1):
789 """\
790 Return the weather informations.
791
792 Parameters:
793
794 row: the rowno to get the information for. (default all rows)
795
796 Returns:
797
798 a dict or list of of dicts of values for the current selection
799
800 """
801
802 values = self._get_column(self._get_weather, row)
803 if row > -1:
804 return {'temperature': values[0],
805 'pressure': values[1], 'humidity' : values[2],
806 'windspeed' : values[3], 'windaz' : values[4]
807 }
808 else:
809 out = []
810 for r in values:
811
812 out.append({'temperature': r[0],
813 'pressure': r[1], 'humidity' : r[2],
814 'windspeed' : r[3], 'windaz' : r[4]
815 })
816 return out
817
818 def _row_callback(self, callback, label):
819 out = ""
820 outvec = []
821 sep = '-'*50
822 for i in range(self.nrow()):
823 tm = self._gettime(i)
824 src = self._getsourcename(i)
825 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
826 out += 'Time[%s]:\n' % (tm)
827 if self.nbeam(-1) > 1:
828 out += ' Beam[%d] ' % (self.getbeam(i))
829 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
830 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
831 outvec.append(callback(i))
832 out += '= %3.3f\n' % (outvec[i])
833 out += sep+'\n'
834
835 asaplog.push(sep)
836 asaplog.push(" %s" % (label))
837 asaplog.push(sep)
838 asaplog.push(out)
839 asaplog.post()
840 return outvec
841
842 def _get_column(self, callback, row=-1, *args):
843 """
844 """
845 if row == -1:
846 return [callback(i, *args) for i in range(self.nrow())]
847 else:
848 if 0 <= row < self.nrow():
849 return callback(row, *args)
850
851
852 def get_time(self, row=-1, asdatetime=False, prec=-1):
853 """\
854 Get a list of time stamps for the observations.
855 Return a datetime object or a string (default) for each
856 integration time stamp in the scantable.
857
858 Parameters:
859
860 row: row no of integration. Default -1 return all rows
861
862 asdatetime: return values as datetime objects rather than strings
863
864 prec: number of digits shown. Default -1 to automatic
865 calculation.
866 Note this number is equals to the digits of MVTime,
867 i.e., 0<prec<3: dates with hh:: only,
868 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
869 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
870
871 """
872 from datetime import datetime
873 if prec < 0:
874 # automagically set necessary precision +1
875 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
876 prec = max(6, int(prec))
877 else:
878 prec = max(0, prec)
879 if asdatetime:
880 #precision can be 1 millisecond at max
881 prec = min(12, prec)
882
883 times = self._get_column(self._gettime, row, prec)
884 if not asdatetime:
885 return times
886 format = "%Y/%m/%d/%H:%M:%S.%f"
887 if prec < 7:
888 nsub = 1 + (((6-prec)/2) % 3)
889 substr = [".%f","%S","%M"]
890 for i in range(nsub):
891 format = format.replace(substr[i],"")
892 if isinstance(times, list):
893 return [datetime.strptime(i, format) for i in times]
894 else:
895 return datetime.strptime(times, format)
896
897
898 def get_inttime(self, row=-1):
899 """\
900 Get a list of integration times for the observations.
901 Return a time in seconds for each integration in the scantable.
902
903 Parameters:
904
905 row: row no of integration. Default -1 return all rows.
906
907 """
908 return self._get_column(self._getinttime, row)
909
910
911 def get_sourcename(self, row=-1):
912 """\
913 Get a list source names for the observations.
914 Return a string for each integration in the scantable.
915 Parameters:
916
917 row: row no of integration. Default -1 return all rows.
918
919 """
920 return self._get_column(self._getsourcename, row)
921
922 def get_elevation(self, row=-1):
923 """\
924 Get a list of elevations for the observations.
925 Return a float for each integration in the scantable.
926
927 Parameters:
928
929 row: row no of integration. Default -1 return all rows.
930
931 """
932 return self._get_column(self._getelevation, row)
933
934 def get_azimuth(self, row=-1):
935 """\
936 Get a list of azimuths for the observations.
937 Return a float for each integration in the scantable.
938
939 Parameters:
940 row: row no of integration. Default -1 return all rows.
941
942 """
943 return self._get_column(self._getazimuth, row)
944
945 def get_parangle(self, row=-1):
946 """\
947 Get a list of parallactic angles for the observations.
948 Return a float for each integration in the scantable.
949
950 Parameters:
951
952 row: row no of integration. Default -1 return all rows.
953
954 """
955 return self._get_column(self._getparangle, row)
956
957 def get_direction(self, row=-1):
958 """
959 Get a list of Positions on the sky (direction) for the observations.
960 Return a string for each integration in the scantable.
961
962 Parameters:
963
964 row: row no of integration. Default -1 return all rows
965
966 """
967 return self._get_column(self._getdirection, row)
968
969 def get_directionval(self, row=-1):
970 """\
971 Get a list of Positions on the sky (direction) for the observations.
972 Return a float for each integration in the scantable.
973
974 Parameters:
975
976 row: row no of integration. Default -1 return all rows
977
978 """
979 return self._get_column(self._getdirectionvec, row)
980
981 @asaplog_post_dec
982 def set_unit(self, unit='channel'):
983 """\
984 Set the unit for all following operations on this scantable
985
986 Parameters:
987
988 unit: optional unit, default is 'channel'. Use one of '*Hz',
989 'km/s', 'channel' or equivalent ''
990
991 """
992 varlist = vars()
993 if unit in ['', 'pixel', 'channel']:
994 unit = ''
995 inf = list(self._getcoordinfo())
996 inf[0] = unit
997 self._setcoordinfo(inf)
998 self._add_history("set_unit", varlist)
999
1000 @asaplog_post_dec
1001 def set_instrument(self, instr):
1002 """\
1003 Set the instrument for subsequent processing.
1004
1005 Parameters:
1006
1007 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1008 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1009
1010 """
1011 self._setInstrument(instr)
1012 self._add_history("set_instument", vars())
1013
1014 @asaplog_post_dec
1015 def set_feedtype(self, feedtype):
1016 """\
1017 Overwrite the feed type, which might not be set correctly.
1018
1019 Parameters:
1020
1021 feedtype: 'linear' or 'circular'
1022
1023 """
1024 self._setfeedtype(feedtype)
1025 self._add_history("set_feedtype", vars())
1026
1027 @asaplog_post_dec
1028 def set_doppler(self, doppler='RADIO'):
1029 """\
1030 Set the doppler for all following operations on this scantable.
1031
1032 Parameters:
1033
1034 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1035
1036 """
1037 varlist = vars()
1038 inf = list(self._getcoordinfo())
1039 inf[2] = doppler
1040 self._setcoordinfo(inf)
1041 self._add_history("set_doppler", vars())
1042
1043 @asaplog_post_dec
1044 def set_freqframe(self, frame=None):
1045 """\
1046 Set the frame type of the Spectral Axis.
1047
1048 Parameters:
1049
1050 frame: an optional frame type, default 'LSRK'. Valid frames are:
1051 'TOPO', 'LSRD', 'LSRK', 'BARY',
1052 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1053
1054 Example::
1055
1056 scan.set_freqframe('BARY')
1057
1058 """
1059 frame = frame or rcParams['scantable.freqframe']
1060 varlist = vars()
1061 # "REST" is not implemented in casacore
1062 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1063 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1064 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1065 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1066
1067 if frame in valid:
1068 inf = list(self._getcoordinfo())
1069 inf[1] = frame
1070 self._setcoordinfo(inf)
1071 self._add_history("set_freqframe", varlist)
1072 else:
1073 msg = "Please specify a valid freq type. Valid types are:\n", valid
1074 raise TypeError(msg)
1075
1076 @asaplog_post_dec
1077 def set_dirframe(self, frame=""):
1078 """\
1079 Set the frame type of the Direction on the sky.
1080
1081 Parameters:
1082
1083 frame: an optional frame type, default ''. Valid frames are:
1084 'J2000', 'B1950', 'GALACTIC'
1085
1086 Example:
1087
1088 scan.set_dirframe('GALACTIC')
1089
1090 """
1091 varlist = vars()
1092 Scantable.set_dirframe(self, frame)
1093 self._add_history("set_dirframe", varlist)
1094
1095 def get_unit(self):
1096 """\
1097 Get the default unit set in this scantable
1098
1099 Returns:
1100
1101 A unit string
1102
1103 """
1104 inf = self._getcoordinfo()
1105 unit = inf[0]
1106 if unit == '': unit = 'channel'
1107 return unit
1108
1109 @asaplog_post_dec
1110 def get_abcissa(self, rowno=0):
1111 """\
1112 Get the abcissa in the current coordinate setup for the currently
1113 selected Beam/IF/Pol
1114
1115 Parameters:
1116
1117 rowno: an optional row number in the scantable. Default is the
1118 first row, i.e. rowno=0
1119
1120 Returns:
1121
1122 The abcissa values and the format string (as a dictionary)
1123
1124 """
1125 abc = self._getabcissa(rowno)
1126 lbl = self._getabcissalabel(rowno)
1127 return abc, lbl
1128
1129 @asaplog_post_dec
1130 def flag(self, row=-1, mask=None, unflag=False):
1131 """\
1132 Flag the selected data using an optional channel mask.
1133
1134 Parameters:
1135
1136 row: an optional row number in the scantable.
1137 Default -1 flags all rows
1138
1139 mask: an optional channel mask, created with create_mask. Default
1140 (no mask) is all channels.
1141
1142 unflag: if True, unflag the data
1143
1144 """
1145 varlist = vars()
1146 mask = mask or []
1147 self._flag(row, mask, unflag)
1148 self._add_history("flag", varlist)
1149
1150 @asaplog_post_dec
1151 def flag_row(self, rows=[], unflag=False):
1152 """\
1153 Flag the selected data in row-based manner.
1154
1155 Parameters:
1156
1157 rows: list of row numbers to be flagged. Default is no row
1158 (must be explicitly specified to execute row-based
1159 flagging).
1160
1161 unflag: if True, unflag the data.
1162
1163 """
1164 varlist = vars()
1165 self._flag_row(rows, unflag)
1166 self._add_history("flag_row", varlist)
1167
1168 @asaplog_post_dec
1169 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1170 """\
1171 Flag the selected data outside a specified range (in channel-base)
1172
1173 Parameters:
1174
1175 uthres: upper threshold.
1176
1177 dthres: lower threshold
1178
1179 clipoutside: True for flagging data outside the range
1180 [dthres:uthres].
1181 False for flagging data inside the range.
1182
1183 unflag: if True, unflag the data.
1184
1185 """
1186 varlist = vars()
1187 self._clip(uthres, dthres, clipoutside, unflag)
1188 self._add_history("clip", varlist)
1189
1190 @asaplog_post_dec
1191 def lag_flag(self, start, end, unit="MHz", insitu=None):
1192 """\
1193 Flag the data in 'lag' space by providing a frequency to remove.
1194 Flagged data in the scantable get interpolated over the region.
1195 No taper is applied.
1196
1197 Parameters:
1198
1199 start: the start frequency (really a period within the
1200 bandwidth) or period to remove
1201
1202 end: the end frequency or period to remove
1203
1204 unit: the frequency unit (default "MHz") or "" for
1205 explicit lag channels
1206
1207 *Notes*:
1208
1209 It is recommended to flag edges of the band or strong
1210 signals beforehand.
1211
1212 """
1213 if insitu is None: insitu = rcParams['insitu']
1214 self._math._setinsitu(insitu)
1215 varlist = vars()
1216 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1217 if not (unit == "" or base.has_key(unit)):
1218 raise ValueError("%s is not a valid unit." % unit)
1219 if unit == "":
1220 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1221 else:
1222 s = scantable(self._math._lag_flag(self, start*base[unit],
1223 end*base[unit], "frequency"))
1224 s._add_history("lag_flag", varlist)
1225 if insitu:
1226 self._assign(s)
1227 else:
1228 return s
1229
1230 @asaplog_post_dec
1231 def fft(self, rowno=[], mask=[], getrealimag=False):
1232 """\
1233 Apply FFT to the spectra.
1234 Flagged data in the scantable get interpolated over the region.
1235
1236 Parameters:
1237
1238 rowno: The row number(s) to be processed. int, list
1239 and tuple are accepted. By default ([]), FFT
1240 is applied to the whole data.
1241
1242 mask: Auxiliary channel mask(s). Given as a boolean
1243 list, it is applied to all specified rows.
1244 A list of boolean lists can also be used to
1245 apply different masks. In the latter case, the
1246 length of 'mask' must be the same as that of
1247 'rowno'. The default is [].
1248
1249 getrealimag: If True, returns the real and imaginary part
1250 values of the complex results.
1251 If False (the default), returns the amplitude
1252 (absolute value) normalised with Ndata/2 and
1253 phase (argument, in unit of radian).
1254
1255 Returns:
1256
1257 A list of dictionaries containing the results for each spectrum.
1258 Each dictionary contains two values, the real and the imaginary
1259 parts when getrealimag = True, or the amplitude(absolute value)
1260 and the phase(argument) when getrealimag = False. The key for
1261 these values are 'real' and 'imag', or 'ampl' and 'phase',
1262 respectively.
1263 """
1264 if isinstance(rowno, int):
1265 rowno = [rowno]
1266 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1267 raise TypeError("The row number(s) must be int, list or tuple.")
1268
1269 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1270
1271 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1272 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1273 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1274 if isinstance(mask[0], bool): mask = [mask]
1275 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1276 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1277
1278 usecommonmask = (len(mask) == 1)
1279 if not usecommonmask:
1280 if len(mask) != len(rowno):
1281 raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1282 for amask in mask:
1283 if len(amask) != self.nchan():
1284 raise ValueError("The spectra and the mask have different length.")
1285
1286 res = []
1287
1288 imask = 0
1289 for whichrow in rowno:
1290 fspec = self._fft(whichrow, mask[imask], getrealimag)
1291 nspec = len(fspec)
1292
1293 i=0
1294 v1=[]
1295 v2=[]
1296 reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
1297
1298 while (i < nspec):
1299 v1.append(fspec[i])
1300 v2.append(fspec[i+1])
1301 i+=2
1302
1303 if getrealimag:
1304 reselem["real"] += v1
1305 reselem["imag"] += v2
1306 else:
1307 reselem["ampl"] += v1
1308 reselem["phase"] += v2
1309
1310 res.append(reselem)
1311
1312 if not usecommonmask: imask += 1
1313
1314 return res
1315
1316 @asaplog_post_dec
1317 def create_mask(self, *args, **kwargs):
1318 """\
1319 Compute and return a mask based on [min, max] windows.
1320 The specified windows are to be INCLUDED, when the mask is
1321 applied.
1322
1323 Parameters:
1324
1325 [min, max], [min2, max2], ...
1326 Pairs of start/end points (inclusive)specifying the regions
1327 to be masked
1328
1329 invert: optional argument. If specified as True,
1330 return an inverted mask, i.e. the regions
1331 specified are EXCLUDED
1332
1333 row: create the mask using the specified row for
1334 unit conversions, default is row=0
1335 only necessary if frequency varies over rows.
1336
1337 Examples::
1338
1339 scan.set_unit('channel')
1340 # a)
1341 msk = scan.create_mask([400, 500], [800, 900])
1342 # masks everything outside 400 and 500
1343 # and 800 and 900 in the unit 'channel'
1344
1345 # b)
1346 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1347 # masks the regions between 400 and 500
1348 # and 800 and 900 in the unit 'channel'
1349
1350 # c)
1351 #mask only channel 400
1352 msk = scan.create_mask([400])
1353
1354 """
1355 row = kwargs.get("row", 0)
1356 data = self._getabcissa(row)
1357 u = self._getcoordinfo()[0]
1358 if u == "":
1359 u = "channel"
1360 msg = "The current mask window unit is %s" % u
1361 i = self._check_ifs()
1362 if not i:
1363 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1364 asaplog.push(msg)
1365 n = self.nchan()
1366 msk = _n_bools(n, False)
1367 # test if args is a 'list' or a 'normal *args - UGLY!!!
1368
1369 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1370 and args or args[0]
1371 for window in ws:
1372 if len(window) == 1:
1373 window = [window[0], window[0]]
1374 if len(window) == 0 or len(window) > 2:
1375 raise ValueError("A window needs to be defined as [start(, end)]")
1376 if window[0] > window[1]:
1377 tmp = window[0]
1378 window[0] = window[1]
1379 window[1] = tmp
1380 for i in range(n):
1381 if data[i] >= window[0] and data[i] <= window[1]:
1382 msk[i] = True
1383 if kwargs.has_key('invert'):
1384 if kwargs.get('invert'):
1385 msk = mask_not(msk)
1386 return msk
1387
1388 def get_masklist(self, mask=None, row=0, silent=False):
1389 """\
1390 Compute and return a list of mask windows, [min, max].
1391
1392 Parameters:
1393
1394 mask: channel mask, created with create_mask.
1395
1396 row: calcutate the masklist using the specified row
1397 for unit conversions, default is row=0
1398 only necessary if frequency varies over rows.
1399
1400 Returns:
1401
1402 [min, max], [min2, max2], ...
1403 Pairs of start/end points (inclusive)specifying
1404 the masked regions
1405
1406 """
1407 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1408 raise TypeError("The mask should be list or tuple.")
1409 if len(mask) < 2:
1410 raise TypeError("The mask elements should be > 1")
1411 if self.nchan() != len(mask):
1412 msg = "Number of channels in scantable != number of mask elements"
1413 raise TypeError(msg)
1414 data = self._getabcissa(row)
1415 u = self._getcoordinfo()[0]
1416 if u == "":
1417 u = "channel"
1418 msg = "The current mask window unit is %s" % u
1419 i = self._check_ifs()
1420 if not i:
1421 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1422 if not silent:
1423 asaplog.push(msg)
1424 masklist=[]
1425 ist, ien = None, None
1426 ist, ien=self.get_mask_indices(mask)
1427 if ist is not None and ien is not None:
1428 for i in xrange(len(ist)):
1429 range=[data[ist[i]],data[ien[i]]]
1430 range.sort()
1431 masklist.append([range[0],range[1]])
1432 return masklist
1433
1434 def get_mask_indices(self, mask=None):
1435 """\
1436 Compute and Return lists of mask start indices and mask end indices.
1437
1438 Parameters:
1439
1440 mask: channel mask, created with create_mask.
1441
1442 Returns:
1443
1444 List of mask start indices and that of mask end indices,
1445 i.e., [istart1,istart2,....], [iend1,iend2,....].
1446
1447 """
1448 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1449 raise TypeError("The mask should be list or tuple.")
1450 if len(mask) < 2:
1451 raise TypeError("The mask elements should be > 1")
1452 istart=[]
1453 iend=[]
1454 if mask[0]: istart.append(0)
1455 for i in range(len(mask)-1):
1456 if not mask[i] and mask[i+1]:
1457 istart.append(i+1)
1458 elif mask[i] and not mask[i+1]:
1459 iend.append(i)
1460 if mask[len(mask)-1]: iend.append(len(mask)-1)
1461 if len(istart) != len(iend):
1462 raise RuntimeError("Numbers of mask start != mask end.")
1463 for i in range(len(istart)):
1464 if istart[i] > iend[i]:
1465 raise RuntimeError("Mask start index > mask end index")
1466 break
1467 return istart,iend
1468
1469 @asaplog_post_dec
1470 def parse_maskexpr(self,maskstring):
1471 """
1472 Parse CASA type mask selection syntax (IF dependent).
1473
1474 Parameters:
1475 maskstring : A string mask selection expression.
1476 A comma separated selections mean different IF -
1477 channel combinations. IFs and channel selections
1478 are partitioned by a colon, ':'.
1479 examples:
1480 '' = all IFs (all channels)
1481 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1482 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1483 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1484 all channels in IF8
1485 Returns:
1486 A dictionary of selected (valid) IF and masklist pairs,
1487 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1488 """
1489 if not isinstance(maskstring,str):
1490 asaplog.post()
1491 asaplog.push("Invalid mask expression")
1492 asaplog.post("ERROR")
1493
1494 valid_ifs = self.getifnos()
1495 frequnit = self.get_unit()
1496 seldict = {}
1497 if maskstring == "":
1498 maskstring = str(valid_ifs)[1:-1]
1499 ## split each selection
1500 sellist = maskstring.split(',')
1501 for currselstr in sellist:
1502 selset = currselstr.split(':')
1503 # spw and mask string (may include ~, < or >)
1504 spwmasklist = self._parse_selection(selset[0],typestr='integer',
1505 offset=1,minval=min(valid_ifs),
1506 maxval=max(valid_ifs))
1507 for spwlist in spwmasklist:
1508 selspws = []
1509 for ispw in range(spwlist[0],spwlist[1]+1):
1510 # Put into the list only if ispw exists
1511 if valid_ifs.count(ispw):
1512 selspws.append(ispw)
1513 del spwmasklist, spwlist
1514
1515 # parse frequency mask list
1516 if len(selset) > 1:
1517 freqmasklist = self._parse_selection(selset[1],typestr='float',
1518 offset=0.)
1519 else:
1520 # want to select the whole spectrum
1521 freqmasklist = [None]
1522
1523 ## define a dictionary of spw - masklist combination
1524 for ispw in selspws:
1525 #print "working on", ispw
1526 spwstr = str(ispw)
1527 if len(selspws) == 0:
1528 # empty spw
1529 continue
1530 else:
1531 ## want to get min and max of the spw and
1532 ## offset to set for '<' and '>'
1533 if frequnit == 'channel':
1534 minfreq = 0
1535 maxfreq = self.nchan(ifno=ispw)
1536 offset = 0.5
1537 else:
1538 ## This is ugly part. need improvement
1539 for ifrow in xrange(self.nrow()):
1540 if self.getif(ifrow) == ispw:
1541 #print "IF",ispw,"found in row =",ifrow
1542 break
1543 freqcoord = self.get_coordinate(ifrow)
1544 freqs = self._getabcissa(ifrow)
1545 minfreq = min(freqs)
1546 maxfreq = max(freqs)
1547 if len(freqs) == 1:
1548 offset = 0.5
1549 elif frequnit.find('Hz') > 0:
1550 offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1551 -freqcoord.to_frequency(0,unit=frequnit))*0.5
1552 elif frequnit.find('m/s') > 0:
1553 offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1554 -freqcoord.to_velocity(0,unit=frequnit))*0.5
1555 else:
1556 asaplog.post()
1557 asaplog.push("Invalid frequency unit")
1558 asaplog.post("ERROR")
1559 del freqs, freqcoord, ifrow
1560 for freq in freqmasklist:
1561 selmask = freq or [minfreq, maxfreq]
1562 if selmask[0] == None:
1563 ## selection was "<freq[1]".
1564 if selmask[1] < minfreq:
1565 ## avoid adding region selection
1566 selmask = None
1567 else:
1568 selmask = [minfreq,selmask[1]-offset]
1569 elif selmask[1] == None:
1570 ## selection was ">freq[0]"
1571 if selmask[0] > maxfreq:
1572 ## avoid adding region selection
1573 selmask = None
1574 else:
1575 selmask = [selmask[0]+offset,maxfreq]
1576 if selmask:
1577 if not seldict.has_key(spwstr):
1578 # new spw selection
1579 seldict[spwstr] = []
1580 seldict[spwstr] += [selmask]
1581 del minfreq,maxfreq,offset,freq,selmask
1582 del spwstr
1583 del freqmasklist
1584 del valid_ifs
1585 if len(seldict) == 0:
1586 asaplog.post()
1587 asaplog.push("No valid selection in the mask expression: "+maskstring)
1588 asaplog.post("WARN")
1589 return None
1590 msg = "Selected masklist:\n"
1591 for sif, lmask in seldict.iteritems():
1592 msg += " IF"+sif+" - "+str(lmask)+"\n"
1593 asaplog.push(msg)
1594 return seldict
1595
1596 def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1597 """
1598 Parameters:
1599 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1600 typestr : The type of the values in returned list
1601 ('integer' or 'float')
1602 offset : The offset value to subtract from or add to
1603 the boundary value if the selection string
1604 includes '<' or '>'
1605 minval, maxval : The minimum/maximum values to set if the
1606 selection string includes '<' or '>'.
1607 The list element is filled with None by default.
1608 Returns:
1609 A list of min/max pair of selections.
1610 Example:
1611 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1612 returns [[0,2],[5,7],[9,9]]
1613 """
1614 selgroups = selstr.split(';')
1615 sellists = []
1616 if typestr.lower().startswith('int'):
1617 formatfunc = int
1618 else:
1619 formatfunc = float
1620
1621 for currsel in selgroups:
1622 if currsel.find('~') > 0:
1623 minsel = formatfunc(currsel.split('~')[0].strip())
1624 maxsel = formatfunc(currsel.split('~')[1].strip())
1625 elif currsel.strip().startswith('<'):
1626 minsel = minval
1627 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1628 - formatfunc(offset)
1629 elif currsel.strip().startswith('>'):
1630 minsel = formatfunc(currsel.split('>')[1].strip()) \
1631 + formatfunc(offset)
1632 maxsel = maxval
1633 else:
1634 minsel = formatfunc(currsel)
1635 maxsel = formatfunc(currsel)
1636 sellists.append([minsel,maxsel])
1637 return sellists
1638
1639# def get_restfreqs(self):
1640# """
1641# Get the restfrequency(s) stored in this scantable.
1642# The return value(s) are always of unit 'Hz'
1643# Parameters:
1644# none
1645# Returns:
1646# a list of doubles
1647# """
1648# return list(self._getrestfreqs())
1649
1650 def get_restfreqs(self, ids=None):
1651 """\
1652 Get the restfrequency(s) stored in this scantable.
1653 The return value(s) are always of unit 'Hz'
1654
1655 Parameters:
1656
1657 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1658 be retrieved
1659
1660 Returns:
1661
1662 dictionary containing ids and a list of doubles for each id
1663
1664 """
1665 if ids is None:
1666 rfreqs={}
1667 idlist = self.getmolnos()
1668 for i in idlist:
1669 rfreqs[i]=list(self._getrestfreqs(i))
1670 return rfreqs
1671 else:
1672 if type(ids)==list or type(ids)==tuple:
1673 rfreqs={}
1674 for i in ids:
1675 rfreqs[i]=list(self._getrestfreqs(i))
1676 return rfreqs
1677 else:
1678 return list(self._getrestfreqs(ids))
1679 #return list(self._getrestfreqs(ids))
1680
1681 def set_restfreqs(self, freqs=None, unit='Hz'):
1682 """\
1683 Set or replace the restfrequency specified and
1684 if the 'freqs' argument holds a scalar,
1685 then that rest frequency will be applied to all the selected
1686 data. If the 'freqs' argument holds
1687 a vector, then it MUST be of equal or smaller length than
1688 the number of IFs (and the available restfrequencies will be
1689 replaced by this vector). In this case, *all* data have
1690 the restfrequency set per IF according
1691 to the corresponding value you give in the 'freqs' vector.
1692 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1693 IF 1 gets restfreq 2e9.
1694
1695 You can also specify the frequencies via a linecatalog.
1696
1697 Parameters:
1698
1699 freqs: list of rest frequency values or string idenitfiers
1700
1701 unit: unit for rest frequency (default 'Hz')
1702
1703
1704 Example::
1705
1706 # set the given restfrequency for the all currently selected IFs
1707 scan.set_restfreqs(freqs=1.4e9)
1708 # set restfrequencies for the n IFs (n > 1) in the order of the
1709 # list, i.e
1710 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1711 # len(list_of_restfreqs) == nIF
1712 # for nIF == 1 the following will set multiple restfrequency for
1713 # that IF
1714 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1715 # set multiple restfrequencies per IF. as a list of lists where
1716 # the outer list has nIF elements, the inner s arbitrary
1717 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1718
1719 *Note*:
1720
1721 To do more sophisticate Restfrequency setting, e.g. on a
1722 source and IF basis, use scantable.set_selection() before using
1723 this function::
1724
1725 # provided your scantable is called scan
1726 selection = selector()
1727 selection.set_name("ORION*")
1728 selection.set_ifs([1])
1729 scan.set_selection(selection)
1730 scan.set_restfreqs(freqs=86.6e9)
1731
1732 """
1733 varlist = vars()
1734 from asap import linecatalog
1735 # simple value
1736 if isinstance(freqs, int) or isinstance(freqs, float):
1737 self._setrestfreqs([freqs], [""], unit)
1738 # list of values
1739 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1740 # list values are scalars
1741 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1742 if len(freqs) == 1:
1743 self._setrestfreqs(freqs, [""], unit)
1744 else:
1745 # allow the 'old' mode of setting mulitple IFs
1746 sel = selector()
1747 savesel = self._getselection()
1748 iflist = self.getifnos()
1749 if len(freqs)>len(iflist):
1750 raise ValueError("number of elements in list of list "
1751 "exeeds the current IF selections")
1752 iflist = self.getifnos()
1753 for i, fval in enumerate(freqs):
1754 sel.set_ifs(iflist[i])
1755 self._setselection(sel)
1756 self._setrestfreqs([fval], [""], unit)
1757 self._setselection(savesel)
1758
1759 # list values are dict, {'value'=, 'name'=)
1760 elif isinstance(freqs[-1], dict):
1761 values = []
1762 names = []
1763 for d in freqs:
1764 values.append(d["value"])
1765 names.append(d["name"])
1766 self._setrestfreqs(values, names, unit)
1767 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1768 sel = selector()
1769 savesel = self._getselection()
1770 iflist = self.getifnos()
1771 if len(freqs)>len(iflist):
1772 raise ValueError("number of elements in list of list exeeds"
1773 " the current IF selections")
1774 for i, fval in enumerate(freqs):
1775 sel.set_ifs(iflist[i])
1776 self._setselection(sel)
1777 self._setrestfreqs(fval, [""], unit)
1778 self._setselection(savesel)
1779 # freqs are to be taken from a linecatalog
1780 elif isinstance(freqs, linecatalog):
1781 sel = selector()
1782 savesel = self._getselection()
1783 for i in xrange(freqs.nrow()):
1784 sel.set_ifs(iflist[i])
1785 self._setselection(sel)
1786 self._setrestfreqs([freqs.get_frequency(i)],
1787 [freqs.get_name(i)], "MHz")
1788 # ensure that we are not iterating past nIF
1789 if i == self.nif()-1: break
1790 self._setselection(savesel)
1791 else:
1792 return
1793 self._add_history("set_restfreqs", varlist)
1794
1795 def shift_refpix(self, delta):
1796 """\
1797 Shift the reference pixel of the Spectra Coordinate by an
1798 integer amount.
1799
1800 Parameters:
1801
1802 delta: the amount to shift by
1803
1804 *Note*:
1805
1806 Be careful using this with broadband data.
1807
1808 """
1809 Scantable.shift_refpix(self, delta)
1810
1811 @asaplog_post_dec
1812 def history(self, filename=None):
1813 """\
1814 Print the history. Optionally to a file.
1815
1816 Parameters:
1817
1818 filename: The name of the file to save the history to.
1819
1820 """
1821 hist = list(self._gethistory())
1822 out = "-"*80
1823 for h in hist:
1824 if h.startswith("---"):
1825 out = "\n".join([out, h])
1826 else:
1827 items = h.split("##")
1828 date = items[0]
1829 func = items[1]
1830 items = items[2:]
1831 out += "\n"+date+"\n"
1832 out += "Function: %s\n Parameters:" % (func)
1833 for i in items:
1834 if i == '':
1835 continue
1836 s = i.split("=")
1837 out += "\n %s = %s" % (s[0], s[1])
1838 out = "\n".join([out, "-"*80])
1839 if filename is not None:
1840 if filename is "":
1841 filename = 'scantable_history.txt'
1842 import os
1843 filename = os.path.expandvars(os.path.expanduser(filename))
1844 if not os.path.isdir(filename):
1845 data = open(filename, 'w')
1846 data.write(out)
1847 data.close()
1848 else:
1849 msg = "Illegal file name '%s'." % (filename)
1850 raise IOError(msg)
1851 return page(out)
1852 #
1853 # Maths business
1854 #
1855 @asaplog_post_dec
1856 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1857 """\
1858 Return the (time) weighted average of a scan.
1859
1860 *Note*:
1861
1862 in channels only - align if necessary
1863
1864 Parameters:
1865
1866 mask: an optional mask (only used for 'var' and 'tsys'
1867 weighting)
1868
1869 scanav: True averages each scan separately
1870 False (default) averages all scans together,
1871
1872 weight: Weighting scheme.
1873 'none' (mean no weight)
1874 'var' (1/var(spec) weighted)
1875 'tsys' (1/Tsys**2 weighted)
1876 'tint' (integration time weighted)
1877 'tintsys' (Tint/Tsys**2)
1878 'median' ( median averaging)
1879 The default is 'tint'
1880
1881 align: align the spectra in velocity before averaging. It takes
1882 the time of the first spectrum as reference time.
1883
1884 Example::
1885
1886 # time average the scantable without using a mask
1887 newscan = scan.average_time()
1888
1889 """
1890 varlist = vars()
1891 weight = weight or 'TINT'
1892 mask = mask or ()
1893 scanav = (scanav and 'SCAN') or 'NONE'
1894 scan = (self, )
1895
1896 if align:
1897 scan = (self.freq_align(insitu=False), )
1898 s = None
1899 if weight.upper() == 'MEDIAN':
1900 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1901 scanav))
1902 else:
1903 s = scantable(self._math._average(scan, mask, weight.upper(),
1904 scanav))
1905 s._add_history("average_time", varlist)
1906 return s
1907
1908 @asaplog_post_dec
1909 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1910 """\
1911 Return a scan where all spectra are converted to either
1912 Jansky or Kelvin depending upon the flux units of the scan table.
1913 By default the function tries to look the values up internally.
1914 If it can't find them (or if you want to over-ride), you must
1915 specify EITHER jyperk OR eta (and D which it will try to look up
1916 also if you don't set it). jyperk takes precedence if you set both.
1917
1918 Parameters:
1919
1920 jyperk: the Jy / K conversion factor
1921
1922 eta: the aperture efficiency
1923
1924 d: the geometric diameter (metres)
1925
1926 insitu: if False a new scantable is returned.
1927 Otherwise, the scaling is done in-situ
1928 The default is taken from .asaprc (False)
1929
1930 """
1931 if insitu is None: insitu = rcParams['insitu']
1932 self._math._setinsitu(insitu)
1933 varlist = vars()
1934 jyperk = jyperk or -1.0
1935 d = d or -1.0
1936 eta = eta or -1.0
1937 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1938 s._add_history("convert_flux", varlist)
1939 if insitu: self._assign(s)
1940 else: return s
1941
1942 @asaplog_post_dec
1943 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1944 """\
1945 Return a scan after applying a gain-elevation correction.
1946 The correction can be made via either a polynomial or a
1947 table-based interpolation (and extrapolation if necessary).
1948 You specify polynomial coefficients, an ascii table or neither.
1949 If you specify neither, then a polynomial correction will be made
1950 with built in coefficients known for certain telescopes (an error
1951 will occur if the instrument is not known).
1952 The data and Tsys are *divided* by the scaling factors.
1953
1954 Parameters:
1955
1956 poly: Polynomial coefficients (default None) to compute a
1957 gain-elevation correction as a function of
1958 elevation (in degrees).
1959
1960 filename: The name of an ascii file holding correction factors.
1961 The first row of the ascii file must give the column
1962 names and these MUST include columns
1963 "ELEVATION" (degrees) and "FACTOR" (multiply data
1964 by this) somewhere.
1965 The second row must give the data type of the
1966 column. Use 'R' for Real and 'I' for Integer.
1967 An example file would be
1968 (actual factors are arbitrary) :
1969
1970 TIME ELEVATION FACTOR
1971 R R R
1972 0.1 0 0.8
1973 0.2 20 0.85
1974 0.3 40 0.9
1975 0.4 60 0.85
1976 0.5 80 0.8
1977 0.6 90 0.75
1978
1979 method: Interpolation method when correcting from a table.
1980 Values are "nearest", "linear" (default), "cubic"
1981 and "spline"
1982
1983 insitu: if False a new scantable is returned.
1984 Otherwise, the scaling is done in-situ
1985 The default is taken from .asaprc (False)
1986
1987 """
1988
1989 if insitu is None: insitu = rcParams['insitu']
1990 self._math._setinsitu(insitu)
1991 varlist = vars()
1992 poly = poly or ()
1993 from os.path import expandvars
1994 filename = expandvars(filename)
1995 s = scantable(self._math._gainel(self, poly, filename, method))
1996 s._add_history("gain_el", varlist)
1997 if insitu:
1998 self._assign(s)
1999 else:
2000 return s
2001
2002 @asaplog_post_dec
2003 def freq_align(self, reftime=None, method='cubic', insitu=None):
2004 """\
2005 Return a scan where all rows have been aligned in frequency/velocity.
2006 The alignment frequency frame (e.g. LSRK) is that set by function
2007 set_freqframe.
2008
2009 Parameters:
2010
2011 reftime: reference time to align at. By default, the time of
2012 the first row of data is used.
2013
2014 method: Interpolation method for regridding the spectra.
2015 Choose from "nearest", "linear", "cubic" (default)
2016 and "spline"
2017
2018 insitu: if False a new scantable is returned.
2019 Otherwise, the scaling is done in-situ
2020 The default is taken from .asaprc (False)
2021
2022 """
2023 if insitu is None: insitu = rcParams["insitu"]
2024 self._math._setinsitu(insitu)
2025 varlist = vars()
2026 reftime = reftime or ""
2027 s = scantable(self._math._freq_align(self, reftime, method))
2028 s._add_history("freq_align", varlist)
2029 if insitu: self._assign(s)
2030 else: return s
2031
2032 @asaplog_post_dec
2033 def opacity(self, tau=None, insitu=None):
2034 """\
2035 Apply an opacity correction. The data
2036 and Tsys are multiplied by the correction factor.
2037
2038 Parameters:
2039
2040 tau: (list of) opacity from which the correction factor is
2041 exp(tau*ZD)
2042 where ZD is the zenith-distance.
2043 If a list is provided, it has to be of length nIF,
2044 nIF*nPol or 1 and in order of IF/POL, e.g.
2045 [opif0pol0, opif0pol1, opif1pol0 ...]
2046 if tau is `None` the opacities are determined from a
2047 model.
2048
2049 insitu: if False a new scantable is returned.
2050 Otherwise, the scaling is done in-situ
2051 The default is taken from .asaprc (False)
2052
2053 """
2054 if insitu is None: insitu = rcParams['insitu']
2055 self._math._setinsitu(insitu)
2056 varlist = vars()
2057 if not hasattr(tau, "__len__"):
2058 tau = [tau]
2059 s = scantable(self._math._opacity(self, tau))
2060 s._add_history("opacity", varlist)
2061 if insitu: self._assign(s)
2062 else: return s
2063
2064 @asaplog_post_dec
2065 def bin(self, width=5, insitu=None):
2066 """\
2067 Return a scan where all spectra have been binned up.
2068
2069 Parameters:
2070
2071 width: The bin width (default=5) in pixels
2072
2073 insitu: if False a new scantable is returned.
2074 Otherwise, the scaling is done in-situ
2075 The default is taken from .asaprc (False)
2076
2077 """
2078 if insitu is None: insitu = rcParams['insitu']
2079 self._math._setinsitu(insitu)
2080 varlist = vars()
2081 s = scantable(self._math._bin(self, width))
2082 s._add_history("bin", varlist)
2083 if insitu:
2084 self._assign(s)
2085 else:
2086 return s
2087
2088 @asaplog_post_dec
2089 def resample(self, width=5, method='cubic', insitu=None):
2090 """\
2091 Return a scan where all spectra have been binned up.
2092
2093 Parameters:
2094
2095 width: The bin width (default=5) in pixels
2096
2097 method: Interpolation method when correcting from a table.
2098 Values are "nearest", "linear", "cubic" (default)
2099 and "spline"
2100
2101 insitu: if False a new scantable is returned.
2102 Otherwise, the scaling is done in-situ
2103 The default is taken from .asaprc (False)
2104
2105 """
2106 if insitu is None: insitu = rcParams['insitu']
2107 self._math._setinsitu(insitu)
2108 varlist = vars()
2109 s = scantable(self._math._resample(self, method, width))
2110 s._add_history("resample", varlist)
2111 if insitu: self._assign(s)
2112 else: return s
2113
2114 @asaplog_post_dec
2115 def average_pol(self, mask=None, weight='none'):
2116 """\
2117 Average the Polarisations together.
2118
2119 Parameters:
2120
2121 mask: An optional mask defining the region, where the
2122 averaging will be applied. The output will have all
2123 specified points masked.
2124
2125 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2126 weighted), or 'tsys' (1/Tsys**2 weighted)
2127
2128 """
2129 varlist = vars()
2130 mask = mask or ()
2131 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2132 s._add_history("average_pol", varlist)
2133 return s
2134
2135 @asaplog_post_dec
2136 def average_beam(self, mask=None, weight='none'):
2137 """\
2138 Average the Beams together.
2139
2140 Parameters:
2141 mask: An optional mask defining the region, where the
2142 averaging will be applied. The output will have all
2143 specified points masked.
2144
2145 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2146 weighted), or 'tsys' (1/Tsys**2 weighted)
2147
2148 """
2149 varlist = vars()
2150 mask = mask or ()
2151 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2152 s._add_history("average_beam", varlist)
2153 return s
2154
2155 def parallactify(self, pflag):
2156 """\
2157 Set a flag to indicate whether this data should be treated as having
2158 been 'parallactified' (total phase == 0.0)
2159
2160 Parameters:
2161
2162 pflag: Bool indicating whether to turn this on (True) or
2163 off (False)
2164
2165 """
2166 varlist = vars()
2167 self._parallactify(pflag)
2168 self._add_history("parallactify", varlist)
2169
2170 @asaplog_post_dec
2171 def convert_pol(self, poltype=None):
2172 """\
2173 Convert the data to a different polarisation type.
2174 Note that you will need cross-polarisation terms for most conversions.
2175
2176 Parameters:
2177
2178 poltype: The new polarisation type. Valid types are:
2179 "linear", "circular", "stokes" and "linpol"
2180
2181 """
2182 varlist = vars()
2183 s = scantable(self._math._convertpol(self, poltype))
2184 s._add_history("convert_pol", varlist)
2185 return s
2186
2187 @asaplog_post_dec
2188 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2189 insitu=None):
2190 """\
2191 Smooth the spectrum by the specified kernel (conserving flux).
2192
2193 Parameters:
2194
2195 kernel: The type of smoothing kernel. Select from
2196 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2197 or 'poly'
2198
2199 width: The width of the kernel in pixels. For hanning this is
2200 ignored otherwise it defauls to 5 pixels.
2201 For 'gaussian' it is the Full Width Half
2202 Maximum. For 'boxcar' it is the full width.
2203 For 'rmedian' and 'poly' it is the half width.
2204
2205 order: Optional parameter for 'poly' kernel (default is 2), to
2206 specify the order of the polnomial. Ignored by all other
2207 kernels.
2208
2209 plot: plot the original and the smoothed spectra.
2210 In this each indivual fit has to be approved, by
2211 typing 'y' or 'n'
2212
2213 insitu: if False a new scantable is returned.
2214 Otherwise, the scaling is done in-situ
2215 The default is taken from .asaprc (False)
2216
2217 """
2218 if insitu is None: insitu = rcParams['insitu']
2219 self._math._setinsitu(insitu)
2220 varlist = vars()
2221
2222 if plot: orgscan = self.copy()
2223
2224 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2225 s._add_history("smooth", varlist)
2226
2227 if plot:
2228 from asap.asapplotter import new_asaplot
2229 theplot = new_asaplot(rcParams['plotter.gui'])
2230 theplot.set_panels()
2231 ylab=s._get_ordinate_label()
2232 #theplot.palette(0,["#777777","red"])
2233 for r in xrange(s.nrow()):
2234 xsm=s._getabcissa(r)
2235 ysm=s._getspectrum(r)
2236 xorg=orgscan._getabcissa(r)
2237 yorg=orgscan._getspectrum(r)
2238 theplot.clear()
2239 theplot.hold()
2240 theplot.set_axes('ylabel',ylab)
2241 theplot.set_axes('xlabel',s._getabcissalabel(r))
2242 theplot.set_axes('title',s._getsourcename(r))
2243 theplot.set_line(label='Original',color="#777777")
2244 theplot.plot(xorg,yorg)
2245 theplot.set_line(label='Smoothed',color="red")
2246 theplot.plot(xsm,ysm)
2247 ### Ugly part for legend
2248 for i in [0,1]:
2249 theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2250 theplot.release()
2251 ### Ugly part for legend
2252 theplot.subplots[0]['lines']=[]
2253 res = raw_input("Accept smoothing ([y]/n): ")
2254 if res.upper() == 'N':
2255 s._setspectrum(yorg, r)
2256 theplot.quit()
2257 del theplot
2258 del orgscan
2259
2260 if insitu: self._assign(s)
2261 else: return s
2262
2263 @asaplog_post_dec
2264 def _parse_wn(self, wn):
2265 if isinstance(wn, list) or isinstance(wn, tuple):
2266 return wn
2267 elif isinstance(wn, int):
2268 return [ wn ]
2269 elif isinstance(wn, str):
2270 if '-' in wn:
2271 # case 'a-b' : return [a,a+1,...,b-1,b]
2272 val = wn.split('-')
2273 val = [int(val[0]), int(val[1])]
2274 val.sort()
2275 res = [i for i in xrange(val[0], val[1]+1)]
2276 elif wn[:2] == '<=' or wn[:2] == '=<':
2277 # cases '<=a','=<a' : return [0,1,...,a-1,a]
2278 val = int(wn[2:])+1
2279 res = [i for i in xrange(val)]
2280 elif wn[-2:] == '>=' or wn[-2:] == '=>':
2281 # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2282 val = int(wn[:-2])+1
2283 res = [i for i in xrange(val)]
2284 elif wn[0] == '<':
2285 # case '<a' : return [0,1,...,a-2,a-1]
2286 val = int(wn[1:])
2287 res = [i for i in xrange(val)]
2288 elif wn[-1] == '>':
2289 # case 'a>' : return [0,1,...,a-2,a-1]
2290 val = int(wn[:-1])
2291 res = [i for i in xrange(val)]
2292 elif wn[:2] == '>=' or wn[:2] == '=>':
2293 # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2294 val = int(wn[2:])
2295 res = [i for i in xrange(val, self.nchan()/2+1)]
2296 elif wn[-2:] == '<=' or wn[-2:] == '=<':
2297 # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2298 val = int(wn[:-2])
2299 res = [i for i in xrange(val, self.nchan()/2+1)]
2300 elif wn[0] == '>':
2301 # case '>a' : return [a+1,a+2,...,a_nyq]
2302 val = int(wn[1:])+1
2303 res = [i for i in xrange(val, self.nchan()/2+1)]
2304 elif wn[-1] == '<':
2305 # case 'a<' : return [a+1,a+2,...,a_nyq]
2306 val = int(wn[:-1])+1
2307 res = [i for i in xrange(val, self.nchan()/2+1)]
2308
2309 return res
2310 else:
2311 msg = 'wrong value given for addwn/rejwn'
2312 raise RuntimeError(msg)
2313
2314
2315 @asaplog_post_dec
2316 def sinusoid_baseline(self, mask=None, insitu=None, applyfft=None,
2317 fftmethod=None, fftthresh=None,
2318 addwn=None, rejwn=None, clipthresh=None,
2319 clipniter=None, plot=None,
2320 getresidual=None, showprogress=None,
2321 minnrow=None, outlog=None, blfile=None):
2322 """\
2323 Return a scan which has been baselined (all rows) with sinusoidal functions.
2324 Parameters:
2325 insitu: if False a new scantable is returned.
2326 Otherwise, the scaling is done in-situ
2327 The default is taken from .asaprc (False)
2328 mask: an optional mask
2329 applyfft: if True use some method, such as FFT, to find
2330 strongest sinusoidal components in the wavenumber
2331 domain to be used for baseline fitting.
2332 default is True.
2333 fftmethod: method to find the strong sinusoidal components.
2334 now only 'fft' is available and it is the default.
2335 fftthresh: the threshold to select wave numbers to be used for
2336 fitting from the distribution of amplitudes in the
2337 wavenumber domain.
2338 both float and string values accepted.
2339 given a float value, the unit is set to sigma.
2340 for string values, allowed formats include:
2341 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2342 'topx' (= the x strongest ones, e.g. 'top5').
2343 default is 3.0 (unit: sigma).
2344 addwn: the additional wave numbers to be used for fitting.
2345 list or integer value is accepted to specify every
2346 wave numbers. also string value can be used in case
2347 you need to specify wave numbers in a certain range,
2348 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2349 '<a' (= 0,1,...,a-2,a-1),
2350 '>=a' (= a, a+1, ... up to the maximum wave
2351 number corresponding to the Nyquist
2352 frequency for the case of FFT).
2353 default is [].
2354 rejwn: the wave numbers NOT to be used for fitting.
2355 can be set just as addwn but has higher priority:
2356 wave numbers which are specified both in addwn
2357 and rejwn will NOT be used. default is [].
2358 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2359 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2360 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2361 plot the fit and the residual. In this each
2362 indivual fit has to be approved, by typing 'y'
2363 or 'n'
2364 getresidual: if False, returns best-fit values instead of
2365 residual. (default is True)
2366 showprogress: show progress status for large data.
2367 default is True.
2368 minnrow: minimum number of input spectra to show.
2369 default is 1000.
2370 outlog: Output the coefficients of the best-fit
2371 function to logger (default is False)
2372 blfile: Name of a text file in which the best-fit
2373 parameter values to be written
2374 (default is '': no file/logger output)
2375
2376 Example:
2377 # return a scan baselined by a combination of sinusoidal curves having
2378 # wave numbers in spectral window up to 10,
2379 # also with 3-sigma clipping, iteration up to 4 times
2380 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2381
2382 Note:
2383 The best-fit parameter values output in logger and/or blfile are now
2384 based on specunit of 'channel'.
2385 """
2386
2387 try:
2388 varlist = vars()
2389
2390 if insitu is None: insitu = rcParams['insitu']
2391 if insitu:
2392 workscan = self
2393 else:
2394 workscan = self.copy()
2395
2396 if mask is None: mask = \
2397 [True for i in xrange(workscan.nchan())]
2398 if applyfft is None: applyfft = True
2399 if fftmethod is None: fftmethod = 'fft'
2400 if fftthresh is None: fftthresh = 3.0
2401 if addwn is None: addwn = []
2402 if rejwn is None: rejwn = []
2403 if clipthresh is None: clipthresh = 3.0
2404 if clipniter is None: clipniter = 0
2405 if plot is None: plot = False
2406 if getresidual is None: getresidual = True
2407 if showprogress is None: showprogress = True
2408 if minnrow is None: minnrow = 1000
2409 if outlog is None: outlog = False
2410 if blfile is None: blfile = ''
2411
2412 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2413 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2414 str(fftthresh).lower(),
2415 workscan._parse_wn(addwn),
2416 workscan._parse_wn(rejwn), clipthresh,
2417 clipniter, getresidual,
2418 pack_progress_params(showprogress,
2419 minnrow),
2420 outlog, blfile)
2421 workscan._add_history('sinusoid_baseline', varlist)
2422
2423 if insitu:
2424 self._assign(workscan)
2425 else:
2426 return workscan
2427
2428 except RuntimeError, e:
2429 raise_fitting_failure_exception(e)
2430
2431
2432 @asaplog_post_dec
2433 def auto_sinusoid_baseline(self, mask=None, insitu=None, applyfft=None,
2434 fftmethod=None, fftthresh=None,
2435 addwn=None, rejwn=None, clipthresh=None,
2436 clipniter=None, edge=None, threshold=None,
2437 chan_avg_limit=None, plot=None,
2438 getresidual=None, showprogress=None,
2439 minnrow=None,
2440 outlog=None, blfile=None):
2441 """\
2442 Return a scan which has been baselined (all rows) with sinusoidal
2443 functions.
2444 Spectral lines are detected first using linefinder and masked out
2445 to avoid them affecting the baseline solution.
2446
2447 Parameters:
2448 insitu: if False a new scantable is returned.
2449 Otherwise, the scaling is done in-situ
2450 The default is taken from .asaprc (False)
2451 mask: an optional mask retreived from scantable
2452 applyfft: if True use some method, such as FFT, to find
2453 strongest sinusoidal components in the wavenumber
2454 domain to be used for baseline fitting.
2455 default is True.
2456 fftmethod: method to find the strong sinusoidal components.
2457 now only 'fft' is available and it is the default.
2458 fftthresh: the threshold to select wave numbers to be used for
2459 fitting from the distribution of amplitudes in the
2460 wavenumber domain.
2461 both float and string values accepted.
2462 given a float value, the unit is set to sigma.
2463 for string values, allowed formats include:
2464 'xsigma' or 'x' (= x-sigma level. e.g.,
2465 '3sigma'), or
2466 'topx' (= the x strongest ones, e.g. 'top5').
2467 default is 3.0 (unit: sigma).
2468 addwn: the additional wave numbers to be used for fitting.
2469 list or integer value is accepted to specify every
2470 wave numbers. also string value can be used in case
2471 you need to specify wave numbers in a certain range,
2472 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2473 '<a' (= 0,1,...,a-2,a-1),
2474 '>=a' (= a, a+1, ... up to the maximum wave
2475 number corresponding to the Nyquist
2476 frequency for the case of FFT).
2477 default is [].
2478 rejwn: the wave numbers NOT to be used for fitting.
2479 can be set just as addwn but has higher priority:
2480 wave numbers which are specified both in addwn
2481 and rejwn will NOT be used. default is [].
2482 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2483 clipniter: maximum number of iteration of 'clipthresh'-sigma
2484 clipping (default is 0)
2485 edge: an optional number of channel to drop at
2486 the edge of spectrum. If only one value is
2487 specified, the same number will be dropped
2488 from both sides of the spectrum. Default
2489 is to keep all channels. Nested tuples
2490 represent individual edge selection for
2491 different IFs (a number of spectral channels
2492 can be different)
2493 threshold: the threshold used by line finder. It is
2494 better to keep it large as only strong lines
2495 affect the baseline solution.
2496 chan_avg_limit: a maximum number of consequtive spectral
2497 channels to average during the search of
2498 weak and broad lines. The default is no
2499 averaging (and no search for weak lines).
2500 If such lines can affect the fitted baseline
2501 (e.g. a high order polynomial is fitted),
2502 increase this parameter (usually values up
2503 to 8 are reasonable). Most users of this
2504 method should find the default value sufficient.
2505 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2506 plot the fit and the residual. In this each
2507 indivual fit has to be approved, by typing 'y'
2508 or 'n'
2509 getresidual: if False, returns best-fit values instead of
2510 residual. (default is True)
2511 showprogress: show progress status for large data.
2512 default is True.
2513 minnrow: minimum number of input spectra to show.
2514 default is 1000.
2515 outlog: Output the coefficients of the best-fit
2516 function to logger (default is False)
2517 blfile: Name of a text file in which the best-fit
2518 parameter values to be written
2519 (default is "": no file/logger output)
2520
2521 Example:
2522 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2523
2524 Note:
2525 The best-fit parameter values output in logger and/or blfile are now
2526 based on specunit of 'channel'.
2527 """
2528
2529 try:
2530 varlist = vars()
2531
2532 if insitu is None: insitu = rcParams['insitu']
2533 if insitu:
2534 workscan = self
2535 else:
2536 workscan = self.copy()
2537
2538 if mask is None: mask = \
2539 [True for i in xrange(workscan.nchan())]
2540 if applyfft is None: applyfft = True
2541 if fftmethod is None: fftmethod = 'fft'
2542 if fftthresh is None: fftthresh = 3.0
2543 if addwn is None: addwn = []
2544 if rejwn is None: rejwn = []
2545 if clipthresh is None: clipthresh = 3.0
2546 if clipniter is None: clipniter = 0
2547 if edge is None: edge = (0,0)
2548 if threshold is None: threshold = 3
2549 if chan_avg_limit is None: chan_avg_limit = 1
2550 if plot is None: plot = False
2551 if getresidual is None: getresidual = True
2552 if showprogress is None: showprogress = True
2553 if minnrow is None: minnrow = 1000
2554 if outlog is None: outlog = False
2555 if blfile is None: blfile = ''
2556
2557 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is
2558 # implemented as a fitter method.
2559 workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2560 str(fftthresh).lower(),
2561 workscan._parse_wn(addwn),
2562 workscan._parse_wn(rejwn),
2563 clipthresh, clipniter,
2564 normalise_edge_param(edge),
2565 threshold, chan_avg_limit,
2566 getresidual,
2567 pack_progress_params(showprogress,
2568 minnrow),
2569 outlog, blfile)
2570 workscan._add_history("auto_sinusoid_baseline", varlist)
2571
2572 if insitu:
2573 self._assign(workscan)
2574 else:
2575 return workscan
2576
2577 except RuntimeError, e:
2578 raise_fitting_failure_exception(e)
2579
2580 @asaplog_post_dec
2581 def cspline_baseline(self, mask=None, insitu=None, npiece=None,
2582 clipthresh=None, clipniter=None, plot=None,
2583 getresidual=None, showprogress=None, minnrow=None,
2584 outlog=None, blfile=None):
2585 """\
2586 Return a scan which has been baselined (all rows) by cubic spline
2587 function (piecewise cubic polynomial).
2588 Parameters:
2589 insitu: If False a new scantable is returned.
2590 Otherwise, the scaling is done in-situ
2591 The default is taken from .asaprc (False)
2592 mask: An optional mask
2593 npiece: Number of pieces. (default is 2)
2594 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2595 clipniter: maximum number of iteration of 'clipthresh'-sigma
2596 clipping (default is 0)
2597 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2598 plot the fit and the residual. In this each
2599 indivual fit has to be approved, by typing 'y'
2600 or 'n'
2601 getresidual: if False, returns best-fit values instead of
2602 residual. (default is True)
2603 showprogress: show progress status for large data.
2604 default is True.
2605 minnrow: minimum number of input spectra to show.
2606 default is 1000.
2607 outlog: Output the coefficients of the best-fit
2608 function to logger (default is False)
2609 blfile: Name of a text file in which the best-fit
2610 parameter values to be written
2611 (default is "": no file/logger output)
2612
2613 Example:
2614 # return a scan baselined by a cubic spline consisting of 2
2615 # pieces (i.e., 1 internal knot),
2616 # also with 3-sigma clipping, iteration up to 4 times
2617 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2618
2619 Note:
2620 The best-fit parameter values output in logger and/or blfile are now
2621 based on specunit of 'channel'.
2622 """
2623
2624 try:
2625 varlist = vars()
2626
2627 if insitu is None: insitu = rcParams['insitu']
2628 if insitu:
2629 workscan = self
2630 else:
2631 workscan = self.copy()
2632
2633 if mask is None: mask = \
2634 [True for i in xrange(workscan.nchan())]
2635 if npiece is None: npiece = 2
2636 if clipthresh is None: clipthresh = 3.0
2637 if clipniter is None: clipniter = 0
2638 if plot is None: plot = False
2639 if getresidual is None: getresidual = True
2640 if showprogress is None: showprogress = True
2641 if minnrow is None: minnrow = 1000
2642 if outlog is None: outlog = False
2643 if blfile is None: blfile = ''
2644
2645 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2646 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2647 getresidual,
2648 pack_progress_params(showprogress,
2649 minnrow),
2650 outlog, blfile)
2651 workscan._add_history("cspline_baseline", varlist)
2652
2653 if insitu:
2654 self._assign(workscan)
2655 else:
2656 return workscan
2657
2658 except RuntimeError, e:
2659 raise_fitting_failure_exception(e)
2660
2661 @asaplog_post_dec
2662 def auto_cspline_baseline(self, mask=None, insitu=None, npiece=None,
2663 clipthresh=None, clipniter=None,
2664 edge=None, threshold=None, chan_avg_limit=None,
2665 getresidual=None, plot=None,
2666 showprogress=None, minnrow=None, outlog=None,
2667 blfile=None):
2668 """\
2669 Return a scan which has been baselined (all rows) by cubic spline
2670 function (piecewise cubic polynomial).
2671 Spectral lines are detected first using linefinder and masked out
2672 to avoid them affecting the baseline solution.
2673
2674 Parameters:
2675 insitu: if False a new scantable is returned.
2676 Otherwise, the scaling is done in-situ
2677 The default is taken from .asaprc (False)
2678 mask: an optional mask retreived from scantable
2679 npiece: Number of pieces. (default is 2)
2680 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2681 clipniter: maximum number of iteration of 'clipthresh'-sigma
2682 clipping (default is 0)
2683 edge: an optional number of channel to drop at
2684 the edge of spectrum. If only one value is
2685 specified, the same number will be dropped
2686 from both sides of the spectrum. Default
2687 is to keep all channels. Nested tuples
2688 represent individual edge selection for
2689 different IFs (a number of spectral channels
2690 can be different)
2691 threshold: the threshold used by line finder. It is
2692 better to keep it large as only strong lines
2693 affect the baseline solution.
2694 chan_avg_limit: a maximum number of consequtive spectral
2695 channels to average during the search of
2696 weak and broad lines. The default is no
2697 averaging (and no search for weak lines).
2698 If such lines can affect the fitted baseline
2699 (e.g. a high order polynomial is fitted),
2700 increase this parameter (usually values up
2701 to 8 are reasonable). Most users of this
2702 method should find the default value sufficient.
2703 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2704 plot the fit and the residual. In this each
2705 indivual fit has to be approved, by typing 'y'
2706 or 'n'
2707 getresidual: if False, returns best-fit values instead of
2708 residual. (default is True)
2709 showprogress: show progress status for large data.
2710 default is True.
2711 minnrow: minimum number of input spectra to show.
2712 default is 1000.
2713 outlog: Output the coefficients of the best-fit
2714 function to logger (default is False)
2715 blfile: Name of a text file in which the best-fit
2716 parameter values to be written
2717 (default is "": no file/logger output)
2718
2719 Example:
2720 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2721
2722 Note:
2723 The best-fit parameter values output in logger and/or blfile are now
2724 based on specunit of 'channel'.
2725 """
2726
2727 try:
2728 varlist = vars()
2729
2730 if insitu is None: insitu = rcParams['insitu']
2731 if insitu:
2732 workscan = self
2733 else:
2734 workscan = self.copy()
2735
2736 if mask is None: mask = \
2737 [True for i in xrange(workscan.nchan())]
2738 if npiece is None: npiece = 2
2739 if clipthresh is None: clipthresh = 3.0
2740 if clipniter is None: clipniter = 0
2741 if edge is None: edge = (0, 0)
2742 if threshold is None: threshold = 3
2743 if chan_avg_limit is None: chan_avg_limit = 1
2744 if plot is None: plot = False
2745 if getresidual is None: getresidual = True
2746 if showprogress is None: showprogress = True
2747 if minnrow is None: minnrow = 1000
2748 if outlog is None: outlog = False
2749 if blfile is None: blfile = ''
2750
2751 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is
2752 # implemented as a fitter method.
2753 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2754 clipniter,
2755 normalise_edge_param(edge),
2756 threshold,
2757 chan_avg_limit, getresidual,
2758 pack_progress_params(showprogress,
2759 minnrow),
2760 outlog, blfile)
2761 workscan._add_history("auto_cspline_baseline", varlist)
2762
2763 if insitu:
2764 self._assign(workscan)
2765 else:
2766 return workscan
2767
2768 except RuntimeError, e:
2769 raise_fitting_failure_exception(e)
2770
2771 @asaplog_post_dec
2772 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2773 getresidual=None, showprogress=None, minnrow=None,
2774 outlog=None, blfile=None):
2775 """\
2776 Return a scan which has been baselined (all rows) by a polynomial.
2777 Parameters:
2778 insitu: if False a new scantable is returned.
2779 Otherwise, the scaling is done in-situ
2780 The default is taken from .asaprc (False)
2781 mask: an optional mask
2782 order: the order of the polynomial (default is 0)
2783 plot: plot the fit and the residual. In this each
2784 indivual fit has to be approved, by typing 'y'
2785 or 'n'
2786 getresidual: if False, returns best-fit values instead of
2787 residual. (default is True)
2788 showprogress: show progress status for large data.
2789 default is True.
2790 minnrow: minimum number of input spectra to show.
2791 default is 1000.
2792 outlog: Output the coefficients of the best-fit
2793 function to logger (default is False)
2794 blfile: Name of a text file in which the best-fit
2795 parameter values to be written
2796 (default is "": no file/logger output)
2797
2798 Example:
2799 # return a scan baselined by a third order polynomial,
2800 # not using a mask
2801 bscan = scan.poly_baseline(order=3)
2802 """
2803
2804 try:
2805 varlist = vars()
2806
2807 if insitu is None:
2808 insitu = rcParams["insitu"]
2809 if insitu:
2810 workscan = self
2811 else:
2812 workscan = self.copy()
2813
2814 if mask is None: mask = \
2815 [True for i in xrange(workscan.nchan())]
2816 if order is None: order = 0
2817 if plot is None: plot = False
2818 if getresidual is None: getresidual = True
2819 if showprogress is None: showprogress = True
2820 if minnrow is None: minnrow = 1000
2821 if outlog is None: outlog = False
2822 if blfile is None: blfile = ""
2823
2824 if plot:
2825 outblfile = (blfile != "") and \
2826 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2827 if outblfile:
2828 blf = open(blfile, "a")
2829
2830 f = fitter()
2831 f.set_function(lpoly=order)
2832
2833 rows = xrange(workscan.nrow())
2834 #if len(rows) > 0: workscan._init_blinfo()
2835
2836 for r in rows:
2837 f.x = workscan._getabcissa(r)
2838 f.y = workscan._getspectrum(r)
2839 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2840 f.data = None
2841 f.fit()
2842
2843 f.plot(residual=True)
2844 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2845 if accept_fit.upper() == "N":
2846 #workscan._append_blinfo(None, None, None)
2847 continue
2848
2849 blpars = f.get_parameters()
2850 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2851 #workscan._append_blinfo(blpars, masklist, f.mask)
2852 workscan._setspectrum((f.fitter.getresidual()
2853 if getresidual else f.fitter.getfit()), r)
2854
2855 if outblfile:
2856 rms = workscan.get_rms(f.mask, r)
2857 dataout = \
2858 workscan.format_blparams_row(blpars["params"],
2859 blpars["fixed"],
2860 rms, str(masklist),
2861 r, True)
2862 blf.write(dataout)
2863
2864 f._p.unmap()
2865 f._p = None
2866
2867 if outblfile: blf.close()
2868 else:
2869 workscan._poly_baseline(mask, order, getresidual,
2870 pack_progress_params(showprogress,
2871 minnrow),
2872 outlog, blfile)
2873
2874 workscan._add_history("poly_baseline", varlist)
2875
2876 if insitu:
2877 self._assign(workscan)
2878 else:
2879 return workscan
2880
2881 except RuntimeError, e:
2882 raise_fitting_failure_exception(e)
2883
2884 @asaplog_post_dec
2885 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2886 threshold=None, chan_avg_limit=None,
2887 plot=None, insitu=None,
2888 getresidual=None, showprogress=None,
2889 minnrow=None, outlog=None, blfile=None):
2890 """\
2891 Return a scan which has been baselined (all rows) by a polynomial.
2892 Spectral lines are detected first using linefinder and masked out
2893 to avoid them affecting the baseline solution.
2894
2895 Parameters:
2896 insitu: if False a new scantable is returned.
2897 Otherwise, the scaling is done in-situ
2898 The default is taken from .asaprc (False)
2899 mask: an optional mask retreived from scantable
2900 order: the order of the polynomial (default is 0)
2901 edge: an optional number of channel to drop at
2902 the edge of spectrum. If only one value is
2903 specified, the same number will be dropped
2904 from both sides of the spectrum. Default
2905 is to keep all channels. Nested tuples
2906 represent individual edge selection for
2907 different IFs (a number of spectral channels
2908 can be different)
2909 threshold: the threshold used by line finder. It is
2910 better to keep it large as only strong lines
2911 affect the baseline solution.
2912 chan_avg_limit: a maximum number of consequtive spectral
2913 channels to average during the search of
2914 weak and broad lines. The default is no
2915 averaging (and no search for weak lines).
2916 If such lines can affect the fitted baseline
2917 (e.g. a high order polynomial is fitted),
2918 increase this parameter (usually values up
2919 to 8 are reasonable). Most users of this
2920 method should find the default value sufficient.
2921 plot: plot the fit and the residual. In this each
2922 indivual fit has to be approved, by typing 'y'
2923 or 'n'
2924 getresidual: if False, returns best-fit values instead of
2925 residual. (default is True)
2926 showprogress: show progress status for large data.
2927 default is True.
2928 minnrow: minimum number of input spectra to show.
2929 default is 1000.
2930 outlog: Output the coefficients of the best-fit
2931 function to logger (default is False)
2932 blfile: Name of a text file in which the best-fit
2933 parameter values to be written
2934 (default is "": no file/logger output)
2935
2936 Example:
2937 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2938 """
2939
2940 try:
2941 varlist = vars()
2942
2943 if insitu is None:
2944 insitu = rcParams['insitu']
2945 if insitu:
2946 workscan = self
2947 else:
2948 workscan = self.copy()
2949
2950 if mask is None: mask = \
2951 [True for i in xrange(workscan.nchan())]
2952 if order is None: order = 0
2953 if edge is None: edge = (0, 0)
2954 if threshold is None: threshold = 3
2955 if chan_avg_limit is None: chan_avg_limit = 1
2956 if plot is None: plot = False
2957 if getresidual is None: getresidual = True
2958 if showprogress is None: showprogress = True
2959 if minnrow is None: minnrow = 1000
2960 if outlog is None: outlog = False
2961 if blfile is None: blfile = ''
2962
2963 edge = normalise_edge_param(edge)
2964
2965 if plot:
2966 outblfile = (blfile != "") and \
2967 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2968 if outblfile: blf = open(blfile, "a")
2969
2970 from asap.asaplinefind import linefinder
2971 fl = linefinder()
2972 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
2973 fl.set_scan(workscan)
2974
2975 f = fitter()
2976 f.set_function(lpoly=order)
2977
2978 rows = xrange(workscan.nrow())
2979 #if len(rows) > 0: workscan._init_blinfo()
2980
2981 for r in rows:
2982 idx = 2*workscan.getif(r)
2983 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2984 edge[idx:idx+2]) # (CAS-1434)
2985
2986 f.x = workscan._getabcissa(r)
2987 f.y = workscan._getspectrum(r)
2988 f.mask = fl.get_mask()
2989 f.data = None
2990 f.fit()
2991
2992 f.plot(residual=True)
2993 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2994 if accept_fit.upper() == "N":
2995 #workscan._append_blinfo(None, None, None)
2996 continue
2997
2998 blpars = f.get_parameters()
2999 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3000 #workscan._append_blinfo(blpars, masklist, f.mask)
3001 workscan._setspectrum((f.fitter.getresidual() \
3002 if getresidual else f.fitter.getfit()), r)
3003
3004 if outblfile:
3005 rms = workscan.get_rms(f.mask, r)
3006 dataout = \
3007 workscan.format_blparams_row(blpars["params"],
3008 blpars["fixed"],
3009 rms, str(masklist),
3010 r, True)
3011 blf.write(dataout)
3012
3013 f._p.unmap()
3014 f._p = None
3015
3016 if outblfile: blf.close()
3017 else:
3018 workscan._auto_poly_baseline(mask, order, edge, threshold,
3019 chan_avg_limit, getresidual,
3020 pack_progress_params(showprogress,
3021 minnrow),
3022 outlog, blfile)
3023
3024 workscan._add_history("auto_poly_baseline", varlist)
3025
3026 if insitu:
3027 self._assign(workscan)
3028 else:
3029 return workscan
3030
3031 except RuntimeError, e:
3032 raise_fitting_failure_exception(e)
3033
3034 def _init_blinfo(self):
3035 """\
3036 Initialise the following three auxiliary members:
3037 blpars : parameters of the best-fit baseline,
3038 masklists : mask data (edge positions of masked channels) and
3039 actualmask : mask data (in boolean list),
3040 to keep for use later (including output to logger/text files).
3041 Used by poly_baseline() and auto_poly_baseline() in case of
3042 'plot=True'.
3043 """
3044 self.blpars = []
3045 self.masklists = []
3046 self.actualmask = []
3047 return
3048
3049 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3050 """\
3051 Append baseline-fitting related info to blpars, masklist and
3052 actualmask.
3053 """
3054 self.blpars.append(data_blpars)
3055 self.masklists.append(data_masklists)
3056 self.actualmask.append(data_actualmask)
3057 return
3058
3059 @asaplog_post_dec
3060 def rotate_linpolphase(self, angle):
3061 """\
3062 Rotate the phase of the complex polarization O=Q+iU correlation.
3063 This is always done in situ in the raw data. So if you call this
3064 function more than once then each call rotates the phase further.
3065
3066 Parameters:
3067
3068 angle: The angle (degrees) to rotate (add) by.
3069
3070 Example::
3071
3072 scan.rotate_linpolphase(2.3)
3073
3074 """
3075 varlist = vars()
3076 self._math._rotate_linpolphase(self, angle)
3077 self._add_history("rotate_linpolphase", varlist)
3078 return
3079
3080 @asaplog_post_dec
3081 def rotate_xyphase(self, angle):
3082 """\
3083 Rotate the phase of the XY correlation. This is always done in situ
3084 in the data. So if you call this function more than once
3085 then each call rotates the phase further.
3086
3087 Parameters:
3088
3089 angle: The angle (degrees) to rotate (add) by.
3090
3091 Example::
3092
3093 scan.rotate_xyphase(2.3)
3094
3095 """
3096 varlist = vars()
3097 self._math._rotate_xyphase(self, angle)
3098 self._add_history("rotate_xyphase", varlist)
3099 return
3100
3101 @asaplog_post_dec
3102 def swap_linears(self):
3103 """\
3104 Swap the linear polarisations XX and YY, or better the first two
3105 polarisations as this also works for ciculars.
3106 """
3107 varlist = vars()
3108 self._math._swap_linears(self)
3109 self._add_history("swap_linears", varlist)
3110 return
3111
3112 @asaplog_post_dec
3113 def invert_phase(self):
3114 """\
3115 Invert the phase of the complex polarisation
3116 """
3117 varlist = vars()
3118 self._math._invert_phase(self)
3119 self._add_history("invert_phase", varlist)
3120 return
3121
3122 @asaplog_post_dec
3123 def add(self, offset, insitu=None):
3124 """\
3125 Return a scan where all spectra have the offset added
3126
3127 Parameters:
3128
3129 offset: the offset
3130
3131 insitu: if False a new scantable is returned.
3132 Otherwise, the scaling is done in-situ
3133 The default is taken from .asaprc (False)
3134
3135 """
3136 if insitu is None: insitu = rcParams['insitu']
3137 self._math._setinsitu(insitu)
3138 varlist = vars()
3139 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3140 s._add_history("add", varlist)
3141 if insitu:
3142 self._assign(s)
3143 else:
3144 return s
3145
3146 @asaplog_post_dec
3147 def scale(self, factor, tsys=True, insitu=None):
3148 """\
3149
3150 Return a scan where all spectra are scaled by the given 'factor'
3151
3152 Parameters:
3153
3154 factor: the scaling factor (float or 1D float list)
3155
3156 insitu: if False a new scantable is returned.
3157 Otherwise, the scaling is done in-situ
3158 The default is taken from .asaprc (False)
3159
3160 tsys: if True (default) then apply the operation to Tsys
3161 as well as the data
3162
3163 """
3164 if insitu is None: insitu = rcParams['insitu']
3165 self._math._setinsitu(insitu)
3166 varlist = vars()
3167 s = None
3168 import numpy
3169 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3170 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3171 from asapmath import _array2dOp
3172 s = _array2dOp( self.copy(), factor, "MUL", tsys )
3173 else:
3174 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3175 else:
3176 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
3177 s._add_history("scale", varlist)
3178 if insitu:
3179 self._assign(s)
3180 else:
3181 return s
3182
3183 def set_sourcetype(self, match, matchtype="pattern",
3184 sourcetype="reference"):
3185 """\
3186 Set the type of the source to be an source or reference scan
3187 using the provided pattern.
3188
3189 Parameters:
3190
3191 match: a Unix style pattern, regular expression or selector
3192
3193 matchtype: 'pattern' (default) UNIX style pattern or
3194 'regex' regular expression
3195
3196 sourcetype: the type of the source to use (source/reference)
3197
3198 """
3199 varlist = vars()
3200 basesel = self.get_selection()
3201 stype = -1
3202 if sourcetype.lower().startswith("r"):
3203 stype = 1
3204 elif sourcetype.lower().startswith("s"):
3205 stype = 0
3206 else:
3207 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3208 if matchtype.lower().startswith("p"):
3209 matchtype = "pattern"
3210 elif matchtype.lower().startswith("r"):
3211 matchtype = "regex"
3212 else:
3213 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3214 sel = selector()
3215 if isinstance(match, selector):
3216 sel = match
3217 else:
3218 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3219 self.set_selection(basesel+sel)
3220 self._setsourcetype(stype)
3221 self.set_selection(basesel)
3222 self._add_history("set_sourcetype", varlist)
3223
3224 @asaplog_post_dec
3225 @preserve_selection
3226 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3227 """\
3228 This function allows to build quotients automatically.
3229 It assumes the observation to have the same number of
3230 "ons" and "offs"
3231
3232 Parameters:
3233
3234 preserve: you can preserve (default) the continuum or
3235 remove it. The equations used are
3236
3237 preserve: Output = Toff * (on/off) - Toff
3238
3239 remove: Output = Toff * (on/off) - Ton
3240
3241 mode: the on/off detection mode
3242 'paired' (default)
3243 identifies 'off' scans by the
3244 trailing '_R' (Mopra/Parkes) or
3245 '_e'/'_w' (Tid) and matches
3246 on/off pairs from the observing pattern
3247 'time'
3248 finds the closest off in time
3249
3250 .. todo:: verify argument is not implemented
3251
3252 """
3253 varlist = vars()
3254 modes = ["time", "paired"]
3255 if not mode in modes:
3256 msg = "please provide valid mode. Valid modes are %s" % (modes)
3257 raise ValueError(msg)
3258 s = None
3259 if mode.lower() == "paired":
3260 sel = self.get_selection()
3261 sel.set_query("SRCTYPE==psoff")
3262 self.set_selection(sel)
3263 offs = self.copy()
3264 sel.set_query("SRCTYPE==pson")
3265 self.set_selection(sel)
3266 ons = self.copy()
3267 s = scantable(self._math._quotient(ons, offs, preserve))
3268 elif mode.lower() == "time":
3269 s = scantable(self._math._auto_quotient(self, mode, preserve))
3270 s._add_history("auto_quotient", varlist)
3271 return s
3272
3273 @asaplog_post_dec
3274 def mx_quotient(self, mask = None, weight='median', preserve=True):
3275 """\
3276 Form a quotient using "off" beams when observing in "MX" mode.
3277
3278 Parameters:
3279
3280 mask: an optional mask to be used when weight == 'stddev'
3281
3282 weight: How to average the off beams. Default is 'median'.
3283
3284 preserve: you can preserve (default) the continuum or
3285 remove it. The equations used are:
3286
3287 preserve: Output = Toff * (on/off) - Toff
3288
3289 remove: Output = Toff * (on/off) - Ton
3290
3291 """
3292 mask = mask or ()
3293 varlist = vars()
3294 on = scantable(self._math._mx_extract(self, 'on'))
3295 preoff = scantable(self._math._mx_extract(self, 'off'))
3296 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3297 from asapmath import quotient
3298 q = quotient(on, off, preserve)
3299 q._add_history("mx_quotient", varlist)
3300 return q
3301
3302 @asaplog_post_dec
3303 def freq_switch(self, insitu=None):
3304 """\
3305 Apply frequency switching to the data.
3306
3307 Parameters:
3308
3309 insitu: if False a new scantable is returned.
3310 Otherwise, the swictching is done in-situ
3311 The default is taken from .asaprc (False)
3312
3313 """
3314 if insitu is None: insitu = rcParams['insitu']
3315 self._math._setinsitu(insitu)
3316 varlist = vars()
3317 s = scantable(self._math._freqswitch(self))
3318 s._add_history("freq_switch", varlist)
3319 if insitu:
3320 self._assign(s)
3321 else:
3322 return s
3323
3324 @asaplog_post_dec
3325 def recalc_azel(self):
3326 """Recalculate the azimuth and elevation for each position."""
3327 varlist = vars()
3328 self._recalcazel()
3329 self._add_history("recalc_azel", varlist)
3330 return
3331
3332 @asaplog_post_dec
3333 def __add__(self, other):
3334 varlist = vars()
3335 s = None
3336 if isinstance(other, scantable):
3337 s = scantable(self._math._binaryop(self, other, "ADD"))
3338 elif isinstance(other, float):
3339 s = scantable(self._math._unaryop(self, other, "ADD", False))
3340 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3341 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3342 from asapmath import _array2dOp
3343 s = _array2dOp( self.copy(), other, "ADD", False )
3344 else:
3345 s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
3346 else:
3347 raise TypeError("Other input is not a scantable or float value")
3348 s._add_history("operator +", varlist)
3349 return s
3350
3351 @asaplog_post_dec
3352 def __sub__(self, other):
3353 """
3354 implicit on all axes and on Tsys
3355 """
3356 varlist = vars()
3357 s = None
3358 if isinstance(other, scantable):
3359 s = scantable(self._math._binaryop(self, other, "SUB"))
3360 elif isinstance(other, float):
3361 s = scantable(self._math._unaryop(self, other, "SUB", False))
3362 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3363 if isinstance(other[0], list) or isinstance(other[0],
3364 numpy.ndarray):
3365 from asapmath import _array2dOp
3366 s = _array2dOp( self.copy(), other, "SUB", False )
3367 else:
3368 s = scantable( self._math._arrayop( self.copy(), other,
3369 "SUB", False ) )
3370 else:
3371 raise TypeError("Other input is not a scantable or float value")
3372 s._add_history("operator -", varlist)
3373 return s
3374
3375 @asaplog_post_dec
3376 def __mul__(self, other):
3377 """
3378 implicit on all axes and on Tsys
3379 """
3380 varlist = vars()
3381 s = None
3382 if isinstance(other, scantable):
3383 s = scantable(self._math._binaryop(self, other, "MUL"))
3384 elif isinstance(other, float):
3385 s = scantable(self._math._unaryop(self, other, "MUL", False))
3386 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3387 if isinstance(other[0], list) or isinstance(other[0],
3388 numpy.ndarray):
3389 from asapmath import _array2dOp
3390 s = _array2dOp( self.copy(), other, "MUL", False )
3391 else:
3392 s = scantable( self._math._arrayop( self.copy(), other,
3393 "MUL", False ) )
3394 else:
3395 raise TypeError("Other input is not a scantable or float value")
3396 s._add_history("operator *", varlist)
3397 return s
3398
3399
3400 @asaplog_post_dec
3401 def __div__(self, other):
3402 """
3403 implicit on all axes and on Tsys
3404 """
3405 varlist = vars()
3406 s = None
3407 if isinstance(other, scantable):
3408 s = scantable(self._math._binaryop(self, other, "DIV"))
3409 elif isinstance(other, float):
3410 if other == 0.0:
3411 raise ZeroDivisionError("Dividing by zero is not recommended")
3412 s = scantable(self._math._unaryop(self, other, "DIV", False))
3413 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3414 if isinstance(other[0], list) or isinstance(other[0],
3415 numpy.ndarray):
3416 from asapmath import _array2dOp
3417 s = _array2dOp( self.copy(), other, "DIV", False )
3418 else:
3419 s = scantable( self._math._arrayop( self.copy(), other,
3420 "DIV", False ) )
3421 else:
3422 raise TypeError("Other input is not a scantable or float value")
3423 s._add_history("operator /", varlist)
3424 return s
3425
3426 @asaplog_post_dec
3427 def get_fit(self, row=0):
3428 """\
3429 Print or return the stored fits for a row in the scantable
3430
3431 Parameters:
3432
3433 row: the row which the fit has been applied to.
3434
3435 """
3436 if row > self.nrow():
3437 return
3438 from asap.asapfit import asapfit
3439 fit = asapfit(self._getfit(row))
3440 asaplog.push( '%s' %(fit) )
3441 return fit.as_dict()
3442
3443 def flag_nans(self):
3444 """\
3445 Utility function to flag NaN values in the scantable.
3446 """
3447 import numpy
3448 basesel = self.get_selection()
3449 for i in range(self.nrow()):
3450 sel = self.get_row_selector(i)
3451 self.set_selection(basesel+sel)
3452 nans = numpy.isnan(self._getspectrum(0))
3453 if numpy.any(nans):
3454 bnans = [ bool(v) for v in nans]
3455 self.flag(bnans)
3456 self.set_selection(basesel)
3457
3458 def get_row_selector(self, rowno):
3459 #return selector(beams=self.getbeam(rowno),
3460 # ifs=self.getif(rowno),
3461 # pols=self.getpol(rowno),
3462 # scans=self.getscan(rowno),
3463 # cycles=self.getcycle(rowno))
3464 return selector(rows=[rowno])
3465
3466 def _add_history(self, funcname, parameters):
3467 if not rcParams['scantable.history']:
3468 return
3469 # create date
3470 sep = "##"
3471 from datetime import datetime
3472 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3473 hist = dstr+sep
3474 hist += funcname+sep#cdate+sep
3475 if parameters.has_key('self'): del parameters['self']
3476 for k, v in parameters.iteritems():
3477 if type(v) is dict:
3478 for k2, v2 in v.iteritems():
3479 hist += k2
3480 hist += "="
3481 if isinstance(v2, scantable):
3482 hist += 'scantable'
3483 elif k2 == 'mask':
3484 if isinstance(v2, list) or isinstance(v2, tuple):
3485 hist += str(self._zip_mask(v2))
3486 else:
3487 hist += str(v2)
3488 else:
3489 hist += str(v2)
3490 else:
3491 hist += k
3492 hist += "="
3493 if isinstance(v, scantable):
3494 hist += 'scantable'
3495 elif k == 'mask':
3496 if isinstance(v, list) or isinstance(v, tuple):
3497 hist += str(self._zip_mask(v))
3498 else:
3499 hist += str(v)
3500 else:
3501 hist += str(v)
3502 hist += sep
3503 hist = hist[:-2] # remove trailing '##'
3504 self._addhistory(hist)
3505
3506
3507 def _zip_mask(self, mask):
3508 mask = list(mask)
3509 i = 0
3510 segments = []
3511 while mask[i:].count(1):
3512 i += mask[i:].index(1)
3513 if mask[i:].count(0):
3514 j = i + mask[i:].index(0)
3515 else:
3516 j = len(mask)
3517 segments.append([i, j])
3518 i = j
3519 return segments
3520
3521 def _get_ordinate_label(self):
3522 fu = "("+self.get_fluxunit()+")"
3523 import re
3524 lbl = "Intensity"
3525 if re.match(".K.", fu):
3526 lbl = "Brightness Temperature "+ fu
3527 elif re.match(".Jy.", fu):
3528 lbl = "Flux density "+ fu
3529 return lbl
3530
3531 def _check_ifs(self):
3532 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3533 nchans = [self.nchan(i) for i in self.getifnos()]
3534 nchans = filter(lambda t: t > 0, nchans)
3535 return (sum(nchans)/len(nchans) == nchans[0])
3536
3537 @asaplog_post_dec
3538 #def _fill(self, names, unit, average, getpt, antenna):
3539 def _fill(self, names, unit, average, opts={}):
3540 first = True
3541 fullnames = []
3542 for name in names:
3543 name = os.path.expandvars(name)
3544 name = os.path.expanduser(name)
3545 if not os.path.exists(name):
3546 msg = "File '%s' does not exists" % (name)
3547 raise IOError(msg)
3548 fullnames.append(name)
3549 if average:
3550 asaplog.push('Auto averaging integrations')
3551 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3552 for name in fullnames:
3553 tbl = Scantable(stype)
3554 if is_ms( name ):
3555 r = msfiller( tbl )
3556 else:
3557 r = filler( tbl )
3558 rx = rcParams['scantable.reference']
3559 r.setreferenceexpr(rx)
3560 #r = filler(tbl)
3561 #rx = rcParams['scantable.reference']
3562 #r.setreferenceexpr(rx)
3563 msg = "Importing %s..." % (name)
3564 asaplog.push(msg, False)
3565 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
3566 r.open(name, opts)# antenna, -1, -1, getpt)
3567 r.fill()
3568 if average:
3569 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3570 if not first:
3571 tbl = self._math._merge([self, tbl])
3572 Scantable.__init__(self, tbl)
3573 r.close()
3574 del r, tbl
3575 first = False
3576 #flush log
3577 asaplog.post()
3578 if unit is not None:
3579 self.set_fluxunit(unit)
3580 if not is_casapy():
3581 self.set_freqframe(rcParams['scantable.freqframe'])
3582
3583
3584 def __getitem__(self, key):
3585 if key < 0:
3586 key += self.nrow()
3587 if key >= self.nrow():
3588 raise IndexError("Row index out of range.")
3589 return self._getspectrum(key)
3590
3591 def __setitem__(self, key, value):
3592 if key < 0:
3593 key += self.nrow()
3594 if key >= self.nrow():
3595 raise IndexError("Row index out of range.")
3596 if not hasattr(value, "__len__") or \
3597 len(value) > self.nchan(self.getif(key)):
3598 raise ValueError("Spectrum length doesn't match.")
3599 return self._setspectrum(value, key)
3600
3601 def __len__(self):
3602 return self.nrow()
3603
3604 def __iter__(self):
3605 for i in range(len(self)):
3606 yield self[i]
Note: See TracBrowser for help on using the repository browser.