source: trunk/python/scantable.py@ 2287

Last change on this file since 2287 was 2286, checked in by Kana Sugimoto, 13 years ago

New Development: No (performance tuning)

JIRA Issue: No

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: a parameter "filename" is added to Scantable::summary. scantable.summary doesn't return a string anymore

Test Programs: sdlist unittest/ scantable.summary("summary.txt")

Put in Release Notes: Yes

Module(s): sdlist, asap.summary

Description:

scantable.summary is very slow for large data sets (in row number) often outputted
by modern telescopes. It takes > 1.5 hours to list OTF raster scan with 350,000 rows.

This was because, the methods accumulates the whole text string (~700,000 lines) and
returns it as a string. Once the summary string exceed several tens thousands lines,
elapse time increases non-linearly, may be because very massive output string starts
to overweigh the memory.

I updated scantable.summary so that it flushes the summary string more often to file/logger.
After the modification, scantable.summary could list the data mentioned above in ~ 7 minutes.
The side effect of it is that scantable.summary doesn't return summary string anymore.
(But people may not happy with sub-million lines of string anyway.)


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 135.1 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import numpy
5try:
6 from functools import wraps as wraps_dec
7except ImportError:
8 from asap.compatibility import wraps as wraps_dec
9
10from asap.env import is_casapy
11from asap._asap import Scantable
12from asap._asap import filler, msfiller
13from asap.parameters import rcParams
14from asap.logging import asaplog, asaplog_post_dec
15from asap.selector import selector
16from asap.linecatalog import linecatalog
17from asap.coordinate import coordinate
18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
19from asap.asapfitter import fitter
20
21
22def preserve_selection(func):
23 @wraps_dec(func)
24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
30 return val
31 return wrap
32
33def is_scantable(filename):
34 """Is the given file a scantable?
35
36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 #if ( l.find('Scantable') != -1 ):
48 if ( l.find('Measurement Set') == -1 ):
49 return True
50 else:
51 return False
52 else:
53 return False
54## return (os.path.isdir(filename)
55## and not os.path.exists(filename+'/table.f1')
56## and os.path.exists(filename+'/table.info'))
57
58def is_ms(filename):
59 """Is the given file a MeasurementSet?
60
61 Parameters:
62
63 filename: the name of the file/directory to test
64
65 """
66 if ( os.path.isdir(filename)
67 and os.path.exists(filename+'/table.info')
68 and os.path.exists(filename+'/table.dat') ):
69 f=open(filename+'/table.info')
70 l=f.readline()
71 f.close()
72 if ( l.find('Measurement Set') != -1 ):
73 return True
74 else:
75 return False
76 else:
77 return False
78
79def normalise_edge_param(edge):
80 """\
81 Convert a given edge value to a one-dimensional array that can be
82 given to baseline-fitting/subtraction functions.
83 The length of the output value will be an even because values for
84 the both sides of spectra are to be contained for each IF. When
85 the length is 2, the values will be applied to all IFs. If the length
86 is larger than 2, it will be 2*ifnos().
87 Accepted format of edge include:
88 * an integer - will be used for both sides of spectra of all IFs.
89 e.g. 10 is converted to [10,10]
90 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
91 * a list/tuple containing an integer - same as the above case.
92 e.g. [10] is converted to [10,10]
93 * a list/tuple containing two integers - will be used for all IFs.
94 e.g. [5,10] is output as it is. no need to convert.
95 * a list/tuple of lists/tuples containing TWO integers -
96 each element of edge will be used for each IF.
97 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
98
99 If an element contains the same integer values, the input 'edge'
100 parameter can be given in a simpler shape in the following cases:
101 ** when len(edge)!=2
102 any elements containing the same values can be replaced
103 to single integers.
104 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
105 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
106 ** when len(edge)=2
107 care is needed for this case: ONLY ONE of the
108 elements can be a single integer,
109 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
110 or [[5,5],10], but can NOT be simplified to [5,10].
111 when [5,10] given, it is interpreted as
112 [[5,10],[5,10],[5,10],...] instead, as shown before.
113 """
114 from asap import _is_sequence_or_number as _is_valid
115 if isinstance(edge, list) or isinstance(edge, tuple):
116 for edgepar in edge:
117 if not _is_valid(edgepar, int):
118 raise ValueError, "Each element of the 'edge' tuple has \
119 to be a pair of integers or an integer."
120 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
121 if len(edgepar) != 2:
122 raise ValueError, "Each element of the 'edge' tuple has \
123 to be a pair of integers or an integer."
124 else:
125 if not _is_valid(edge, int):
126 raise ValueError, "Parameter 'edge' has to be an integer or a \
127 pair of integers specified as a tuple. \
128 Nested tuples are allowed \
129 to make individual selection for different IFs."
130
131
132 if isinstance(edge, int):
133 edge = [ edge, edge ] # e.g. 3 => [3,3]
134 elif isinstance(edge, list) or isinstance(edge, tuple):
135 if len(edge) == 0:
136 edge = [0, 0] # e.g. [] => [0,0]
137 elif len(edge) == 1:
138 if isinstance(edge[0], int):
139 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
140
141 commonedge = True
142 if len(edge) > 2: commonedge = False
143 else:
144 for edgepar in edge:
145 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
146 commonedge = False
147 break
148
149 if commonedge:
150 if len(edge) > 1:
151 norm_edge = edge
152 else:
153 norm_edge = edge + edge
154 else:
155 norm_edge = []
156 for edgepar in edge:
157 if isinstance(edgepar, int):
158 norm_edge += [edgepar, edgepar]
159 else:
160 norm_edge += edgepar
161
162 return norm_edge
163
164def raise_fitting_failure_exception(e):
165 msg = "The fit failed, possibly because it didn't converge."
166 if rcParams["verbose"]:
167 asaplog.push(str(e))
168 asaplog.push(str(msg))
169 else:
170 raise RuntimeError(str(e)+'\n'+msg)
171
172def pack_progress_params(showprogress, minnrow):
173 return str(showprogress).lower() + ',' + str(minnrow)
174
175class scantable(Scantable):
176 """\
177 The ASAP container for scans (single-dish data).
178 """
179
180 @asaplog_post_dec
181 #def __init__(self, filename, average=None, unit=None, getpt=None,
182 # antenna=None, parallactify=None):
183 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
184 """\
185 Create a scantable from a saved one or make a reference
186
187 Parameters:
188
189 filename: the name of an asap table on disk
190 or
191 the name of a rpfits/sdfits/ms file
192 (integrations within scans are auto averaged
193 and the whole file is read) or
194 [advanced] a reference to an existing scantable
195
196 average: average all integrations withinb a scan on read.
197 The default (True) is taken from .asaprc.
198
199 unit: brightness unit; must be consistent with K or Jy.
200 Over-rides the default selected by the filler
201 (input rpfits/sdfits/ms) or replaces the value
202 in existing scantables
203
204 getpt: for MeasurementSet input data only:
205 If True, all pointing data are filled.
206 The deafult is False, which makes time to load
207 the MS data faster in some cases.
208
209 antenna: for MeasurementSet input data only:
210 Antenna selection. integer (id) or string (name or id).
211
212 parallactify: Indicate that the data had been parallatified. Default
213 is taken from rc file.
214
215 """
216 if average is None:
217 average = rcParams['scantable.autoaverage']
218 #if getpt is None:
219 # getpt = True
220 #if antenna is not None:
221 # asaplog.push("Antenna selection currently unsupported."
222 # "Using ''")
223 # asaplog.post('WARN')
224 #if antenna is None:
225 # antenna = ''
226 #elif type(antenna) == int:
227 # antenna = '%s' % antenna
228 #elif type(antenna) == list:
229 # tmpstr = ''
230 # for i in range( len(antenna) ):
231 # if type(antenna[i]) == int:
232 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
233 # elif type(antenna[i]) == str:
234 # tmpstr=tmpstr+antenna[i]+','
235 # else:
236 # raise TypeError('Bad antenna selection.')
237 # antenna = tmpstr.rstrip(',')
238 parallactify = parallactify or rcParams['scantable.parallactify']
239 varlist = vars()
240 from asap._asap import stmath
241 self._math = stmath( rcParams['insitu'] )
242 if isinstance(filename, Scantable):
243 Scantable.__init__(self, filename)
244 else:
245 if isinstance(filename, str):
246 filename = os.path.expandvars(filename)
247 filename = os.path.expanduser(filename)
248 if not os.path.exists(filename):
249 s = "File '%s' not found." % (filename)
250 raise IOError(s)
251 if is_scantable(filename):
252 ondisk = rcParams['scantable.storage'] == 'disk'
253 Scantable.__init__(self, filename, ondisk)
254 if unit is not None:
255 self.set_fluxunit(unit)
256 if average:
257 self._assign( self.average_time( scanav=True ) )
258 # do not reset to the default freqframe
259 #self.set_freqframe(rcParams['scantable.freqframe'])
260 #elif os.path.isdir(filename) \
261 # and not os.path.exists(filename+'/table.f1'):
262 elif is_ms(filename):
263 # Measurement Set
264 opts={'ms': {}}
265 mskeys=['getpt','antenna']
266 for key in mskeys:
267 if key in args.keys():
268 opts['ms'][key] = args[key]
269 #self._fill([filename], unit, average, getpt, antenna)
270 self._fill([filename], unit, average, opts)
271 elif os.path.isfile(filename):
272 #self._fill([filename], unit, average, getpt, antenna)
273 self._fill([filename], unit, average)
274 else:
275 msg = "The given file '%s'is not a valid " \
276 "asap table." % (filename)
277 raise IOError(msg)
278 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
279 and isinstance(filename[-1], str):
280 #self._fill(filename, unit, average, getpt, antenna)
281 self._fill(filename, unit, average)
282 self.parallactify(parallactify)
283 self._add_history("scantable", varlist)
284
285 @asaplog_post_dec
286 def save(self, name=None, format=None, overwrite=False):
287 """\
288 Store the scantable on disk. This can be an asap (aips++) Table,
289 SDFITS or MS2 format.
290
291 Parameters:
292
293 name: the name of the outputfile. For format "ASCII"
294 this is the root file name (data in 'name'.txt
295 and header in 'name'_header.txt)
296
297 format: an optional file format. Default is ASAP.
298 Allowed are:
299
300 * 'ASAP' (save as ASAP [aips++] Table),
301 * 'SDFITS' (save as SDFITS file)
302 * 'ASCII' (saves as ascii text file)
303 * 'MS2' (saves as an casacore MeasurementSet V2)
304 * 'FITS' (save as image FITS - not readable by class)
305 * 'CLASS' (save as FITS readable by CLASS)
306
307 overwrite: If the file should be overwritten if it exists.
308 The default False is to return with warning
309 without writing the output. USE WITH CARE.
310
311 Example::
312
313 scan.save('myscan.asap')
314 scan.save('myscan.sdfits', 'SDFITS')
315
316 """
317 from os import path
318 format = format or rcParams['scantable.save']
319 suffix = '.'+format.lower()
320 if name is None or name == "":
321 name = 'scantable'+suffix
322 msg = "No filename given. Using default name %s..." % name
323 asaplog.push(msg)
324 name = path.expandvars(name)
325 if path.isfile(name) or path.isdir(name):
326 if not overwrite:
327 msg = "File %s exists." % name
328 raise IOError(msg)
329 format2 = format.upper()
330 if format2 == 'ASAP':
331 self._save(name)
332 elif format2 == 'MS2':
333 msopt = {'ms': {'overwrite': overwrite } }
334 from asap._asap import mswriter
335 writer = mswriter( self )
336 writer.write( name, msopt )
337 else:
338 from asap._asap import stwriter as stw
339 writer = stw(format2)
340 writer.write(self, name)
341 return
342
343 def copy(self):
344 """Return a copy of this scantable.
345
346 *Note*:
347
348 This makes a full (deep) copy. scan2 = scan1 makes a reference.
349
350 Example::
351
352 copiedscan = scan.copy()
353
354 """
355 sd = scantable(Scantable._copy(self))
356 return sd
357
358 def drop_scan(self, scanid=None):
359 """\
360 Return a new scantable where the specified scan number(s) has(have)
361 been dropped.
362
363 Parameters:
364
365 scanid: a (list of) scan number(s)
366
367 """
368 from asap import _is_sequence_or_number as _is_valid
369 from asap import _to_list
370 from asap import unique
371 if not _is_valid(scanid):
372 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
373 scanid = _to_list(scanid)
374 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
375 for sid in scanid: allscans.remove(sid)
376 if len(allscans) == 0:
377 raise ValueError("Can't remove all scans")
378 sel = selector(scans=allscans)
379 return self._select_copy(sel)
380
381 def _select_copy(self, selection):
382 orig = self.get_selection()
383 self.set_selection(orig+selection)
384 cp = self.copy()
385 self.set_selection(orig)
386 return cp
387
388 def get_scan(self, scanid=None):
389 """\
390 Return a specific scan (by scanno) or collection of scans (by
391 source name) in a new scantable.
392
393 *Note*:
394
395 See scantable.drop_scan() for the inverse operation.
396
397 Parameters:
398
399 scanid: a (list of) scanno or a source name, unix-style
400 patterns are accepted for source name matching, e.g.
401 '*_R' gets all 'ref scans
402
403 Example::
404
405 # get all scans containing the source '323p459'
406 newscan = scan.get_scan('323p459')
407 # get all 'off' scans
408 refscans = scan.get_scan('*_R')
409 # get a susbset of scans by scanno (as listed in scan.summary())
410 newscan = scan.get_scan([0, 2, 7, 10])
411
412 """
413 if scanid is None:
414 raise RuntimeError( 'Please specify a scan no or name to '
415 'retrieve from the scantable' )
416 try:
417 bsel = self.get_selection()
418 sel = selector()
419 if type(scanid) is str:
420 sel.set_name(scanid)
421 return self._select_copy(sel)
422 elif type(scanid) is int:
423 sel.set_scans([scanid])
424 return self._select_copy(sel)
425 elif type(scanid) is list:
426 sel.set_scans(scanid)
427 return self._select_copy(sel)
428 else:
429 msg = "Illegal scanid type, use 'int' or 'list' if ints."
430 raise TypeError(msg)
431 except RuntimeError:
432 raise
433
434 def __str__(self):
435 return Scantable._summary(self)
436
437 def summary(self, filename=None):
438 """\
439 Print a summary of the contents of this scantable.
440
441 Parameters:
442
443 filename: the name of a file to write the putput to
444 Default - no file output
445
446 """
447# info = Scantable._summary(self)
448 if filename is not None:
449 if filename is "":
450 filename = 'scantable_summary.txt'
451 from os.path import expandvars, isdir
452 filename = expandvars(filename)
453# if not isdir(filename):
454# data = open(filename, 'w')
455# data.write(info)
456# data.close()
457# else:
458 if isdir(filename):
459 msg = "Illegal file name '%s'." % (filename)
460 raise IOError(msg)
461 else:
462 filename = ""
463 Scantable._summary(self, filename)
464# return page(info)
465
466 def get_spectrum(self, rowno):
467 """Return the spectrum for the current row in the scantable as a list.
468
469 Parameters:
470
471 rowno: the row number to retrieve the spectrum from
472
473 """
474 return self._getspectrum(rowno)
475
476 def get_mask(self, rowno):
477 """Return the mask for the current row in the scantable as a list.
478
479 Parameters:
480
481 rowno: the row number to retrieve the mask from
482
483 """
484 return self._getmask(rowno)
485
486 def set_spectrum(self, spec, rowno):
487 """Set the spectrum for the current row in the scantable.
488
489 Parameters:
490
491 spec: the new spectrum
492
493 rowno: the row number to set the spectrum for
494
495 """
496 assert(len(spec) == self.nchan())
497 return self._setspectrum(spec, rowno)
498
499 def get_coordinate(self, rowno):
500 """Return the (spectral) coordinate for a a given 'rowno'.
501
502 *Note*:
503
504 * This coordinate is only valid until a scantable method modifies
505 the frequency axis.
506 * This coordinate does contain the original frequency set-up
507 NOT the new frame. The conversions however are done using the user
508 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
509 use scantable.freq_align first. Without it there is no closure,
510 i.e.::
511
512 c = myscan.get_coordinate(0)
513 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
514
515 Parameters:
516
517 rowno: the row number for the spectral coordinate
518
519 """
520 return coordinate(Scantable.get_coordinate(self, rowno))
521
522 def get_selection(self):
523 """\
524 Get the selection object currently set on this scantable.
525
526 Example::
527
528 sel = scan.get_selection()
529 sel.set_ifs(0) # select IF 0
530 scan.set_selection(sel) # apply modified selection
531
532 """
533 return selector(self._getselection())
534
535 def set_selection(self, selection=None, **kw):
536 """\
537 Select a subset of the data. All following operations on this scantable
538 are only applied to thi selection.
539
540 Parameters:
541
542 selection: a selector object (default unset the selection), or
543 any combination of "pols", "ifs", "beams", "scans",
544 "cycles", "name", "query"
545
546 Examples::
547
548 sel = selector() # create a selection object
549 self.set_scans([0, 3]) # select SCANNO 0 and 3
550 scan.set_selection(sel) # set the selection
551 scan.summary() # will only print summary of scanno 0 an 3
552 scan.set_selection() # unset the selection
553 # or the equivalent
554 scan.set_selection(scans=[0,3])
555 scan.summary() # will only print summary of scanno 0 an 3
556 scan.set_selection() # unset the selection
557
558 """
559 if selection is None:
560 # reset
561 if len(kw) == 0:
562 selection = selector()
563 else:
564 # try keywords
565 for k in kw:
566 if k not in selector.fields:
567 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
568 selection = selector(**kw)
569 self._setselection(selection)
570
571 def get_row(self, row=0, insitu=None):
572 """\
573 Select a row in the scantable.
574 Return a scantable with single row.
575
576 Parameters:
577
578 row: row no of integration, default is 0.
579 insitu: if False a new scantable is returned. Otherwise, the
580 scaling is done in-situ. The default is taken from .asaprc
581 (False)
582
583 """
584 if insitu is None: insitu = rcParams['insitu']
585 if not insitu:
586 workscan = self.copy()
587 else:
588 workscan = self
589 # Select a row
590 sel=selector()
591 sel.set_rows([row])
592 #sel.set_scans([workscan.getscan(row)])
593 #sel.set_cycles([workscan.getcycle(row)])
594 #sel.set_beams([workscan.getbeam(row)])
595 #sel.set_ifs([workscan.getif(row)])
596 #sel.set_polarisations([workscan.getpol(row)])
597 #sel.set_name(workscan._getsourcename(row))
598 workscan.set_selection(sel)
599 if not workscan.nrow() == 1:
600 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
601 raise RuntimeError(msg)
602 del sel
603 if insitu:
604 self._assign(workscan)
605 else:
606 return workscan
607
608 @asaplog_post_dec
609 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
610 """\
611 Determine the specified statistic of the current beam/if/pol
612 Takes a 'mask' as an optional parameter to specify which
613 channels should be excluded.
614
615 Parameters:
616
617 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
618 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
619
620 mask: an optional mask specifying where the statistic
621 should be determined.
622
623 form: format string to print statistic values
624
625 row: row number of spectrum to process.
626 (default is None: for all rows)
627
628 Example:
629 scan.set_unit('channel')
630 msk = scan.create_mask([100, 200], [500, 600])
631 scan.stats(stat='mean', mask=m)
632
633 """
634 mask = mask or []
635 if not self._check_ifs():
636 raise ValueError("Cannot apply mask as the IFs have different "
637 "number of channels. Please use setselection() "
638 "to select individual IFs")
639 rtnabc = False
640 if stat.lower().endswith('_abc'): rtnabc = True
641 getchan = False
642 if stat.lower().startswith('min') or stat.lower().startswith('max'):
643 chan = self._math._minmaxchan(self, mask, stat)
644 getchan = True
645 statvals = []
646 if not rtnabc:
647 if row == None:
648 statvals = self._math._stats(self, mask, stat)
649 else:
650 statvals = self._math._statsrow(self, mask, stat, int(row))
651
652 #def cb(i):
653 # return statvals[i]
654
655 #return self._row_callback(cb, stat)
656
657 label=stat
658 #callback=cb
659 out = ""
660 #outvec = []
661 sep = '-'*50
662
663 if row == None:
664 rows = xrange(self.nrow())
665 elif isinstance(row, int):
666 rows = [ row ]
667
668 for i in rows:
669 refstr = ''
670 statunit= ''
671 if getchan:
672 qx, qy = self.chan2data(rowno=i, chan=chan[i])
673 if rtnabc:
674 statvals.append(qx['value'])
675 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
676 statunit= '['+qx['unit']+']'
677 else:
678 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
679
680 tm = self._gettime(i)
681 src = self._getsourcename(i)
682 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
683 out += 'Time[%s]:\n' % (tm)
684 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
685 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
686 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
687 #outvec.append(callback(i))
688 if len(rows) > 1:
689 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
690 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
691 else:
692 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
693 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
694 out += sep+"\n"
695
696 import os
697 if os.environ.has_key( 'USER' ):
698 usr = os.environ['USER']
699 else:
700 import commands
701 usr = commands.getoutput( 'whoami' )
702 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
703 f = open(tmpfile,'w')
704 print >> f, sep
705 print >> f, ' %s %s' % (label, statunit)
706 print >> f, sep
707 print >> f, out
708 f.close()
709 f = open(tmpfile,'r')
710 x = f.readlines()
711 f.close()
712 asaplog.push(''.join(x), False)
713
714 return statvals
715
716 def chan2data(self, rowno=0, chan=0):
717 """\
718 Returns channel/frequency/velocity and spectral value
719 at an arbitrary row and channel in the scantable.
720
721 Parameters:
722
723 rowno: a row number in the scantable. Default is the
724 first row, i.e. rowno=0
725
726 chan: a channel in the scantable. Default is the first
727 channel, i.e. pos=0
728
729 """
730 if isinstance(rowno, int) and isinstance(chan, int):
731 qx = {'unit': self.get_unit(),
732 'value': self._getabcissa(rowno)[chan]}
733 qy = {'unit': self.get_fluxunit(),
734 'value': self._getspectrum(rowno)[chan]}
735 return qx, qy
736
737 def stddev(self, mask=None):
738 """\
739 Determine the standard deviation of the current beam/if/pol
740 Takes a 'mask' as an optional parameter to specify which
741 channels should be excluded.
742
743 Parameters:
744
745 mask: an optional mask specifying where the standard
746 deviation should be determined.
747
748 Example::
749
750 scan.set_unit('channel')
751 msk = scan.create_mask([100, 200], [500, 600])
752 scan.stddev(mask=m)
753
754 """
755 return self.stats(stat='stddev', mask=mask);
756
757
758 def get_column_names(self):
759 """\
760 Return a list of column names, which can be used for selection.
761 """
762 return list(Scantable.get_column_names(self))
763
764 def get_tsys(self, row=-1):
765 """\
766 Return the System temperatures.
767
768 Parameters:
769
770 row: the rowno to get the information for. (default all rows)
771
772 Returns:
773
774 a list of Tsys values for the current selection
775
776 """
777 if row > -1:
778 return self._get_column(self._gettsys, row)
779 return self._row_callback(self._gettsys, "Tsys")
780
781
782 def get_weather(self, row=-1):
783 """\
784 Return the weather informations.
785
786 Parameters:
787
788 row: the rowno to get the information for. (default all rows)
789
790 Returns:
791
792 a dict or list of of dicts of values for the current selection
793
794 """
795
796 values = self._get_column(self._get_weather, row)
797 if row > -1:
798 return {'temperature': values[0],
799 'pressure': values[1], 'humidity' : values[2],
800 'windspeed' : values[3], 'windaz' : values[4]
801 }
802 else:
803 out = []
804 for r in values:
805
806 out.append({'temperature': r[0],
807 'pressure': r[1], 'humidity' : r[2],
808 'windspeed' : r[3], 'windaz' : r[4]
809 })
810 return out
811
812 def _row_callback(self, callback, label):
813 out = ""
814 outvec = []
815 sep = '-'*50
816 for i in range(self.nrow()):
817 tm = self._gettime(i)
818 src = self._getsourcename(i)
819 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
820 out += 'Time[%s]:\n' % (tm)
821 if self.nbeam(-1) > 1:
822 out += ' Beam[%d] ' % (self.getbeam(i))
823 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
824 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
825 outvec.append(callback(i))
826 out += '= %3.3f\n' % (outvec[i])
827 out += sep+'\n'
828
829 asaplog.push(sep)
830 asaplog.push(" %s" % (label))
831 asaplog.push(sep)
832 asaplog.push(out)
833 asaplog.post()
834 return outvec
835
836 def _get_column(self, callback, row=-1, *args):
837 """
838 """
839 if row == -1:
840 return [callback(i, *args) for i in range(self.nrow())]
841 else:
842 if 0 <= row < self.nrow():
843 return callback(row, *args)
844
845
846 def get_time(self, row=-1, asdatetime=False, prec=-1):
847 """\
848 Get a list of time stamps for the observations.
849 Return a datetime object or a string (default) for each
850 integration time stamp in the scantable.
851
852 Parameters:
853
854 row: row no of integration. Default -1 return all rows
855
856 asdatetime: return values as datetime objects rather than strings
857
858 prec: number of digits shown. Default -1 to automatic calculation.
859 Note this number is equals to the digits of MVTime,
860 i.e., 0<prec<3: dates with hh:: only,
861 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
862 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
863
864 """
865 from datetime import datetime
866 if prec < 0:
867 # automagically set necessary precision +1
868 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
869 prec = max(6, int(prec))
870 else:
871 prec = max(0, prec)
872 if asdatetime:
873 #precision can be 1 millisecond at max
874 prec = min(12, prec)
875
876 times = self._get_column(self._gettime, row, prec)
877 if not asdatetime:
878 return times
879 format = "%Y/%m/%d/%H:%M:%S.%f"
880 if prec < 7:
881 nsub = 1 + (((6-prec)/2) % 3)
882 substr = [".%f","%S","%M"]
883 for i in range(nsub):
884 format = format.replace(substr[i],"")
885 if isinstance(times, list):
886 return [datetime.strptime(i, format) for i in times]
887 else:
888 return datetime.strptime(times, format)
889
890
891 def get_inttime(self, row=-1):
892 """\
893 Get a list of integration times for the observations.
894 Return a time in seconds for each integration in the scantable.
895
896 Parameters:
897
898 row: row no of integration. Default -1 return all rows.
899
900 """
901 return self._get_column(self._getinttime, row)
902
903
904 def get_sourcename(self, row=-1):
905 """\
906 Get a list source names for the observations.
907 Return a string for each integration in the scantable.
908 Parameters:
909
910 row: row no of integration. Default -1 return all rows.
911
912 """
913 return self._get_column(self._getsourcename, row)
914
915 def get_elevation(self, row=-1):
916 """\
917 Get a list of elevations for the observations.
918 Return a float for each integration in the scantable.
919
920 Parameters:
921
922 row: row no of integration. Default -1 return all rows.
923
924 """
925 return self._get_column(self._getelevation, row)
926
927 def get_azimuth(self, row=-1):
928 """\
929 Get a list of azimuths for the observations.
930 Return a float for each integration in the scantable.
931
932 Parameters:
933 row: row no of integration. Default -1 return all rows.
934
935 """
936 return self._get_column(self._getazimuth, row)
937
938 def get_parangle(self, row=-1):
939 """\
940 Get a list of parallactic angles for the observations.
941 Return a float for each integration in the scantable.
942
943 Parameters:
944
945 row: row no of integration. Default -1 return all rows.
946
947 """
948 return self._get_column(self._getparangle, row)
949
950 def get_direction(self, row=-1):
951 """
952 Get a list of Positions on the sky (direction) for the observations.
953 Return a string for each integration in the scantable.
954
955 Parameters:
956
957 row: row no of integration. Default -1 return all rows
958
959 """
960 return self._get_column(self._getdirection, row)
961
962 def get_directionval(self, row=-1):
963 """\
964 Get a list of Positions on the sky (direction) for the observations.
965 Return a float for each integration in the scantable.
966
967 Parameters:
968
969 row: row no of integration. Default -1 return all rows
970
971 """
972 return self._get_column(self._getdirectionvec, row)
973
974 @asaplog_post_dec
975 def set_unit(self, unit='channel'):
976 """\
977 Set the unit for all following operations on this scantable
978
979 Parameters:
980
981 unit: optional unit, default is 'channel'. Use one of '*Hz',
982 'km/s', 'channel' or equivalent ''
983
984 """
985 varlist = vars()
986 if unit in ['', 'pixel', 'channel']:
987 unit = ''
988 inf = list(self._getcoordinfo())
989 inf[0] = unit
990 self._setcoordinfo(inf)
991 self._add_history("set_unit", varlist)
992
993 @asaplog_post_dec
994 def set_instrument(self, instr):
995 """\
996 Set the instrument for subsequent processing.
997
998 Parameters:
999
1000 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
1001 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
1002
1003 """
1004 self._setInstrument(instr)
1005 self._add_history("set_instument", vars())
1006
1007 @asaplog_post_dec
1008 def set_feedtype(self, feedtype):
1009 """\
1010 Overwrite the feed type, which might not be set correctly.
1011
1012 Parameters:
1013
1014 feedtype: 'linear' or 'circular'
1015
1016 """
1017 self._setfeedtype(feedtype)
1018 self._add_history("set_feedtype", vars())
1019
1020 @asaplog_post_dec
1021 def set_doppler(self, doppler='RADIO'):
1022 """\
1023 Set the doppler for all following operations on this scantable.
1024
1025 Parameters:
1026
1027 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1028
1029 """
1030 varlist = vars()
1031 inf = list(self._getcoordinfo())
1032 inf[2] = doppler
1033 self._setcoordinfo(inf)
1034 self._add_history("set_doppler", vars())
1035
1036 @asaplog_post_dec
1037 def set_freqframe(self, frame=None):
1038 """\
1039 Set the frame type of the Spectral Axis.
1040
1041 Parameters:
1042
1043 frame: an optional frame type, default 'LSRK'. Valid frames are:
1044 'TOPO', 'LSRD', 'LSRK', 'BARY',
1045 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1046
1047 Example::
1048
1049 scan.set_freqframe('BARY')
1050
1051 """
1052 frame = frame or rcParams['scantable.freqframe']
1053 varlist = vars()
1054 # "REST" is not implemented in casacore
1055 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1056 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1057 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1058 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1059
1060 if frame in valid:
1061 inf = list(self._getcoordinfo())
1062 inf[1] = frame
1063 self._setcoordinfo(inf)
1064 self._add_history("set_freqframe", varlist)
1065 else:
1066 msg = "Please specify a valid freq type. Valid types are:\n", valid
1067 raise TypeError(msg)
1068
1069 @asaplog_post_dec
1070 def set_dirframe(self, frame=""):
1071 """\
1072 Set the frame type of the Direction on the sky.
1073
1074 Parameters:
1075
1076 frame: an optional frame type, default ''. Valid frames are:
1077 'J2000', 'B1950', 'GALACTIC'
1078
1079 Example:
1080
1081 scan.set_dirframe('GALACTIC')
1082
1083 """
1084 varlist = vars()
1085 Scantable.set_dirframe(self, frame)
1086 self._add_history("set_dirframe", varlist)
1087
1088 def get_unit(self):
1089 """\
1090 Get the default unit set in this scantable
1091
1092 Returns:
1093
1094 A unit string
1095
1096 """
1097 inf = self._getcoordinfo()
1098 unit = inf[0]
1099 if unit == '': unit = 'channel'
1100 return unit
1101
1102 @asaplog_post_dec
1103 def get_abcissa(self, rowno=0):
1104 """\
1105 Get the abcissa in the current coordinate setup for the currently
1106 selected Beam/IF/Pol
1107
1108 Parameters:
1109
1110 rowno: an optional row number in the scantable. Default is the
1111 first row, i.e. rowno=0
1112
1113 Returns:
1114
1115 The abcissa values and the format string (as a dictionary)
1116
1117 """
1118 abc = self._getabcissa(rowno)
1119 lbl = self._getabcissalabel(rowno)
1120 return abc, lbl
1121
1122 @asaplog_post_dec
1123 def flag(self, row=-1, mask=None, unflag=False):
1124 """\
1125 Flag the selected data using an optional channel mask.
1126
1127 Parameters:
1128
1129 row: an optional row number in the scantable.
1130 Default -1 flags all rows
1131
1132 mask: an optional channel mask, created with create_mask. Default
1133 (no mask) is all channels.
1134
1135 unflag: if True, unflag the data
1136
1137 """
1138 varlist = vars()
1139 mask = mask or []
1140 self._flag(row, mask, unflag)
1141 self._add_history("flag", varlist)
1142
1143 @asaplog_post_dec
1144 def flag_row(self, rows=[], unflag=False):
1145 """\
1146 Flag the selected data in row-based manner.
1147
1148 Parameters:
1149
1150 rows: list of row numbers to be flagged. Default is no row
1151 (must be explicitly specified to execute row-based flagging).
1152
1153 unflag: if True, unflag the data.
1154
1155 """
1156 varlist = vars()
1157 self._flag_row(rows, unflag)
1158 self._add_history("flag_row", varlist)
1159
1160 @asaplog_post_dec
1161 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1162 """\
1163 Flag the selected data outside a specified range (in channel-base)
1164
1165 Parameters:
1166
1167 uthres: upper threshold.
1168
1169 dthres: lower threshold
1170
1171 clipoutside: True for flagging data outside the range [dthres:uthres].
1172 False for flagging data inside the range.
1173
1174 unflag: if True, unflag the data.
1175
1176 """
1177 varlist = vars()
1178 self._clip(uthres, dthres, clipoutside, unflag)
1179 self._add_history("clip", varlist)
1180
1181 @asaplog_post_dec
1182 def lag_flag(self, start, end, unit="MHz", insitu=None):
1183 """\
1184 Flag the data in 'lag' space by providing a frequency to remove.
1185 Flagged data in the scantable get interpolated over the region.
1186 No taper is applied.
1187
1188 Parameters:
1189
1190 start: the start frequency (really a period within the
1191 bandwidth) or period to remove
1192
1193 end: the end frequency or period to remove
1194
1195 unit: the frequency unit (default "MHz") or "" for
1196 explicit lag channels
1197
1198 *Notes*:
1199
1200 It is recommended to flag edges of the band or strong
1201 signals beforehand.
1202
1203 """
1204 if insitu is None: insitu = rcParams['insitu']
1205 self._math._setinsitu(insitu)
1206 varlist = vars()
1207 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1208 if not (unit == "" or base.has_key(unit)):
1209 raise ValueError("%s is not a valid unit." % unit)
1210 if unit == "":
1211 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1212 else:
1213 s = scantable(self._math._lag_flag(self, start*base[unit],
1214 end*base[unit], "frequency"))
1215 s._add_history("lag_flag", varlist)
1216 if insitu:
1217 self._assign(s)
1218 else:
1219 return s
1220
1221 @asaplog_post_dec
1222 def fft(self, rowno=[], mask=[], getrealimag=False):
1223 """\
1224 Apply FFT to the spectra.
1225 Flagged data in the scantable get interpolated over the region.
1226
1227 Parameters:
1228
1229 rowno: The row number(s) to be processed. int, list
1230 and tuple are accepted. By default ([]), FFT
1231 is applied to the whole data.
1232
1233 mask: Auxiliary channel mask(s). Given as a boolean
1234 list, it is applied to all specified rows.
1235 A list of boolean lists can also be used to
1236 apply different masks. In the latter case, the
1237 length of 'mask' must be the same as that of
1238 'rowno'. The default is [].
1239
1240 getrealimag: If True, returns the real and imaginary part
1241 values of the complex results.
1242 If False (the default), returns the amplitude
1243 (absolute value) normalised with Ndata/2 and
1244 phase (argument, in unit of radian).
1245
1246 Returns:
1247
1248 A list of dictionaries containing the results for each spectrum.
1249 Each dictionary contains two values, the real and the imaginary
1250 parts when getrealimag = True, or the amplitude(absolute value)
1251 and the phase(argument) when getrealimag = False. The key for
1252 these values are 'real' and 'imag', or 'ampl' and 'phase',
1253 respectively.
1254 """
1255 if isinstance(rowno, int):
1256 rowno = [rowno]
1257 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1258 raise TypeError("The row number(s) must be int, list or tuple.")
1259
1260 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1261
1262 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1263 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1264 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1265 if isinstance(mask[0], bool): mask = [mask]
1266 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1267 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1268
1269 usecommonmask = (len(mask) == 1)
1270 if not usecommonmask:
1271 if len(mask) != len(rowno):
1272 raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1273 for amask in mask:
1274 if len(amask) != self.nchan():
1275 raise ValueError("The spectra and the mask have different length.")
1276
1277 res = []
1278
1279 imask = 0
1280 for whichrow in rowno:
1281 fspec = self._fft(whichrow, mask[imask], getrealimag)
1282 nspec = len(fspec)
1283
1284 i=0
1285 v1=[]
1286 v2=[]
1287 reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
1288
1289 while (i < nspec):
1290 v1.append(fspec[i])
1291 v2.append(fspec[i+1])
1292 i+=2
1293
1294 if getrealimag:
1295 reselem["real"] += v1
1296 reselem["imag"] += v2
1297 else:
1298 reselem["ampl"] += v1
1299 reselem["phase"] += v2
1300
1301 res.append(reselem)
1302
1303 if not usecommonmask: imask += 1
1304
1305 return res
1306
1307 @asaplog_post_dec
1308 def create_mask(self, *args, **kwargs):
1309 """\
1310 Compute and return a mask based on [min, max] windows.
1311 The specified windows are to be INCLUDED, when the mask is
1312 applied.
1313
1314 Parameters:
1315
1316 [min, max], [min2, max2], ...
1317 Pairs of start/end points (inclusive)specifying the regions
1318 to be masked
1319
1320 invert: optional argument. If specified as True,
1321 return an inverted mask, i.e. the regions
1322 specified are EXCLUDED
1323
1324 row: create the mask using the specified row for
1325 unit conversions, default is row=0
1326 only necessary if frequency varies over rows.
1327
1328 Examples::
1329
1330 scan.set_unit('channel')
1331 # a)
1332 msk = scan.create_mask([400, 500], [800, 900])
1333 # masks everything outside 400 and 500
1334 # and 800 and 900 in the unit 'channel'
1335
1336 # b)
1337 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1338 # masks the regions between 400 and 500
1339 # and 800 and 900 in the unit 'channel'
1340
1341 # c)
1342 #mask only channel 400
1343 msk = scan.create_mask([400])
1344
1345 """
1346 row = kwargs.get("row", 0)
1347 data = self._getabcissa(row)
1348 u = self._getcoordinfo()[0]
1349 if u == "":
1350 u = "channel"
1351 msg = "The current mask window unit is %s" % u
1352 i = self._check_ifs()
1353 if not i:
1354 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1355 asaplog.push(msg)
1356 n = self.nchan()
1357 msk = _n_bools(n, False)
1358 # test if args is a 'list' or a 'normal *args - UGLY!!!
1359
1360 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1361 and args or args[0]
1362 for window in ws:
1363 if len(window) == 1:
1364 window = [window[0], window[0]]
1365 if len(window) == 0 or len(window) > 2:
1366 raise ValueError("A window needs to be defined as [start(, end)]")
1367 if window[0] > window[1]:
1368 tmp = window[0]
1369 window[0] = window[1]
1370 window[1] = tmp
1371 for i in range(n):
1372 if data[i] >= window[0] and data[i] <= window[1]:
1373 msk[i] = True
1374 if kwargs.has_key('invert'):
1375 if kwargs.get('invert'):
1376 msk = mask_not(msk)
1377 return msk
1378
1379 def get_masklist(self, mask=None, row=0, silent=False):
1380 """\
1381 Compute and return a list of mask windows, [min, max].
1382
1383 Parameters:
1384
1385 mask: channel mask, created with create_mask.
1386
1387 row: calcutate the masklist using the specified row
1388 for unit conversions, default is row=0
1389 only necessary if frequency varies over rows.
1390
1391 Returns:
1392
1393 [min, max], [min2, max2], ...
1394 Pairs of start/end points (inclusive)specifying
1395 the masked regions
1396
1397 """
1398 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1399 raise TypeError("The mask should be list or tuple.")
1400 if len(mask) < 2:
1401 raise TypeError("The mask elements should be > 1")
1402 if self.nchan() != len(mask):
1403 msg = "Number of channels in scantable != number of mask elements"
1404 raise TypeError(msg)
1405 data = self._getabcissa(row)
1406 u = self._getcoordinfo()[0]
1407 if u == "":
1408 u = "channel"
1409 msg = "The current mask window unit is %s" % u
1410 i = self._check_ifs()
1411 if not i:
1412 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1413 if not silent:
1414 asaplog.push(msg)
1415 masklist=[]
1416 ist, ien = None, None
1417 ist, ien=self.get_mask_indices(mask)
1418 if ist is not None and ien is not None:
1419 for i in xrange(len(ist)):
1420 range=[data[ist[i]],data[ien[i]]]
1421 range.sort()
1422 masklist.append([range[0],range[1]])
1423 return masklist
1424
1425 def get_mask_indices(self, mask=None):
1426 """\
1427 Compute and Return lists of mask start indices and mask end indices.
1428
1429 Parameters:
1430
1431 mask: channel mask, created with create_mask.
1432
1433 Returns:
1434
1435 List of mask start indices and that of mask end indices,
1436 i.e., [istart1,istart2,....], [iend1,iend2,....].
1437
1438 """
1439 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1440 raise TypeError("The mask should be list or tuple.")
1441 if len(mask) < 2:
1442 raise TypeError("The mask elements should be > 1")
1443 istart=[]
1444 iend=[]
1445 if mask[0]: istart.append(0)
1446 for i in range(len(mask)-1):
1447 if not mask[i] and mask[i+1]:
1448 istart.append(i+1)
1449 elif mask[i] and not mask[i+1]:
1450 iend.append(i)
1451 if mask[len(mask)-1]: iend.append(len(mask)-1)
1452 if len(istart) != len(iend):
1453 raise RuntimeError("Numbers of mask start != mask end.")
1454 for i in range(len(istart)):
1455 if istart[i] > iend[i]:
1456 raise RuntimeError("Mask start index > mask end index")
1457 break
1458 return istart,iend
1459
1460 @asaplog_post_dec
1461 def parse_maskexpr(self,maskstring):
1462 """
1463 Parse CASA type mask selection syntax (IF dependent).
1464
1465 Parameters:
1466 maskstring : A string mask selection expression.
1467 A comma separated selections mean different IF -
1468 channel combinations. IFs and channel selections
1469 are partitioned by a colon, ':'.
1470 examples:
1471 '' = all IFs (all channels)
1472 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1473 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1474 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1475 all channels in IF8
1476 Returns:
1477 A dictionary of selected (valid) IF and masklist pairs,
1478 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1479 """
1480 if not isinstance(maskstring,str):
1481 asaplog.post()
1482 asaplog.push("Invalid mask expression")
1483 asaplog.post("ERROR")
1484
1485 valid_ifs = self.getifnos()
1486 frequnit = self.get_unit()
1487 seldict = {}
1488 if maskstring == "":
1489 maskstring = str(valid_ifs)[1:-1]
1490 ## split each selection
1491 sellist = maskstring.split(',')
1492 for currselstr in sellist:
1493 selset = currselstr.split(':')
1494 # spw and mask string (may include ~, < or >)
1495 spwmasklist = self._parse_selection(selset[0],typestr='integer',
1496 offset=1,minval=min(valid_ifs),
1497 maxval=max(valid_ifs))
1498 for spwlist in spwmasklist:
1499 selspws = []
1500 for ispw in range(spwlist[0],spwlist[1]+1):
1501 # Put into the list only if ispw exists
1502 if valid_ifs.count(ispw):
1503 selspws.append(ispw)
1504 del spwmasklist, spwlist
1505
1506 # parse frequency mask list
1507 if len(selset) > 1:
1508 freqmasklist = self._parse_selection(selset[1],typestr='float',
1509 offset=0.)
1510 else:
1511 # want to select the whole spectrum
1512 freqmasklist = [None]
1513
1514 ## define a dictionary of spw - masklist combination
1515 for ispw in selspws:
1516 #print "working on", ispw
1517 spwstr = str(ispw)
1518 if len(selspws) == 0:
1519 # empty spw
1520 continue
1521 else:
1522 ## want to get min and max of the spw and
1523 ## offset to set for '<' and '>'
1524 if frequnit == 'channel':
1525 minfreq = 0
1526 maxfreq = self.nchan(ifno=ispw)
1527 offset = 0.5
1528 else:
1529 ## This is ugly part. need improvement
1530 for ifrow in xrange(self.nrow()):
1531 if self.getif(ifrow) == ispw:
1532 #print "IF",ispw,"found in row =",ifrow
1533 break
1534 freqcoord = self.get_coordinate(ifrow)
1535 freqs = self._getabcissa(ifrow)
1536 minfreq = min(freqs)
1537 maxfreq = max(freqs)
1538 if len(freqs) == 1:
1539 offset = 0.5
1540 elif frequnit.find('Hz') > 0:
1541 offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1542 -freqcoord.to_frequency(0,unit=frequnit))*0.5
1543 elif frequnit.find('m/s') > 0:
1544 offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1545 -freqcoord.to_velocity(0,unit=frequnit))*0.5
1546 else:
1547 asaplog.post()
1548 asaplog.push("Invalid frequency unit")
1549 asaplog.post("ERROR")
1550 del freqs, freqcoord, ifrow
1551 for freq in freqmasklist:
1552 selmask = freq or [minfreq, maxfreq]
1553 if selmask[0] == None:
1554 ## selection was "<freq[1]".
1555 if selmask[1] < minfreq:
1556 ## avoid adding region selection
1557 selmask = None
1558 else:
1559 selmask = [minfreq,selmask[1]-offset]
1560 elif selmask[1] == None:
1561 ## selection was ">freq[0]"
1562 if selmask[0] > maxfreq:
1563 ## avoid adding region selection
1564 selmask = None
1565 else:
1566 selmask = [selmask[0]+offset,maxfreq]
1567 if selmask:
1568 if not seldict.has_key(spwstr):
1569 # new spw selection
1570 seldict[spwstr] = []
1571 seldict[spwstr] += [selmask]
1572 del minfreq,maxfreq,offset,freq,selmask
1573 del spwstr
1574 del freqmasklist
1575 del valid_ifs
1576 if len(seldict) == 0:
1577 asaplog.post()
1578 asaplog.push("No valid selection in the mask expression: "+maskstring)
1579 asaplog.post("WARN")
1580 return None
1581 msg = "Selected masklist:\n"
1582 for sif, lmask in seldict.iteritems():
1583 msg += " IF"+sif+" - "+str(lmask)+"\n"
1584 asaplog.push(msg)
1585 return seldict
1586
1587 def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1588 """
1589 Parameters:
1590 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1591 typestr : The type of the values in returned list
1592 ('integer' or 'float')
1593 offset : The offset value to subtract from or add to
1594 the boundary value if the selection string
1595 includes '<' or '>'
1596 minval, maxval : The minimum/maximum values to set if the
1597 selection string includes '<' or '>'.
1598 The list element is filled with None by default.
1599 Returns:
1600 A list of min/max pair of selections.
1601 Example:
1602 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1603 returns [[0,2],[5,7],[9,9]]
1604 """
1605 selgroups = selstr.split(';')
1606 sellists = []
1607 if typestr.lower().startswith('int'):
1608 formatfunc = int
1609 else:
1610 formatfunc = float
1611
1612 for currsel in selgroups:
1613 if currsel.find('~') > 0:
1614 minsel = formatfunc(currsel.split('~')[0].strip())
1615 maxsel = formatfunc(currsel.split('~')[1].strip())
1616 elif currsel.strip().startswith('<'):
1617 minsel = minval
1618 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1619 - formatfunc(offset)
1620 elif currsel.strip().startswith('>'):
1621 minsel = formatfunc(currsel.split('>')[1].strip()) \
1622 + formatfunc(offset)
1623 maxsel = maxval
1624 else:
1625 minsel = formatfunc(currsel)
1626 maxsel = formatfunc(currsel)
1627 sellists.append([minsel,maxsel])
1628 return sellists
1629
1630# def get_restfreqs(self):
1631# """
1632# Get the restfrequency(s) stored in this scantable.
1633# The return value(s) are always of unit 'Hz'
1634# Parameters:
1635# none
1636# Returns:
1637# a list of doubles
1638# """
1639# return list(self._getrestfreqs())
1640
1641 def get_restfreqs(self, ids=None):
1642 """\
1643 Get the restfrequency(s) stored in this scantable.
1644 The return value(s) are always of unit 'Hz'
1645
1646 Parameters:
1647
1648 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1649 be retrieved
1650
1651 Returns:
1652
1653 dictionary containing ids and a list of doubles for each id
1654
1655 """
1656 if ids is None:
1657 rfreqs={}
1658 idlist = self.getmolnos()
1659 for i in idlist:
1660 rfreqs[i]=list(self._getrestfreqs(i))
1661 return rfreqs
1662 else:
1663 if type(ids)==list or type(ids)==tuple:
1664 rfreqs={}
1665 for i in ids:
1666 rfreqs[i]=list(self._getrestfreqs(i))
1667 return rfreqs
1668 else:
1669 return list(self._getrestfreqs(ids))
1670 #return list(self._getrestfreqs(ids))
1671
1672 def set_restfreqs(self, freqs=None, unit='Hz'):
1673 """\
1674 Set or replace the restfrequency specified and
1675 if the 'freqs' argument holds a scalar,
1676 then that rest frequency will be applied to all the selected
1677 data. If the 'freqs' argument holds
1678 a vector, then it MUST be of equal or smaller length than
1679 the number of IFs (and the available restfrequencies will be
1680 replaced by this vector). In this case, *all* data have
1681 the restfrequency set per IF according
1682 to the corresponding value you give in the 'freqs' vector.
1683 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1684 IF 1 gets restfreq 2e9.
1685
1686 You can also specify the frequencies via a linecatalog.
1687
1688 Parameters:
1689
1690 freqs: list of rest frequency values or string idenitfiers
1691
1692 unit: unit for rest frequency (default 'Hz')
1693
1694
1695 Example::
1696
1697 # set the given restfrequency for the all currently selected IFs
1698 scan.set_restfreqs(freqs=1.4e9)
1699 # set restfrequencies for the n IFs (n > 1) in the order of the
1700 # list, i.e
1701 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1702 # len(list_of_restfreqs) == nIF
1703 # for nIF == 1 the following will set multiple restfrequency for
1704 # that IF
1705 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1706 # set multiple restfrequencies per IF. as a list of lists where
1707 # the outer list has nIF elements, the inner s arbitrary
1708 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1709
1710 *Note*:
1711
1712 To do more sophisticate Restfrequency setting, e.g. on a
1713 source and IF basis, use scantable.set_selection() before using
1714 this function::
1715
1716 # provided your scantable is called scan
1717 selection = selector()
1718 selection.set_name("ORION*")
1719 selection.set_ifs([1])
1720 scan.set_selection(selection)
1721 scan.set_restfreqs(freqs=86.6e9)
1722
1723 """
1724 varlist = vars()
1725 from asap import linecatalog
1726 # simple value
1727 if isinstance(freqs, int) or isinstance(freqs, float):
1728 self._setrestfreqs([freqs], [""], unit)
1729 # list of values
1730 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1731 # list values are scalars
1732 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1733 if len(freqs) == 1:
1734 self._setrestfreqs(freqs, [""], unit)
1735 else:
1736 # allow the 'old' mode of setting mulitple IFs
1737 sel = selector()
1738 savesel = self._getselection()
1739 iflist = self.getifnos()
1740 if len(freqs)>len(iflist):
1741 raise ValueError("number of elements in list of list "
1742 "exeeds the current IF selections")
1743 iflist = self.getifnos()
1744 for i, fval in enumerate(freqs):
1745 sel.set_ifs(iflist[i])
1746 self._setselection(sel)
1747 self._setrestfreqs([fval], [""], unit)
1748 self._setselection(savesel)
1749
1750 # list values are dict, {'value'=, 'name'=)
1751 elif isinstance(freqs[-1], dict):
1752 values = []
1753 names = []
1754 for d in freqs:
1755 values.append(d["value"])
1756 names.append(d["name"])
1757 self._setrestfreqs(values, names, unit)
1758 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1759 sel = selector()
1760 savesel = self._getselection()
1761 iflist = self.getifnos()
1762 if len(freqs)>len(iflist):
1763 raise ValueError("number of elements in list of list exeeds"
1764 " the current IF selections")
1765 for i, fval in enumerate(freqs):
1766 sel.set_ifs(iflist[i])
1767 self._setselection(sel)
1768 self._setrestfreqs(fval, [""], unit)
1769 self._setselection(savesel)
1770 # freqs are to be taken from a linecatalog
1771 elif isinstance(freqs, linecatalog):
1772 sel = selector()
1773 savesel = self._getselection()
1774 for i in xrange(freqs.nrow()):
1775 sel.set_ifs(iflist[i])
1776 self._setselection(sel)
1777 self._setrestfreqs([freqs.get_frequency(i)],
1778 [freqs.get_name(i)], "MHz")
1779 # ensure that we are not iterating past nIF
1780 if i == self.nif()-1: break
1781 self._setselection(savesel)
1782 else:
1783 return
1784 self._add_history("set_restfreqs", varlist)
1785
1786 def shift_refpix(self, delta):
1787 """\
1788 Shift the reference pixel of the Spectra Coordinate by an
1789 integer amount.
1790
1791 Parameters:
1792
1793 delta: the amount to shift by
1794
1795 *Note*:
1796
1797 Be careful using this with broadband data.
1798
1799 """
1800 Scantable.shift_refpix(self, delta)
1801
1802 @asaplog_post_dec
1803 def history(self, filename=None):
1804 """\
1805 Print the history. Optionally to a file.
1806
1807 Parameters:
1808
1809 filename: The name of the file to save the history to.
1810
1811 """
1812 hist = list(self._gethistory())
1813 out = "-"*80
1814 for h in hist:
1815 if h.startswith("---"):
1816 out = "\n".join([out, h])
1817 else:
1818 items = h.split("##")
1819 date = items[0]
1820 func = items[1]
1821 items = items[2:]
1822 out += "\n"+date+"\n"
1823 out += "Function: %s\n Parameters:" % (func)
1824 for i in items:
1825 if i == '':
1826 continue
1827 s = i.split("=")
1828 out += "\n %s = %s" % (s[0], s[1])
1829 out = "\n".join([out, "-"*80])
1830 if filename is not None:
1831 if filename is "":
1832 filename = 'scantable_history.txt'
1833 import os
1834 filename = os.path.expandvars(os.path.expanduser(filename))
1835 if not os.path.isdir(filename):
1836 data = open(filename, 'w')
1837 data.write(out)
1838 data.close()
1839 else:
1840 msg = "Illegal file name '%s'." % (filename)
1841 raise IOError(msg)
1842 return page(out)
1843 #
1844 # Maths business
1845 #
1846 @asaplog_post_dec
1847 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1848 """\
1849 Return the (time) weighted average of a scan.
1850
1851 *Note*:
1852
1853 in channels only - align if necessary
1854
1855 Parameters:
1856
1857 mask: an optional mask (only used for 'var' and 'tsys'
1858 weighting)
1859
1860 scanav: True averages each scan separately
1861 False (default) averages all scans together,
1862
1863 weight: Weighting scheme.
1864 'none' (mean no weight)
1865 'var' (1/var(spec) weighted)
1866 'tsys' (1/Tsys**2 weighted)
1867 'tint' (integration time weighted)
1868 'tintsys' (Tint/Tsys**2)
1869 'median' ( median averaging)
1870 The default is 'tint'
1871
1872 align: align the spectra in velocity before averaging. It takes
1873 the time of the first spectrum as reference time.
1874
1875 Example::
1876
1877 # time average the scantable without using a mask
1878 newscan = scan.average_time()
1879
1880 """
1881 varlist = vars()
1882 weight = weight or 'TINT'
1883 mask = mask or ()
1884 scanav = (scanav and 'SCAN') or 'NONE'
1885 scan = (self, )
1886
1887 if align:
1888 scan = (self.freq_align(insitu=False), )
1889 s = None
1890 if weight.upper() == 'MEDIAN':
1891 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1892 scanav))
1893 else:
1894 s = scantable(self._math._average(scan, mask, weight.upper(),
1895 scanav))
1896 s._add_history("average_time", varlist)
1897 return s
1898
1899 @asaplog_post_dec
1900 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1901 """\
1902 Return a scan where all spectra are converted to either
1903 Jansky or Kelvin depending upon the flux units of the scan table.
1904 By default the function tries to look the values up internally.
1905 If it can't find them (or if you want to over-ride), you must
1906 specify EITHER jyperk OR eta (and D which it will try to look up
1907 also if you don't set it). jyperk takes precedence if you set both.
1908
1909 Parameters:
1910
1911 jyperk: the Jy / K conversion factor
1912
1913 eta: the aperture efficiency
1914
1915 d: the geometric diameter (metres)
1916
1917 insitu: if False a new scantable is returned.
1918 Otherwise, the scaling is done in-situ
1919 The default is taken from .asaprc (False)
1920
1921 """
1922 if insitu is None: insitu = rcParams['insitu']
1923 self._math._setinsitu(insitu)
1924 varlist = vars()
1925 jyperk = jyperk or -1.0
1926 d = d or -1.0
1927 eta = eta or -1.0
1928 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1929 s._add_history("convert_flux", varlist)
1930 if insitu: self._assign(s)
1931 else: return s
1932
1933 @asaplog_post_dec
1934 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1935 """\
1936 Return a scan after applying a gain-elevation correction.
1937 The correction can be made via either a polynomial or a
1938 table-based interpolation (and extrapolation if necessary).
1939 You specify polynomial coefficients, an ascii table or neither.
1940 If you specify neither, then a polynomial correction will be made
1941 with built in coefficients known for certain telescopes (an error
1942 will occur if the instrument is not known).
1943 The data and Tsys are *divided* by the scaling factors.
1944
1945 Parameters:
1946
1947 poly: Polynomial coefficients (default None) to compute a
1948 gain-elevation correction as a function of
1949 elevation (in degrees).
1950
1951 filename: The name of an ascii file holding correction factors.
1952 The first row of the ascii file must give the column
1953 names and these MUST include columns
1954 "ELEVATION" (degrees) and "FACTOR" (multiply data
1955 by this) somewhere.
1956 The second row must give the data type of the
1957 column. Use 'R' for Real and 'I' for Integer.
1958 An example file would be
1959 (actual factors are arbitrary) :
1960
1961 TIME ELEVATION FACTOR
1962 R R R
1963 0.1 0 0.8
1964 0.2 20 0.85
1965 0.3 40 0.9
1966 0.4 60 0.85
1967 0.5 80 0.8
1968 0.6 90 0.75
1969
1970 method: Interpolation method when correcting from a table.
1971 Values are "nearest", "linear" (default), "cubic"
1972 and "spline"
1973
1974 insitu: if False a new scantable is returned.
1975 Otherwise, the scaling is done in-situ
1976 The default is taken from .asaprc (False)
1977
1978 """
1979
1980 if insitu is None: insitu = rcParams['insitu']
1981 self._math._setinsitu(insitu)
1982 varlist = vars()
1983 poly = poly or ()
1984 from os.path import expandvars
1985 filename = expandvars(filename)
1986 s = scantable(self._math._gainel(self, poly, filename, method))
1987 s._add_history("gain_el", varlist)
1988 if insitu:
1989 self._assign(s)
1990 else:
1991 return s
1992
1993 @asaplog_post_dec
1994 def freq_align(self, reftime=None, method='cubic', insitu=None):
1995 """\
1996 Return a scan where all rows have been aligned in frequency/velocity.
1997 The alignment frequency frame (e.g. LSRK) is that set by function
1998 set_freqframe.
1999
2000 Parameters:
2001
2002 reftime: reference time to align at. By default, the time of
2003 the first row of data is used.
2004
2005 method: Interpolation method for regridding the spectra.
2006 Choose from "nearest", "linear", "cubic" (default)
2007 and "spline"
2008
2009 insitu: if False a new scantable is returned.
2010 Otherwise, the scaling is done in-situ
2011 The default is taken from .asaprc (False)
2012
2013 """
2014 if insitu is None: insitu = rcParams["insitu"]
2015 self._math._setinsitu(insitu)
2016 varlist = vars()
2017 reftime = reftime or ""
2018 s = scantable(self._math._freq_align(self, reftime, method))
2019 s._add_history("freq_align", varlist)
2020 if insitu: self._assign(s)
2021 else: return s
2022
2023 @asaplog_post_dec
2024 def opacity(self, tau=None, insitu=None):
2025 """\
2026 Apply an opacity correction. The data
2027 and Tsys are multiplied by the correction factor.
2028
2029 Parameters:
2030
2031 tau: (list of) opacity from which the correction factor is
2032 exp(tau*ZD)
2033 where ZD is the zenith-distance.
2034 If a list is provided, it has to be of length nIF,
2035 nIF*nPol or 1 and in order of IF/POL, e.g.
2036 [opif0pol0, opif0pol1, opif1pol0 ...]
2037 if tau is `None` the opacities are determined from a
2038 model.
2039
2040 insitu: if False a new scantable is returned.
2041 Otherwise, the scaling is done in-situ
2042 The default is taken from .asaprc (False)
2043
2044 """
2045 if insitu is None: insitu = rcParams['insitu']
2046 self._math._setinsitu(insitu)
2047 varlist = vars()
2048 if not hasattr(tau, "__len__"):
2049 tau = [tau]
2050 s = scantable(self._math._opacity(self, tau))
2051 s._add_history("opacity", varlist)
2052 if insitu: self._assign(s)
2053 else: return s
2054
2055 @asaplog_post_dec
2056 def bin(self, width=5, insitu=None):
2057 """\
2058 Return a scan where all spectra have been binned up.
2059
2060 Parameters:
2061
2062 width: The bin width (default=5) in pixels
2063
2064 insitu: if False a new scantable is returned.
2065 Otherwise, the scaling is done in-situ
2066 The default is taken from .asaprc (False)
2067
2068 """
2069 if insitu is None: insitu = rcParams['insitu']
2070 self._math._setinsitu(insitu)
2071 varlist = vars()
2072 s = scantable(self._math._bin(self, width))
2073 s._add_history("bin", varlist)
2074 if insitu:
2075 self._assign(s)
2076 else:
2077 return s
2078
2079 @asaplog_post_dec
2080 def resample(self, width=5, method='cubic', insitu=None):
2081 """\
2082 Return a scan where all spectra have been binned up.
2083
2084 Parameters:
2085
2086 width: The bin width (default=5) in pixels
2087
2088 method: Interpolation method when correcting from a table.
2089 Values are "nearest", "linear", "cubic" (default)
2090 and "spline"
2091
2092 insitu: if False a new scantable is returned.
2093 Otherwise, the scaling is done in-situ
2094 The default is taken from .asaprc (False)
2095
2096 """
2097 if insitu is None: insitu = rcParams['insitu']
2098 self._math._setinsitu(insitu)
2099 varlist = vars()
2100 s = scantable(self._math._resample(self, method, width))
2101 s._add_history("resample", varlist)
2102 if insitu: self._assign(s)
2103 else: return s
2104
2105 @asaplog_post_dec
2106 def average_pol(self, mask=None, weight='none'):
2107 """\
2108 Average the Polarisations together.
2109
2110 Parameters:
2111
2112 mask: An optional mask defining the region, where the
2113 averaging will be applied. The output will have all
2114 specified points masked.
2115
2116 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2117 weighted), or 'tsys' (1/Tsys**2 weighted)
2118
2119 """
2120 varlist = vars()
2121 mask = mask or ()
2122 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2123 s._add_history("average_pol", varlist)
2124 return s
2125
2126 @asaplog_post_dec
2127 def average_beam(self, mask=None, weight='none'):
2128 """\
2129 Average the Beams together.
2130
2131 Parameters:
2132 mask: An optional mask defining the region, where the
2133 averaging will be applied. The output will have all
2134 specified points masked.
2135
2136 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2137 weighted), or 'tsys' (1/Tsys**2 weighted)
2138
2139 """
2140 varlist = vars()
2141 mask = mask or ()
2142 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2143 s._add_history("average_beam", varlist)
2144 return s
2145
2146 def parallactify(self, pflag):
2147 """\
2148 Set a flag to indicate whether this data should be treated as having
2149 been 'parallactified' (total phase == 0.0)
2150
2151 Parameters:
2152
2153 pflag: Bool indicating whether to turn this on (True) or
2154 off (False)
2155
2156 """
2157 varlist = vars()
2158 self._parallactify(pflag)
2159 self._add_history("parallactify", varlist)
2160
2161 @asaplog_post_dec
2162 def convert_pol(self, poltype=None):
2163 """\
2164 Convert the data to a different polarisation type.
2165 Note that you will need cross-polarisation terms for most conversions.
2166
2167 Parameters:
2168
2169 poltype: The new polarisation type. Valid types are:
2170 "linear", "circular", "stokes" and "linpol"
2171
2172 """
2173 varlist = vars()
2174 s = scantable(self._math._convertpol(self, poltype))
2175 s._add_history("convert_pol", varlist)
2176 return s
2177
2178 @asaplog_post_dec
2179 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2180 insitu=None):
2181 """\
2182 Smooth the spectrum by the specified kernel (conserving flux).
2183
2184 Parameters:
2185
2186 kernel: The type of smoothing kernel. Select from
2187 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2188 or 'poly'
2189
2190 width: The width of the kernel in pixels. For hanning this is
2191 ignored otherwise it defauls to 5 pixels.
2192 For 'gaussian' it is the Full Width Half
2193 Maximum. For 'boxcar' it is the full width.
2194 For 'rmedian' and 'poly' it is the half width.
2195
2196 order: Optional parameter for 'poly' kernel (default is 2), to
2197 specify the order of the polnomial. Ignored by all other
2198 kernels.
2199
2200 plot: plot the original and the smoothed spectra.
2201 In this each indivual fit has to be approved, by
2202 typing 'y' or 'n'
2203
2204 insitu: if False a new scantable is returned.
2205 Otherwise, the scaling is done in-situ
2206 The default is taken from .asaprc (False)
2207
2208 """
2209 if insitu is None: insitu = rcParams['insitu']
2210 self._math._setinsitu(insitu)
2211 varlist = vars()
2212
2213 if plot: orgscan = self.copy()
2214
2215 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2216 s._add_history("smooth", varlist)
2217
2218 if plot:
2219 from asap.asapplotter import new_asaplot
2220 theplot = new_asaplot(rcParams['plotter.gui'])
2221 theplot.set_panels()
2222 ylab=s._get_ordinate_label()
2223 #theplot.palette(0,["#777777","red"])
2224 for r in xrange(s.nrow()):
2225 xsm=s._getabcissa(r)
2226 ysm=s._getspectrum(r)
2227 xorg=orgscan._getabcissa(r)
2228 yorg=orgscan._getspectrum(r)
2229 theplot.clear()
2230 theplot.hold()
2231 theplot.set_axes('ylabel',ylab)
2232 theplot.set_axes('xlabel',s._getabcissalabel(r))
2233 theplot.set_axes('title',s._getsourcename(r))
2234 theplot.set_line(label='Original',color="#777777")
2235 theplot.plot(xorg,yorg)
2236 theplot.set_line(label='Smoothed',color="red")
2237 theplot.plot(xsm,ysm)
2238 ### Ugly part for legend
2239 for i in [0,1]:
2240 theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2241 theplot.release()
2242 ### Ugly part for legend
2243 theplot.subplots[0]['lines']=[]
2244 res = raw_input("Accept smoothing ([y]/n): ")
2245 if res.upper() == 'N':
2246 s._setspectrum(yorg, r)
2247 theplot.quit()
2248 del theplot
2249 del orgscan
2250
2251 if insitu: self._assign(s)
2252 else: return s
2253
2254 @asaplog_post_dec
2255 def _parse_wn(self, wn):
2256 if isinstance(wn, list) or isinstance(wn, tuple):
2257 return wn
2258 elif isinstance(wn, int):
2259 return [ wn ]
2260 elif isinstance(wn, str):
2261 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2262 val = wn.split('-')
2263 val = [int(val[0]), int(val[1])]
2264 val.sort()
2265 res = [i for i in xrange(val[0], val[1]+1)]
2266 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2267 val = int(wn[2:])+1
2268 res = [i for i in xrange(val)]
2269 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2270 val = int(wn[:-2])+1
2271 res = [i for i in xrange(val)]
2272 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2273 val = int(wn[1:])
2274 res = [i for i in xrange(val)]
2275 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2276 val = int(wn[:-1])
2277 res = [i for i in xrange(val)]
2278 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2279 val = int(wn[2:])
2280 res = [i for i in xrange(val, self.nchan()/2+1)]
2281 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2282 val = int(wn[:-2])
2283 res = [i for i in xrange(val, self.nchan()/2+1)]
2284 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
2285 val = int(wn[1:])+1
2286 res = [i for i in xrange(val, self.nchan()/2+1)]
2287 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
2288 val = int(wn[:-1])+1
2289 res = [i for i in xrange(val, self.nchan()/2+1)]
2290
2291 return res
2292 else:
2293 msg = 'wrong value given for addwn/rejwn'
2294 raise RuntimeError(msg)
2295
2296
2297 @asaplog_post_dec
2298 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2299 fftmethod=None, fftthresh=None,
2300 addwn=None, rejwn=None, clipthresh=None,
2301 clipniter=None, plot=None,
2302 getresidual=None, showprogress=None,
2303 minnrow=None, outlog=None, blfile=None):
2304 """\
2305 Return a scan which has been baselined (all rows) with sinusoidal functions.
2306 Parameters:
2307 insitu: if False a new scantable is returned.
2308 Otherwise, the scaling is done in-situ
2309 The default is taken from .asaprc (False)
2310 mask: an optional mask
2311 applyfft: if True use some method, such as FFT, to find
2312 strongest sinusoidal components in the wavenumber
2313 domain to be used for baseline fitting.
2314 default is True.
2315 fftmethod: method to find the strong sinusoidal components.
2316 now only 'fft' is available and it is the default.
2317 fftthresh: the threshold to select wave numbers to be used for
2318 fitting from the distribution of amplitudes in the
2319 wavenumber domain.
2320 both float and string values accepted.
2321 given a float value, the unit is set to sigma.
2322 for string values, allowed formats include:
2323 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2324 'topx' (= the x strongest ones, e.g. 'top5').
2325 default is 3.0 (unit: sigma).
2326 addwn: the additional wave numbers to be used for fitting.
2327 list or integer value is accepted to specify every
2328 wave numbers. also string value can be used in case
2329 you need to specify wave numbers in a certain range,
2330 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2331 '<a' (= 0,1,...,a-2,a-1),
2332 '>=a' (= a, a+1, ... up to the maximum wave
2333 number corresponding to the Nyquist
2334 frequency for the case of FFT).
2335 default is [].
2336 rejwn: the wave numbers NOT to be used for fitting.
2337 can be set just as addwn but has higher priority:
2338 wave numbers which are specified both in addwn
2339 and rejwn will NOT be used. default is [].
2340 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2341 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2342 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2343 plot the fit and the residual. In this each
2344 indivual fit has to be approved, by typing 'y'
2345 or 'n'
2346 getresidual: if False, returns best-fit values instead of
2347 residual. (default is True)
2348 showprogress: show progress status for large data.
2349 default is True.
2350 minnrow: minimum number of input spectra to show.
2351 default is 1000.
2352 outlog: Output the coefficients of the best-fit
2353 function to logger (default is False)
2354 blfile: Name of a text file in which the best-fit
2355 parameter values to be written
2356 (default is '': no file/logger output)
2357
2358 Example:
2359 # return a scan baselined by a combination of sinusoidal curves having
2360 # wave numbers in spectral window up to 10,
2361 # also with 3-sigma clipping, iteration up to 4 times
2362 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2363
2364 Note:
2365 The best-fit parameter values output in logger and/or blfile are now
2366 based on specunit of 'channel'.
2367 """
2368
2369 try:
2370 varlist = vars()
2371
2372 if insitu is None: insitu = rcParams['insitu']
2373 if insitu:
2374 workscan = self
2375 else:
2376 workscan = self.copy()
2377
2378 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2379 if applyfft is None: applyfft = True
2380 if fftmethod is None: fftmethod = 'fft'
2381 if fftthresh is None: fftthresh = 3.0
2382 if addwn is None: addwn = []
2383 if rejwn is None: rejwn = []
2384 if clipthresh is None: clipthresh = 3.0
2385 if clipniter is None: clipniter = 0
2386 if plot is None: plot = False
2387 if getresidual is None: getresidual = True
2388 if showprogress is None: showprogress = True
2389 if minnrow is None: minnrow = 1000
2390 if outlog is None: outlog = False
2391 if blfile is None: blfile = ''
2392
2393 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2394 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2395 workscan._add_history('sinusoid_baseline', varlist)
2396
2397 if insitu:
2398 self._assign(workscan)
2399 else:
2400 return workscan
2401
2402 except RuntimeError, e:
2403 raise_fitting_failure_exception(e)
2404
2405
2406 @asaplog_post_dec
2407 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2408 addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
2409 chan_avg_limit=None, plot=None, getresidual=None, showprogress=None, minnrow=None,
2410 outlog=None, blfile=None):
2411 """\
2412 Return a scan which has been baselined (all rows) with sinusoidal functions.
2413 Spectral lines are detected first using linefinder and masked out
2414 to avoid them affecting the baseline solution.
2415
2416 Parameters:
2417 insitu: if False a new scantable is returned.
2418 Otherwise, the scaling is done in-situ
2419 The default is taken from .asaprc (False)
2420 mask: an optional mask retreived from scantable
2421 applyfft: if True use some method, such as FFT, to find
2422 strongest sinusoidal components in the wavenumber
2423 domain to be used for baseline fitting.
2424 default is True.
2425 fftmethod: method to find the strong sinusoidal components.
2426 now only 'fft' is available and it is the default.
2427 fftthresh: the threshold to select wave numbers to be used for
2428 fitting from the distribution of amplitudes in the
2429 wavenumber domain.
2430 both float and string values accepted.
2431 given a float value, the unit is set to sigma.
2432 for string values, allowed formats include:
2433 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2434 'topx' (= the x strongest ones, e.g. 'top5').
2435 default is 3.0 (unit: sigma).
2436 addwn: the additional wave numbers to be used for fitting.
2437 list or integer value is accepted to specify every
2438 wave numbers. also string value can be used in case
2439 you need to specify wave numbers in a certain range,
2440 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2441 '<a' (= 0,1,...,a-2,a-1),
2442 '>=a' (= a, a+1, ... up to the maximum wave
2443 number corresponding to the Nyquist
2444 frequency for the case of FFT).
2445 default is [].
2446 rejwn: the wave numbers NOT to be used for fitting.
2447 can be set just as addwn but has higher priority:
2448 wave numbers which are specified both in addwn
2449 and rejwn will NOT be used. default is [].
2450 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2451 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2452 edge: an optional number of channel to drop at
2453 the edge of spectrum. If only one value is
2454 specified, the same number will be dropped
2455 from both sides of the spectrum. Default
2456 is to keep all channels. Nested tuples
2457 represent individual edge selection for
2458 different IFs (a number of spectral channels
2459 can be different)
2460 threshold: the threshold used by line finder. It is
2461 better to keep it large as only strong lines
2462 affect the baseline solution.
2463 chan_avg_limit: a maximum number of consequtive spectral
2464 channels to average during the search of
2465 weak and broad lines. The default is no
2466 averaging (and no search for weak lines).
2467 If such lines can affect the fitted baseline
2468 (e.g. a high order polynomial is fitted),
2469 increase this parameter (usually values up
2470 to 8 are reasonable). Most users of this
2471 method should find the default value sufficient.
2472 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2473 plot the fit and the residual. In this each
2474 indivual fit has to be approved, by typing 'y'
2475 or 'n'
2476 getresidual: if False, returns best-fit values instead of
2477 residual. (default is True)
2478 showprogress: show progress status for large data.
2479 default is True.
2480 minnrow: minimum number of input spectra to show.
2481 default is 1000.
2482 outlog: Output the coefficients of the best-fit
2483 function to logger (default is False)
2484 blfile: Name of a text file in which the best-fit
2485 parameter values to be written
2486 (default is "": no file/logger output)
2487
2488 Example:
2489 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2490
2491 Note:
2492 The best-fit parameter values output in logger and/or blfile are now
2493 based on specunit of 'channel'.
2494 """
2495
2496 try:
2497 varlist = vars()
2498
2499 if insitu is None: insitu = rcParams['insitu']
2500 if insitu:
2501 workscan = self
2502 else:
2503 workscan = self.copy()
2504
2505 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2506 if applyfft is None: applyfft = True
2507 if fftmethod is None: fftmethod = 'fft'
2508 if fftthresh is None: fftthresh = 3.0
2509 if addwn is None: addwn = []
2510 if rejwn is None: rejwn = []
2511 if clipthresh is None: clipthresh = 3.0
2512 if clipniter is None: clipniter = 0
2513 if edge is None: edge = (0,0)
2514 if threshold is None: threshold = 3
2515 if chan_avg_limit is None: chan_avg_limit = 1
2516 if plot is None: plot = False
2517 if getresidual is None: getresidual = True
2518 if showprogress is None: showprogress = True
2519 if minnrow is None: minnrow = 1000
2520 if outlog is None: outlog = False
2521 if blfile is None: blfile = ''
2522
2523 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2524 workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2525 workscan._add_history("auto_sinusoid_baseline", varlist)
2526
2527 if insitu:
2528 self._assign(workscan)
2529 else:
2530 return workscan
2531
2532 except RuntimeError, e:
2533 raise_fitting_failure_exception(e)
2534
2535 @asaplog_post_dec
2536 def cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2537 plot=None, getresidual=None, showprogress=None, minnrow=None, outlog=None, blfile=None):
2538 """\
2539 Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
2540 Parameters:
2541 insitu: If False a new scantable is returned.
2542 Otherwise, the scaling is done in-situ
2543 The default is taken from .asaprc (False)
2544 mask: An optional mask
2545 npiece: Number of pieces. (default is 2)
2546 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2547 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2548 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2549 plot the fit and the residual. In this each
2550 indivual fit has to be approved, by typing 'y'
2551 or 'n'
2552 getresidual: if False, returns best-fit values instead of
2553 residual. (default is True)
2554 showprogress: show progress status for large data.
2555 default is True.
2556 minnrow: minimum number of input spectra to show.
2557 default is 1000.
2558 outlog: Output the coefficients of the best-fit
2559 function to logger (default is False)
2560 blfile: Name of a text file in which the best-fit
2561 parameter values to be written
2562 (default is "": no file/logger output)
2563
2564 Example:
2565 # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
2566 # also with 3-sigma clipping, iteration up to 4 times
2567 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2568
2569 Note:
2570 The best-fit parameter values output in logger and/or blfile are now
2571 based on specunit of 'channel'.
2572 """
2573
2574 try:
2575 varlist = vars()
2576
2577 if insitu is None: insitu = rcParams['insitu']
2578 if insitu:
2579 workscan = self
2580 else:
2581 workscan = self.copy()
2582
2583 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2584 if npiece is None: npiece = 2
2585 if clipthresh is None: clipthresh = 3.0
2586 if clipniter is None: clipniter = 0
2587 if plot is None: plot = False
2588 if getresidual is None: getresidual = True
2589 if showprogress is None: showprogress = True
2590 if minnrow is None: minnrow = 1000
2591 if outlog is None: outlog = False
2592 if blfile is None: blfile = ''
2593
2594 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2595 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
2596 workscan._add_history("cspline_baseline", varlist)
2597
2598 if insitu:
2599 self._assign(workscan)
2600 else:
2601 return workscan
2602
2603 except RuntimeError, e:
2604 raise_fitting_failure_exception(e)
2605
2606 @asaplog_post_dec
2607 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2608 edge=None, threshold=None, chan_avg_limit=None, getresidual=None, plot=None,
2609 showprogress=None, minnrow=None, outlog=None, blfile=None):
2610 """\
2611 Return a scan which has been baselined (all rows) by cubic spline
2612 function (piecewise cubic polynomial).
2613 Spectral lines are detected first using linefinder and masked out
2614 to avoid them affecting the baseline solution.
2615
2616 Parameters:
2617 insitu: if False a new scantable is returned.
2618 Otherwise, the scaling is done in-situ
2619 The default is taken from .asaprc (False)
2620 mask: an optional mask retreived from scantable
2621 npiece: Number of pieces. (default is 2)
2622 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2623 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2624 edge: an optional number of channel to drop at
2625 the edge of spectrum. If only one value is
2626 specified, the same number will be dropped
2627 from both sides of the spectrum. Default
2628 is to keep all channels. Nested tuples
2629 represent individual edge selection for
2630 different IFs (a number of spectral channels
2631 can be different)
2632 threshold: the threshold used by line finder. It is
2633 better to keep it large as only strong lines
2634 affect the baseline solution.
2635 chan_avg_limit: a maximum number of consequtive spectral
2636 channels to average during the search of
2637 weak and broad lines. The default is no
2638 averaging (and no search for weak lines).
2639 If such lines can affect the fitted baseline
2640 (e.g. a high order polynomial is fitted),
2641 increase this parameter (usually values up
2642 to 8 are reasonable). Most users of this
2643 method should find the default value sufficient.
2644 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2645 plot the fit and the residual. In this each
2646 indivual fit has to be approved, by typing 'y'
2647 or 'n'
2648 getresidual: if False, returns best-fit values instead of
2649 residual. (default is True)
2650 showprogress: show progress status for large data.
2651 default is True.
2652 minnrow: minimum number of input spectra to show.
2653 default is 1000.
2654 outlog: Output the coefficients of the best-fit
2655 function to logger (default is False)
2656 blfile: Name of a text file in which the best-fit
2657 parameter values to be written
2658 (default is "": no file/logger output)
2659
2660 Example:
2661 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2662
2663 Note:
2664 The best-fit parameter values output in logger and/or blfile are now
2665 based on specunit of 'channel'.
2666 """
2667
2668 try:
2669 varlist = vars()
2670
2671 if insitu is None: insitu = rcParams['insitu']
2672 if insitu:
2673 workscan = self
2674 else:
2675 workscan = self.copy()
2676
2677 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2678 if npiece is None: npiece = 2
2679 if clipthresh is None: clipthresh = 3.0
2680 if clipniter is None: clipniter = 0
2681 if edge is None: edge = (0, 0)
2682 if threshold is None: threshold = 3
2683 if chan_avg_limit is None: chan_avg_limit = 1
2684 if plot is None: plot = False
2685 if getresidual is None: getresidual = True
2686 if showprogress is None: showprogress = True
2687 if minnrow is None: minnrow = 1000
2688 if outlog is None: outlog = False
2689 if blfile is None: blfile = ''
2690
2691 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2692 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2693 clipniter,
2694 normalise_edge_param(edge),
2695 threshold,
2696 chan_avg_limit, getresidual,
2697 pack_progress_params(showprogress,
2698 minnrow),
2699 outlog, blfile)
2700 workscan._add_history("auto_cspline_baseline", varlist)
2701
2702 if insitu:
2703 self._assign(workscan)
2704 else:
2705 return workscan
2706
2707 except RuntimeError, e:
2708 raise_fitting_failure_exception(e)
2709
2710 @asaplog_post_dec
2711 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2712 getresidual=None, showprogress=None, minnrow=None,
2713 outlog=None, blfile=None):
2714 """\
2715 Return a scan which has been baselined (all rows) by a polynomial.
2716 Parameters:
2717 insitu: if False a new scantable is returned.
2718 Otherwise, the scaling is done in-situ
2719 The default is taken from .asaprc (False)
2720 mask: an optional mask
2721 order: the order of the polynomial (default is 0)
2722 plot: plot the fit and the residual. In this each
2723 indivual fit has to be approved, by typing 'y'
2724 or 'n'
2725 getresidual: if False, returns best-fit values instead of
2726 residual. (default is True)
2727 showprogress: show progress status for large data.
2728 default is True.
2729 minnrow: minimum number of input spectra to show.
2730 default is 1000.
2731 outlog: Output the coefficients of the best-fit
2732 function to logger (default is False)
2733 blfile: Name of a text file in which the best-fit
2734 parameter values to be written
2735 (default is "": no file/logger output)
2736
2737 Example:
2738 # return a scan baselined by a third order polynomial,
2739 # not using a mask
2740 bscan = scan.poly_baseline(order=3)
2741 """
2742
2743 try:
2744 varlist = vars()
2745
2746 if insitu is None:
2747 insitu = rcParams["insitu"]
2748 if insitu:
2749 workscan = self
2750 else:
2751 workscan = self.copy()
2752
2753 if mask is None: mask = [True for i in \
2754 xrange(workscan.nchan())]
2755 if order is None: order = 0
2756 if plot is None: plot = False
2757 if getresidual is None: getresidual = True
2758 if showprogress is None: showprogress = True
2759 if minnrow is None: minnrow = 1000
2760 if outlog is None: outlog = False
2761 if blfile is None: blfile = ""
2762
2763 if plot:
2764 outblfile = (blfile != "") and \
2765 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2766 if outblfile:
2767 blf = open(blfile, "a")
2768
2769 f = fitter()
2770 f.set_function(lpoly=order)
2771
2772 rows = xrange(workscan.nrow())
2773 #if len(rows) > 0: workscan._init_blinfo()
2774
2775 for r in rows:
2776 f.x = workscan._getabcissa(r)
2777 f.y = workscan._getspectrum(r)
2778 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2779 f.data = None
2780 f.fit()
2781
2782 f.plot(residual=True)
2783 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2784 if accept_fit.upper() == "N":
2785 #workscan._append_blinfo(None, None, None)
2786 continue
2787
2788 blpars = f.get_parameters()
2789 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2790 #workscan._append_blinfo(blpars, masklist, f.mask)
2791 workscan._setspectrum((f.fitter.getresidual()
2792 if getresidual else f.fitter.getfit()), r)
2793
2794 if outblfile:
2795 rms = workscan.get_rms(f.mask, r)
2796 dataout = \
2797 workscan.format_blparams_row(blpars["params"],
2798 blpars["fixed"],
2799 rms, str(masklist),
2800 r, True)
2801 blf.write(dataout)
2802
2803 f._p.unmap()
2804 f._p = None
2805
2806 if outblfile: blf.close()
2807 else:
2808 workscan._poly_baseline(mask, order, getresidual,
2809 pack_progress_params(showprogress,
2810 minnrow),
2811 outlog, blfile)
2812
2813 workscan._add_history("poly_baseline", varlist)
2814
2815 if insitu:
2816 self._assign(workscan)
2817 else:
2818 return workscan
2819
2820 except RuntimeError, e:
2821 raise_fitting_failure_exception(e)
2822
2823 @asaplog_post_dec
2824 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2825 threshold=None, chan_avg_limit=None,
2826 plot=None, insitu=None,
2827 getresidual=None, showprogress=None,
2828 minnrow=None, outlog=None, blfile=None):
2829 """\
2830 Return a scan which has been baselined (all rows) by a polynomial.
2831 Spectral lines are detected first using linefinder and masked out
2832 to avoid them affecting the baseline solution.
2833
2834 Parameters:
2835 insitu: if False a new scantable is returned.
2836 Otherwise, the scaling is done in-situ
2837 The default is taken from .asaprc (False)
2838 mask: an optional mask retreived from scantable
2839 order: the order of the polynomial (default is 0)
2840 edge: an optional number of channel to drop at
2841 the edge of spectrum. If only one value is
2842 specified, the same number will be dropped
2843 from both sides of the spectrum. Default
2844 is to keep all channels. Nested tuples
2845 represent individual edge selection for
2846 different IFs (a number of spectral channels
2847 can be different)
2848 threshold: the threshold used by line finder. It is
2849 better to keep it large as only strong lines
2850 affect the baseline solution.
2851 chan_avg_limit: a maximum number of consequtive spectral
2852 channels to average during the search of
2853 weak and broad lines. The default is no
2854 averaging (and no search for weak lines).
2855 If such lines can affect the fitted baseline
2856 (e.g. a high order polynomial is fitted),
2857 increase this parameter (usually values up
2858 to 8 are reasonable). Most users of this
2859 method should find the default value sufficient.
2860 plot: plot the fit and the residual. In this each
2861 indivual fit has to be approved, by typing 'y'
2862 or 'n'
2863 getresidual: if False, returns best-fit values instead of
2864 residual. (default is True)
2865 showprogress: show progress status for large data.
2866 default is True.
2867 minnrow: minimum number of input spectra to show.
2868 default is 1000.
2869 outlog: Output the coefficients of the best-fit
2870 function to logger (default is False)
2871 blfile: Name of a text file in which the best-fit
2872 parameter values to be written
2873 (default is "": no file/logger output)
2874
2875 Example:
2876 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2877 """
2878
2879 try:
2880 varlist = vars()
2881
2882 if insitu is None:
2883 insitu = rcParams['insitu']
2884 if insitu:
2885 workscan = self
2886 else:
2887 workscan = self.copy()
2888
2889 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2890 if order is None: order = 0
2891 if edge is None: edge = (0, 0)
2892 if threshold is None: threshold = 3
2893 if chan_avg_limit is None: chan_avg_limit = 1
2894 if plot is None: plot = False
2895 if getresidual is None: getresidual = True
2896 if showprogress is None: showprogress = True
2897 if minnrow is None: minnrow = 1000
2898 if outlog is None: outlog = False
2899 if blfile is None: blfile = ''
2900
2901 edge = normalise_edge_param(edge)
2902
2903 if plot:
2904 outblfile = (blfile != "") and \
2905 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2906 if outblfile: blf = open(blfile, "a")
2907
2908 from asap.asaplinefind import linefinder
2909 fl = linefinder()
2910 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
2911 fl.set_scan(workscan)
2912
2913 f = fitter()
2914 f.set_function(lpoly=order)
2915
2916 rows = xrange(workscan.nrow())
2917 #if len(rows) > 0: workscan._init_blinfo()
2918
2919 for r in rows:
2920 idx = 2*workscan.getif(r)
2921 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2922 edge[idx:idx+2]) # (CAS-1434)
2923
2924 f.x = workscan._getabcissa(r)
2925 f.y = workscan._getspectrum(r)
2926 f.mask = fl.get_mask()
2927 f.data = None
2928 f.fit()
2929
2930 f.plot(residual=True)
2931 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2932 if accept_fit.upper() == "N":
2933 #workscan._append_blinfo(None, None, None)
2934 continue
2935
2936 blpars = f.get_parameters()
2937 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2938 #workscan._append_blinfo(blpars, masklist, f.mask)
2939 workscan._setspectrum((f.fitter.getresidual() \
2940 if getresidual else f.fitter.getfit()), r)
2941
2942 if outblfile:
2943 rms = workscan.get_rms(f.mask, r)
2944 dataout = \
2945 workscan.format_blparams_row(blpars["params"],
2946 blpars["fixed"],
2947 rms, str(masklist),
2948 r, True)
2949 blf.write(dataout)
2950
2951 f._p.unmap()
2952 f._p = None
2953
2954 if outblfile: blf.close()
2955 else:
2956 workscan._auto_poly_baseline(mask, order, edge, threshold,
2957 chan_avg_limit, getresidual,
2958 pack_progress_params(showprogress,
2959 minnrow),
2960 outlog, blfile)
2961
2962 workscan._add_history("auto_poly_baseline", varlist)
2963
2964 if insitu:
2965 self._assign(workscan)
2966 else:
2967 return workscan
2968
2969 except RuntimeError, e:
2970 raise_fitting_failure_exception(e)
2971
2972 def _init_blinfo(self):
2973 """\
2974 Initialise the following three auxiliary members:
2975 blpars : parameters of the best-fit baseline,
2976 masklists : mask data (edge positions of masked channels) and
2977 actualmask : mask data (in boolean list),
2978 to keep for use later (including output to logger/text files).
2979 Used by poly_baseline() and auto_poly_baseline() in case of
2980 'plot=True'.
2981 """
2982 self.blpars = []
2983 self.masklists = []
2984 self.actualmask = []
2985 return
2986
2987 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
2988 """\
2989 Append baseline-fitting related info to blpars, masklist and
2990 actualmask.
2991 """
2992 self.blpars.append(data_blpars)
2993 self.masklists.append(data_masklists)
2994 self.actualmask.append(data_actualmask)
2995 return
2996
2997 @asaplog_post_dec
2998 def rotate_linpolphase(self, angle):
2999 """\
3000 Rotate the phase of the complex polarization O=Q+iU correlation.
3001 This is always done in situ in the raw data. So if you call this
3002 function more than once then each call rotates the phase further.
3003
3004 Parameters:
3005
3006 angle: The angle (degrees) to rotate (add) by.
3007
3008 Example::
3009
3010 scan.rotate_linpolphase(2.3)
3011
3012 """
3013 varlist = vars()
3014 self._math._rotate_linpolphase(self, angle)
3015 self._add_history("rotate_linpolphase", varlist)
3016 return
3017
3018 @asaplog_post_dec
3019 def rotate_xyphase(self, angle):
3020 """\
3021 Rotate the phase of the XY correlation. This is always done in situ
3022 in the data. So if you call this function more than once
3023 then each call rotates the phase further.
3024
3025 Parameters:
3026
3027 angle: The angle (degrees) to rotate (add) by.
3028
3029 Example::
3030
3031 scan.rotate_xyphase(2.3)
3032
3033 """
3034 varlist = vars()
3035 self._math._rotate_xyphase(self, angle)
3036 self._add_history("rotate_xyphase", varlist)
3037 return
3038
3039 @asaplog_post_dec
3040 def swap_linears(self):
3041 """\
3042 Swap the linear polarisations XX and YY, or better the first two
3043 polarisations as this also works for ciculars.
3044 """
3045 varlist = vars()
3046 self._math._swap_linears(self)
3047 self._add_history("swap_linears", varlist)
3048 return
3049
3050 @asaplog_post_dec
3051 def invert_phase(self):
3052 """\
3053 Invert the phase of the complex polarisation
3054 """
3055 varlist = vars()
3056 self._math._invert_phase(self)
3057 self._add_history("invert_phase", varlist)
3058 return
3059
3060 @asaplog_post_dec
3061 def add(self, offset, insitu=None):
3062 """\
3063 Return a scan where all spectra have the offset added
3064
3065 Parameters:
3066
3067 offset: the offset
3068
3069 insitu: if False a new scantable is returned.
3070 Otherwise, the scaling is done in-situ
3071 The default is taken from .asaprc (False)
3072
3073 """
3074 if insitu is None: insitu = rcParams['insitu']
3075 self._math._setinsitu(insitu)
3076 varlist = vars()
3077 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3078 s._add_history("add", varlist)
3079 if insitu:
3080 self._assign(s)
3081 else:
3082 return s
3083
3084 @asaplog_post_dec
3085 def scale(self, factor, tsys=True, insitu=None):
3086 """\
3087
3088 Return a scan where all spectra are scaled by the given 'factor'
3089
3090 Parameters:
3091
3092 factor: the scaling factor (float or 1D float list)
3093
3094 insitu: if False a new scantable is returned.
3095 Otherwise, the scaling is done in-situ
3096 The default is taken from .asaprc (False)
3097
3098 tsys: if True (default) then apply the operation to Tsys
3099 as well as the data
3100
3101 """
3102 if insitu is None: insitu = rcParams['insitu']
3103 self._math._setinsitu(insitu)
3104 varlist = vars()
3105 s = None
3106 import numpy
3107 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3108 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3109 from asapmath import _array2dOp
3110 s = _array2dOp( self.copy(), factor, "MUL", tsys )
3111 else:
3112 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3113 else:
3114 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
3115 s._add_history("scale", varlist)
3116 if insitu:
3117 self._assign(s)
3118 else:
3119 return s
3120
3121 def set_sourcetype(self, match, matchtype="pattern",
3122 sourcetype="reference"):
3123 """\
3124 Set the type of the source to be an source or reference scan
3125 using the provided pattern.
3126
3127 Parameters:
3128
3129 match: a Unix style pattern, regular expression or selector
3130
3131 matchtype: 'pattern' (default) UNIX style pattern or
3132 'regex' regular expression
3133
3134 sourcetype: the type of the source to use (source/reference)
3135
3136 """
3137 varlist = vars()
3138 basesel = self.get_selection()
3139 stype = -1
3140 if sourcetype.lower().startswith("r"):
3141 stype = 1
3142 elif sourcetype.lower().startswith("s"):
3143 stype = 0
3144 else:
3145 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3146 if matchtype.lower().startswith("p"):
3147 matchtype = "pattern"
3148 elif matchtype.lower().startswith("r"):
3149 matchtype = "regex"
3150 else:
3151 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3152 sel = selector()
3153 if isinstance(match, selector):
3154 sel = match
3155 else:
3156 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3157 self.set_selection(basesel+sel)
3158 self._setsourcetype(stype)
3159 self.set_selection(basesel)
3160 self._add_history("set_sourcetype", varlist)
3161
3162 @asaplog_post_dec
3163 @preserve_selection
3164 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3165 """\
3166 This function allows to build quotients automatically.
3167 It assumes the observation to have the same number of
3168 "ons" and "offs"
3169
3170 Parameters:
3171
3172 preserve: you can preserve (default) the continuum or
3173 remove it. The equations used are
3174
3175 preserve: Output = Toff * (on/off) - Toff
3176
3177 remove: Output = Toff * (on/off) - Ton
3178
3179 mode: the on/off detection mode
3180 'paired' (default)
3181 identifies 'off' scans by the
3182 trailing '_R' (Mopra/Parkes) or
3183 '_e'/'_w' (Tid) and matches
3184 on/off pairs from the observing pattern
3185 'time'
3186 finds the closest off in time
3187
3188 .. todo:: verify argument is not implemented
3189
3190 """
3191 varlist = vars()
3192 modes = ["time", "paired"]
3193 if not mode in modes:
3194 msg = "please provide valid mode. Valid modes are %s" % (modes)
3195 raise ValueError(msg)
3196 s = None
3197 if mode.lower() == "paired":
3198 sel = self.get_selection()
3199 sel.set_query("SRCTYPE==psoff")
3200 self.set_selection(sel)
3201 offs = self.copy()
3202 sel.set_query("SRCTYPE==pson")
3203 self.set_selection(sel)
3204 ons = self.copy()
3205 s = scantable(self._math._quotient(ons, offs, preserve))
3206 elif mode.lower() == "time":
3207 s = scantable(self._math._auto_quotient(self, mode, preserve))
3208 s._add_history("auto_quotient", varlist)
3209 return s
3210
3211 @asaplog_post_dec
3212 def mx_quotient(self, mask = None, weight='median', preserve=True):
3213 """\
3214 Form a quotient using "off" beams when observing in "MX" mode.
3215
3216 Parameters:
3217
3218 mask: an optional mask to be used when weight == 'stddev'
3219
3220 weight: How to average the off beams. Default is 'median'.
3221
3222 preserve: you can preserve (default) the continuum or
3223 remove it. The equations used are:
3224
3225 preserve: Output = Toff * (on/off) - Toff
3226
3227 remove: Output = Toff * (on/off) - Ton
3228
3229 """
3230 mask = mask or ()
3231 varlist = vars()
3232 on = scantable(self._math._mx_extract(self, 'on'))
3233 preoff = scantable(self._math._mx_extract(self, 'off'))
3234 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3235 from asapmath import quotient
3236 q = quotient(on, off, preserve)
3237 q._add_history("mx_quotient", varlist)
3238 return q
3239
3240 @asaplog_post_dec
3241 def freq_switch(self, insitu=None):
3242 """\
3243 Apply frequency switching to the data.
3244
3245 Parameters:
3246
3247 insitu: if False a new scantable is returned.
3248 Otherwise, the swictching is done in-situ
3249 The default is taken from .asaprc (False)
3250
3251 """
3252 if insitu is None: insitu = rcParams['insitu']
3253 self._math._setinsitu(insitu)
3254 varlist = vars()
3255 s = scantable(self._math._freqswitch(self))
3256 s._add_history("freq_switch", varlist)
3257 if insitu:
3258 self._assign(s)
3259 else:
3260 return s
3261
3262 @asaplog_post_dec
3263 def recalc_azel(self):
3264 """Recalculate the azimuth and elevation for each position."""
3265 varlist = vars()
3266 self._recalcazel()
3267 self._add_history("recalc_azel", varlist)
3268 return
3269
3270 @asaplog_post_dec
3271 def __add__(self, other):
3272 varlist = vars()
3273 s = None
3274 if isinstance(other, scantable):
3275 s = scantable(self._math._binaryop(self, other, "ADD"))
3276 elif isinstance(other, float):
3277 s = scantable(self._math._unaryop(self, other, "ADD", False))
3278 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3279 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3280 from asapmath import _array2dOp
3281 s = _array2dOp( self.copy(), other, "ADD", False )
3282 else:
3283 s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
3284 else:
3285 raise TypeError("Other input is not a scantable or float value")
3286 s._add_history("operator +", varlist)
3287 return s
3288
3289 @asaplog_post_dec
3290 def __sub__(self, other):
3291 """
3292 implicit on all axes and on Tsys
3293 """
3294 varlist = vars()
3295 s = None
3296 if isinstance(other, scantable):
3297 s = scantable(self._math._binaryop(self, other, "SUB"))
3298 elif isinstance(other, float):
3299 s = scantable(self._math._unaryop(self, other, "SUB", False))
3300 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3301 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3302 from asapmath import _array2dOp
3303 s = _array2dOp( self.copy(), other, "SUB", False )
3304 else:
3305 s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
3306 else:
3307 raise TypeError("Other input is not a scantable or float value")
3308 s._add_history("operator -", varlist)
3309 return s
3310
3311 @asaplog_post_dec
3312 def __mul__(self, other):
3313 """
3314 implicit on all axes and on Tsys
3315 """
3316 varlist = vars()
3317 s = None
3318 if isinstance(other, scantable):
3319 s = scantable(self._math._binaryop(self, other, "MUL"))
3320 elif isinstance(other, float):
3321 s = scantable(self._math._unaryop(self, other, "MUL", False))
3322 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3323 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3324 from asapmath import _array2dOp
3325 s = _array2dOp( self.copy(), other, "MUL", False )
3326 else:
3327 s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
3328 else:
3329 raise TypeError("Other input is not a scantable or float value")
3330 s._add_history("operator *", varlist)
3331 return s
3332
3333
3334 @asaplog_post_dec
3335 def __div__(self, other):
3336 """
3337 implicit on all axes and on Tsys
3338 """
3339 varlist = vars()
3340 s = None
3341 if isinstance(other, scantable):
3342 s = scantable(self._math._binaryop(self, other, "DIV"))
3343 elif isinstance(other, float):
3344 if other == 0.0:
3345 raise ZeroDivisionError("Dividing by zero is not recommended")
3346 s = scantable(self._math._unaryop(self, other, "DIV", False))
3347 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3348 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3349 from asapmath import _array2dOp
3350 s = _array2dOp( self.copy(), other, "DIV", False )
3351 else:
3352 s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
3353 else:
3354 raise TypeError("Other input is not a scantable or float value")
3355 s._add_history("operator /", varlist)
3356 return s
3357
3358 @asaplog_post_dec
3359 def get_fit(self, row=0):
3360 """\
3361 Print or return the stored fits for a row in the scantable
3362
3363 Parameters:
3364
3365 row: the row which the fit has been applied to.
3366
3367 """
3368 if row > self.nrow():
3369 return
3370 from asap.asapfit import asapfit
3371 fit = asapfit(self._getfit(row))
3372 asaplog.push( '%s' %(fit) )
3373 return fit.as_dict()
3374
3375 def flag_nans(self):
3376 """\
3377 Utility function to flag NaN values in the scantable.
3378 """
3379 import numpy
3380 basesel = self.get_selection()
3381 for i in range(self.nrow()):
3382 sel = self.get_row_selector(i)
3383 self.set_selection(basesel+sel)
3384 nans = numpy.isnan(self._getspectrum(0))
3385 if numpy.any(nans):
3386 bnans = [ bool(v) for v in nans]
3387 self.flag(bnans)
3388 self.set_selection(basesel)
3389
3390 def get_row_selector(self, rowno):
3391 #return selector(beams=self.getbeam(rowno),
3392 # ifs=self.getif(rowno),
3393 # pols=self.getpol(rowno),
3394 # scans=self.getscan(rowno),
3395 # cycles=self.getcycle(rowno))
3396 return selector(rows=[rowno])
3397
3398 def _add_history(self, funcname, parameters):
3399 if not rcParams['scantable.history']:
3400 return
3401 # create date
3402 sep = "##"
3403 from datetime import datetime
3404 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3405 hist = dstr+sep
3406 hist += funcname+sep#cdate+sep
3407 if parameters.has_key('self'): del parameters['self']
3408 for k, v in parameters.iteritems():
3409 if type(v) is dict:
3410 for k2, v2 in v.iteritems():
3411 hist += k2
3412 hist += "="
3413 if isinstance(v2, scantable):
3414 hist += 'scantable'
3415 elif k2 == 'mask':
3416 if isinstance(v2, list) or isinstance(v2, tuple):
3417 hist += str(self._zip_mask(v2))
3418 else:
3419 hist += str(v2)
3420 else:
3421 hist += str(v2)
3422 else:
3423 hist += k
3424 hist += "="
3425 if isinstance(v, scantable):
3426 hist += 'scantable'
3427 elif k == 'mask':
3428 if isinstance(v, list) or isinstance(v, tuple):
3429 hist += str(self._zip_mask(v))
3430 else:
3431 hist += str(v)
3432 else:
3433 hist += str(v)
3434 hist += sep
3435 hist = hist[:-2] # remove trailing '##'
3436 self._addhistory(hist)
3437
3438
3439 def _zip_mask(self, mask):
3440 mask = list(mask)
3441 i = 0
3442 segments = []
3443 while mask[i:].count(1):
3444 i += mask[i:].index(1)
3445 if mask[i:].count(0):
3446 j = i + mask[i:].index(0)
3447 else:
3448 j = len(mask)
3449 segments.append([i, j])
3450 i = j
3451 return segments
3452
3453 def _get_ordinate_label(self):
3454 fu = "("+self.get_fluxunit()+")"
3455 import re
3456 lbl = "Intensity"
3457 if re.match(".K.", fu):
3458 lbl = "Brightness Temperature "+ fu
3459 elif re.match(".Jy.", fu):
3460 lbl = "Flux density "+ fu
3461 return lbl
3462
3463 def _check_ifs(self):
3464 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3465 nchans = [self.nchan(i) for i in self.getifnos()]
3466 nchans = filter(lambda t: t > 0, nchans)
3467 return (sum(nchans)/len(nchans) == nchans[0])
3468
3469 @asaplog_post_dec
3470 #def _fill(self, names, unit, average, getpt, antenna):
3471 def _fill(self, names, unit, average, opts={}):
3472 first = True
3473 fullnames = []
3474 for name in names:
3475 name = os.path.expandvars(name)
3476 name = os.path.expanduser(name)
3477 if not os.path.exists(name):
3478 msg = "File '%s' does not exists" % (name)
3479 raise IOError(msg)
3480 fullnames.append(name)
3481 if average:
3482 asaplog.push('Auto averaging integrations')
3483 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3484 for name in fullnames:
3485 tbl = Scantable(stype)
3486 if is_ms( name ):
3487 r = msfiller( tbl )
3488 else:
3489 r = filler( tbl )
3490 rx = rcParams['scantable.reference']
3491 r.setreferenceexpr(rx)
3492 #r = filler(tbl)
3493 #rx = rcParams['scantable.reference']
3494 #r.setreferenceexpr(rx)
3495 msg = "Importing %s..." % (name)
3496 asaplog.push(msg, False)
3497 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
3498 r.open(name, opts)# antenna, -1, -1, getpt)
3499 r.fill()
3500 if average:
3501 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3502 if not first:
3503 tbl = self._math._merge([self, tbl])
3504 Scantable.__init__(self, tbl)
3505 r.close()
3506 del r, tbl
3507 first = False
3508 #flush log
3509 asaplog.post()
3510 if unit is not None:
3511 self.set_fluxunit(unit)
3512 if not is_casapy():
3513 self.set_freqframe(rcParams['scantable.freqframe'])
3514
3515
3516 def __getitem__(self, key):
3517 if key < 0:
3518 key += self.nrow()
3519 if key >= self.nrow():
3520 raise IndexError("Row index out of range.")
3521 return self._getspectrum(key)
3522
3523 def __setitem__(self, key, value):
3524 if key < 0:
3525 key += self.nrow()
3526 if key >= self.nrow():
3527 raise IndexError("Row index out of range.")
3528 if not hasattr(value, "__len__") or \
3529 len(value) > self.nchan(self.getif(key)):
3530 raise ValueError("Spectrum length doesn't match.")
3531 return self._setspectrum(value, key)
3532
3533 def __len__(self):
3534 return self.nrow()
3535
3536 def __iter__(self):
3537 for i in range(len(self)):
3538 yield self[i]
Note: See TracBrowser for help on using the repository browser.