source: trunk/python/scantable.py@ 2410

Last change on this file since 2410 was 2410, checked in by Takeshi Nakazato, 13 years ago

New Development: No

JIRA Issue: Yes CAS-3606/CAS-3757

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: sdbaseline unit test

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Fixed a bug that baseline functions doesn't work when multi-IF with different
nchan are processed at once.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 137.3 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import tempfile
5import numpy
6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
11from asap.env import is_casapy
12from asap._asap import Scantable
13from asap._asap import filler, msfiller
14from asap.parameters import rcParams
15from asap.logging import asaplog, asaplog_post_dec
16from asap.selector import selector
17from asap.linecatalog import linecatalog
18from asap.coordinate import coordinate
19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
20from asap.asapfitter import fitter
21
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 #if ( l.find('Scantable') != -1 ):
49 if ( l.find('Measurement Set') == -1 ):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176class scantable(Scantable):
177 """\
178 The ASAP container for scans (single-dish data).
179 """
180
181 @asaplog_post_dec
182 def __init__(self, filename, average=None, unit=None, parallactify=None,
183 **args):
184 """\
185 Create a scantable from a saved one or make a reference
186
187 Parameters:
188
189 filename: the name of an asap table on disk
190 or
191 the name of a rpfits/sdfits/ms file
192 (integrations within scans are auto averaged
193 and the whole file is read) or
194 [advanced] a reference to an existing scantable
195
196 average: average all integrations withinb a scan on read.
197 The default (True) is taken from .asaprc.
198
199 unit: brightness unit; must be consistent with K or Jy.
200 Over-rides the default selected by the filler
201 (input rpfits/sdfits/ms) or replaces the value
202 in existing scantables
203
204 antenna: for MeasurementSet input data only:
205 Antenna selection. integer (id) or string
206 (name or id).
207
208 parallactify: Indicate that the data had been parallactified.
209 Default (false) is taken from rc file.
210
211 """
212 if average is None:
213 average = rcParams['scantable.autoaverage']
214 parallactify = parallactify or rcParams['scantable.parallactify']
215 varlist = vars()
216 from asap._asap import stmath
217 self._math = stmath( rcParams['insitu'] )
218 if isinstance(filename, Scantable):
219 Scantable.__init__(self, filename)
220 else:
221 if isinstance(filename, str):
222 filename = os.path.expandvars(filename)
223 filename = os.path.expanduser(filename)
224 if not os.path.exists(filename):
225 s = "File '%s' not found." % (filename)
226 raise IOError(s)
227 if is_scantable(filename):
228 ondisk = rcParams['scantable.storage'] == 'disk'
229 Scantable.__init__(self, filename, ondisk)
230 if unit is not None:
231 self.set_fluxunit(unit)
232 if average:
233 self._assign( self.average_time( scanav=True ) )
234 # do not reset to the default freqframe
235 #self.set_freqframe(rcParams['scantable.freqframe'])
236 elif is_ms(filename):
237 # Measurement Set
238 opts={'ms': {}}
239 mskeys=['getpt','antenna']
240 for key in mskeys:
241 if key in args.keys():
242 opts['ms'][key] = args[key]
243 self._fill([filename], unit, average, opts)
244 elif os.path.isfile(filename):
245 self._fill([filename], unit, average)
246 # only apply to new data not "copy constructor"
247 self.parallactify(parallactify)
248 else:
249 msg = "The given file '%s'is not a valid " \
250 "asap table." % (filename)
251 raise IOError(msg)
252 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
253 and isinstance(filename[-1], str):
254 self._fill(filename, unit, average)
255 self.parallactify(parallactify)
256 self._add_history("scantable", varlist)
257
258 @asaplog_post_dec
259 def save(self, name=None, format=None, overwrite=False):
260 """\
261 Store the scantable on disk. This can be an asap (aips++) Table,
262 SDFITS or MS2 format.
263
264 Parameters:
265
266 name: the name of the outputfile. For format "ASCII"
267 this is the root file name (data in 'name'.txt
268 and header in 'name'_header.txt)
269
270 format: an optional file format. Default is ASAP.
271 Allowed are:
272
273 * 'ASAP' (save as ASAP [aips++] Table),
274 * 'SDFITS' (save as SDFITS file)
275 * 'ASCII' (saves as ascii text file)
276 * 'MS2' (saves as an casacore MeasurementSet V2)
277 * 'FITS' (save as image FITS - not readable by
278 class)
279 * 'CLASS' (save as FITS readable by CLASS)
280
281 overwrite: If the file should be overwritten if it exists.
282 The default False is to return with warning
283 without writing the output. USE WITH CARE.
284
285 Example::
286
287 scan.save('myscan.asap')
288 scan.save('myscan.sdfits', 'SDFITS')
289
290 """
291 from os import path
292 format = format or rcParams['scantable.save']
293 suffix = '.'+format.lower()
294 if name is None or name == "":
295 name = 'scantable'+suffix
296 msg = "No filename given. Using default name %s..." % name
297 asaplog.push(msg)
298 name = path.expandvars(name)
299 if path.isfile(name) or path.isdir(name):
300 if not overwrite:
301 msg = "File %s exists." % name
302 raise IOError(msg)
303 format2 = format.upper()
304 if format2 == 'ASAP':
305 self._save(name)
306 elif format2 == 'MS2':
307 msopt = {'ms': {'overwrite': overwrite } }
308 from asap._asap import mswriter
309 writer = mswriter( self )
310 writer.write( name, msopt )
311 else:
312 from asap._asap import stwriter as stw
313 writer = stw(format2)
314 writer.write(self, name)
315 return
316
317 def copy(self):
318 """Return a copy of this scantable.
319
320 *Note*:
321
322 This makes a full (deep) copy. scan2 = scan1 makes a reference.
323
324 Example::
325
326 copiedscan = scan.copy()
327
328 """
329 sd = scantable(Scantable._copy(self))
330 return sd
331
332 def drop_scan(self, scanid=None):
333 """\
334 Return a new scantable where the specified scan number(s) has(have)
335 been dropped.
336
337 Parameters:
338
339 scanid: a (list of) scan number(s)
340
341 """
342 from asap import _is_sequence_or_number as _is_valid
343 from asap import _to_list
344 from asap import unique
345 if not _is_valid(scanid):
346 raise RuntimeError( 'Please specify a scanno to drop from the'
347 ' scantable' )
348 scanid = _to_list(scanid)
349 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
350 for sid in scanid: allscans.remove(sid)
351 if len(allscans) == 0:
352 raise ValueError("Can't remove all scans")
353 sel = selector(scans=allscans)
354 return self._select_copy(sel)
355
356 def _select_copy(self, selection):
357 orig = self.get_selection()
358 self.set_selection(orig+selection)
359 cp = self.copy()
360 self.set_selection(orig)
361 return cp
362
363 def get_scan(self, scanid=None):
364 """\
365 Return a specific scan (by scanno) or collection of scans (by
366 source name) in a new scantable.
367
368 *Note*:
369
370 See scantable.drop_scan() for the inverse operation.
371
372 Parameters:
373
374 scanid: a (list of) scanno or a source name, unix-style
375 patterns are accepted for source name matching, e.g.
376 '*_R' gets all 'ref scans
377
378 Example::
379
380 # get all scans containing the source '323p459'
381 newscan = scan.get_scan('323p459')
382 # get all 'off' scans
383 refscans = scan.get_scan('*_R')
384 # get a susbset of scans by scanno (as listed in scan.summary())
385 newscan = scan.get_scan([0, 2, 7, 10])
386
387 """
388 if scanid is None:
389 raise RuntimeError( 'Please specify a scan no or name to '
390 'retrieve from the scantable' )
391 try:
392 bsel = self.get_selection()
393 sel = selector()
394 if type(scanid) is str:
395 sel.set_name(scanid)
396 return self._select_copy(sel)
397 elif type(scanid) is int:
398 sel.set_scans([scanid])
399 return self._select_copy(sel)
400 elif type(scanid) is list:
401 sel.set_scans(scanid)
402 return self._select_copy(sel)
403 else:
404 msg = "Illegal scanid type, use 'int' or 'list' if ints."
405 raise TypeError(msg)
406 except RuntimeError:
407 raise
408
409 def __str__(self):
410 tempFile = tempfile.NamedTemporaryFile()
411 Scantable._summary(self, tempFile.name)
412 tempFile.seek(0)
413 asaplog.clear()
414 return tempFile.file.read()
415
416 @asaplog_post_dec
417 def summary(self, filename=None):
418 """\
419 Print a summary of the contents of this scantable.
420
421 Parameters:
422
423 filename: the name of a file to write the putput to
424 Default - no file output
425
426 """
427# info = Scantable._summary(self)
428 if filename is not None:
429 if filename is "":
430 filename = 'scantable_summary.txt'
431 from os.path import expandvars, isdir
432 filename = expandvars(filename)
433# if not isdir(filename):
434# data = open(filename, 'w')
435# data.write(info)
436# data.close()
437# else:
438 if isdir(filename):
439 msg = "Illegal file name '%s'." % (filename)
440 raise IOError(msg)
441 else:
442 filename = ""
443 Scantable._summary(self, filename)
444# info = Scantable._summary(self, filename)
445# return page(info)
446
447 def get_spectrum(self, rowno):
448 """Return the spectrum for the current row in the scantable as a list.
449
450 Parameters:
451
452 rowno: the row number to retrieve the spectrum from
453
454 """
455 return self._getspectrum(rowno)
456
457 def get_mask(self, rowno):
458 """Return the mask for the current row in the scantable as a list.
459
460 Parameters:
461
462 rowno: the row number to retrieve the mask from
463
464 """
465 return self._getmask(rowno)
466
467 def set_spectrum(self, spec, rowno):
468 """Set the spectrum for the current row in the scantable.
469
470 Parameters:
471
472 spec: the new spectrum
473
474 rowno: the row number to set the spectrum for
475
476 """
477 assert(len(spec) == self.nchan(self.getif(rowno)))
478 return self._setspectrum(spec, rowno)
479
480 def get_coordinate(self, rowno):
481 """Return the (spectral) coordinate for a a given 'rowno'.
482
483 *Note*:
484
485 * This coordinate is only valid until a scantable method modifies
486 the frequency axis.
487 * This coordinate does contain the original frequency set-up
488 NOT the new frame. The conversions however are done using the user
489 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
490 use scantable.freq_align first. Without it there is no closure,
491 i.e.::
492
493 c = myscan.get_coordinate(0)
494 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
495
496 Parameters:
497
498 rowno: the row number for the spectral coordinate
499
500 """
501 return coordinate(Scantable.get_coordinate(self, rowno))
502
503 def get_selection(self):
504 """\
505 Get the selection object currently set on this scantable.
506
507 Example::
508
509 sel = scan.get_selection()
510 sel.set_ifs(0) # select IF 0
511 scan.set_selection(sel) # apply modified selection
512
513 """
514 return selector(self._getselection())
515
516 def set_selection(self, selection=None, **kw):
517 """\
518 Select a subset of the data. All following operations on this scantable
519 are only applied to thi selection.
520
521 Parameters:
522
523 selection: a selector object (default unset the selection), or
524 any combination of "pols", "ifs", "beams", "scans",
525 "cycles", "name", "query"
526
527 Examples::
528
529 sel = selector() # create a selection object
530 self.set_scans([0, 3]) # select SCANNO 0 and 3
531 scan.set_selection(sel) # set the selection
532 scan.summary() # will only print summary of scanno 0 an 3
533 scan.set_selection() # unset the selection
534 # or the equivalent
535 scan.set_selection(scans=[0,3])
536 scan.summary() # will only print summary of scanno 0 an 3
537 scan.set_selection() # unset the selection
538
539 """
540 if selection is None:
541 # reset
542 if len(kw) == 0:
543 selection = selector()
544 else:
545 # try keywords
546 for k in kw:
547 if k not in selector.fields:
548 raise KeyError("Invalid selection key '%s', "
549 "valid keys are %s" % (k,
550 selector.fields))
551 selection = selector(**kw)
552 self._setselection(selection)
553
554 def get_row(self, row=0, insitu=None):
555 """\
556 Select a row in the scantable.
557 Return a scantable with single row.
558
559 Parameters:
560
561 row: row no of integration, default is 0.
562 insitu: if False a new scantable is returned. Otherwise, the
563 scaling is done in-situ. The default is taken from .asaprc
564 (False)
565
566 """
567 if insitu is None:
568 insitu = rcParams['insitu']
569 if not insitu:
570 workscan = self.copy()
571 else:
572 workscan = self
573 # Select a row
574 sel = selector()
575 sel.set_rows([row])
576 workscan.set_selection(sel)
577 if not workscan.nrow() == 1:
578 msg = "Could not identify single row. %d rows selected." \
579 % (workscan.nrow())
580 raise RuntimeError(msg)
581 if insitu:
582 self._assign(workscan)
583 else:
584 return workscan
585
586 @asaplog_post_dec
587 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
588 """\
589 Determine the specified statistic of the current beam/if/pol
590 Takes a 'mask' as an optional parameter to specify which
591 channels should be excluded.
592
593 Parameters:
594
595 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
596 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
597
598 mask: an optional mask specifying where the statistic
599 should be determined.
600
601 form: format string to print statistic values
602
603 row: row number of spectrum to process.
604 (default is None: for all rows)
605
606 Example:
607 scan.set_unit('channel')
608 msk = scan.create_mask([100, 200], [500, 600])
609 scan.stats(stat='mean', mask=m)
610
611 """
612 mask = mask or []
613 if not self._check_ifs():
614 raise ValueError("Cannot apply mask as the IFs have different "
615 "number of channels. Please use setselection() "
616 "to select individual IFs")
617 rtnabc = False
618 if stat.lower().endswith('_abc'): rtnabc = True
619 getchan = False
620 if stat.lower().startswith('min') or stat.lower().startswith('max'):
621 chan = self._math._minmaxchan(self, mask, stat)
622 getchan = True
623 statvals = []
624 if not rtnabc:
625 if row == None:
626 statvals = self._math._stats(self, mask, stat)
627 else:
628 statvals = self._math._statsrow(self, mask, stat, int(row))
629
630 #def cb(i):
631 # return statvals[i]
632
633 #return self._row_callback(cb, stat)
634
635 label=stat
636 #callback=cb
637 out = ""
638 #outvec = []
639 sep = '-'*50
640
641 if row == None:
642 rows = xrange(self.nrow())
643 elif isinstance(row, int):
644 rows = [ row ]
645
646 for i in rows:
647 refstr = ''
648 statunit= ''
649 if getchan:
650 qx, qy = self.chan2data(rowno=i, chan=chan[i])
651 if rtnabc:
652 statvals.append(qx['value'])
653 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
654 statunit= '['+qx['unit']+']'
655 else:
656 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
657
658 tm = self._gettime(i)
659 src = self._getsourcename(i)
660 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
661 out += 'Time[%s]:\n' % (tm)
662 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
663 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
664 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
665 #outvec.append(callback(i))
666 if len(rows) > 1:
667 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
668 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
669 else:
670 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
671 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
672 out += sep+"\n"
673
674 import os
675 if os.environ.has_key( 'USER' ):
676 usr = os.environ['USER']
677 else:
678 import commands
679 usr = commands.getoutput( 'whoami' )
680 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
681 f = open(tmpfile,'w')
682 print >> f, sep
683 print >> f, ' %s %s' % (label, statunit)
684 print >> f, sep
685 print >> f, out
686 f.close()
687 f = open(tmpfile,'r')
688 x = f.readlines()
689 f.close()
690 asaplog.push(''.join(x), False)
691
692 return statvals
693
694 def chan2data(self, rowno=0, chan=0):
695 """\
696 Returns channel/frequency/velocity and spectral value
697 at an arbitrary row and channel in the scantable.
698
699 Parameters:
700
701 rowno: a row number in the scantable. Default is the
702 first row, i.e. rowno=0
703
704 chan: a channel in the scantable. Default is the first
705 channel, i.e. pos=0
706
707 """
708 if isinstance(rowno, int) and isinstance(chan, int):
709 qx = {'unit': self.get_unit(),
710 'value': self._getabcissa(rowno)[chan]}
711 qy = {'unit': self.get_fluxunit(),
712 'value': self._getspectrum(rowno)[chan]}
713 return qx, qy
714
715 def stddev(self, mask=None):
716 """\
717 Determine the standard deviation of the current beam/if/pol
718 Takes a 'mask' as an optional parameter to specify which
719 channels should be excluded.
720
721 Parameters:
722
723 mask: an optional mask specifying where the standard
724 deviation should be determined.
725
726 Example::
727
728 scan.set_unit('channel')
729 msk = scan.create_mask([100, 200], [500, 600])
730 scan.stddev(mask=m)
731
732 """
733 return self.stats(stat='stddev', mask=mask);
734
735
736 def get_column_names(self):
737 """\
738 Return a list of column names, which can be used for selection.
739 """
740 return list(Scantable.get_column_names(self))
741
742 def get_tsys(self, row=-1):
743 """\
744 Return the System temperatures.
745
746 Parameters:
747
748 row: the rowno to get the information for. (default all rows)
749
750 Returns:
751
752 a list of Tsys values for the current selection
753
754 """
755 if row > -1:
756 return self._get_column(self._gettsys, row)
757 return self._row_callback(self._gettsys, "Tsys")
758
759 def get_tsysspectrum(self, row=-1):
760 """\
761 Return the channel dependent system temperatures.
762
763 Parameters:
764
765 row: the rowno to get the information for. (default all rows)
766
767 Returns:
768
769 a list of Tsys values for the current selection
770
771 """
772 return self._get_column( self._gettsysspectrum, row )
773
774 def get_weather(self, row=-1):
775 """\
776 Return the weather informations.
777
778 Parameters:
779
780 row: the rowno to get the information for. (default all rows)
781
782 Returns:
783
784 a dict or list of of dicts of values for the current selection
785
786 """
787
788 values = self._get_column(self._get_weather, row)
789 if row > -1:
790 return {'temperature': values[0],
791 'pressure': values[1], 'humidity' : values[2],
792 'windspeed' : values[3], 'windaz' : values[4]
793 }
794 else:
795 out = []
796 for r in values:
797
798 out.append({'temperature': r[0],
799 'pressure': r[1], 'humidity' : r[2],
800 'windspeed' : r[3], 'windaz' : r[4]
801 })
802 return out
803
804 def _row_callback(self, callback, label):
805 out = ""
806 outvec = []
807 sep = '-'*50
808 for i in range(self.nrow()):
809 tm = self._gettime(i)
810 src = self._getsourcename(i)
811 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
812 out += 'Time[%s]:\n' % (tm)
813 if self.nbeam(-1) > 1:
814 out += ' Beam[%d] ' % (self.getbeam(i))
815 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
816 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
817 outvec.append(callback(i))
818 out += '= %3.3f\n' % (outvec[i])
819 out += sep+'\n'
820
821 asaplog.push(sep)
822 asaplog.push(" %s" % (label))
823 asaplog.push(sep)
824 asaplog.push(out)
825 asaplog.post()
826 return outvec
827
828 def _get_column(self, callback, row=-1, *args):
829 """
830 """
831 if row == -1:
832 return [callback(i, *args) for i in range(self.nrow())]
833 else:
834 if 0 <= row < self.nrow():
835 return callback(row, *args)
836
837
838 def get_time(self, row=-1, asdatetime=False, prec=-1):
839 """\
840 Get a list of time stamps for the observations.
841 Return a datetime object or a string (default) for each
842 integration time stamp in the scantable.
843
844 Parameters:
845
846 row: row no of integration. Default -1 return all rows
847
848 asdatetime: return values as datetime objects rather than strings
849
850 prec: number of digits shown. Default -1 to automatic
851 calculation.
852 Note this number is equals to the digits of MVTime,
853 i.e., 0<prec<3: dates with hh:: only,
854 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
855 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
856
857 """
858 from datetime import datetime
859 if prec < 0:
860 # automagically set necessary precision +1
861 prec = 7 - \
862 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
863 prec = max(6, int(prec))
864 else:
865 prec = max(0, prec)
866 if asdatetime:
867 #precision can be 1 millisecond at max
868 prec = min(12, prec)
869
870 times = self._get_column(self._gettime, row, prec)
871 if not asdatetime:
872 return times
873 format = "%Y/%m/%d/%H:%M:%S.%f"
874 if prec < 7:
875 nsub = 1 + (((6-prec)/2) % 3)
876 substr = [".%f","%S","%M"]
877 for i in range(nsub):
878 format = format.replace(substr[i],"")
879 if isinstance(times, list):
880 return [datetime.strptime(i, format) for i in times]
881 else:
882 return datetime.strptime(times, format)
883
884
885 def get_inttime(self, row=-1):
886 """\
887 Get a list of integration times for the observations.
888 Return a time in seconds for each integration in the scantable.
889
890 Parameters:
891
892 row: row no of integration. Default -1 return all rows.
893
894 """
895 return self._get_column(self._getinttime, row)
896
897
898 def get_sourcename(self, row=-1):
899 """\
900 Get a list source names for the observations.
901 Return a string for each integration in the scantable.
902 Parameters:
903
904 row: row no of integration. Default -1 return all rows.
905
906 """
907 return self._get_column(self._getsourcename, row)
908
909 def get_elevation(self, row=-1):
910 """\
911 Get a list of elevations for the observations.
912 Return a float for each integration in the scantable.
913
914 Parameters:
915
916 row: row no of integration. Default -1 return all rows.
917
918 """
919 return self._get_column(self._getelevation, row)
920
921 def get_azimuth(self, row=-1):
922 """\
923 Get a list of azimuths for the observations.
924 Return a float for each integration in the scantable.
925
926 Parameters:
927 row: row no of integration. Default -1 return all rows.
928
929 """
930 return self._get_column(self._getazimuth, row)
931
932 def get_parangle(self, row=-1):
933 """\
934 Get a list of parallactic angles for the observations.
935 Return a float for each integration in the scantable.
936
937 Parameters:
938
939 row: row no of integration. Default -1 return all rows.
940
941 """
942 return self._get_column(self._getparangle, row)
943
944 def get_direction(self, row=-1):
945 """
946 Get a list of Positions on the sky (direction) for the observations.
947 Return a string for each integration in the scantable.
948
949 Parameters:
950
951 row: row no of integration. Default -1 return all rows
952
953 """
954 return self._get_column(self._getdirection, row)
955
956 def get_directionval(self, row=-1):
957 """\
958 Get a list of Positions on the sky (direction) for the observations.
959 Return a float for each integration in the scantable.
960
961 Parameters:
962
963 row: row no of integration. Default -1 return all rows
964
965 """
966 return self._get_column(self._getdirectionvec, row)
967
968 @asaplog_post_dec
969 def set_unit(self, unit='channel'):
970 """\
971 Set the unit for all following operations on this scantable
972
973 Parameters:
974
975 unit: optional unit, default is 'channel'. Use one of '*Hz',
976 'km/s', 'channel' or equivalent ''
977
978 """
979 varlist = vars()
980 if unit in ['', 'pixel', 'channel']:
981 unit = ''
982 inf = list(self._getcoordinfo())
983 inf[0] = unit
984 self._setcoordinfo(inf)
985 self._add_history("set_unit", varlist)
986
987 @asaplog_post_dec
988 def set_instrument(self, instr):
989 """\
990 Set the instrument for subsequent processing.
991
992 Parameters:
993
994 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
995 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
996
997 """
998 self._setInstrument(instr)
999 self._add_history("set_instument", vars())
1000
1001 @asaplog_post_dec
1002 def set_feedtype(self, feedtype):
1003 """\
1004 Overwrite the feed type, which might not be set correctly.
1005
1006 Parameters:
1007
1008 feedtype: 'linear' or 'circular'
1009
1010 """
1011 self._setfeedtype(feedtype)
1012 self._add_history("set_feedtype", vars())
1013
1014 @asaplog_post_dec
1015 def set_doppler(self, doppler='RADIO'):
1016 """\
1017 Set the doppler for all following operations on this scantable.
1018
1019 Parameters:
1020
1021 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1022
1023 """
1024 varlist = vars()
1025 inf = list(self._getcoordinfo())
1026 inf[2] = doppler
1027 self._setcoordinfo(inf)
1028 self._add_history("set_doppler", vars())
1029
1030 @asaplog_post_dec
1031 def set_freqframe(self, frame=None):
1032 """\
1033 Set the frame type of the Spectral Axis.
1034
1035 Parameters:
1036
1037 frame: an optional frame type, default 'LSRK'. Valid frames are:
1038 'TOPO', 'LSRD', 'LSRK', 'BARY',
1039 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1040
1041 Example::
1042
1043 scan.set_freqframe('BARY')
1044
1045 """
1046 frame = frame or rcParams['scantable.freqframe']
1047 varlist = vars()
1048 # "REST" is not implemented in casacore
1049 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1050 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1051 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1052 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1053
1054 if frame in valid:
1055 inf = list(self._getcoordinfo())
1056 inf[1] = frame
1057 self._setcoordinfo(inf)
1058 self._add_history("set_freqframe", varlist)
1059 else:
1060 msg = "Please specify a valid freq type. Valid types are:\n", valid
1061 raise TypeError(msg)
1062
1063 @asaplog_post_dec
1064 def set_dirframe(self, frame=""):
1065 """\
1066 Set the frame type of the Direction on the sky.
1067
1068 Parameters:
1069
1070 frame: an optional frame type, default ''. Valid frames are:
1071 'J2000', 'B1950', 'GALACTIC'
1072
1073 Example:
1074
1075 scan.set_dirframe('GALACTIC')
1076
1077 """
1078 varlist = vars()
1079 Scantable.set_dirframe(self, frame)
1080 self._add_history("set_dirframe", varlist)
1081
1082 def get_unit(self):
1083 """\
1084 Get the default unit set in this scantable
1085
1086 Returns:
1087
1088 A unit string
1089
1090 """
1091 inf = self._getcoordinfo()
1092 unit = inf[0]
1093 if unit == '': unit = 'channel'
1094 return unit
1095
1096 @asaplog_post_dec
1097 def get_abcissa(self, rowno=0):
1098 """\
1099 Get the abcissa in the current coordinate setup for the currently
1100 selected Beam/IF/Pol
1101
1102 Parameters:
1103
1104 rowno: an optional row number in the scantable. Default is the
1105 first row, i.e. rowno=0
1106
1107 Returns:
1108
1109 The abcissa values and the format string (as a dictionary)
1110
1111 """
1112 abc = self._getabcissa(rowno)
1113 lbl = self._getabcissalabel(rowno)
1114 return abc, lbl
1115
1116 @asaplog_post_dec
1117 def flag(self, mask=None, unflag=False, row=-1):
1118 """\
1119 Flag the selected data using an optional channel mask.
1120
1121 Parameters:
1122
1123 mask: an optional channel mask, created with create_mask. Default
1124 (no mask) is all channels.
1125
1126 unflag: if True, unflag the data
1127
1128 row: an optional row number in the scantable.
1129 Default -1 flags all rows
1130
1131 """
1132 varlist = vars()
1133 mask = mask or []
1134 self._flag(row, mask, unflag)
1135 self._add_history("flag", varlist)
1136
1137 @asaplog_post_dec
1138 def flag_row(self, rows=None, unflag=False):
1139 """\
1140 Flag the selected data in row-based manner.
1141
1142 Parameters:
1143
1144 rows: list of row numbers to be flagged. Default is no row
1145 (must be explicitly specified to execute row-based
1146 flagging).
1147
1148 unflag: if True, unflag the data.
1149
1150 """
1151 varlist = vars()
1152 if rows is None:
1153 rows = []
1154 self._flag_row(rows, unflag)
1155 self._add_history("flag_row", varlist)
1156
1157 @asaplog_post_dec
1158 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1159 """\
1160 Flag the selected data outside a specified range (in channel-base)
1161
1162 Parameters:
1163
1164 uthres: upper threshold.
1165
1166 dthres: lower threshold
1167
1168 clipoutside: True for flagging data outside the range
1169 [dthres:uthres].
1170 False for flagging data inside the range.
1171
1172 unflag: if True, unflag the data.
1173
1174 """
1175 varlist = vars()
1176 self._clip(uthres, dthres, clipoutside, unflag)
1177 self._add_history("clip", varlist)
1178
1179 @asaplog_post_dec
1180 def lag_flag(self, start, end, unit="MHz", insitu=None):
1181 """\
1182 Flag the data in 'lag' space by providing a frequency to remove.
1183 Flagged data in the scantable get interpolated over the region.
1184 No taper is applied.
1185
1186 Parameters:
1187
1188 start: the start frequency (really a period within the
1189 bandwidth) or period to remove
1190
1191 end: the end frequency or period to remove
1192
1193 unit: the frequency unit (default "MHz") or "" for
1194 explicit lag channels
1195
1196 *Notes*:
1197
1198 It is recommended to flag edges of the band or strong
1199 signals beforehand.
1200
1201 """
1202 if insitu is None: insitu = rcParams['insitu']
1203 self._math._setinsitu(insitu)
1204 varlist = vars()
1205 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1206 if not (unit == "" or base.has_key(unit)):
1207 raise ValueError("%s is not a valid unit." % unit)
1208 if unit == "":
1209 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1210 else:
1211 s = scantable(self._math._lag_flag(self, start*base[unit],
1212 end*base[unit], "frequency"))
1213 s._add_history("lag_flag", varlist)
1214 if insitu:
1215 self._assign(s)
1216 else:
1217 return s
1218
1219 @asaplog_post_dec
1220 def fft(self, rowno=None, mask=None, getrealimag=False):
1221 """\
1222 Apply FFT to the spectra.
1223 Flagged data in the scantable get interpolated over the region.
1224
1225 Parameters:
1226
1227 rowno: The row number(s) to be processed. int, list
1228 and tuple are accepted. By default (None), FFT
1229 is applied to the whole data.
1230
1231 mask: Auxiliary channel mask(s). Given as a boolean
1232 list, it is applied to all specified rows.
1233 A list of boolean lists can also be used to
1234 apply different masks. In the latter case, the
1235 length of 'mask' must be the same as that of
1236 'rowno'. The default is None.
1237
1238 getrealimag: If True, returns the real and imaginary part
1239 values of the complex results.
1240 If False (the default), returns the amplitude
1241 (absolute value) normalised with Ndata/2 and
1242 phase (argument, in unit of radian).
1243
1244 Returns:
1245
1246 A list of dictionaries containing the results for each spectrum.
1247 Each dictionary contains two values, the real and the imaginary
1248 parts when getrealimag = True, or the amplitude(absolute value)
1249 and the phase(argument) when getrealimag = False. The key for
1250 these values are 'real' and 'imag', or 'ampl' and 'phase',
1251 respectively.
1252 """
1253 if rowno is None:
1254 rowno = []
1255 if mask is None:
1256 mask = []
1257 if isinstance(rowno, int):
1258 rowno = [rowno]
1259 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1260 raise TypeError("The row number(s) must be int, list or tuple.")
1261
1262 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1263
1264 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1265 raise TypeError("The mask must be a boolean list or a list of "
1266 "boolean list.")
1267 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1268 if isinstance(mask[0], bool): mask = [mask]
1269 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1270 raise TypeError("The mask must be a boolean list or a list of "
1271 "boolean list.")
1272
1273 usecommonmask = (len(mask) == 1)
1274 if not usecommonmask:
1275 if len(mask) != len(rowno):
1276 raise ValueError("When specifying masks for each spectrum, "
1277 "the numbers of them must be identical.")
1278 for amask in mask:
1279 if len(amask) != self.nchan():
1280 raise ValueError("The spectra and the mask have different "
1281 "length.")
1282
1283 res = []
1284
1285 imask = 0
1286 for whichrow in rowno:
1287 fspec = self._fft(whichrow, mask[imask], getrealimag)
1288 nspec = len(fspec)
1289
1290 i = 0
1291 v1 = []
1292 v2 = []
1293 reselem = {"real":[],"imag":[]} if getrealimag \
1294 else {"ampl":[],"phase":[]}
1295
1296 while (i < nspec):
1297 v1.append(fspec[i])
1298 v2.append(fspec[i+1])
1299 i += 2
1300
1301 if getrealimag:
1302 reselem["real"] += v1
1303 reselem["imag"] += v2
1304 else:
1305 reselem["ampl"] += v1
1306 reselem["phase"] += v2
1307
1308 res.append(reselem)
1309
1310 if not usecommonmask:
1311 imask += 1
1312
1313 return res
1314
1315 @asaplog_post_dec
1316 def create_mask(self, *args, **kwargs):
1317 """\
1318 Compute and return a mask based on [min, max] windows.
1319 The specified windows are to be INCLUDED, when the mask is
1320 applied.
1321
1322 Parameters:
1323
1324 [min, max], [min2, max2], ...
1325 Pairs of start/end points (inclusive)specifying the regions
1326 to be masked
1327
1328 invert: optional argument. If specified as True,
1329 return an inverted mask, i.e. the regions
1330 specified are EXCLUDED
1331
1332 row: create the mask using the specified row for
1333 unit conversions, default is row=0
1334 only necessary if frequency varies over rows.
1335
1336 Examples::
1337
1338 scan.set_unit('channel')
1339 # a)
1340 msk = scan.create_mask([400, 500], [800, 900])
1341 # masks everything outside 400 and 500
1342 # and 800 and 900 in the unit 'channel'
1343
1344 # b)
1345 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1346 # masks the regions between 400 and 500
1347 # and 800 and 900 in the unit 'channel'
1348
1349 # c)
1350 #mask only channel 400
1351 msk = scan.create_mask([400])
1352
1353 """
1354 row = kwargs.get("row", 0)
1355 data = self._getabcissa(row)
1356 u = self._getcoordinfo()[0]
1357 if u == "":
1358 u = "channel"
1359 msg = "The current mask window unit is %s" % u
1360 i = self._check_ifs()
1361 if not i:
1362 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1363 asaplog.push(msg)
1364 n = len(data)
1365 msk = _n_bools(n, False)
1366 # test if args is a 'list' or a 'normal *args - UGLY!!!
1367
1368 ws = (isinstance(args[-1][-1], int)
1369 or isinstance(args[-1][-1], float)) and args or args[0]
1370 for window in ws:
1371 if len(window) == 1:
1372 window = [window[0], window[0]]
1373 if len(window) == 0 or len(window) > 2:
1374 raise ValueError("A window needs to be defined as "
1375 "[start(, end)]")
1376 if window[0] > window[1]:
1377 tmp = window[0]
1378 window[0] = window[1]
1379 window[1] = tmp
1380 for i in range(n):
1381 if data[i] >= window[0] and data[i] <= window[1]:
1382 msk[i] = True
1383 if kwargs.has_key('invert'):
1384 if kwargs.get('invert'):
1385 msk = mask_not(msk)
1386 return msk
1387
1388 def get_masklist(self, mask=None, row=0, silent=False):
1389 """\
1390 Compute and return a list of mask windows, [min, max].
1391
1392 Parameters:
1393
1394 mask: channel mask, created with create_mask.
1395
1396 row: calcutate the masklist using the specified row
1397 for unit conversions, default is row=0
1398 only necessary if frequency varies over rows.
1399
1400 Returns:
1401
1402 [min, max], [min2, max2], ...
1403 Pairs of start/end points (inclusive)specifying
1404 the masked regions
1405
1406 """
1407 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1408 raise TypeError("The mask should be list or tuple.")
1409 if len(mask) < 2:
1410 raise TypeError("The mask elements should be > 1")
1411 data = self._getabcissa(row)
1412 if len(data) != len(mask):
1413 msg = "Number of channels in scantable != number of mask elements"
1414 raise TypeError(msg)
1415 u = self._getcoordinfo()[0]
1416 if u == "":
1417 u = "channel"
1418 msg = "The current mask window unit is %s" % u
1419 i = self._check_ifs()
1420 if not i:
1421 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1422 if not silent:
1423 asaplog.push(msg)
1424 masklist = []
1425 ist, ien = None, None
1426 ist, ien=self.get_mask_indices(mask)
1427 if ist is not None and ien is not None:
1428 for i in xrange(len(ist)):
1429 range=[data[ist[i]],data[ien[i]]]
1430 range.sort()
1431 masklist.append([range[0],range[1]])
1432 return masklist
1433
1434 def get_mask_indices(self, mask=None):
1435 """\
1436 Compute and Return lists of mask start indices and mask end indices.
1437
1438 Parameters:
1439
1440 mask: channel mask, created with create_mask.
1441
1442 Returns:
1443
1444 List of mask start indices and that of mask end indices,
1445 i.e., [istart1,istart2,....], [iend1,iend2,....].
1446
1447 """
1448 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1449 raise TypeError("The mask should be list or tuple.")
1450 if len(mask) < 2:
1451 raise TypeError("The mask elements should be > 1")
1452 istart = []
1453 iend = []
1454 if mask[0]:
1455 istart.append(0)
1456 for i in range(len(mask)-1):
1457 if not mask[i] and mask[i+1]:
1458 istart.append(i+1)
1459 elif mask[i] and not mask[i+1]:
1460 iend.append(i)
1461 if mask[len(mask)-1]:
1462 iend.append(len(mask)-1)
1463 if len(istart) != len(iend):
1464 raise RuntimeError("Numbers of mask start != mask end.")
1465 for i in range(len(istart)):
1466 if istart[i] > iend[i]:
1467 raise RuntimeError("Mask start index > mask end index")
1468 break
1469 return istart,iend
1470
1471 @asaplog_post_dec
1472 def parse_maskexpr(self, maskstring):
1473 """
1474 Parse CASA type mask selection syntax (IF dependent).
1475
1476 Parameters:
1477 maskstring : A string mask selection expression.
1478 A comma separated selections mean different IF -
1479 channel combinations. IFs and channel selections
1480 are partitioned by a colon, ':'.
1481 examples:
1482 '' = all IFs (all channels)
1483 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1484 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1485 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1486 all channels in IF8
1487 Returns:
1488 A dictionary of selected (valid) IF and masklist pairs,
1489 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1490 """
1491 if not isinstance(maskstring,str):
1492 asaplog.post()
1493 asaplog.push("Invalid mask expression")
1494 asaplog.post("ERROR")
1495
1496 valid_ifs = self.getifnos()
1497 frequnit = self.get_unit()
1498 seldict = {}
1499 if maskstring == "":
1500 maskstring = str(valid_ifs)[1:-1]
1501 ## split each selection
1502 sellist = maskstring.split(',')
1503 for currselstr in sellist:
1504 selset = currselstr.split(':')
1505 # spw and mask string (may include ~, < or >)
1506 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1507 offset=1, minval=min(valid_ifs),
1508 maxval=max(valid_ifs))
1509 for spwlist in spwmasklist:
1510 selspws = []
1511 for ispw in range(spwlist[0],spwlist[1]+1):
1512 # Put into the list only if ispw exists
1513 if valid_ifs.count(ispw):
1514 selspws.append(ispw)
1515 del spwmasklist, spwlist
1516
1517 # parse frequency mask list
1518 if len(selset) > 1:
1519 freqmasklist = self._parse_selection(selset[1], typestr='float',
1520 offset=0.)
1521 else:
1522 # want to select the whole spectrum
1523 freqmasklist = [None]
1524
1525 ## define a dictionary of spw - masklist combination
1526 for ispw in selspws:
1527 #print "working on", ispw
1528 spwstr = str(ispw)
1529 if len(selspws) == 0:
1530 # empty spw
1531 continue
1532 else:
1533 ## want to get min and max of the spw and
1534 ## offset to set for '<' and '>'
1535 if frequnit == 'channel':
1536 minfreq = 0
1537 maxfreq = self.nchan(ifno=ispw)
1538 offset = 0.5
1539 else:
1540 ## This is ugly part. need improvement
1541 for ifrow in xrange(self.nrow()):
1542 if self.getif(ifrow) == ispw:
1543 #print "IF",ispw,"found in row =",ifrow
1544 break
1545 freqcoord = self.get_coordinate(ifrow)
1546 freqs = self._getabcissa(ifrow)
1547 minfreq = min(freqs)
1548 maxfreq = max(freqs)
1549 if len(freqs) == 1:
1550 offset = 0.5
1551 elif frequnit.find('Hz') > 0:
1552 offset = abs(freqcoord.to_frequency(1,
1553 unit=frequnit)
1554 -freqcoord.to_frequency(0,
1555 unit=frequnit)
1556 )*0.5
1557 elif frequnit.find('m/s') > 0:
1558 offset = abs(freqcoord.to_velocity(1,
1559 unit=frequnit)
1560 -freqcoord.to_velocity(0,
1561 unit=frequnit)
1562 )*0.5
1563 else:
1564 asaplog.post()
1565 asaplog.push("Invalid frequency unit")
1566 asaplog.post("ERROR")
1567 del freqs, freqcoord, ifrow
1568 for freq in freqmasklist:
1569 selmask = freq or [minfreq, maxfreq]
1570 if selmask[0] == None:
1571 ## selection was "<freq[1]".
1572 if selmask[1] < minfreq:
1573 ## avoid adding region selection
1574 selmask = None
1575 else:
1576 selmask = [minfreq,selmask[1]-offset]
1577 elif selmask[1] == None:
1578 ## selection was ">freq[0]"
1579 if selmask[0] > maxfreq:
1580 ## avoid adding region selection
1581 selmask = None
1582 else:
1583 selmask = [selmask[0]+offset,maxfreq]
1584 if selmask:
1585 if not seldict.has_key(spwstr):
1586 # new spw selection
1587 seldict[spwstr] = []
1588 seldict[spwstr] += [selmask]
1589 del minfreq,maxfreq,offset,freq,selmask
1590 del spwstr
1591 del freqmasklist
1592 del valid_ifs
1593 if len(seldict) == 0:
1594 asaplog.post()
1595 asaplog.push("No valid selection in the mask expression: "
1596 +maskstring)
1597 asaplog.post("WARN")
1598 return None
1599 msg = "Selected masklist:\n"
1600 for sif, lmask in seldict.iteritems():
1601 msg += " IF"+sif+" - "+str(lmask)+"\n"
1602 asaplog.push(msg)
1603 return seldict
1604
1605 def _parse_selection(self, selstr, typestr='float', offset=0.,
1606 minval=None, maxval=None):
1607 """
1608 Parameters:
1609 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1610 typestr : The type of the values in returned list
1611 ('integer' or 'float')
1612 offset : The offset value to subtract from or add to
1613 the boundary value if the selection string
1614 includes '<' or '>'
1615 minval, maxval : The minimum/maximum values to set if the
1616 selection string includes '<' or '>'.
1617 The list element is filled with None by default.
1618 Returns:
1619 A list of min/max pair of selections.
1620 Example:
1621 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1622 returns [[0,2],[5,7],[9,9]]
1623 """
1624 selgroups = selstr.split(';')
1625 sellists = []
1626 if typestr.lower().startswith('int'):
1627 formatfunc = int
1628 else:
1629 formatfunc = float
1630
1631 for currsel in selgroups:
1632 if currsel.find('~') > 0:
1633 minsel = formatfunc(currsel.split('~')[0].strip())
1634 maxsel = formatfunc(currsel.split('~')[1].strip())
1635 elif currsel.strip().startswith('<'):
1636 minsel = minval
1637 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1638 - formatfunc(offset)
1639 elif currsel.strip().startswith('>'):
1640 minsel = formatfunc(currsel.split('>')[1].strip()) \
1641 + formatfunc(offset)
1642 maxsel = maxval
1643 else:
1644 minsel = formatfunc(currsel)
1645 maxsel = formatfunc(currsel)
1646 sellists.append([minsel,maxsel])
1647 return sellists
1648
1649# def get_restfreqs(self):
1650# """
1651# Get the restfrequency(s) stored in this scantable.
1652# The return value(s) are always of unit 'Hz'
1653# Parameters:
1654# none
1655# Returns:
1656# a list of doubles
1657# """
1658# return list(self._getrestfreqs())
1659
1660 def get_restfreqs(self, ids=None):
1661 """\
1662 Get the restfrequency(s) stored in this scantable.
1663 The return value(s) are always of unit 'Hz'
1664
1665 Parameters:
1666
1667 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1668 be retrieved
1669
1670 Returns:
1671
1672 dictionary containing ids and a list of doubles for each id
1673
1674 """
1675 if ids is None:
1676 rfreqs = {}
1677 idlist = self.getmolnos()
1678 for i in idlist:
1679 rfreqs[i] = list(self._getrestfreqs(i))
1680 return rfreqs
1681 else:
1682 if type(ids) == list or type(ids) == tuple:
1683 rfreqs = {}
1684 for i in ids:
1685 rfreqs[i] = list(self._getrestfreqs(i))
1686 return rfreqs
1687 else:
1688 return list(self._getrestfreqs(ids))
1689
1690 @asaplog_post_dec
1691 def set_restfreqs(self, freqs=None, unit='Hz'):
1692 """\
1693 Set or replace the restfrequency specified and
1694 if the 'freqs' argument holds a scalar,
1695 then that rest frequency will be applied to all the selected
1696 data. If the 'freqs' argument holds
1697 a vector, then it MUST be of equal or smaller length than
1698 the number of IFs (and the available restfrequencies will be
1699 replaced by this vector). In this case, *all* data have
1700 the restfrequency set per IF according
1701 to the corresponding value you give in the 'freqs' vector.
1702 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1703 IF 1 gets restfreq 2e9.
1704
1705 You can also specify the frequencies via a linecatalog.
1706
1707 Parameters:
1708
1709 freqs: list of rest frequency values or string idenitfiers
1710
1711 unit: unit for rest frequency (default 'Hz')
1712
1713
1714 Example::
1715
1716 # set the given restfrequency for the all currently selected IFs
1717 scan.set_restfreqs(freqs=1.4e9)
1718 # set restfrequencies for the n IFs (n > 1) in the order of the
1719 # list, i.e
1720 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1721 # len(list_of_restfreqs) == nIF
1722 # for nIF == 1 the following will set multiple restfrequency for
1723 # that IF
1724 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1725 # set multiple restfrequencies per IF. as a list of lists where
1726 # the outer list has nIF elements, the inner s arbitrary
1727 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1728
1729 *Note*:
1730
1731 To do more sophisticate Restfrequency setting, e.g. on a
1732 source and IF basis, use scantable.set_selection() before using
1733 this function::
1734
1735 # provided your scantable is called scan
1736 selection = selector()
1737 selection.set_name("ORION*")
1738 selection.set_ifs([1])
1739 scan.set_selection(selection)
1740 scan.set_restfreqs(freqs=86.6e9)
1741
1742 """
1743 varlist = vars()
1744 from asap import linecatalog
1745 # simple value
1746 if isinstance(freqs, int) or isinstance(freqs, float):
1747 self._setrestfreqs([freqs], [""], unit)
1748 # list of values
1749 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1750 # list values are scalars
1751 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1752 if len(freqs) == 1:
1753 self._setrestfreqs(freqs, [""], unit)
1754 else:
1755 # allow the 'old' mode of setting mulitple IFs
1756 sel = selector()
1757 savesel = self._getselection()
1758 iflist = self.getifnos()
1759 if len(freqs)>len(iflist):
1760 raise ValueError("number of elements in list of list "
1761 "exeeds the current IF selections")
1762 iflist = self.getifnos()
1763 for i, fval in enumerate(freqs):
1764 sel.set_ifs(iflist[i])
1765 self._setselection(sel)
1766 self._setrestfreqs([fval], [""], unit)
1767 self._setselection(savesel)
1768
1769 # list values are dict, {'value'=, 'name'=)
1770 elif isinstance(freqs[-1], dict):
1771 values = []
1772 names = []
1773 for d in freqs:
1774 values.append(d["value"])
1775 names.append(d["name"])
1776 self._setrestfreqs(values, names, unit)
1777 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1778 sel = selector()
1779 savesel = self._getselection()
1780 iflist = self.getifnos()
1781 if len(freqs)>len(iflist):
1782 raise ValueError("number of elements in list of list exeeds"
1783 " the current IF selections")
1784 for i, fval in enumerate(freqs):
1785 sel.set_ifs(iflist[i])
1786 self._setselection(sel)
1787 self._setrestfreqs(fval, [""], unit)
1788 self._setselection(savesel)
1789 # freqs are to be taken from a linecatalog
1790 elif isinstance(freqs, linecatalog):
1791 sel = selector()
1792 savesel = self._getselection()
1793 for i in xrange(freqs.nrow()):
1794 sel.set_ifs(iflist[i])
1795 self._setselection(sel)
1796 self._setrestfreqs([freqs.get_frequency(i)],
1797 [freqs.get_name(i)], "MHz")
1798 # ensure that we are not iterating past nIF
1799 if i == self.nif()-1: break
1800 self._setselection(savesel)
1801 else:
1802 return
1803 self._add_history("set_restfreqs", varlist)
1804
1805 @asaplog_post_dec
1806 def shift_refpix(self, delta):
1807 """\
1808 Shift the reference pixel of the Spectra Coordinate by an
1809 integer amount.
1810
1811 Parameters:
1812
1813 delta: the amount to shift by
1814
1815 *Note*:
1816
1817 Be careful using this with broadband data.
1818
1819 """
1820 varlist = vars()
1821 Scantable.shift_refpix(self, delta)
1822 s._add_history("shift_refpix", varlist)
1823
1824 @asaplog_post_dec
1825 def history(self, filename=None):
1826 """\
1827 Print the history. Optionally to a file.
1828
1829 Parameters:
1830
1831 filename: The name of the file to save the history to.
1832
1833 """
1834 hist = list(self._gethistory())
1835 out = "-"*80
1836 for h in hist:
1837 if h.startswith("---"):
1838 out = "\n".join([out, h])
1839 else:
1840 items = h.split("##")
1841 date = items[0]
1842 func = items[1]
1843 items = items[2:]
1844 out += "\n"+date+"\n"
1845 out += "Function: %s\n Parameters:" % (func)
1846 for i in items:
1847 if i == '':
1848 continue
1849 s = i.split("=")
1850 out += "\n %s = %s" % (s[0], s[1])
1851 out = "\n".join([out, "-"*80])
1852 if filename is not None:
1853 if filename is "":
1854 filename = 'scantable_history.txt'
1855 import os
1856 filename = os.path.expandvars(os.path.expanduser(filename))
1857 if not os.path.isdir(filename):
1858 data = open(filename, 'w')
1859 data.write(out)
1860 data.close()
1861 else:
1862 msg = "Illegal file name '%s'." % (filename)
1863 raise IOError(msg)
1864 return page(out)
1865
1866 #
1867 # Maths business
1868 #
1869 @asaplog_post_dec
1870 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1871 """\
1872 Return the (time) weighted average of a scan. Scans will be averaged
1873 only if the source direction (RA/DEC) is within 1' otherwise
1874
1875 *Note*:
1876
1877 in channels only - align if necessary
1878
1879 Parameters:
1880
1881 mask: an optional mask (only used for 'var' and 'tsys'
1882 weighting)
1883
1884 scanav: True averages each scan separately
1885 False (default) averages all scans together,
1886
1887 weight: Weighting scheme.
1888 'none' (mean no weight)
1889 'var' (1/var(spec) weighted)
1890 'tsys' (1/Tsys**2 weighted)
1891 'tint' (integration time weighted)
1892 'tintsys' (Tint/Tsys**2)
1893 'median' ( median averaging)
1894 The default is 'tint'
1895
1896 align: align the spectra in velocity before averaging. It takes
1897 the time of the first spectrum as reference time.
1898
1899 Example::
1900
1901 # time average the scantable without using a mask
1902 newscan = scan.average_time()
1903
1904 """
1905 varlist = vars()
1906 weight = weight or 'TINT'
1907 mask = mask or ()
1908 scanav = (scanav and 'SCAN') or 'NONE'
1909 scan = (self, )
1910
1911 if align:
1912 scan = (self.freq_align(insitu=False), )
1913 s = None
1914 if weight.upper() == 'MEDIAN':
1915 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1916 scanav))
1917 else:
1918 s = scantable(self._math._average(scan, mask, weight.upper(),
1919 scanav))
1920 s._add_history("average_time", varlist)
1921 return s
1922
1923 @asaplog_post_dec
1924 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1925 """\
1926 Return a scan where all spectra are converted to either
1927 Jansky or Kelvin depending upon the flux units of the scan table.
1928 By default the function tries to look the values up internally.
1929 If it can't find them (or if you want to over-ride), you must
1930 specify EITHER jyperk OR eta (and D which it will try to look up
1931 also if you don't set it). jyperk takes precedence if you set both.
1932
1933 Parameters:
1934
1935 jyperk: the Jy / K conversion factor
1936
1937 eta: the aperture efficiency
1938
1939 d: the geometric diameter (metres)
1940
1941 insitu: if False a new scantable is returned.
1942 Otherwise, the scaling is done in-situ
1943 The default is taken from .asaprc (False)
1944
1945 """
1946 if insitu is None: insitu = rcParams['insitu']
1947 self._math._setinsitu(insitu)
1948 varlist = vars()
1949 jyperk = jyperk or -1.0
1950 d = d or -1.0
1951 eta = eta or -1.0
1952 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1953 s._add_history("convert_flux", varlist)
1954 if insitu: self._assign(s)
1955 else: return s
1956
1957 @asaplog_post_dec
1958 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1959 """\
1960 Return a scan after applying a gain-elevation correction.
1961 The correction can be made via either a polynomial or a
1962 table-based interpolation (and extrapolation if necessary).
1963 You specify polynomial coefficients, an ascii table or neither.
1964 If you specify neither, then a polynomial correction will be made
1965 with built in coefficients known for certain telescopes (an error
1966 will occur if the instrument is not known).
1967 The data and Tsys are *divided* by the scaling factors.
1968
1969 Parameters:
1970
1971 poly: Polynomial coefficients (default None) to compute a
1972 gain-elevation correction as a function of
1973 elevation (in degrees).
1974
1975 filename: The name of an ascii file holding correction factors.
1976 The first row of the ascii file must give the column
1977 names and these MUST include columns
1978 "ELEVATION" (degrees) and "FACTOR" (multiply data
1979 by this) somewhere.
1980 The second row must give the data type of the
1981 column. Use 'R' for Real and 'I' for Integer.
1982 An example file would be
1983 (actual factors are arbitrary) :
1984
1985 TIME ELEVATION FACTOR
1986 R R R
1987 0.1 0 0.8
1988 0.2 20 0.85
1989 0.3 40 0.9
1990 0.4 60 0.85
1991 0.5 80 0.8
1992 0.6 90 0.75
1993
1994 method: Interpolation method when correcting from a table.
1995 Values are "nearest", "linear" (default), "cubic"
1996 and "spline"
1997
1998 insitu: if False a new scantable is returned.
1999 Otherwise, the scaling is done in-situ
2000 The default is taken from .asaprc (False)
2001
2002 """
2003
2004 if insitu is None: insitu = rcParams['insitu']
2005 self._math._setinsitu(insitu)
2006 varlist = vars()
2007 poly = poly or ()
2008 from os.path import expandvars
2009 filename = expandvars(filename)
2010 s = scantable(self._math._gainel(self, poly, filename, method))
2011 s._add_history("gain_el", varlist)
2012 if insitu:
2013 self._assign(s)
2014 else:
2015 return s
2016
2017 @asaplog_post_dec
2018 def freq_align(self, reftime=None, method='cubic', insitu=None):
2019 """\
2020 Return a scan where all rows have been aligned in frequency/velocity.
2021 The alignment frequency frame (e.g. LSRK) is that set by function
2022 set_freqframe.
2023
2024 Parameters:
2025
2026 reftime: reference time to align at. By default, the time of
2027 the first row of data is used.
2028
2029 method: Interpolation method for regridding the spectra.
2030 Choose from "nearest", "linear", "cubic" (default)
2031 and "spline"
2032
2033 insitu: if False a new scantable is returned.
2034 Otherwise, the scaling is done in-situ
2035 The default is taken from .asaprc (False)
2036
2037 """
2038 if insitu is None: insitu = rcParams["insitu"]
2039 self._math._setinsitu(insitu)
2040 varlist = vars()
2041 reftime = reftime or ""
2042 s = scantable(self._math._freq_align(self, reftime, method))
2043 s._add_history("freq_align", varlist)
2044 if insitu:
2045 self._assign(s)
2046 else:
2047 return s
2048
2049 @asaplog_post_dec
2050 def opacity(self, tau=None, insitu=None):
2051 """\
2052 Apply an opacity correction. The data
2053 and Tsys are multiplied by the correction factor.
2054
2055 Parameters:
2056
2057 tau: (list of) opacity from which the correction factor is
2058 exp(tau*ZD)
2059 where ZD is the zenith-distance.
2060 If a list is provided, it has to be of length nIF,
2061 nIF*nPol or 1 and in order of IF/POL, e.g.
2062 [opif0pol0, opif0pol1, opif1pol0 ...]
2063 if tau is `None` the opacities are determined from a
2064 model.
2065
2066 insitu: if False a new scantable is returned.
2067 Otherwise, the scaling is done in-situ
2068 The default is taken from .asaprc (False)
2069
2070 """
2071 if insitu is None:
2072 insitu = rcParams['insitu']
2073 self._math._setinsitu(insitu)
2074 varlist = vars()
2075 if not hasattr(tau, "__len__"):
2076 tau = [tau]
2077 s = scantable(self._math._opacity(self, tau))
2078 s._add_history("opacity", varlist)
2079 if insitu:
2080 self._assign(s)
2081 else:
2082 return s
2083
2084 @asaplog_post_dec
2085 def bin(self, width=5, insitu=None):
2086 """\
2087 Return a scan where all spectra have been binned up.
2088
2089 Parameters:
2090
2091 width: The bin width (default=5) in pixels
2092
2093 insitu: if False a new scantable is returned.
2094 Otherwise, the scaling is done in-situ
2095 The default is taken from .asaprc (False)
2096
2097 """
2098 if insitu is None:
2099 insitu = rcParams['insitu']
2100 self._math._setinsitu(insitu)
2101 varlist = vars()
2102 s = scantable(self._math._bin(self, width))
2103 s._add_history("bin", varlist)
2104 if insitu:
2105 self._assign(s)
2106 else:
2107 return s
2108
2109 @asaplog_post_dec
2110 def resample(self, width=5, method='cubic', insitu=None):
2111 """\
2112 Return a scan where all spectra have been binned up.
2113
2114 Parameters:
2115
2116 width: The bin width (default=5) in pixels
2117
2118 method: Interpolation method when correcting from a table.
2119 Values are "nearest", "linear", "cubic" (default)
2120 and "spline"
2121
2122 insitu: if False a new scantable is returned.
2123 Otherwise, the scaling is done in-situ
2124 The default is taken from .asaprc (False)
2125
2126 """
2127 if insitu is None:
2128 insitu = rcParams['insitu']
2129 self._math._setinsitu(insitu)
2130 varlist = vars()
2131 s = scantable(self._math._resample(self, method, width))
2132 s._add_history("resample", varlist)
2133 if insitu:
2134 self._assign(s)
2135 else:
2136 return s
2137
2138 @asaplog_post_dec
2139 def average_pol(self, mask=None, weight='none'):
2140 """\
2141 Average the Polarisations together.
2142
2143 Parameters:
2144
2145 mask: An optional mask defining the region, where the
2146 averaging will be applied. The output will have all
2147 specified points masked.
2148
2149 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2150 weighted), or 'tsys' (1/Tsys**2 weighted)
2151
2152 """
2153 varlist = vars()
2154 mask = mask or ()
2155 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2156 s._add_history("average_pol", varlist)
2157 return s
2158
2159 @asaplog_post_dec
2160 def average_beam(self, mask=None, weight='none'):
2161 """\
2162 Average the Beams together.
2163
2164 Parameters:
2165 mask: An optional mask defining the region, where the
2166 averaging will be applied. The output will have all
2167 specified points masked.
2168
2169 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2170 weighted), or 'tsys' (1/Tsys**2 weighted)
2171
2172 """
2173 varlist = vars()
2174 mask = mask or ()
2175 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2176 s._add_history("average_beam", varlist)
2177 return s
2178
2179 def parallactify(self, pflag):
2180 """\
2181 Set a flag to indicate whether this data should be treated as having
2182 been 'parallactified' (total phase == 0.0)
2183
2184 Parameters:
2185
2186 pflag: Bool indicating whether to turn this on (True) or
2187 off (False)
2188
2189 """
2190 varlist = vars()
2191 self._parallactify(pflag)
2192 self._add_history("parallactify", varlist)
2193
2194 @asaplog_post_dec
2195 def convert_pol(self, poltype=None):
2196 """\
2197 Convert the data to a different polarisation type.
2198 Note that you will need cross-polarisation terms for most conversions.
2199
2200 Parameters:
2201
2202 poltype: The new polarisation type. Valid types are:
2203 "linear", "circular", "stokes" and "linpol"
2204
2205 """
2206 varlist = vars()
2207 s = scantable(self._math._convertpol(self, poltype))
2208 s._add_history("convert_pol", varlist)
2209 return s
2210
2211 @asaplog_post_dec
2212 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2213 insitu=None):
2214 """\
2215 Smooth the spectrum by the specified kernel (conserving flux).
2216
2217 Parameters:
2218
2219 kernel: The type of smoothing kernel. Select from
2220 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2221 or 'poly'
2222
2223 width: The width of the kernel in pixels. For hanning this is
2224 ignored otherwise it defauls to 5 pixels.
2225 For 'gaussian' it is the Full Width Half
2226 Maximum. For 'boxcar' it is the full width.
2227 For 'rmedian' and 'poly' it is the half width.
2228
2229 order: Optional parameter for 'poly' kernel (default is 2), to
2230 specify the order of the polnomial. Ignored by all other
2231 kernels.
2232
2233 plot: plot the original and the smoothed spectra.
2234 In this each indivual fit has to be approved, by
2235 typing 'y' or 'n'
2236
2237 insitu: if False a new scantable is returned.
2238 Otherwise, the scaling is done in-situ
2239 The default is taken from .asaprc (False)
2240
2241 """
2242 if insitu is None: insitu = rcParams['insitu']
2243 self._math._setinsitu(insitu)
2244 varlist = vars()
2245
2246 if plot: orgscan = self.copy()
2247
2248 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2249 s._add_history("smooth", varlist)
2250
2251 if plot:
2252 from asap.asapplotter import new_asaplot
2253 theplot = new_asaplot(rcParams['plotter.gui'])
2254 theplot.set_panels()
2255 ylab=s._get_ordinate_label()
2256 #theplot.palette(0,["#777777","red"])
2257 for r in xrange(s.nrow()):
2258 xsm=s._getabcissa(r)
2259 ysm=s._getspectrum(r)
2260 xorg=orgscan._getabcissa(r)
2261 yorg=orgscan._getspectrum(r)
2262 theplot.clear()
2263 theplot.hold()
2264 theplot.set_axes('ylabel',ylab)
2265 theplot.set_axes('xlabel',s._getabcissalabel(r))
2266 theplot.set_axes('title',s._getsourcename(r))
2267 theplot.set_line(label='Original',color="#777777")
2268 theplot.plot(xorg,yorg)
2269 theplot.set_line(label='Smoothed',color="red")
2270 theplot.plot(xsm,ysm)
2271 ### Ugly part for legend
2272 for i in [0,1]:
2273 theplot.subplots[0]['lines'].append(
2274 [theplot.subplots[0]['axes'].lines[i]]
2275 )
2276 theplot.release()
2277 ### Ugly part for legend
2278 theplot.subplots[0]['lines']=[]
2279 res = raw_input("Accept smoothing ([y]/n): ")
2280 if res.upper() == 'N':
2281 s._setspectrum(yorg, r)
2282 theplot.quit()
2283 del theplot
2284 del orgscan
2285
2286 if insitu: self._assign(s)
2287 else: return s
2288
2289 @asaplog_post_dec
2290 def _parse_wn(self, wn):
2291 if isinstance(wn, list) or isinstance(wn, tuple):
2292 return wn
2293 elif isinstance(wn, int):
2294 return [ wn ]
2295 elif isinstance(wn, str):
2296 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2297 val = wn.split('-')
2298 val = [int(val[0]), int(val[1])]
2299 val.sort()
2300 res = [i for i in xrange(val[0], val[1]+1)]
2301 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2302 val = int(wn[2:])+1
2303 res = [i for i in xrange(val)]
2304 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2305 val = int(wn[:-2])+1
2306 res = [i for i in xrange(val)]
2307 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2308 val = int(wn[1:])
2309 res = [i for i in xrange(val)]
2310 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2311 val = int(wn[:-1])
2312 res = [i for i in xrange(val)]
2313 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2314 val = int(wn[2:])
2315 res = [i for i in xrange(val, self.nchan()/2+1)]
2316 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2317 val = int(wn[:-2])
2318 res = [i for i in xrange(val, self.nchan()/2+1)]
2319 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
2320 val = int(wn[1:])+1
2321 res = [i for i in xrange(val, self.nchan()/2+1)]
2322 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
2323 val = int(wn[:-1])+1
2324 res = [i for i in xrange(val, self.nchan()/2+1)]
2325
2326 return res
2327 else:
2328 msg = 'wrong value given for addwn/rejwn'
2329 raise RuntimeError(msg)
2330
2331
2332 @asaplog_post_dec
2333 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2334 fftmethod=None, fftthresh=None,
2335 addwn=None, rejwn=None, clipthresh=None,
2336 clipniter=None, plot=None,
2337 getresidual=None, showprogress=None,
2338 minnrow=None, outlog=None, blfile=None):
2339 """\
2340 Return a scan which has been baselined (all rows) with sinusoidal
2341 functions.
2342
2343 Parameters:
2344 insitu: if False a new scantable is returned.
2345 Otherwise, the scaling is done in-situ
2346 The default is taken from .asaprc (False)
2347 mask: an optional mask
2348 applyfft: if True use some method, such as FFT, to find
2349 strongest sinusoidal components in the wavenumber
2350 domain to be used for baseline fitting.
2351 default is True.
2352 fftmethod: method to find the strong sinusoidal components.
2353 now only 'fft' is available and it is the default.
2354 fftthresh: the threshold to select wave numbers to be used for
2355 fitting from the distribution of amplitudes in the
2356 wavenumber domain.
2357 both float and string values accepted.
2358 given a float value, the unit is set to sigma.
2359 for string values, allowed formats include:
2360 'xsigma' or 'x' (= x-sigma level. e.g.,
2361 '3sigma'), or
2362 'topx' (= the x strongest ones, e.g. 'top5').
2363 default is 3.0 (unit: sigma).
2364 addwn: the additional wave numbers to be used for fitting.
2365 list or integer value is accepted to specify every
2366 wave numbers. also string value can be used in case
2367 you need to specify wave numbers in a certain range,
2368 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2369 '<a' (= 0,1,...,a-2,a-1),
2370 '>=a' (= a, a+1, ... up to the maximum wave
2371 number corresponding to the Nyquist
2372 frequency for the case of FFT).
2373 default is [].
2374 rejwn: the wave numbers NOT to be used for fitting.
2375 can be set just as addwn but has higher priority:
2376 wave numbers which are specified both in addwn
2377 and rejwn will NOT be used. default is [].
2378 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2379 clipniter: maximum number of iteration of 'clipthresh'-sigma
2380 clipping (default is 0)
2381 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2382 plot the fit and the residual. In this each
2383 indivual fit has to be approved, by typing 'y'
2384 or 'n'
2385 getresidual: if False, returns best-fit values instead of
2386 residual. (default is True)
2387 showprogress: show progress status for large data.
2388 default is True.
2389 minnrow: minimum number of input spectra to show.
2390 default is 1000.
2391 outlog: Output the coefficients of the best-fit
2392 function to logger (default is False)
2393 blfile: Name of a text file in which the best-fit
2394 parameter values to be written
2395 (default is '': no file/logger output)
2396
2397 Example:
2398 # return a scan baselined by a combination of sinusoidal curves
2399 # having wave numbers in spectral window up to 10,
2400 # also with 3-sigma clipping, iteration up to 4 times
2401 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2402
2403 Note:
2404 The best-fit parameter values output in logger and/or blfile are now
2405 based on specunit of 'channel'.
2406 """
2407
2408 try:
2409 varlist = vars()
2410
2411 if insitu is None: insitu = rcParams['insitu']
2412 if insitu:
2413 workscan = self
2414 else:
2415 workscan = self.copy()
2416
2417 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2418 if mask is None: mask = []
2419 if applyfft is None: applyfft = True
2420 if fftmethod is None: fftmethod = 'fft'
2421 if fftthresh is None: fftthresh = 3.0
2422 if addwn is None: addwn = []
2423 if rejwn is None: rejwn = []
2424 if clipthresh is None: clipthresh = 3.0
2425 if clipniter is None: clipniter = 0
2426 if plot is None: plot = False
2427 if getresidual is None: getresidual = True
2428 if showprogress is None: showprogress = True
2429 if minnrow is None: minnrow = 1000
2430 if outlog is None: outlog = False
2431 if blfile is None: blfile = ''
2432
2433 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2434 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2435 str(fftthresh).lower(),
2436 workscan._parse_wn(addwn),
2437 workscan._parse_wn(rejwn), clipthresh,
2438 clipniter, getresidual,
2439 pack_progress_params(showprogress,
2440 minnrow), outlog,
2441 blfile)
2442 workscan._add_history('sinusoid_baseline', varlist)
2443
2444 if insitu:
2445 self._assign(workscan)
2446 else:
2447 return workscan
2448
2449 except RuntimeError, e:
2450 raise_fitting_failure_exception(e)
2451
2452
2453 @asaplog_post_dec
2454 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2455 fftmethod=None, fftthresh=None,
2456 addwn=None, rejwn=None, clipthresh=None,
2457 clipniter=None, edge=None, threshold=None,
2458 chan_avg_limit=None, plot=None,
2459 getresidual=None, showprogress=None,
2460 minnrow=None, outlog=None, blfile=None):
2461 """\
2462 Return a scan which has been baselined (all rows) with sinusoidal
2463 functions.
2464 Spectral lines are detected first using linefinder and masked out
2465 to avoid them affecting the baseline solution.
2466
2467 Parameters:
2468 insitu: if False a new scantable is returned.
2469 Otherwise, the scaling is done in-situ
2470 The default is taken from .asaprc (False)
2471 mask: an optional mask retreived from scantable
2472 applyfft: if True use some method, such as FFT, to find
2473 strongest sinusoidal components in the wavenumber
2474 domain to be used for baseline fitting.
2475 default is True.
2476 fftmethod: method to find the strong sinusoidal components.
2477 now only 'fft' is available and it is the default.
2478 fftthresh: the threshold to select wave numbers to be used for
2479 fitting from the distribution of amplitudes in the
2480 wavenumber domain.
2481 both float and string values accepted.
2482 given a float value, the unit is set to sigma.
2483 for string values, allowed formats include:
2484 'xsigma' or 'x' (= x-sigma level. e.g.,
2485 '3sigma'), or
2486 'topx' (= the x strongest ones, e.g. 'top5').
2487 default is 3.0 (unit: sigma).
2488 addwn: the additional wave numbers to be used for fitting.
2489 list or integer value is accepted to specify every
2490 wave numbers. also string value can be used in case
2491 you need to specify wave numbers in a certain range,
2492 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2493 '<a' (= 0,1,...,a-2,a-1),
2494 '>=a' (= a, a+1, ... up to the maximum wave
2495 number corresponding to the Nyquist
2496 frequency for the case of FFT).
2497 default is [].
2498 rejwn: the wave numbers NOT to be used for fitting.
2499 can be set just as addwn but has higher priority:
2500 wave numbers which are specified both in addwn
2501 and rejwn will NOT be used. default is [].
2502 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2503 clipniter: maximum number of iteration of 'clipthresh'-sigma
2504 clipping (default is 0)
2505 edge: an optional number of channel to drop at
2506 the edge of spectrum. If only one value is
2507 specified, the same number will be dropped
2508 from both sides of the spectrum. Default
2509 is to keep all channels. Nested tuples
2510 represent individual edge selection for
2511 different IFs (a number of spectral channels
2512 can be different)
2513 threshold: the threshold used by line finder. It is
2514 better to keep it large as only strong lines
2515 affect the baseline solution.
2516 chan_avg_limit: a maximum number of consequtive spectral
2517 channels to average during the search of
2518 weak and broad lines. The default is no
2519 averaging (and no search for weak lines).
2520 If such lines can affect the fitted baseline
2521 (e.g. a high order polynomial is fitted),
2522 increase this parameter (usually values up
2523 to 8 are reasonable). Most users of this
2524 method should find the default value sufficient.
2525 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2526 plot the fit and the residual. In this each
2527 indivual fit has to be approved, by typing 'y'
2528 or 'n'
2529 getresidual: if False, returns best-fit values instead of
2530 residual. (default is True)
2531 showprogress: show progress status for large data.
2532 default is True.
2533 minnrow: minimum number of input spectra to show.
2534 default is 1000.
2535 outlog: Output the coefficients of the best-fit
2536 function to logger (default is False)
2537 blfile: Name of a text file in which the best-fit
2538 parameter values to be written
2539 (default is "": no file/logger output)
2540
2541 Example:
2542 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2543
2544 Note:
2545 The best-fit parameter values output in logger and/or blfile are now
2546 based on specunit of 'channel'.
2547 """
2548
2549 try:
2550 varlist = vars()
2551
2552 if insitu is None: insitu = rcParams['insitu']
2553 if insitu:
2554 workscan = self
2555 else:
2556 workscan = self.copy()
2557
2558 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2559 if mask is None: mask = []
2560 if applyfft is None: applyfft = True
2561 if fftmethod is None: fftmethod = 'fft'
2562 if fftthresh is None: fftthresh = 3.0
2563 if addwn is None: addwn = []
2564 if rejwn is None: rejwn = []
2565 if clipthresh is None: clipthresh = 3.0
2566 if clipniter is None: clipniter = 0
2567 if edge is None: edge = (0,0)
2568 if threshold is None: threshold = 3
2569 if chan_avg_limit is None: chan_avg_limit = 1
2570 if plot is None: plot = False
2571 if getresidual is None: getresidual = True
2572 if showprogress is None: showprogress = True
2573 if minnrow is None: minnrow = 1000
2574 if outlog is None: outlog = False
2575 if blfile is None: blfile = ''
2576
2577 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2578 workscan._auto_sinusoid_baseline(mask, applyfft,
2579 fftmethod.lower(),
2580 str(fftthresh).lower(),
2581 workscan._parse_wn(addwn),
2582 workscan._parse_wn(rejwn),
2583 clipthresh, clipniter,
2584 normalise_edge_param(edge),
2585 threshold, chan_avg_limit,
2586 getresidual,
2587 pack_progress_params(showprogress,
2588 minnrow),
2589 outlog, blfile)
2590 workscan._add_history("auto_sinusoid_baseline", varlist)
2591
2592 if insitu:
2593 self._assign(workscan)
2594 else:
2595 return workscan
2596
2597 except RuntimeError, e:
2598 raise_fitting_failure_exception(e)
2599
2600 @asaplog_post_dec
2601 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2602 clipthresh=None, clipniter=None, plot=None,
2603 getresidual=None, showprogress=None, minnrow=None,
2604 outlog=None, blfile=None):
2605 """\
2606 Return a scan which has been baselined (all rows) by cubic spline
2607 function (piecewise cubic polynomial).
2608
2609 Parameters:
2610 insitu: If False a new scantable is returned.
2611 Otherwise, the scaling is done in-situ
2612 The default is taken from .asaprc (False)
2613 mask: An optional mask
2614 npiece: Number of pieces. (default is 2)
2615 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2616 clipniter: maximum number of iteration of 'clipthresh'-sigma
2617 clipping (default is 0)
2618 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2619 plot the fit and the residual. In this each
2620 indivual fit has to be approved, by typing 'y'
2621 or 'n'
2622 getresidual: if False, returns best-fit values instead of
2623 residual. (default is True)
2624 showprogress: show progress status for large data.
2625 default is True.
2626 minnrow: minimum number of input spectra to show.
2627 default is 1000.
2628 outlog: Output the coefficients of the best-fit
2629 function to logger (default is False)
2630 blfile: Name of a text file in which the best-fit
2631 parameter values to be written
2632 (default is "": no file/logger output)
2633
2634 Example:
2635 # return a scan baselined by a cubic spline consisting of 2 pieces
2636 # (i.e., 1 internal knot),
2637 # also with 3-sigma clipping, iteration up to 4 times
2638 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2639
2640 Note:
2641 The best-fit parameter values output in logger and/or blfile are now
2642 based on specunit of 'channel'.
2643 """
2644
2645 try:
2646 varlist = vars()
2647
2648 if insitu is None: insitu = rcParams['insitu']
2649 if insitu:
2650 workscan = self
2651 else:
2652 workscan = self.copy()
2653
2654 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2655 if mask is None: mask = []
2656 if npiece is None: npiece = 2
2657 if clipthresh is None: clipthresh = 3.0
2658 if clipniter is None: clipniter = 0
2659 if plot is None: plot = False
2660 if getresidual is None: getresidual = True
2661 if showprogress is None: showprogress = True
2662 if minnrow is None: minnrow = 1000
2663 if outlog is None: outlog = False
2664 if blfile is None: blfile = ''
2665
2666 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2667 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2668 getresidual,
2669 pack_progress_params(showprogress,
2670 minnrow), outlog,
2671 blfile)
2672 workscan._add_history("cspline_baseline", varlist)
2673
2674 if insitu:
2675 self._assign(workscan)
2676 else:
2677 return workscan
2678
2679 except RuntimeError, e:
2680 raise_fitting_failure_exception(e)
2681
2682 @asaplog_post_dec
2683 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2684 clipthresh=None, clipniter=None,
2685 edge=None, threshold=None, chan_avg_limit=None,
2686 getresidual=None, plot=None,
2687 showprogress=None, minnrow=None, outlog=None,
2688 blfile=None):
2689 """\
2690 Return a scan which has been baselined (all rows) by cubic spline
2691 function (piecewise cubic polynomial).
2692 Spectral lines are detected first using linefinder and masked out
2693 to avoid them affecting the baseline solution.
2694
2695 Parameters:
2696 insitu: if False a new scantable is returned.
2697 Otherwise, the scaling is done in-situ
2698 The default is taken from .asaprc (False)
2699 mask: an optional mask retreived from scantable
2700 npiece: Number of pieces. (default is 2)
2701 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2702 clipniter: maximum number of iteration of 'clipthresh'-sigma
2703 clipping (default is 0)
2704 edge: an optional number of channel to drop at
2705 the edge of spectrum. If only one value is
2706 specified, the same number will be dropped
2707 from both sides of the spectrum. Default
2708 is to keep all channels. Nested tuples
2709 represent individual edge selection for
2710 different IFs (a number of spectral channels
2711 can be different)
2712 threshold: the threshold used by line finder. It is
2713 better to keep it large as only strong lines
2714 affect the baseline solution.
2715 chan_avg_limit: a maximum number of consequtive spectral
2716 channels to average during the search of
2717 weak and broad lines. The default is no
2718 averaging (and no search for weak lines).
2719 If such lines can affect the fitted baseline
2720 (e.g. a high order polynomial is fitted),
2721 increase this parameter (usually values up
2722 to 8 are reasonable). Most users of this
2723 method should find the default value sufficient.
2724 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2725 plot the fit and the residual. In this each
2726 indivual fit has to be approved, by typing 'y'
2727 or 'n'
2728 getresidual: if False, returns best-fit values instead of
2729 residual. (default is True)
2730 showprogress: show progress status for large data.
2731 default is True.
2732 minnrow: minimum number of input spectra to show.
2733 default is 1000.
2734 outlog: Output the coefficients of the best-fit
2735 function to logger (default is False)
2736 blfile: Name of a text file in which the best-fit
2737 parameter values to be written
2738 (default is "": no file/logger output)
2739
2740 Example:
2741 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2742
2743 Note:
2744 The best-fit parameter values output in logger and/or blfile are now
2745 based on specunit of 'channel'.
2746 """
2747
2748 try:
2749 varlist = vars()
2750
2751 if insitu is None: insitu = rcParams['insitu']
2752 if insitu:
2753 workscan = self
2754 else:
2755 workscan = self.copy()
2756
2757 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2758 if mask is None: mask = []
2759 if npiece is None: npiece = 2
2760 if clipthresh is None: clipthresh = 3.0
2761 if clipniter is None: clipniter = 0
2762 if edge is None: edge = (0, 0)
2763 if threshold is None: threshold = 3
2764 if chan_avg_limit is None: chan_avg_limit = 1
2765 if plot is None: plot = False
2766 if getresidual is None: getresidual = True
2767 if showprogress is None: showprogress = True
2768 if minnrow is None: minnrow = 1000
2769 if outlog is None: outlog = False
2770 if blfile is None: blfile = ''
2771
2772 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2773 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2774 clipniter,
2775 normalise_edge_param(edge),
2776 threshold,
2777 chan_avg_limit, getresidual,
2778 pack_progress_params(showprogress,
2779 minnrow),
2780 outlog, blfile)
2781 workscan._add_history("auto_cspline_baseline", varlist)
2782
2783 if insitu:
2784 self._assign(workscan)
2785 else:
2786 return workscan
2787
2788 except RuntimeError, e:
2789 raise_fitting_failure_exception(e)
2790
2791 @asaplog_post_dec
2792 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2793 getresidual=None, showprogress=None, minnrow=None,
2794 outlog=None, blfile=None):
2795 """\
2796 Return a scan which has been baselined (all rows) by a polynomial.
2797 Parameters:
2798 insitu: if False a new scantable is returned.
2799 Otherwise, the scaling is done in-situ
2800 The default is taken from .asaprc (False)
2801 mask: an optional mask
2802 order: the order of the polynomial (default is 0)
2803 plot: plot the fit and the residual. In this each
2804 indivual fit has to be approved, by typing 'y'
2805 or 'n'
2806 getresidual: if False, returns best-fit values instead of
2807 residual. (default is True)
2808 showprogress: show progress status for large data.
2809 default is True.
2810 minnrow: minimum number of input spectra to show.
2811 default is 1000.
2812 outlog: Output the coefficients of the best-fit
2813 function to logger (default is False)
2814 blfile: Name of a text file in which the best-fit
2815 parameter values to be written
2816 (default is "": no file/logger output)
2817
2818 Example:
2819 # return a scan baselined by a third order polynomial,
2820 # not using a mask
2821 bscan = scan.poly_baseline(order=3)
2822 """
2823
2824 try:
2825 varlist = vars()
2826
2827 if insitu is None:
2828 insitu = rcParams["insitu"]
2829 if insitu:
2830 workscan = self
2831 else:
2832 workscan = self.copy()
2833
2834 #if mask is None: mask = [True for i in \
2835 # xrange(workscan.nchan())]
2836 if mask is None: mask = []
2837 if order is None: order = 0
2838 if plot is None: plot = False
2839 if getresidual is None: getresidual = True
2840 if showprogress is None: showprogress = True
2841 if minnrow is None: minnrow = 1000
2842 if outlog is None: outlog = False
2843 if blfile is None: blfile = ""
2844
2845 if plot:
2846 outblfile = (blfile != "") and \
2847 os.path.exists(os.path.expanduser(
2848 os.path.expandvars(blfile))
2849 )
2850 if outblfile:
2851 blf = open(blfile, "a")
2852
2853 f = fitter()
2854 f.set_function(lpoly=order)
2855
2856 rows = xrange(workscan.nrow())
2857 #if len(rows) > 0: workscan._init_blinfo()
2858
2859 for r in rows:
2860 f.x = workscan._getabcissa(r)
2861 f.y = workscan._getspectrum(r)
2862 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2863 f.data = None
2864 f.fit()
2865
2866 f.plot(residual=True)
2867 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2868 if accept_fit.upper() == "N":
2869 #workscan._append_blinfo(None, None, None)
2870 continue
2871
2872 blpars = f.get_parameters()
2873 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2874 #workscan._append_blinfo(blpars, masklist, f.mask)
2875 workscan._setspectrum((f.fitter.getresidual()
2876 if getresidual else f.fitter.getfit()), r)
2877
2878 if outblfile:
2879 rms = workscan.get_rms(f.mask, r)
2880 dataout = \
2881 workscan.format_blparams_row(blpars["params"],
2882 blpars["fixed"],
2883 rms, str(masklist),
2884 r, True)
2885 blf.write(dataout)
2886
2887 f._p.unmap()
2888 f._p = None
2889
2890 if outblfile:
2891 blf.close()
2892 else:
2893 workscan._poly_baseline(mask, order, getresidual,
2894 pack_progress_params(showprogress,
2895 minnrow),
2896 outlog, blfile)
2897
2898 workscan._add_history("poly_baseline", varlist)
2899
2900 if insitu:
2901 self._assign(workscan)
2902 else:
2903 return workscan
2904
2905 except RuntimeError, e:
2906 raise_fitting_failure_exception(e)
2907
2908 @asaplog_post_dec
2909 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2910 threshold=None, chan_avg_limit=None,
2911 plot=None, insitu=None,
2912 getresidual=None, showprogress=None,
2913 minnrow=None, outlog=None, blfile=None):
2914 """\
2915 Return a scan which has been baselined (all rows) by a polynomial.
2916 Spectral lines are detected first using linefinder and masked out
2917 to avoid them affecting the baseline solution.
2918
2919 Parameters:
2920 insitu: if False a new scantable is returned.
2921 Otherwise, the scaling is done in-situ
2922 The default is taken from .asaprc (False)
2923 mask: an optional mask retreived from scantable
2924 order: the order of the polynomial (default is 0)
2925 edge: an optional number of channel to drop at
2926 the edge of spectrum. If only one value is
2927 specified, the same number will be dropped
2928 from both sides of the spectrum. Default
2929 is to keep all channels. Nested tuples
2930 represent individual edge selection for
2931 different IFs (a number of spectral channels
2932 can be different)
2933 threshold: the threshold used by line finder. It is
2934 better to keep it large as only strong lines
2935 affect the baseline solution.
2936 chan_avg_limit: a maximum number of consequtive spectral
2937 channels to average during the search of
2938 weak and broad lines. The default is no
2939 averaging (and no search for weak lines).
2940 If such lines can affect the fitted baseline
2941 (e.g. a high order polynomial is fitted),
2942 increase this parameter (usually values up
2943 to 8 are reasonable). Most users of this
2944 method should find the default value sufficient.
2945 plot: plot the fit and the residual. In this each
2946 indivual fit has to be approved, by typing 'y'
2947 or 'n'
2948 getresidual: if False, returns best-fit values instead of
2949 residual. (default is True)
2950 showprogress: show progress status for large data.
2951 default is True.
2952 minnrow: minimum number of input spectra to show.
2953 default is 1000.
2954 outlog: Output the coefficients of the best-fit
2955 function to logger (default is False)
2956 blfile: Name of a text file in which the best-fit
2957 parameter values to be written
2958 (default is "": no file/logger output)
2959
2960 Example:
2961 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2962 """
2963
2964 try:
2965 varlist = vars()
2966
2967 if insitu is None:
2968 insitu = rcParams['insitu']
2969 if insitu:
2970 workscan = self
2971 else:
2972 workscan = self.copy()
2973
2974 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2975 if mask is None: mask = []
2976 if order is None: order = 0
2977 if edge is None: edge = (0, 0)
2978 if threshold is None: threshold = 3
2979 if chan_avg_limit is None: chan_avg_limit = 1
2980 if plot is None: plot = False
2981 if getresidual is None: getresidual = True
2982 if showprogress is None: showprogress = True
2983 if minnrow is None: minnrow = 1000
2984 if outlog is None: outlog = False
2985 if blfile is None: blfile = ''
2986
2987 edge = normalise_edge_param(edge)
2988
2989 if plot:
2990 outblfile = (blfile != "") and \
2991 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2992 if outblfile: blf = open(blfile, "a")
2993
2994 from asap.asaplinefind import linefinder
2995 fl = linefinder()
2996 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
2997 fl.set_scan(workscan)
2998
2999 f = fitter()
3000 f.set_function(lpoly=order)
3001
3002 rows = xrange(workscan.nrow())
3003 #if len(rows) > 0: workscan._init_blinfo()
3004
3005 for r in rows:
3006 idx = 2*workscan.getif(r)
3007 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
3008 edge[idx:idx+2]) # (CAS-1434)
3009
3010 f.x = workscan._getabcissa(r)
3011 f.y = workscan._getspectrum(r)
3012 f.mask = fl.get_mask()
3013 f.data = None
3014 f.fit()
3015
3016 f.plot(residual=True)
3017 accept_fit = raw_input("Accept fit ( [y]/n ): ")
3018 if accept_fit.upper() == "N":
3019 #workscan._append_blinfo(None, None, None)
3020 continue
3021
3022 blpars = f.get_parameters()
3023 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3024 #workscan._append_blinfo(blpars, masklist, f.mask)
3025 workscan._setspectrum(
3026 (f.fitter.getresidual() if getresidual
3027 else f.fitter.getfit()), r
3028 )
3029
3030 if outblfile:
3031 rms = workscan.get_rms(f.mask, r)
3032 dataout = \
3033 workscan.format_blparams_row(blpars["params"],
3034 blpars["fixed"],
3035 rms, str(masklist),
3036 r, True)
3037 blf.write(dataout)
3038
3039 f._p.unmap()
3040 f._p = None
3041
3042 if outblfile: blf.close()
3043 else:
3044 workscan._auto_poly_baseline(mask, order, edge, threshold,
3045 chan_avg_limit, getresidual,
3046 pack_progress_params(showprogress,
3047 minnrow),
3048 outlog, blfile)
3049
3050 workscan._add_history("auto_poly_baseline", varlist)
3051
3052 if insitu:
3053 self._assign(workscan)
3054 else:
3055 return workscan
3056
3057 except RuntimeError, e:
3058 raise_fitting_failure_exception(e)
3059
3060 def _init_blinfo(self):
3061 """\
3062 Initialise the following three auxiliary members:
3063 blpars : parameters of the best-fit baseline,
3064 masklists : mask data (edge positions of masked channels) and
3065 actualmask : mask data (in boolean list),
3066 to keep for use later (including output to logger/text files).
3067 Used by poly_baseline() and auto_poly_baseline() in case of
3068 'plot=True'.
3069 """
3070 self.blpars = []
3071 self.masklists = []
3072 self.actualmask = []
3073 return
3074
3075 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3076 """\
3077 Append baseline-fitting related info to blpars, masklist and
3078 actualmask.
3079 """
3080 self.blpars.append(data_blpars)
3081 self.masklists.append(data_masklists)
3082 self.actualmask.append(data_actualmask)
3083 return
3084
3085 @asaplog_post_dec
3086 def rotate_linpolphase(self, angle):
3087 """\
3088 Rotate the phase of the complex polarization O=Q+iU correlation.
3089 This is always done in situ in the raw data. So if you call this
3090 function more than once then each call rotates the phase further.
3091
3092 Parameters:
3093
3094 angle: The angle (degrees) to rotate (add) by.
3095
3096 Example::
3097
3098 scan.rotate_linpolphase(2.3)
3099
3100 """
3101 varlist = vars()
3102 self._math._rotate_linpolphase(self, angle)
3103 self._add_history("rotate_linpolphase", varlist)
3104 return
3105
3106 @asaplog_post_dec
3107 def rotate_xyphase(self, angle):
3108 """\
3109 Rotate the phase of the XY correlation. This is always done in situ
3110 in the data. So if you call this function more than once
3111 then each call rotates the phase further.
3112
3113 Parameters:
3114
3115 angle: The angle (degrees) to rotate (add) by.
3116
3117 Example::
3118
3119 scan.rotate_xyphase(2.3)
3120
3121 """
3122 varlist = vars()
3123 self._math._rotate_xyphase(self, angle)
3124 self._add_history("rotate_xyphase", varlist)
3125 return
3126
3127 @asaplog_post_dec
3128 def swap_linears(self):
3129 """\
3130 Swap the linear polarisations XX and YY, or better the first two
3131 polarisations as this also works for ciculars.
3132 """
3133 varlist = vars()
3134 self._math._swap_linears(self)
3135 self._add_history("swap_linears", varlist)
3136 return
3137
3138 @asaplog_post_dec
3139 def invert_phase(self):
3140 """\
3141 Invert the phase of the complex polarisation
3142 """
3143 varlist = vars()
3144 self._math._invert_phase(self)
3145 self._add_history("invert_phase", varlist)
3146 return
3147
3148 @asaplog_post_dec
3149 def add(self, offset, insitu=None):
3150 """\
3151 Return a scan where all spectra have the offset added
3152
3153 Parameters:
3154
3155 offset: the offset
3156
3157 insitu: if False a new scantable is returned.
3158 Otherwise, the scaling is done in-situ
3159 The default is taken from .asaprc (False)
3160
3161 """
3162 if insitu is None: insitu = rcParams['insitu']
3163 self._math._setinsitu(insitu)
3164 varlist = vars()
3165 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3166 s._add_history("add", varlist)
3167 if insitu:
3168 self._assign(s)
3169 else:
3170 return s
3171
3172 @asaplog_post_dec
3173 def scale(self, factor, tsys=True, insitu=None):
3174 """\
3175
3176 Return a scan where all spectra are scaled by the given 'factor'
3177
3178 Parameters:
3179
3180 factor: the scaling factor (float or 1D float list)
3181
3182 insitu: if False a new scantable is returned.
3183 Otherwise, the scaling is done in-situ
3184 The default is taken from .asaprc (False)
3185
3186 tsys: if True (default) then apply the operation to Tsys
3187 as well as the data
3188
3189 """
3190 if insitu is None: insitu = rcParams['insitu']
3191 self._math._setinsitu(insitu)
3192 varlist = vars()
3193 s = None
3194 import numpy
3195 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3196 if isinstance(factor[0], list) or isinstance(factor[0],
3197 numpy.ndarray):
3198 from asapmath import _array2dOp
3199 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3200 else:
3201 s = scantable( self._math._arrayop( self, factor,
3202 "MUL", tsys ) )
3203 else:
3204 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3205 s._add_history("scale", varlist)
3206 if insitu:
3207 self._assign(s)
3208 else:
3209 return s
3210
3211 @preserve_selection
3212 def set_sourcetype(self, match, matchtype="pattern",
3213 sourcetype="reference"):
3214 """\
3215 Set the type of the source to be an source or reference scan
3216 using the provided pattern.
3217
3218 Parameters:
3219
3220 match: a Unix style pattern, regular expression or selector
3221
3222 matchtype: 'pattern' (default) UNIX style pattern or
3223 'regex' regular expression
3224
3225 sourcetype: the type of the source to use (source/reference)
3226
3227 """
3228 varlist = vars()
3229 basesel = self.get_selection()
3230 stype = -1
3231 if sourcetype.lower().startswith("r"):
3232 stype = 1
3233 elif sourcetype.lower().startswith("s"):
3234 stype = 0
3235 else:
3236 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3237 if matchtype.lower().startswith("p"):
3238 matchtype = "pattern"
3239 elif matchtype.lower().startswith("r"):
3240 matchtype = "regex"
3241 else:
3242 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3243 sel = selector()
3244 if isinstance(match, selector):
3245 sel = match
3246 else:
3247 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3248 self._setsourcetype(stype)
3249 self._add_history("set_sourcetype", varlist)
3250
3251 @asaplog_post_dec
3252 @preserve_selection
3253 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3254 """\
3255 This function allows to build quotients automatically.
3256 It assumes the observation to have the same number of
3257 "ons" and "offs"
3258
3259 Parameters:
3260
3261 preserve: you can preserve (default) the continuum or
3262 remove it. The equations used are
3263
3264 preserve: Output = Toff * (on/off) - Toff
3265
3266 remove: Output = Toff * (on/off) - Ton
3267
3268 mode: the on/off detection mode
3269 'paired' (default)
3270 identifies 'off' scans by the
3271 trailing '_R' (Mopra/Parkes) or
3272 '_e'/'_w' (Tid) and matches
3273 on/off pairs from the observing pattern
3274 'time'
3275 finds the closest off in time
3276
3277 .. todo:: verify argument is not implemented
3278
3279 """
3280 varlist = vars()
3281 modes = ["time", "paired"]
3282 if not mode in modes:
3283 msg = "please provide valid mode. Valid modes are %s" % (modes)
3284 raise ValueError(msg)
3285 s = None
3286 if mode.lower() == "paired":
3287 sel = self.get_selection()
3288 sel.set_query("SRCTYPE==psoff")
3289 self.set_selection(sel)
3290 offs = self.copy()
3291 sel.set_query("SRCTYPE==pson")
3292 self.set_selection(sel)
3293 ons = self.copy()
3294 s = scantable(self._math._quotient(ons, offs, preserve))
3295 elif mode.lower() == "time":
3296 s = scantable(self._math._auto_quotient(self, mode, preserve))
3297 s._add_history("auto_quotient", varlist)
3298 return s
3299
3300 @asaplog_post_dec
3301 def mx_quotient(self, mask = None, weight='median', preserve=True):
3302 """\
3303 Form a quotient using "off" beams when observing in "MX" mode.
3304
3305 Parameters:
3306
3307 mask: an optional mask to be used when weight == 'stddev'
3308
3309 weight: How to average the off beams. Default is 'median'.
3310
3311 preserve: you can preserve (default) the continuum or
3312 remove it. The equations used are:
3313
3314 preserve: Output = Toff * (on/off) - Toff
3315
3316 remove: Output = Toff * (on/off) - Ton
3317
3318 """
3319 mask = mask or ()
3320 varlist = vars()
3321 on = scantable(self._math._mx_extract(self, 'on'))
3322 preoff = scantable(self._math._mx_extract(self, 'off'))
3323 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3324 from asapmath import quotient
3325 q = quotient(on, off, preserve)
3326 q._add_history("mx_quotient", varlist)
3327 return q
3328
3329 @asaplog_post_dec
3330 def freq_switch(self, insitu=None):
3331 """\
3332 Apply frequency switching to the data.
3333
3334 Parameters:
3335
3336 insitu: if False a new scantable is returned.
3337 Otherwise, the swictching is done in-situ
3338 The default is taken from .asaprc (False)
3339
3340 """
3341 if insitu is None: insitu = rcParams['insitu']
3342 self._math._setinsitu(insitu)
3343 varlist = vars()
3344 s = scantable(self._math._freqswitch(self))
3345 s._add_history("freq_switch", varlist)
3346 if insitu:
3347 self._assign(s)
3348 else:
3349 return s
3350
3351 @asaplog_post_dec
3352 def recalc_azel(self):
3353 """Recalculate the azimuth and elevation for each position."""
3354 varlist = vars()
3355 self._recalcazel()
3356 self._add_history("recalc_azel", varlist)
3357 return
3358
3359 @asaplog_post_dec
3360 def __add__(self, other):
3361 varlist = vars()
3362 s = None
3363 if isinstance(other, scantable):
3364 s = scantable(self._math._binaryop(self, other, "ADD"))
3365 elif isinstance(other, float):
3366 s = scantable(self._math._unaryop(self, other, "ADD", False))
3367 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3368 if isinstance(other[0], list) \
3369 or isinstance(other[0], numpy.ndarray):
3370 from asapmath import _array2dOp
3371 s = _array2dOp( self.copy(), other, "ADD", False )
3372 else:
3373 s = scantable( self._math._arrayop( self.copy(), other,
3374 "ADD", False ) )
3375 else:
3376 raise TypeError("Other input is not a scantable or float value")
3377 s._add_history("operator +", varlist)
3378 return s
3379
3380 @asaplog_post_dec
3381 def __sub__(self, other):
3382 """
3383 implicit on all axes and on Tsys
3384 """
3385 varlist = vars()
3386 s = None
3387 if isinstance(other, scantable):
3388 s = scantable(self._math._binaryop(self, other, "SUB"))
3389 elif isinstance(other, float):
3390 s = scantable(self._math._unaryop(self, other, "SUB", False))
3391 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3392 if isinstance(other[0], list) \
3393 or isinstance(other[0], numpy.ndarray):
3394 from asapmath import _array2dOp
3395 s = _array2dOp( self.copy(), other, "SUB", False )
3396 else:
3397 s = scantable( self._math._arrayop( self.copy(), other,
3398 "SUB", False ) )
3399 else:
3400 raise TypeError("Other input is not a scantable or float value")
3401 s._add_history("operator -", varlist)
3402 return s
3403
3404 @asaplog_post_dec
3405 def __mul__(self, other):
3406 """
3407 implicit on all axes and on Tsys
3408 """
3409 varlist = vars()
3410 s = None
3411 if isinstance(other, scantable):
3412 s = scantable(self._math._binaryop(self, other, "MUL"))
3413 elif isinstance(other, float):
3414 s = scantable(self._math._unaryop(self, other, "MUL", False))
3415 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3416 if isinstance(other[0], list) \
3417 or isinstance(other[0], numpy.ndarray):
3418 from asapmath import _array2dOp
3419 s = _array2dOp( self.copy(), other, "MUL", False )
3420 else:
3421 s = scantable( self._math._arrayop( self.copy(), other,
3422 "MUL", False ) )
3423 else:
3424 raise TypeError("Other input is not a scantable or float value")
3425 s._add_history("operator *", varlist)
3426 return s
3427
3428
3429 @asaplog_post_dec
3430 def __div__(self, other):
3431 """
3432 implicit on all axes and on Tsys
3433 """
3434 varlist = vars()
3435 s = None
3436 if isinstance(other, scantable):
3437 s = scantable(self._math._binaryop(self, other, "DIV"))
3438 elif isinstance(other, float):
3439 if other == 0.0:
3440 raise ZeroDivisionError("Dividing by zero is not recommended")
3441 s = scantable(self._math._unaryop(self, other, "DIV", False))
3442 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3443 if isinstance(other[0], list) \
3444 or isinstance(other[0], numpy.ndarray):
3445 from asapmath import _array2dOp
3446 s = _array2dOp( self.copy(), other, "DIV", False )
3447 else:
3448 s = scantable( self._math._arrayop( self.copy(), other,
3449 "DIV", False ) )
3450 else:
3451 raise TypeError("Other input is not a scantable or float value")
3452 s._add_history("operator /", varlist)
3453 return s
3454
3455 @asaplog_post_dec
3456 def get_fit(self, row=0):
3457 """\
3458 Print or return the stored fits for a row in the scantable
3459
3460 Parameters:
3461
3462 row: the row which the fit has been applied to.
3463
3464 """
3465 if row > self.nrow():
3466 return
3467 from asap.asapfit import asapfit
3468 fit = asapfit(self._getfit(row))
3469 asaplog.push( '%s' %(fit) )
3470 return fit.as_dict()
3471
3472 @preserve_selection
3473 def flag_nans(self):
3474 """\
3475 Utility function to flag NaN values in the scantable.
3476 """
3477 import numpy
3478 basesel = self.get_selection()
3479 for i in range(self.nrow()):
3480 sel = self.get_row_selector(i)
3481 self.set_selection(basesel+sel)
3482 nans = numpy.isnan(self._getspectrum(0))
3483 if numpy.any(nans):
3484 bnans = [ bool(v) for v in nans]
3485 self.flag(bnans)
3486
3487 def get_row_selector(self, rowno):
3488 return selector(rows=[rowno])
3489
3490 def _add_history(self, funcname, parameters):
3491 if not rcParams['scantable.history']:
3492 return
3493 # create date
3494 sep = "##"
3495 from datetime import datetime
3496 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3497 hist = dstr+sep
3498 hist += funcname+sep#cdate+sep
3499 if parameters.has_key('self'):
3500 del parameters['self']
3501 for k, v in parameters.iteritems():
3502 if type(v) is dict:
3503 for k2, v2 in v.iteritems():
3504 hist += k2
3505 hist += "="
3506 if isinstance(v2, scantable):
3507 hist += 'scantable'
3508 elif k2 == 'mask':
3509 if isinstance(v2, list) or isinstance(v2, tuple):
3510 hist += str(self._zip_mask(v2))
3511 else:
3512 hist += str(v2)
3513 else:
3514 hist += str(v2)
3515 else:
3516 hist += k
3517 hist += "="
3518 if isinstance(v, scantable):
3519 hist += 'scantable'
3520 elif k == 'mask':
3521 if isinstance(v, list) or isinstance(v, tuple):
3522 hist += str(self._zip_mask(v))
3523 else:
3524 hist += str(v)
3525 else:
3526 hist += str(v)
3527 hist += sep
3528 hist = hist[:-2] # remove trailing '##'
3529 self._addhistory(hist)
3530
3531
3532 def _zip_mask(self, mask):
3533 mask = list(mask)
3534 i = 0
3535 segments = []
3536 while mask[i:].count(1):
3537 i += mask[i:].index(1)
3538 if mask[i:].count(0):
3539 j = i + mask[i:].index(0)
3540 else:
3541 j = len(mask)
3542 segments.append([i, j])
3543 i = j
3544 return segments
3545
3546 def _get_ordinate_label(self):
3547 fu = "("+self.get_fluxunit()+")"
3548 import re
3549 lbl = "Intensity"
3550 if re.match(".K.", fu):
3551 lbl = "Brightness Temperature "+ fu
3552 elif re.match(".Jy.", fu):
3553 lbl = "Flux density "+ fu
3554 return lbl
3555
3556 def _check_ifs(self):
3557# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
3558 nchans = [self.nchan(i) for i in self.getifnos()]
3559 nchans = filter(lambda t: t > 0, nchans)
3560 return (sum(nchans)/len(nchans) == nchans[0])
3561
3562 @asaplog_post_dec
3563 def _fill(self, names, unit, average, opts={}):
3564 first = True
3565 fullnames = []
3566 for name in names:
3567 name = os.path.expandvars(name)
3568 name = os.path.expanduser(name)
3569 if not os.path.exists(name):
3570 msg = "File '%s' does not exists" % (name)
3571 raise IOError(msg)
3572 fullnames.append(name)
3573 if average:
3574 asaplog.push('Auto averaging integrations')
3575 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3576 for name in fullnames:
3577 tbl = Scantable(stype)
3578 if is_ms( name ):
3579 r = msfiller( tbl )
3580 else:
3581 r = filler( tbl )
3582 rx = rcParams['scantable.reference']
3583 r.setreferenceexpr(rx)
3584 msg = "Importing %s..." % (name)
3585 asaplog.push(msg, False)
3586 r.open(name, opts)
3587 r.fill()
3588 if average:
3589 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3590 if not first:
3591 tbl = self._math._merge([self, tbl])
3592 Scantable.__init__(self, tbl)
3593 r.close()
3594 del r, tbl
3595 first = False
3596 #flush log
3597 asaplog.post()
3598 if unit is not None:
3599 self.set_fluxunit(unit)
3600 if not is_casapy():
3601 self.set_freqframe(rcParams['scantable.freqframe'])
3602
3603
3604 def __getitem__(self, key):
3605 if key < 0:
3606 key += self.nrow()
3607 if key >= self.nrow():
3608 raise IndexError("Row index out of range.")
3609 return self._getspectrum(key)
3610
3611 def __setitem__(self, key, value):
3612 if key < 0:
3613 key += self.nrow()
3614 if key >= self.nrow():
3615 raise IndexError("Row index out of range.")
3616 if not hasattr(value, "__len__") or \
3617 len(value) > self.nchan(self.getif(key)):
3618 raise ValueError("Spectrum length doesn't match.")
3619 return self._setspectrum(value, key)
3620
3621 def __len__(self):
3622 return self.nrow()
3623
3624 def __iter__(self):
3625 for i in range(len(self)):
3626 yield self[i]
Note: See TracBrowser for help on using the repository browser.