source: branches/alma/python/scantable.py@ 1614

Last change on this file since 1614 was 1614, checked in by Takeshi Nakazato, 16 years ago

New Development: No

JIRA Issue: Yes CAS-729, CAS-1147

Ready to Release: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: No

Module(s): Module Names change impacts.

Description: Describe your changes here...

  1. Added level parameter to print_log()
  2. Replaced casalog.post() to asaplog.push() + print_log().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 80.9 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None, getpt=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 getpt: for MeasurementSet input data only:
33 If True, all pointing data are filled.
34 The deafult is False, which makes time to load
35 the MS data faster in some cases.
36 """
37 if average is None:
38 average = rcParams['scantable.autoaverage']
39 if getpt is None:
40 getpt = False
41 varlist = vars()
42 from asap._asap import stmath
43 self._math = stmath()
44 if isinstance(filename, Scantable):
45 Scantable.__init__(self, filename)
46 else:
47 if isinstance(filename, str):# or \
48# (isinstance(filename, list) or isinstance(filename, tuple)) \
49# and isinstance(filename[-1], str):
50 import os.path
51 filename = os.path.expandvars(filename)
52 filename = os.path.expanduser(filename)
53 if not os.path.exists(filename):
54 s = "File '%s' not found." % (filename)
55 if rcParams['verbose']:
56 asaplog.push(s)
57 #print asaplog.pop().strip()
58 print_log('ERROR')
59 return
60 raise IOError(s)
61 if os.path.isdir(filename) \
62 and not os.path.exists(filename+'/table.f1'):
63 # crude check if asap table
64 if os.path.exists(filename+'/table.info'):
65 ondisk = rcParams['scantable.storage'] == 'disk'
66 Scantable.__init__(self, filename, ondisk)
67 if unit is not None:
68 self.set_fluxunit(unit)
69 # do not reset to the default freqframe
70 #self.set_freqframe(rcParams['scantable.freqframe'])
71 else:
72 msg = "The given file '%s'is not a valid " \
73 "asap table." % (filename)
74 if rcParams['verbose']:
75 #print msg
76 asaplog.push( msg )
77 print_log( 'ERROR' )
78 return
79 else:
80 raise IOError(msg)
81 else:
82 self._fill([filename], unit, average, getpt)
83 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
84 and isinstance(filename[-1], str):
85 self._fill(filename, unit, average, getpt)
86 self._add_history("scantable", varlist)
87 print_log()
88
89 def save(self, name=None, format=None, overwrite=False):
90 """
91 Store the scantable on disk. This can be an asap (aips++) Table,
92 SDFITS or MS2 format.
93 Parameters:
94 name: the name of the outputfile. For format "ASCII"
95 this is the root file name (data in 'name'.txt
96 and header in 'name'_header.txt)
97 format: an optional file format. Default is ASAP.
98 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
99 'SDFITS' (save as SDFITS file)
100 'ASCII' (saves as ascii text file)
101 'MS2' (saves as an aips++
102 MeasurementSet V2)
103 'FITS' (save as image FITS - not
104 readable by class)
105 'CLASS' (save as FITS readable by CLASS)
106 overwrite: If the file should be overwritten if it exists.
107 The default False is to return with warning
108 without writing the output. USE WITH CARE.
109 Example:
110 scan.save('myscan.asap')
111 scan.save('myscan.sdfits', 'SDFITS')
112 """
113 from os import path
114 if format is None: format = rcParams['scantable.save']
115 suffix = '.'+format.lower()
116 if name is None or name == "":
117 name = 'scantable'+suffix
118 msg = "No filename given. Using default name %s..." % name
119 asaplog.push(msg)
120 name = path.expandvars(name)
121 if path.isfile(name) or path.isdir(name):
122 if not overwrite:
123 msg = "File %s exists." % name
124 if rcParams['verbose']:
125 #print msg
126 asaplog.push( msg )
127 print_log( 'ERROR' )
128 return
129 else:
130 raise IOError(msg)
131 format2 = format.upper()
132 if format2 == 'ASAP':
133 self._save(name)
134 else:
135 from asap._asap import stwriter as stw
136 writer = stw(format2)
137 writer.write(self, name)
138 print_log()
139 return
140
141 def copy(self):
142 """
143 Return a copy of this scantable.
144 Note:
145 This makes a full (deep) copy. scan2 = scan1 makes a reference.
146 Parameters:
147 none
148 Example:
149 copiedscan = scan.copy()
150 """
151 sd = scantable(Scantable._copy(self))
152 return sd
153
154 def drop_scan(self, scanid=None):
155 """
156 Return a new scantable where the specified scan number(s) has(have)
157 been dropped.
158 Parameters:
159 scanid: a (list of) scan number(s)
160 """
161 from asap import _is_sequence_or_number as _is_valid
162 from asap import _to_list
163 from asap import unique
164 if not _is_valid(scanid):
165 if rcParams['verbose']:
166 #print "Please specify a scanno to drop from the scantable"
167 asaplog.push( 'Please specify a scanno to drop from the scantable' )
168 print_log( 'ERROR' )
169 return
170 else:
171 raise RuntimeError("No scan given")
172 try:
173 scanid = _to_list(scanid)
174 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
175 for sid in scanid: allscans.remove(sid)
176 if len(allscans) == 0:
177 raise ValueError("Can't remove all scans")
178 except ValueError:
179 if rcParams['verbose']:
180 #print "Couldn't find any match."
181 print_log()
182 asaplog.push( "Couldn't find any match." )
183 print_log( 'ERROR' )
184 return
185 else: raise
186 try:
187 bsel = self.get_selection()
188 sel = selector()
189 sel.set_scans(allscans)
190 self.set_selection(bsel+sel)
191 scopy = self._copy()
192 self.set_selection(bsel)
193 return scantable(scopy)
194 except RuntimeError:
195 if rcParams['verbose']:
196 #print "Couldn't find any match."
197 print_log()
198 asaplog.push( "Couldn't find any match." )
199 print_log( 'ERROR' )
200 else:
201 raise
202
203
204 def get_scan(self, scanid=None):
205 """
206 Return a specific scan (by scanno) or collection of scans (by
207 source name) in a new scantable.
208 Note:
209 See scantable.drop_scan() for the inverse operation.
210 Parameters:
211 scanid: a (list of) scanno or a source name, unix-style
212 patterns are accepted for source name matching, e.g.
213 '*_R' gets all 'ref scans
214 Example:
215 # get all scans containing the source '323p459'
216 newscan = scan.get_scan('323p459')
217 # get all 'off' scans
218 refscans = scan.get_scan('*_R')
219 # get a susbset of scans by scanno (as listed in scan.summary())
220 newscan = scan.get_scan([0, 2, 7, 10])
221 """
222 if scanid is None:
223 if rcParams['verbose']:
224 #print "Please specify a scan no or name to " \
225 # "retrieve from the scantable"
226 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
227 print_log( 'ERROR' )
228 return
229 else:
230 raise RuntimeError("No scan given")
231
232 try:
233 bsel = self.get_selection()
234 sel = selector()
235 if type(scanid) is str:
236 sel.set_name(scanid)
237 self.set_selection(bsel+sel)
238 scopy = self._copy()
239 self.set_selection(bsel)
240 return scantable(scopy)
241 elif type(scanid) is int:
242 sel.set_scans([scanid])
243 self.set_selection(bsel+sel)
244 scopy = self._copy()
245 self.set_selection(bsel)
246 return scantable(scopy)
247 elif type(scanid) is list:
248 sel.set_scans(scanid)
249 self.set_selection(sel)
250 scopy = self._copy()
251 self.set_selection(bsel)
252 return scantable(scopy)
253 else:
254 msg = "Illegal scanid type, use 'int' or 'list' if ints."
255 if rcParams['verbose']:
256 #print msg
257 asaplog.push( msg )
258 print_log( 'ERROR' )
259 else:
260 raise TypeError(msg)
261 except RuntimeError:
262 if rcParams['verbose']:
263 #print "Couldn't find any match."
264 print_log()
265 asaplog.push( "Couldn't find any match." )
266 print_log( 'ERROR' )
267 else: raise
268
269 def __str__(self):
270 return Scantable._summary(self, True)
271
272 def summary(self, filename=None):
273 """
274 Print a summary of the contents of this scantable.
275 Parameters:
276 filename: the name of a file to write the putput to
277 Default - no file output
278 verbose: print extra info such as the frequency table
279 The default (False) is taken from .asaprc
280 """
281 info = Scantable._summary(self, True)
282 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
283 if filename is not None:
284 if filename is "":
285 filename = 'scantable_summary.txt'
286 from os.path import expandvars, isdir
287 filename = expandvars(filename)
288 if not isdir(filename):
289 data = open(filename, 'w')
290 data.write(info)
291 data.close()
292 else:
293 msg = "Illegal file name '%s'." % (filename)
294 if rcParams['verbose']:
295 #print msg
296 asaplog.push( msg )
297 print_log( 'ERROR' )
298 else:
299 raise IOError(msg)
300 if rcParams['verbose']:
301 try:
302 from IPython.genutils import page as pager
303 except ImportError:
304 from pydoc import pager
305 pager(info)
306 else:
307 return info
308
309 def get_spectrum(self, rowno):
310 """Return the spectrum for the current row in the scantable as a list.
311 Parameters:
312 rowno: the row number to retrieve the spectrum from
313 """
314 return self._getspectrum(rowno)
315
316 def get_mask(self, rowno):
317 """Return the mask for the current row in the scantable as a list.
318 Parameters:
319 rowno: the row number to retrieve the mask from
320 """
321 return self._getmask(rowno)
322
323 def set_spectrum(self, spec, rowno):
324 """Return the spectrum for the current row in the scantable as a list.
325 Parameters:
326 spec: the spectrum
327 rowno: the row number to set the spectrum for
328 """
329 assert(len(spec) == self.nchan())
330 return self._setspectrum(spec, rowno)
331
332 def get_selection(self):
333 """
334 Get the selection object currently set on this scantable.
335 Parameters:
336 none
337 Example:
338 sel = scan.get_selection()
339 sel.set_ifs(0) # select IF 0
340 scan.set_selection(sel) # apply modified selection
341 """
342 return selector(self._getselection())
343
344 def set_selection(self, selection=selector()):
345 """
346 Select a subset of the data. All following operations on this scantable
347 are only applied to thi selection.
348 Parameters:
349 selection: a selector object (default unset the selection)
350 Examples:
351 sel = selector() # create a selection object
352 self.set_scans([0, 3]) # select SCANNO 0 and 3
353 scan.set_selection(sel) # set the selection
354 scan.summary() # will only print summary of scanno 0 an 3
355 scan.set_selection() # unset the selection
356 """
357 self._setselection(selection)
358
359 def get_row(self, row=0, insitu=None):
360 """
361 Select a row in the scantable.
362 Return a scantable with single row.
363 Parameters:
364 row: row no of integration, default is 0.
365 insitu: if False a new scantable is returned.
366 Otherwise, the scaling is done in-situ
367 The default is taken from .asaprc (False)
368 """
369 if insitu is None: insitu = rcParams['insitu']
370 if not insitu:
371 workscan = self.copy()
372 else:
373 workscan = self
374 # Select a row
375 sel=selector()
376 sel.set_scans([workscan.getscan(row)])
377 sel.set_cycles([workscan.getcycle(row)])
378 sel.set_beams([workscan.getbeam(row)])
379 sel.set_ifs([workscan.getif(row)])
380 sel.set_polarisations([workscan.getpol(row)])
381 sel.set_name(workscan._getsourcename(row))
382 workscan.set_selection(sel)
383 if not workscan.nrow() == 1:
384 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
385 raise RuntimeError(msg)
386 del sel
387 if insitu:
388 self._assign(workscan)
389 else:
390 return workscan
391
392 def stats(self, stat='stddev', mask=None):
393 """
394 Determine the specified statistic of the current beam/if/pol
395 Takes a 'mask' as an optional parameter to specify which
396 channels should be excluded.
397 Parameters:
398 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
399 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
400 mask: an optional mask specifying where the statistic
401 should be determined.
402 Example:
403 scan.set_unit('channel')
404 msk = scan.create_mask([100, 200], [500, 600])
405 scan.stats(stat='mean', mask=m)
406 """
407 if mask == None:
408 mask = []
409 axes = ['Beam', 'IF', 'Pol', 'Time']
410 if not self._check_ifs():
411 raise ValueError("Cannot apply mask as the IFs have different "
412 "number of channels. Please use setselection() "
413 "to select individual IFs")
414 rtnabc = False
415 if stat.lower().endswith('_abc'): rtnabc = True
416 getchan = False
417 if stat.lower().startswith('min') or stat.lower().startswith('max'):
418 chan = self._math._minmaxchan(self, mask, stat)
419 getchan = True
420 statvals = []
421 if not rtnabc: statvals = self._math._stats(self, mask, stat)
422
423 out = ''
424 axes = []
425 for i in range(self.nrow()):
426 axis = []
427 axis.append(self.getscan(i))
428 axis.append(self.getbeam(i))
429 axis.append(self.getif(i))
430 axis.append(self.getpol(i))
431 axis.append(self.getcycle(i))
432 axes.append(axis)
433 tm = self._gettime(i)
434 src = self._getsourcename(i)
435 refstr = ''
436 statunit= ''
437 if getchan:
438 qx, qy = self.chan2data(rowno=i, chan=chan[i])
439 if rtnabc:
440 statvals.append(qx['value'])
441 refstr = '(value: %3.3f' % (qy['value'])+' ['+qy['unit']+'])'
442 statunit= '['+qx['unit']+']'
443 else:
444 refstr = '(@ %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
445 #statunit= ' ['+qy['unit']+']'
446 out += 'Scan[%d] (%s) ' % (axis[0], src)
447 out += 'Time[%s]:\n' % (tm)
448 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
449 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
450 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
451 out += '= %3.3f ' % (statvals[i]) +refstr+'\n'
452 out += "--------------------------------------------------\n"
453
454 if rcParams['verbose']:
455 usr=os.environ['USER']
456 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
457 f=open(tmpfile,'w')
458 print >> f, "--------------------------------------------------"
459 print >> f, " ", stat, statunit
460 print >> f, "--------------------------------------------------"
461 print >> f, out
462 f.close()
463 f=open(tmpfile,'r')
464 x=f.readlines()
465 f.close()
466 for xx in x:
467 asaplog.push( xx )
468 print_log()
469 #else:
470 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
471 # 'axes' : axes,
472 # 'data': statvals}
473 return statvals
474
475 def chan2data(self, rowno=0, chan=0):
476 """
477 Returns channel/frequency/velocity and spectral value
478 at an arbitrary row and channel in the scantable.
479 Parameters:
480 rowno: a row number in the scantable. Default is the
481 first row, i.e. rowno=0
482 chan: a channel in the scantable. Default is the first
483 channel, i.e. pos=0
484 """
485 if isinstance(rowno, int) and isinstance(chan, int):
486 qx = {'unit': self.get_unit(),
487 'value': self._getabcissa(rowno)[chan]}
488 qy = {'unit': self.get_fluxunit(),
489 'value': self._getspectrum(rowno)[chan]}
490 return qx, qy
491
492 def stddev(self, mask=None):
493 """
494 Determine the standard deviation of the current beam/if/pol
495 Takes a 'mask' as an optional parameter to specify which
496 channels should be excluded.
497 Parameters:
498 mask: an optional mask specifying where the standard
499 deviation should be determined.
500
501 Example:
502 scan.set_unit('channel')
503 msk = scan.create_mask([100, 200], [500, 600])
504 scan.stddev(mask=m)
505 """
506 return self.stats(stat='stddev', mask=mask);
507
508
509 def get_column_names(self):
510 """
511 Return a list of column names, which can be used for selection.
512 """
513 return list(Scantable.get_column_names(self))
514
515 def get_tsys(self):
516 """
517 Return the System temperatures.
518 Returns:
519 a list of Tsys values for the current selection
520 """
521
522 return self._row_callback(self._gettsys, "Tsys")
523
524 def _row_callback(self, callback, label):
525 axes = []
526 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
527 out = ""
528 outvec = []
529 for i in range(self.nrow()):
530 axis = []
531 axis.append(self.getscan(i))
532 axis.append(self.getbeam(i))
533 axis.append(self.getif(i))
534 axis.append(self.getpol(i))
535 axis.append(self.getcycle(i))
536 axes.append(axis)
537 tm = self._gettime(i)
538 src = self._getsourcename(i)
539 out += 'Scan[%d] (%s) ' % (axis[0], src)
540 out += 'Time[%s]:\n' % (tm)
541 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
542 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
543 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
544 outvec.append(callback(i))
545 out += '= %3.3f\n' % (outvec[i])
546 out += "--------------------------------------------------\n"
547 if rcParams['verbose']:
548 usr=os.environ['USER']
549 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_row_callback'
550 f=open(tmpfile,'w')
551 print >> f, "--------------------------------------------------"
552 print >> f, " %s" % (label)
553 print >> f, "--------------------------------------------------"
554 print >> f, out
555 f.close()
556 f=open(tmpfile,'r')
557 x=f.readlines()
558 f.close()
559 for xx in x:
560 asaplog.push( xx )
561 print_log()
562 # disabled because the vector seems more useful
563 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
564 return outvec
565
566 def _get_column(self, callback, row=-1):
567 """
568 """
569 if row == -1:
570 return [callback(i) for i in range(self.nrow())]
571 else:
572 if 0 <= row < self.nrow():
573 return callback(row)
574
575
576 def get_time(self, row=-1, asdatetime=False):
577 """
578 Get a list of time stamps for the observations.
579 Return a datetime object for each integration time stamp in the scantable.
580 Parameters:
581 row: row no of integration. Default -1 return all rows
582 asdatetime: return values as datetime objects rather than strings
583 Example:
584 none
585 """
586 from time import strptime
587 from datetime import datetime
588 times = self._get_column(self._gettime, row)
589 if not asdatetime:
590 return times
591 format = "%Y/%m/%d/%H:%M:%S"
592 if isinstance(times, list):
593 return [datetime(*strptime(i, format)[:6]) for i in times]
594 else:
595 return datetime(*strptime(times, format)[:6])
596
597
598 def get_inttime(self, row=-1):
599 """
600 Get a list of integration times for the observations.
601 Return a time in seconds for each integration in the scantable.
602 Parameters:
603 row: row no of integration. Default -1 return all rows.
604 Example:
605 none
606 """
607 return self._get_column(self._getinttime, row)
608
609
610 def get_sourcename(self, row=-1):
611 """
612 Get a list source names for the observations.
613 Return a string for each integration in the scantable.
614 Parameters:
615 row: row no of integration. Default -1 return all rows.
616 Example:
617 none
618 """
619 return self._get_column(self._getsourcename, row)
620
621 def get_elevation(self, row=-1):
622 """
623 Get a list of elevations for the observations.
624 Return a float for each integration in the scantable.
625 Parameters:
626 row: row no of integration. Default -1 return all rows.
627 Example:
628 none
629 """
630 return self._get_column(self._getelevation, row)
631
632 def get_azimuth(self, row=-1):
633 """
634 Get a list of azimuths for the observations.
635 Return a float for each integration in the scantable.
636 Parameters:
637 row: row no of integration. Default -1 return all rows.
638 Example:
639 none
640 """
641 return self._get_column(self._getazimuth, row)
642
643 def get_parangle(self, row=-1):
644 """
645 Get a list of parallactic angles for the observations.
646 Return a float for each integration in the scantable.
647 Parameters:
648 row: row no of integration. Default -1 return all rows.
649 Example:
650 none
651 """
652 return self._get_column(self._getparangle, row)
653
654 def get_direction(self, row=-1):
655 """
656 Get a list of Positions on the sky (direction) for the observations.
657 Return a float for each integration in the scantable.
658 Parameters:
659 row: row no of integration. Default -1 return all rows
660 Example:
661 none
662 """
663 return self._get_column(self._getdirection, row)
664
665 def get_directionval(self, row=-1):
666 """
667 Get a list of Positions on the sky (direction) for the observations.
668 Return a float for each integration in the scantable.
669 Parameters:
670 row: row no of integration. Default -1 return all rows
671 Example:
672 none
673 """
674 return self._get_column(self._getdirectionvec, row)
675
676 def set_unit(self, unit='channel'):
677 """
678 Set the unit for all following operations on this scantable
679 Parameters:
680 unit: optional unit, default is 'channel'
681 one of '*Hz', 'km/s', 'channel', ''
682 """
683 varlist = vars()
684 if unit in ['', 'pixel', 'channel']:
685 unit = ''
686 inf = list(self._getcoordinfo())
687 inf[0] = unit
688 self._setcoordinfo(inf)
689 self._add_history("set_unit", varlist)
690
691 def set_instrument(self, instr):
692 """
693 Set the instrument for subsequent processing.
694 Parameters:
695 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
696 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
697 """
698 self._setInstrument(instr)
699 self._add_history("set_instument", vars())
700 print_log()
701
702 def set_feedtype(self, feedtype):
703 """
704 Overwrite the feed type, which might not be set correctly.
705 Parameters:
706 feedtype: 'linear' or 'circular'
707 """
708 self._setfeedtype(feedtype)
709 self._add_history("set_feedtype", vars())
710 print_log()
711
712 def set_doppler(self, doppler='RADIO'):
713 """
714 Set the doppler for all following operations on this scantable.
715 Parameters:
716 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
717 """
718 varlist = vars()
719 inf = list(self._getcoordinfo())
720 inf[2] = doppler
721 self._setcoordinfo(inf)
722 self._add_history("set_doppler", vars())
723 print_log()
724
725 def set_freqframe(self, frame=None):
726 """
727 Set the frame type of the Spectral Axis.
728 Parameters:
729 frame: an optional frame type, default 'LSRK'. Valid frames are:
730 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
731 'GEO', 'GALACTO', 'LGROUP', 'CMB'
732 Examples:
733 scan.set_freqframe('BARY')
734 """
735 if frame is None: frame = rcParams['scantable.freqframe']
736 varlist = vars()
737 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
738 'GEO', 'GALACTO', 'LGROUP', 'CMB']
739
740 if frame in valid:
741 inf = list(self._getcoordinfo())
742 inf[1] = frame
743 self._setcoordinfo(inf)
744 self._add_history("set_freqframe", varlist)
745 else:
746 msg = "Please specify a valid freq type. Valid types are:\n", valid
747 if rcParams['verbose']:
748 #print msg
749 asaplog.push( msg )
750 print_log( 'ERROR' )
751 else:
752 raise TypeError(msg)
753 print_log()
754
755 def set_dirframe(self, frame=""):
756 """
757 Set the frame type of the Direction on the sky.
758 Parameters:
759 frame: an optional frame type, default ''. Valid frames are:
760 'J2000', 'B1950', 'GALACTIC'
761 Examples:
762 scan.set_dirframe('GALACTIC')
763 """
764 varlist = vars()
765 try:
766 Scantable.set_dirframe(self, frame)
767 except RuntimeError, msg:
768 if rcParams['verbose']:
769 #print msg
770 print_log()
771 asaplog.push( msg )
772 print_log( 'ERROR' )
773 else:
774 raise
775 self._add_history("set_dirframe", varlist)
776
777 def get_unit(self):
778 """
779 Get the default unit set in this scantable
780 Returns:
781 A unit string
782 """
783 inf = self._getcoordinfo()
784 unit = inf[0]
785 if unit == '': unit = 'channel'
786 return unit
787
788 def get_abcissa(self, rowno=0):
789 """
790 Get the abcissa in the current coordinate setup for the currently
791 selected Beam/IF/Pol
792 Parameters:
793 rowno: an optional row number in the scantable. Default is the
794 first row, i.e. rowno=0
795 Returns:
796 The abcissa values and the format string (as a dictionary)
797 """
798 abc = self._getabcissa(rowno)
799 lbl = self._getabcissalabel(rowno)
800 print_log()
801 return abc, lbl
802
803 def flag(self, mask=None, unflag=False):
804 """
805 Flag the selected data using an optional channel mask.
806 Parameters:
807 mask: an optional channel mask, created with create_mask. Default
808 (no mask) is all channels.
809 unflag: if True, unflag the data
810 """
811 varlist = vars()
812 if mask is None:
813 mask = []
814 try:
815 self._flag(mask, unflag)
816 except RuntimeError, msg:
817 if rcParams['verbose']:
818 #print msg
819 print_log()
820 asaplog.push( msg )
821 print_log( 'ERROR' )
822 return
823 else: raise
824 self._add_history("flag", varlist)
825
826 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
827 """
828 Flag the data in 'lag' space by providing a frequency to remove.
829 Flagged data in the scantable gets set to 0.0 before the fft.
830 No taper is applied.
831 Parameters:
832 frequency: the frequency (really a period within the bandwidth)
833 to remove
834 width: the width of the frequency to remove, to remove a
835 range of frequencies around the centre.
836 unit: the frequency unit (default "GHz")
837 Notes:
838 It is recommended to flag edges of the band or strong
839 signals beforehand.
840 """
841 if insitu is None: insitu = rcParams['insitu']
842 self._math._setinsitu(insitu)
843 varlist = vars()
844 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
845 if not base.has_key(unit):
846 raise ValueError("%s is not a valid unit." % unit)
847 try:
848 s = scantable(self._math._lag_flag(self, frequency*base[unit],
849 width*base[unit]))
850 except RuntimeError, msg:
851 if rcParams['verbose']:
852 #print msg
853 print_log()
854 asaplog.push( msg )
855 print_log( 'ERROR' )
856 return
857 else: raise
858 s._add_history("lag_flag", varlist)
859 print_log()
860 if insitu:
861 self._assign(s)
862 else:
863 return s
864
865
866 def create_mask(self, *args, **kwargs):
867 """
868 Compute and return a mask based on [min, max] windows.
869 The specified windows are to be INCLUDED, when the mask is
870 applied.
871 Parameters:
872 [min, max], [min2, max2], ...
873 Pairs of start/end points (inclusive)specifying the regions
874 to be masked
875 invert: optional argument. If specified as True,
876 return an inverted mask, i.e. the regions
877 specified are EXCLUDED
878 row: create the mask using the specified row for
879 unit conversions, default is row=0
880 only necessary if frequency varies over rows.
881 Example:
882 scan.set_unit('channel')
883 a)
884 msk = scan.create_mask([400, 500], [800, 900])
885 # masks everything outside 400 and 500
886 # and 800 and 900 in the unit 'channel'
887
888 b)
889 msk = scan.create_mask([400, 500], [800, 900], invert=True)
890 # masks the regions between 400 and 500
891 # and 800 and 900 in the unit 'channel'
892 c)
893 mask only channel 400
894 msk = scan.create_mask([400, 400])
895 """
896 row = 0
897 if kwargs.has_key("row"):
898 row = kwargs.get("row")
899 data = self._getabcissa(row)
900 u = self._getcoordinfo()[0]
901 if rcParams['verbose']:
902 if u == "": u = "channel"
903 msg = "The current mask window unit is %s" % u
904 i = self._check_ifs()
905 if not i:
906 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
907 asaplog.push(msg)
908 n = self.nchan()
909 msk = _n_bools(n, False)
910 # test if args is a 'list' or a 'normal *args - UGLY!!!
911
912 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
913 and args or args[0]
914 for window in ws:
915 if (len(window) != 2 or window[0] > window[1] ):
916 raise TypeError("A window needs to be defined as [min, max]")
917 for i in range(n):
918 if data[i] >= window[0] and data[i] <= window[1]:
919 msk[i] = True
920 if kwargs.has_key('invert'):
921 if kwargs.get('invert'):
922 msk = mask_not(msk)
923 print_log()
924 return msk
925
926 def get_masklist(self, mask=None, row=0):
927 """
928 Compute and return a list of mask windows, [min, max].
929 Parameters:
930 mask: channel mask, created with create_mask.
931 row: calcutate the masklist using the specified row
932 for unit conversions, default is row=0
933 only necessary if frequency varies over rows.
934 Returns:
935 [min, max], [min2, max2], ...
936 Pairs of start/end points (inclusive)specifying
937 the masked regions
938 """
939 if not (isinstance(mask,list) or isinstance(mask, tuple)):
940 raise TypeError("The mask should be list or tuple.")
941 if len(mask) < 2:
942 raise TypeError("The mask elements should be > 1")
943 if self.nchan() != len(mask):
944 msg = "Number of channels in scantable != number of mask elements"
945 raise TypeError(msg)
946 data = self._getabcissa(row)
947 u = self._getcoordinfo()[0]
948 if rcParams['verbose']:
949 if u == "": u = "channel"
950 msg = "The current mask window unit is %s" % u
951 i = self._check_ifs()
952 if not i:
953 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
954 asaplog.push(msg)
955 masklist=[]
956 ist, ien = None, None
957 ist, ien=self.get_mask_indices(mask)
958 if ist is not None and ien is not None:
959 for i in xrange(len(ist)):
960 range=[data[ist[i]],data[ien[i]]]
961 range.sort()
962 masklist.append([range[0],range[1]])
963 return masklist
964
965 def get_mask_indices(self, mask=None):
966 """
967 Compute and Return lists of mask start indices and mask end indices.
968 Parameters:
969 mask: channel mask, created with create_mask.
970 Returns:
971 List of mask start indices and that of mask end indices,
972 i.e., [istart1,istart2,....], [iend1,iend2,....].
973 """
974 if not (isinstance(mask,list) or isinstance(mask, tuple)):
975 raise TypeError("The mask should be list or tuple.")
976 if len(mask) < 2:
977 raise TypeError("The mask elements should be > 1")
978 istart=[]
979 iend=[]
980 if mask[0]: istart.append(0)
981 for i in range(len(mask)-1):
982 if not mask[i] and mask[i+1]:
983 istart.append(i+1)
984 elif mask[i] and not mask[i+1]:
985 iend.append(i)
986 if mask[len(mask)-1]: iend.append(len(mask)-1)
987 if len(istart) != len(iend):
988 raise RuntimeError("Numbers of mask start != mask end.")
989 for i in range(len(istart)):
990 if istart[i] > iend[i]:
991 raise RuntimeError("Mask start index > mask end index")
992 break
993 return istart,iend
994
995# def get_restfreqs(self):
996# """
997# Get the restfrequency(s) stored in this scantable.
998# The return value(s) are always of unit 'Hz'
999# Parameters:
1000# none
1001# Returns:
1002# a list of doubles
1003# """
1004# return list(self._getrestfreqs())
1005
1006 def get_restfreqs(self, ids=None):
1007 """
1008 Get the restfrequency(s) stored in this scantable.
1009 The return value(s) are always of unit 'Hz'
1010 Parameters:
1011 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1012 be retrieved
1013 Returns:
1014 dictionary containing ids and a list of doubles for each id
1015 """
1016 if ids is None:
1017 rfreqs={}
1018 idlist = self.getmolnos()
1019 for i in idlist:
1020 rfreqs[i]=list(self._getrestfreqs(i))
1021 return rfreqs
1022 else:
1023 if type(ids)==list or type(ids)==tuple:
1024 rfreqs={}
1025 for i in ids:
1026 rfreqs[i]=list(self._getrestfreqs(i))
1027 return rfreqs
1028 else:
1029 return list(self._getrestfreqs(ids))
1030 #return list(self._getrestfreqs(ids))
1031
1032 def set_restfreqs(self, freqs=None, unit='Hz'):
1033 """
1034 ********NEED TO BE UPDATED begin************
1035 Set or replace the restfrequency specified and
1036 If the 'freqs' argument holds a scalar,
1037 then that rest frequency will be applied to all the selected
1038 data. If the 'freqs' argument holds
1039 a vector, then it MUST be of equal or smaller length than
1040 the number of IFs (and the available restfrequencies will be
1041 replaced by this vector). In this case, *all* data have
1042 the restfrequency set per IF according
1043 to the corresponding value you give in the 'freqs' vector.
1044 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1045 IF 1 gets restfreq 2e9.
1046 ********NEED TO BE UPDATED end************
1047 You can also specify the frequencies via a linecatalog.
1048
1049 Parameters:
1050 freqs: list of rest frequency values or string idenitfiers
1051 unit: unit for rest frequency (default 'Hz')
1052
1053 Example:
1054 # set the given restfrequency for the all currently selected IFs
1055 scan.set_restfreqs(freqs=1.4e9)
1056 # set multiple restfrequencies to all the selected data
1057 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1058 # If the number of IFs in the data is >= 2 the IF0 gets the first
1059 # value IF1 the second... NOTE that freqs needs to be
1060 # specified in list of list (e.g. [[],[],...] ).
1061 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
1062 #set the given restfrequency for the whole table (by name)
1063 scan.set_restfreqs(freqs="OH1667")
1064
1065 Note:
1066 To do more sophisticate Restfrequency setting, e.g. on a
1067 source and IF basis, use scantable.set_selection() before using
1068 this function.
1069 # provide your scantable is call scan
1070 selection = selector()
1071 selection.set_name("ORION*")
1072 selection.set_ifs([1])
1073 scan.set_selection(selection)
1074 scan.set_restfreqs(freqs=86.6e9)
1075
1076 """
1077 varlist = vars()
1078 from asap import linecatalog
1079 # simple value
1080 if isinstance(freqs, int) or isinstance(freqs, float):
1081 # TT mod
1082 #self._setrestfreqs(freqs, "",unit)
1083 self._setrestfreqs([freqs], [""],unit)
1084 # list of values
1085 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1086 # list values are scalars
1087 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1088 self._setrestfreqs(freqs, [""],unit)
1089 # list values are tuples, (value, name)
1090 elif isinstance(freqs[-1], dict):
1091 #sel = selector()
1092 #savesel = self._getselection()
1093 #iflist = self.getifnos()
1094 #for i in xrange(len(freqs)):
1095 # sel.set_ifs(iflist[i])
1096 # self._setselection(sel)
1097 # self._setrestfreqs(freqs[i], "",unit)
1098 #self._setselection(savesel)
1099 self._setrestfreqs(freqs["value"],
1100 freqs["name"], "MHz")
1101 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1102 sel = selector()
1103 savesel = self._getselection()
1104 iflist = self.getifnos()
1105 if len(freqs)>len(iflist):
1106 raise ValueError("number of elements in list of list exeeds the current IF selections")
1107 for i in xrange(len(freqs)):
1108 sel.set_ifs(iflist[i])
1109 self._setselection(sel)
1110 self._setrestfreqs(freqs[i]["value"],
1111 freqs[i]["name"], "MHz")
1112 self._setselection(savesel)
1113 # freqs are to be taken from a linecatalog
1114 elif isinstance(freqs, linecatalog):
1115 sel = selector()
1116 savesel = self._getselection()
1117 for i in xrange(freqs.nrow()):
1118 sel.set_ifs(iflist[i])
1119 self._setselection(sel)
1120 self._setrestfreqs(freqs.get_frequency(i),
1121 freqs.get_name(i), "MHz")
1122 # ensure that we are not iterating past nIF
1123 if i == self.nif()-1: break
1124 self._setselection(savesel)
1125 else:
1126 return
1127 self._add_history("set_restfreqs", varlist)
1128
1129 def shift_refpix(self, delta):
1130 """
1131 Shift the reference pixel of the Spectra Coordinate by an
1132 integer amount.
1133 Parameters:
1134 delta: the amount to shift by
1135 Note:
1136 Be careful using this with broadband data.
1137 """
1138 Scantable.shift(self, delta)
1139
1140 def history(self, filename=None):
1141 """
1142 Print the history. Optionally to a file.
1143 Parameters:
1144 filename: The name of the file to save the history to.
1145 """
1146 hist = list(self._gethistory())
1147 out = "-"*80
1148 for h in hist:
1149 if h.startswith("---"):
1150 out += "\n"+h
1151 else:
1152 items = h.split("##")
1153 date = items[0]
1154 func = items[1]
1155 items = items[2:]
1156 out += "\n"+date+"\n"
1157 out += "Function: %s\n Parameters:" % (func)
1158 for i in items:
1159 s = i.split("=")
1160 out += "\n %s = %s" % (s[0], s[1])
1161 out += "\n"+"-"*80
1162 if filename is not None:
1163 if filename is "":
1164 filename = 'scantable_history.txt'
1165 import os
1166 filename = os.path.expandvars(os.path.expanduser(filename))
1167 if not os.path.isdir(filename):
1168 data = open(filename, 'w')
1169 data.write(out)
1170 data.close()
1171 else:
1172 msg = "Illegal file name '%s'." % (filename)
1173 if rcParams['verbose']:
1174 #print msg
1175 asaplog.push( msg )
1176 print_log( 'ERROR' )
1177 else:
1178 raise IOError(msg)
1179 if rcParams['verbose']:
1180 try:
1181 from IPython.genutils import page as pager
1182 except ImportError:
1183 from pydoc import pager
1184 pager(out)
1185 else:
1186 return out
1187 return
1188 #
1189 # Maths business
1190 #
1191
1192 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1193 """
1194 Return the (time) weighted average of a scan.
1195 Note:
1196 in channels only - align if necessary
1197 Parameters:
1198 mask: an optional mask (only used for 'var' and 'tsys'
1199 weighting)
1200 scanav: True averages each scan separately
1201 False (default) averages all scans together,
1202 weight: Weighting scheme.
1203 'none' (mean no weight)
1204 'var' (1/var(spec) weighted)
1205 'tsys' (1/Tsys**2 weighted)
1206 'tint' (integration time weighted)
1207 'tintsys' (Tint/Tsys**2)
1208 'median' ( median averaging)
1209 The default is 'tint'
1210 align: align the spectra in velocity before averaging. It takes
1211 the time of the first spectrum as reference time.
1212 Example:
1213 # time average the scantable without using a mask
1214 newscan = scan.average_time()
1215 """
1216 varlist = vars()
1217 if weight is None: weight = 'TINT'
1218 if mask is None: mask = ()
1219 if scanav: scanav = "SCAN"
1220 else: scanav = "NONE"
1221 scan = (self, )
1222 try:
1223 if align:
1224 scan = (self.freq_align(insitu=False), )
1225 s = None
1226 if weight.upper() == 'MEDIAN':
1227 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1228 scanav))
1229 else:
1230 s = scantable(self._math._average(scan, mask, weight.upper(),
1231 scanav))
1232 except RuntimeError, msg:
1233 if rcParams['verbose']:
1234 #print msg
1235 print_log()
1236 asaplog.push( msg )
1237 print_log( 'ERROR' )
1238 return
1239 else: raise
1240 s._add_history("average_time", varlist)
1241 print_log()
1242 return s
1243
1244 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1245 """
1246 Return a scan where all spectra are converted to either
1247 Jansky or Kelvin depending upon the flux units of the scan table.
1248 By default the function tries to look the values up internally.
1249 If it can't find them (or if you want to over-ride), you must
1250 specify EITHER jyperk OR eta (and D which it will try to look up
1251 also if you don't set it). jyperk takes precedence if you set both.
1252 Parameters:
1253 jyperk: the Jy / K conversion factor
1254 eta: the aperture efficiency
1255 d: the geomtric diameter (metres)
1256 insitu: if False a new scantable is returned.
1257 Otherwise, the scaling is done in-situ
1258 The default is taken from .asaprc (False)
1259 """
1260 if insitu is None: insitu = rcParams['insitu']
1261 self._math._setinsitu(insitu)
1262 varlist = vars()
1263 if jyperk is None: jyperk = -1.0
1264 if d is None: d = -1.0
1265 if eta is None: eta = -1.0
1266 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1267 s._add_history("convert_flux", varlist)
1268 print_log()
1269 if insitu: self._assign(s)
1270 else: return s
1271
1272 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1273 """
1274 Return a scan after applying a gain-elevation correction.
1275 The correction can be made via either a polynomial or a
1276 table-based interpolation (and extrapolation if necessary).
1277 You specify polynomial coefficients, an ascii table or neither.
1278 If you specify neither, then a polynomial correction will be made
1279 with built in coefficients known for certain telescopes (an error
1280 will occur if the instrument is not known).
1281 The data and Tsys are *divided* by the scaling factors.
1282 Parameters:
1283 poly: Polynomial coefficients (default None) to compute a
1284 gain-elevation correction as a function of
1285 elevation (in degrees).
1286 filename: The name of an ascii file holding correction factors.
1287 The first row of the ascii file must give the column
1288 names and these MUST include columns
1289 "ELEVATION" (degrees) and "FACTOR" (multiply data
1290 by this) somewhere.
1291 The second row must give the data type of the
1292 column. Use 'R' for Real and 'I' for Integer.
1293 An example file would be
1294 (actual factors are arbitrary) :
1295
1296 TIME ELEVATION FACTOR
1297 R R R
1298 0.1 0 0.8
1299 0.2 20 0.85
1300 0.3 40 0.9
1301 0.4 60 0.85
1302 0.5 80 0.8
1303 0.6 90 0.75
1304 method: Interpolation method when correcting from a table.
1305 Values are "nearest", "linear" (default), "cubic"
1306 and "spline"
1307 insitu: if False a new scantable is returned.
1308 Otherwise, the scaling is done in-situ
1309 The default is taken from .asaprc (False)
1310 """
1311
1312 if insitu is None: insitu = rcParams['insitu']
1313 self._math._setinsitu(insitu)
1314 varlist = vars()
1315 if poly is None:
1316 poly = ()
1317 from os.path import expandvars
1318 filename = expandvars(filename)
1319 s = scantable(self._math._gainel(self, poly, filename, method))
1320 s._add_history("gain_el", varlist)
1321 print_log()
1322 if insitu: self._assign(s)
1323 else: return s
1324
1325 def freq_align(self, reftime=None, method='cubic', insitu=None):
1326 """
1327 Return a scan where all rows have been aligned in frequency/velocity.
1328 The alignment frequency frame (e.g. LSRK) is that set by function
1329 set_freqframe.
1330 Parameters:
1331 reftime: reference time to align at. By default, the time of
1332 the first row of data is used.
1333 method: Interpolation method for regridding the spectra.
1334 Choose from "nearest", "linear", "cubic" (default)
1335 and "spline"
1336 insitu: if False a new scantable is returned.
1337 Otherwise, the scaling is done in-situ
1338 The default is taken from .asaprc (False)
1339 """
1340 if insitu is None: insitu = rcParams["insitu"]
1341 self._math._setinsitu(insitu)
1342 varlist = vars()
1343 if reftime is None: reftime = ""
1344 s = scantable(self._math._freq_align(self, reftime, method))
1345 s._add_history("freq_align", varlist)
1346 print_log()
1347 if insitu: self._assign(s)
1348 else: return s
1349
1350 def opacity(self, tau, insitu=None):
1351 """
1352 Apply an opacity correction. The data
1353 and Tsys are multiplied by the correction factor.
1354 Parameters:
1355 tau: Opacity from which the correction factor is
1356 exp(tau*ZD)
1357 where ZD is the zenith-distance
1358 insitu: if False a new scantable is returned.
1359 Otherwise, the scaling is done in-situ
1360 The default is taken from .asaprc (False)
1361 """
1362 if insitu is None: insitu = rcParams['insitu']
1363 self._math._setinsitu(insitu)
1364 varlist = vars()
1365 s = scantable(self._math._opacity(self, tau))
1366 s._add_history("opacity", varlist)
1367 print_log()
1368 if insitu: self._assign(s)
1369 else: return s
1370
1371 def bin(self, width=5, insitu=None):
1372 """
1373 Return a scan where all spectra have been binned up.
1374 Parameters:
1375 width: The bin width (default=5) in pixels
1376 insitu: if False a new scantable is returned.
1377 Otherwise, the scaling is done in-situ
1378 The default is taken from .asaprc (False)
1379 """
1380 if insitu is None: insitu = rcParams['insitu']
1381 self._math._setinsitu(insitu)
1382 varlist = vars()
1383 s = scantable(self._math._bin(self, width))
1384 s._add_history("bin", varlist)
1385 print_log()
1386 if insitu: self._assign(s)
1387 else: return s
1388
1389
1390 def resample(self, width=5, method='cubic', insitu=None):
1391 """
1392 Return a scan where all spectra have been binned up.
1393
1394 Parameters:
1395 width: The bin width (default=5) in pixels
1396 method: Interpolation method when correcting from a table.
1397 Values are "nearest", "linear", "cubic" (default)
1398 and "spline"
1399 insitu: if False a new scantable is returned.
1400 Otherwise, the scaling is done in-situ
1401 The default is taken from .asaprc (False)
1402 """
1403 if insitu is None: insitu = rcParams['insitu']
1404 self._math._setinsitu(insitu)
1405 varlist = vars()
1406 s = scantable(self._math._resample(self, method, width))
1407 s._add_history("resample", varlist)
1408 print_log()
1409 if insitu: self._assign(s)
1410 else: return s
1411
1412
1413 def average_pol(self, mask=None, weight='none'):
1414 """
1415 Average the Polarisations together.
1416 Parameters:
1417 mask: An optional mask defining the region, where the
1418 averaging will be applied. The output will have all
1419 specified points masked.
1420 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1421 weighted), or 'tsys' (1/Tsys**2 weighted)
1422 """
1423 varlist = vars()
1424 if mask is None:
1425 mask = ()
1426 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1427 s._add_history("average_pol", varlist)
1428 print_log()
1429 return s
1430
1431 def average_beam(self, mask=None, weight='none'):
1432 """
1433 Average the Beams together.
1434 Parameters:
1435 mask: An optional mask defining the region, where the
1436 averaging will be applied. The output will have all
1437 specified points masked.
1438 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1439 weighted), or 'tsys' (1/Tsys**2 weighted)
1440 """
1441 varlist = vars()
1442 if mask is None:
1443 mask = ()
1444 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1445 s._add_history("average_beam", varlist)
1446 print_log()
1447 return s
1448
1449 def convert_pol(self, poltype=None):
1450 """
1451 Convert the data to a different polarisation type.
1452 Parameters:
1453 poltype: The new polarisation type. Valid types are:
1454 "linear", "stokes" and "circular"
1455 """
1456 varlist = vars()
1457 try:
1458 s = scantable(self._math._convertpol(self, poltype))
1459 except RuntimeError, msg:
1460 if rcParams['verbose']:
1461 #print msg
1462 print_log()
1463 asaplog.push( msg )
1464 print_log( 'ERROR' )
1465 return
1466 else:
1467 raise
1468 s._add_history("convert_pol", varlist)
1469 print_log()
1470 return s
1471
1472 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1473 """
1474 Smooth the spectrum by the specified kernel (conserving flux).
1475 Parameters:
1476 kernel: The type of smoothing kernel. Select from
1477 'hanning' (default), 'gaussian', 'boxcar' and
1478 'rmedian'
1479 width: The width of the kernel in pixels. For hanning this is
1480 ignored otherwise it defauls to 5 pixels.
1481 For 'gaussian' it is the Full Width Half
1482 Maximum. For 'boxcar' it is the full width.
1483 For 'rmedian' it is the half width.
1484 insitu: if False a new scantable is returned.
1485 Otherwise, the scaling is done in-situ
1486 The default is taken from .asaprc (False)
1487 Example:
1488 none
1489 """
1490 if insitu is None: insitu = rcParams['insitu']
1491 self._math._setinsitu(insitu)
1492 varlist = vars()
1493 s = scantable(self._math._smooth(self, kernel.lower(), width))
1494 s._add_history("smooth", varlist)
1495 print_log()
1496 if insitu: self._assign(s)
1497 else: return s
1498
1499
1500 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1501 """
1502 Return a scan which has been baselined (all rows) by a polynomial.
1503 Parameters:
1504 mask: an optional mask
1505 order: the order of the polynomial (default is 0)
1506 plot: plot the fit and the residual. In this each
1507 indivual fit has to be approved, by typing 'y'
1508 or 'n'
1509 uselin: use linear polynomial fit
1510 insitu: if False a new scantable is returned.
1511 Otherwise, the scaling is done in-situ
1512 The default is taken from .asaprc (False)
1513 Example:
1514 # return a scan baselined by a third order polynomial,
1515 # not using a mask
1516 bscan = scan.poly_baseline(order=3)
1517 """
1518 if insitu is None: insitu = rcParams['insitu']
1519 varlist = vars()
1520 if mask is None:
1521 mask = [True for i in xrange(self.nchan(-1))]
1522 from asap.asapfitter import fitter
1523 try:
1524 f = fitter()
1525 f.set_scan(self, mask)
1526 if uselin:
1527 f.set_function(lpoly=order)
1528 else:
1529 f.set_function(poly=order)
1530 s = f.auto_fit(insitu, plot=plot)
1531 # Save parameters of baseline fits as a class attribute.
1532 # NOTICE: It does not reflect changes in scantable!
1533 self.blpars = f.blpars
1534 s._add_history("poly_baseline", varlist)
1535 print_log()
1536 if insitu: self._assign(s)
1537 else: return s
1538 except RuntimeError:
1539 msg = "The fit failed, possibly because it didn't converge."
1540 if rcParams['verbose']:
1541 #print msg
1542 print_log()
1543 asaplog.push( msg )
1544 print_log( 'ERROR' )
1545 return
1546 else:
1547 raise RuntimeError(msg)
1548
1549
1550 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1551 threshold=3, chan_avg_limit=1, plot=False,
1552 insitu=None):
1553 """
1554 Return a scan which has been baselined (all rows) by a polynomial.
1555 Spectral lines are detected first using linefinder and masked out
1556 to avoid them affecting the baseline solution.
1557
1558 Parameters:
1559 mask: an optional mask retreived from scantable
1560 edge: an optional number of channel to drop at
1561 the edge of spectrum. If only one value is
1562 specified, the same number will be dropped from
1563 both sides of the spectrum. Default is to keep
1564 all channels. Nested tuples represent individual
1565 edge selection for different IFs (a number of spectral
1566 channels can be different)
1567 order: the order of the polynomial (default is 0)
1568 threshold: the threshold used by line finder. It is better to
1569 keep it large as only strong lines affect the
1570 baseline solution.
1571 chan_avg_limit:
1572 a maximum number of consequtive spectral channels to
1573 average during the search of weak and broad lines.
1574 The default is no averaging (and no search for weak
1575 lines). If such lines can affect the fitted baseline
1576 (e.g. a high order polynomial is fitted), increase this
1577 parameter (usually values up to 8 are reasonable). Most
1578 users of this method should find the default value
1579 sufficient.
1580 plot: plot the fit and the residual. In this each
1581 indivual fit has to be approved, by typing 'y'
1582 or 'n'
1583 insitu: if False a new scantable is returned.
1584 Otherwise, the scaling is done in-situ
1585 The default is taken from .asaprc (False)
1586
1587 Example:
1588 scan2=scan.auto_poly_baseline(order=7)
1589 """
1590 if insitu is None: insitu = rcParams['insitu']
1591 varlist = vars()
1592 from asap.asapfitter import fitter
1593 from asap.asaplinefind import linefinder
1594 from asap import _is_sequence_or_number as _is_valid
1595
1596 # check whether edge is set up for each IF individually
1597 individualedge = False;
1598 if len(edge) > 1:
1599 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1600 individualedge = True;
1601
1602 if not _is_valid(edge, int) and not individualedge:
1603 raise ValueError, "Parameter 'edge' has to be an integer or a \
1604 pair of integers specified as a tuple. Nested tuples are allowed \
1605 to make individual selection for different IFs."
1606
1607 curedge = (0, 0)
1608 if individualedge:
1609 for edgepar in edge:
1610 if not _is_valid(edgepar, int):
1611 raise ValueError, "Each element of the 'edge' tuple has \
1612 to be a pair of integers or an integer."
1613 else:
1614 curedge = edge;
1615
1616 # setup fitter
1617 f = fitter()
1618 f.set_function(poly=order)
1619
1620 # setup line finder
1621 fl = linefinder()
1622 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1623
1624 if not insitu:
1625 workscan = self.copy()
1626 else:
1627 workscan = self
1628
1629 fl.set_scan(workscan)
1630
1631 rows = range(workscan.nrow())
1632 # Save parameters of baseline fits & masklists as a class attribute.
1633 # NOTICE: It does not reflect changes in scantable!
1634 if len(rows) > 0:
1635 self.blpars=[]
1636 self.masklists=[]
1637 asaplog.push("Processing:")
1638 for r in rows:
1639 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1640 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1641 workscan.getpol(r), workscan.getcycle(r))
1642 asaplog.push(msg, False)
1643
1644 # figure out edge parameter
1645 if individualedge:
1646 if len(edge) >= workscan.getif(r):
1647 raise RuntimeError, "Number of edge elements appear to " \
1648 "be less than the number of IFs"
1649 curedge = edge[workscan.getif(r)]
1650
1651 # setup line finder
1652 fl.find_lines(r, mask, curedge)
1653 outmask=fl.get_mask()
1654 f.set_scan(workscan, fl.get_mask())
1655 f.x = workscan._getabcissa(r)
1656 f.y = workscan._getspectrum(r)
1657 f.data = None
1658 f.fit()
1659
1660 # Show mask list
1661 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1662 msg = "mask range: "+str(masklist)
1663 asaplog.push(msg, False)
1664
1665 fpar = f.get_parameters()
1666 if plot:
1667 f.plot(residual=True)
1668 x = raw_input("Accept fit ( [y]/n ): ")
1669 if x.upper() == 'N':
1670 self.blpars.append(None)
1671 self.masklists.append(None)
1672 continue
1673 workscan._setspectrum(f.fitter.getresidual(), r)
1674 self.blpars.append(fpar)
1675 self.masklists.append(masklist)
1676 if plot:
1677 f._p.unmap()
1678 f._p = None
1679 workscan._add_history("auto_poly_baseline", varlist)
1680 if insitu:
1681 self._assign(workscan)
1682 else:
1683 return workscan
1684
1685 def rotate_linpolphase(self, angle):
1686 """
1687 Rotate the phase of the complex polarization O=Q+iU correlation.
1688 This is always done in situ in the raw data. So if you call this
1689 function more than once then each call rotates the phase further.
1690 Parameters:
1691 angle: The angle (degrees) to rotate (add) by.
1692 Examples:
1693 scan.rotate_linpolphase(2.3)
1694 """
1695 varlist = vars()
1696 self._math._rotate_linpolphase(self, angle)
1697 self._add_history("rotate_linpolphase", varlist)
1698 print_log()
1699 return
1700
1701
1702 def rotate_xyphase(self, angle):
1703 """
1704 Rotate the phase of the XY correlation. This is always done in situ
1705 in the data. So if you call this function more than once
1706 then each call rotates the phase further.
1707 Parameters:
1708 angle: The angle (degrees) to rotate (add) by.
1709 Examples:
1710 scan.rotate_xyphase(2.3)
1711 """
1712 varlist = vars()
1713 self._math._rotate_xyphase(self, angle)
1714 self._add_history("rotate_xyphase", varlist)
1715 print_log()
1716 return
1717
1718 def swap_linears(self):
1719 """
1720 Swap the linear polarisations XX and YY, or better the first two
1721 polarisations as this also works for ciculars.
1722 """
1723 varlist = vars()
1724 self._math._swap_linears(self)
1725 self._add_history("swap_linears", varlist)
1726 print_log()
1727 return
1728
1729 def invert_phase(self):
1730 """
1731 Invert the phase of the complex polarisation
1732 """
1733 varlist = vars()
1734 self._math._invert_phase(self)
1735 self._add_history("invert_phase", varlist)
1736 print_log()
1737 return
1738
1739 def add(self, offset, insitu=None):
1740 """
1741 Return a scan where all spectra have the offset added
1742 Parameters:
1743 offset: the offset
1744 insitu: if False a new scantable is returned.
1745 Otherwise, the scaling is done in-situ
1746 The default is taken from .asaprc (False)
1747 """
1748 if insitu is None: insitu = rcParams['insitu']
1749 self._math._setinsitu(insitu)
1750 varlist = vars()
1751 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1752 s._add_history("add", varlist)
1753 print_log()
1754 if insitu:
1755 self._assign(s)
1756 else:
1757 return s
1758
1759 def scale(self, factor, tsys=True, insitu=None):
1760 """
1761 Return a scan where all spectra are scaled by the give 'factor'
1762 Parameters:
1763 factor: the scaling factor
1764 insitu: if False a new scantable is returned.
1765 Otherwise, the scaling is done in-situ
1766 The default is taken from .asaprc (False)
1767 tsys: if True (default) then apply the operation to Tsys
1768 as well as the data
1769 """
1770 if insitu is None: insitu = rcParams['insitu']
1771 self._math._setinsitu(insitu)
1772 varlist = vars()
1773 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1774 s._add_history("scale", varlist)
1775 print_log()
1776 if insitu:
1777 self._assign(s)
1778 else:
1779 return s
1780
1781 def set_sourcetype(self, match, matchtype="pattern",
1782 sourcetype="reference"):
1783 """
1784 Set the type of the source to be an source or reference scan
1785 using the provided pattern:
1786 Parameters:
1787 match: a Unix style pattern, regular expression or selector
1788 matchtype: 'pattern' (default) UNIX style pattern or
1789 'regex' regular expression
1790 sourcetype: the type of the source to use (source/reference)
1791 """
1792 varlist = vars()
1793 basesel = self.get_selection()
1794 stype = -1
1795 if sourcetype.lower().startswith("r"):
1796 stype = 1
1797 elif sourcetype.lower().startswith("s"):
1798 stype = 0
1799 else:
1800 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1801 if matchtype.lower().startswith("p"):
1802 matchtype = "pattern"
1803 elif matchtype.lower().startswith("r"):
1804 matchtype = "regex"
1805 else:
1806 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1807 sel = selector()
1808 if isinstance(match, selector):
1809 sel = match
1810 else:
1811 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1812 self.set_selection(basesel+sel)
1813 self._setsourcetype(stype)
1814 self.set_selection(basesel)
1815 s._add_history("set_sourcetype", varlist)
1816
1817 def auto_quotient(self, preserve=True, mode='paired'):
1818 """
1819 This function allows to build quotients automatically.
1820 It assumes the observation to have the same numer of
1821 "ons" and "offs"
1822 Parameters:
1823 preserve: you can preserve (default) the continuum or
1824 remove it. The equations used are
1825 preserve: Output = Toff * (on/off) - Toff
1826 remove: Output = Toff * (on/off) - Ton
1827 mode: the on/off detection mode
1828 'paired' (default)
1829 identifies 'off' scans by the
1830 trailing '_R' (Mopra/Parkes) or
1831 '_e'/'_w' (Tid) and matches
1832 on/off pairs from the observing pattern
1833 'time'
1834 finds the closest off in time
1835
1836 """
1837 modes = ["time", "paired"]
1838 if not mode in modes:
1839 msg = "please provide valid mode. Valid modes are %s" % (modes)
1840 raise ValueError(msg)
1841 varlist = vars()
1842 s = None
1843 if mode.lower() == "paired":
1844 basesel = self.get_selection()
1845 sel = selector()+basesel
1846 sel.set_query("SRCTYPE==1")
1847 self.set_selection(sel)
1848 offs = self.copy()
1849 sel.set_query("SRCTYPE==0")
1850 self.set_selection(sel)
1851 ons = self.copy()
1852 s = scantable(self._math._quotient(ons, offs, preserve))
1853 self.set_selection(basesel)
1854 elif mode.lower() == "time":
1855 s = scantable(self._math._auto_quotient(self, mode, preserve))
1856 s._add_history("auto_quotient", varlist)
1857 print_log()
1858 return s
1859
1860 def mx_quotient(self, mask = None, weight='median', preserve=True):
1861 """
1862 Form a quotient using "off" beams when observing in "MX" mode.
1863 Parameters:
1864 mask: an optional mask to be used when weight == 'stddev'
1865 weight: How to average the off beams. Default is 'median'.
1866 preserve: you can preserve (default) the continuum or
1867 remove it. The equations used are
1868 preserve: Output = Toff * (on/off) - Toff
1869 remove: Output = Toff * (on/off) - Ton
1870 """
1871 if mask is None: mask = ()
1872 varlist = vars()
1873 on = scantable(self._math._mx_extract(self, 'on'))
1874 preoff = scantable(self._math._mx_extract(self, 'off'))
1875 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1876 from asapmath import quotient
1877 q = quotient(on, off, preserve)
1878 q._add_history("mx_quotient", varlist)
1879 print_log()
1880 return q
1881
1882 def freq_switch(self, insitu=None):
1883 """
1884 Apply frequency switching to the data.
1885 Parameters:
1886 insitu: if False a new scantable is returned.
1887 Otherwise, the swictching is done in-situ
1888 The default is taken from .asaprc (False)
1889 Example:
1890 none
1891 """
1892 if insitu is None: insitu = rcParams['insitu']
1893 self._math._setinsitu(insitu)
1894 varlist = vars()
1895 s = scantable(self._math._freqswitch(self))
1896 s._add_history("freq_switch", varlist)
1897 print_log()
1898 if insitu: self._assign(s)
1899 else: return s
1900
1901 def recalc_azel(self):
1902 """
1903 Recalculate the azimuth and elevation for each position.
1904 Parameters:
1905 none
1906 Example:
1907 """
1908 varlist = vars()
1909 self._recalcazel()
1910 self._add_history("recalc_azel", varlist)
1911 print_log()
1912 return
1913
1914 def __add__(self, other):
1915 varlist = vars()
1916 s = None
1917 if isinstance(other, scantable):
1918 s = scantable(self._math._binaryop(self, other, "ADD"))
1919 elif isinstance(other, float):
1920 s = scantable(self._math._unaryop(self, other, "ADD", False))
1921 else:
1922 raise TypeError("Other input is not a scantable or float value")
1923 s._add_history("operator +", varlist)
1924 print_log()
1925 return s
1926
1927 def __sub__(self, other):
1928 """
1929 implicit on all axes and on Tsys
1930 """
1931 varlist = vars()
1932 s = None
1933 if isinstance(other, scantable):
1934 s = scantable(self._math._binaryop(self, other, "SUB"))
1935 elif isinstance(other, float):
1936 s = scantable(self._math._unaryop(self, other, "SUB", False))
1937 else:
1938 raise TypeError("Other input is not a scantable or float value")
1939 s._add_history("operator -", varlist)
1940 print_log()
1941 return s
1942
1943 def __mul__(self, other):
1944 """
1945 implicit on all axes and on Tsys
1946 """
1947 varlist = vars()
1948 s = None
1949 if isinstance(other, scantable):
1950 s = scantable(self._math._binaryop(self, other, "MUL"))
1951 elif isinstance(other, float):
1952 s = scantable(self._math._unaryop(self, other, "MUL", False))
1953 else:
1954 raise TypeError("Other input is not a scantable or float value")
1955 s._add_history("operator *", varlist)
1956 print_log()
1957 return s
1958
1959
1960 def __div__(self, other):
1961 """
1962 implicit on all axes and on Tsys
1963 """
1964 varlist = vars()
1965 s = None
1966 if isinstance(other, scantable):
1967 s = scantable(self._math._binaryop(self, other, "DIV"))
1968 elif isinstance(other, float):
1969 if other == 0.0:
1970 raise ZeroDivisionError("Dividing by zero is not recommended")
1971 s = scantable(self._math._unaryop(self, other, "DIV", False))
1972 else:
1973 raise TypeError("Other input is not a scantable or float value")
1974 s._add_history("operator /", varlist)
1975 print_log()
1976 return s
1977
1978 def get_fit(self, row=0):
1979 """
1980 Print or return the stored fits for a row in the scantable
1981 Parameters:
1982 row: the row which the fit has been applied to.
1983 """
1984 if row > self.nrow():
1985 return
1986 from asap.asapfit import asapfit
1987 fit = asapfit(self._getfit(row))
1988 if rcParams['verbose']:
1989 #print fit
1990 asaplog.push( '%s' %(fit) )
1991 print_log()
1992 return
1993 else:
1994 return fit.as_dict()
1995
1996 def flag_nans(self):
1997 """
1998 Utility function to flag NaN values in the scantable.
1999 """
2000 import numpy
2001 basesel = self.get_selection()
2002 for i in range(self.nrow()):
2003 sel = selector()+basesel
2004 sel.set_scans(self.getscan(i))
2005 sel.set_beams(self.getbeam(i))
2006 sel.set_ifs(self.getif(i))
2007 sel.set_polarisations(self.getpol(i))
2008 self.set_selection(sel)
2009 nans = numpy.isnan(self._getspectrum(0))
2010 if numpy.any(nans):
2011 bnans = [ bool(v) for v in nans]
2012 self.flag(bnans)
2013 self.set_selection(basesel)
2014
2015
2016 def _add_history(self, funcname, parameters):
2017 if not rcParams['scantable.history']:
2018 return
2019 # create date
2020 sep = "##"
2021 from datetime import datetime
2022 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2023 hist = dstr+sep
2024 hist += funcname+sep#cdate+sep
2025 if parameters.has_key('self'): del parameters['self']
2026 for k, v in parameters.iteritems():
2027 if type(v) is dict:
2028 for k2, v2 in v.iteritems():
2029 hist += k2
2030 hist += "="
2031 if isinstance(v2, scantable):
2032 hist += 'scantable'
2033 elif k2 == 'mask':
2034 if isinstance(v2, list) or isinstance(v2, tuple):
2035 hist += str(self._zip_mask(v2))
2036 else:
2037 hist += str(v2)
2038 else:
2039 hist += str(v2)
2040 else:
2041 hist += k
2042 hist += "="
2043 if isinstance(v, scantable):
2044 hist += 'scantable'
2045 elif k == 'mask':
2046 if isinstance(v, list) or isinstance(v, tuple):
2047 hist += str(self._zip_mask(v))
2048 else:
2049 hist += str(v)
2050 else:
2051 hist += str(v)
2052 hist += sep
2053 hist = hist[:-2] # remove trailing '##'
2054 self._addhistory(hist)
2055
2056
2057 def _zip_mask(self, mask):
2058 mask = list(mask)
2059 i = 0
2060 segments = []
2061 while mask[i:].count(1):
2062 i += mask[i:].index(1)
2063 if mask[i:].count(0):
2064 j = i + mask[i:].index(0)
2065 else:
2066 j = len(mask)
2067 segments.append([i, j])
2068 i = j
2069 return segments
2070
2071 def _get_ordinate_label(self):
2072 fu = "("+self.get_fluxunit()+")"
2073 import re
2074 lbl = "Intensity"
2075 if re.match(".K.", fu):
2076 lbl = "Brightness Temperature "+ fu
2077 elif re.match(".Jy.", fu):
2078 lbl = "Flux density "+ fu
2079 return lbl
2080
2081 def _check_ifs(self):
2082 nchans = [self.nchan(i) for i in range(self.nif(-1))]
2083 nchans = filter(lambda t: t > 0, nchans)
2084 return (sum(nchans)/len(nchans) == nchans[0])
2085
2086 def _fill(self, names, unit, average, getpt):
2087 import os
2088 from asap._asap import stfiller
2089 first = True
2090 fullnames = []
2091 for name in names:
2092 name = os.path.expandvars(name)
2093 name = os.path.expanduser(name)
2094 if not os.path.exists(name):
2095 msg = "File '%s' does not exists" % (name)
2096 if rcParams['verbose']:
2097 asaplog.push(msg)
2098 #print asaplog.pop().strip()
2099 print_log( 'ERROR' )
2100 return
2101 raise IOError(msg)
2102 fullnames.append(name)
2103 if average:
2104 asaplog.push('Auto averaging integrations')
2105 stype = int(rcParams['scantable.storage'].lower() == 'disk')
2106 for name in fullnames:
2107 tbl = Scantable(stype)
2108 r = stfiller(tbl)
2109 rx = rcParams['scantable.reference']
2110 r._setreferenceexpr(rx)
2111 msg = "Importing %s..." % (name)
2112 asaplog.push(msg, False)
2113 print_log()
2114 r._open(name, -1, -1, getpt)
2115 r._read()
2116 if average:
2117 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2118 if not first:
2119 tbl = self._math._merge([self, tbl])
2120 Scantable.__init__(self, tbl)
2121 r._close()
2122 del r, tbl
2123 first = False
2124 if unit is not None:
2125 self.set_fluxunit(unit)
2126 #self.set_freqframe(rcParams['scantable.freqframe'])
2127
2128 def __getitem__(self, key):
2129 if key < 0:
2130 key += self.nrow()
2131 if key >= self.nrow():
2132 raise IndexError("Row index out of range.")
2133 return self._getspectrum(key)
2134
2135 def __setitem__(self, key, value):
2136 if key < 0:
2137 key += self.nrow()
2138 if key >= self.nrow():
2139 raise IndexError("Row index out of range.")
2140 if not hasattr(value, "__len__") or \
2141 len(value) > self.nchan(self.getif(key)):
2142 raise ValueError("Spectrum length doesn't match.")
2143 return self._setspectrum(value, key)
2144
2145 def __len__(self):
2146 return self.nrow()
2147
2148 def __iter__(self):
2149 for i in range(len(self)):
2150 yield self[i]
Note: See TracBrowser for help on using the repository browser.