source: branches/alma/python/scantable.py@ 1616

Last change on this file since 1616 was 1615, checked in by Takeshi Nakazato, 15 years ago

New Development: No

JIRA Issue: Yes CAS-729, CAS-1147

Ready to Release: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

I have modified a way to log messages in _row_callback() method such that
the message is directly written in the casalogger, not via a temporary file.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 80.6 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None, getpt=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 getpt: for MeasurementSet input data only:
33 If True, all pointing data are filled.
34 The deafult is False, which makes time to load
35 the MS data faster in some cases.
36 """
37 if average is None:
38 average = rcParams['scantable.autoaverage']
39 if getpt is None:
40 getpt = False
41 varlist = vars()
42 from asap._asap import stmath
43 self._math = stmath()
44 if isinstance(filename, Scantable):
45 Scantable.__init__(self, filename)
46 else:
47 if isinstance(filename, str):# or \
48# (isinstance(filename, list) or isinstance(filename, tuple)) \
49# and isinstance(filename[-1], str):
50 import os.path
51 filename = os.path.expandvars(filename)
52 filename = os.path.expanduser(filename)
53 if not os.path.exists(filename):
54 s = "File '%s' not found." % (filename)
55 if rcParams['verbose']:
56 asaplog.push(s)
57 #print asaplog.pop().strip()
58 print_log('ERROR')
59 return
60 raise IOError(s)
61 if os.path.isdir(filename) \
62 and not os.path.exists(filename+'/table.f1'):
63 # crude check if asap table
64 if os.path.exists(filename+'/table.info'):
65 ondisk = rcParams['scantable.storage'] == 'disk'
66 Scantable.__init__(self, filename, ondisk)
67 if unit is not None:
68 self.set_fluxunit(unit)
69 # do not reset to the default freqframe
70 #self.set_freqframe(rcParams['scantable.freqframe'])
71 else:
72 msg = "The given file '%s'is not a valid " \
73 "asap table." % (filename)
74 if rcParams['verbose']:
75 #print msg
76 asaplog.push( msg )
77 print_log( 'ERROR' )
78 return
79 else:
80 raise IOError(msg)
81 else:
82 self._fill([filename], unit, average, getpt)
83 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
84 and isinstance(filename[-1], str):
85 self._fill(filename, unit, average, getpt)
86 self._add_history("scantable", varlist)
87 print_log()
88
89 def save(self, name=None, format=None, overwrite=False):
90 """
91 Store the scantable on disk. This can be an asap (aips++) Table,
92 SDFITS or MS2 format.
93 Parameters:
94 name: the name of the outputfile. For format "ASCII"
95 this is the root file name (data in 'name'.txt
96 and header in 'name'_header.txt)
97 format: an optional file format. Default is ASAP.
98 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
99 'SDFITS' (save as SDFITS file)
100 'ASCII' (saves as ascii text file)
101 'MS2' (saves as an aips++
102 MeasurementSet V2)
103 'FITS' (save as image FITS - not
104 readable by class)
105 'CLASS' (save as FITS readable by CLASS)
106 overwrite: If the file should be overwritten if it exists.
107 The default False is to return with warning
108 without writing the output. USE WITH CARE.
109 Example:
110 scan.save('myscan.asap')
111 scan.save('myscan.sdfits', 'SDFITS')
112 """
113 from os import path
114 if format is None: format = rcParams['scantable.save']
115 suffix = '.'+format.lower()
116 if name is None or name == "":
117 name = 'scantable'+suffix
118 msg = "No filename given. Using default name %s..." % name
119 asaplog.push(msg)
120 name = path.expandvars(name)
121 if path.isfile(name) or path.isdir(name):
122 if not overwrite:
123 msg = "File %s exists." % name
124 if rcParams['verbose']:
125 #print msg
126 asaplog.push( msg )
127 print_log( 'ERROR' )
128 return
129 else:
130 raise IOError(msg)
131 format2 = format.upper()
132 if format2 == 'ASAP':
133 self._save(name)
134 else:
135 from asap._asap import stwriter as stw
136 writer = stw(format2)
137 writer.write(self, name)
138 print_log()
139 return
140
141 def copy(self):
142 """
143 Return a copy of this scantable.
144 Note:
145 This makes a full (deep) copy. scan2 = scan1 makes a reference.
146 Parameters:
147 none
148 Example:
149 copiedscan = scan.copy()
150 """
151 sd = scantable(Scantable._copy(self))
152 return sd
153
154 def drop_scan(self, scanid=None):
155 """
156 Return a new scantable where the specified scan number(s) has(have)
157 been dropped.
158 Parameters:
159 scanid: a (list of) scan number(s)
160 """
161 from asap import _is_sequence_or_number as _is_valid
162 from asap import _to_list
163 from asap import unique
164 if not _is_valid(scanid):
165 if rcParams['verbose']:
166 #print "Please specify a scanno to drop from the scantable"
167 asaplog.push( 'Please specify a scanno to drop from the scantable' )
168 print_log( 'ERROR' )
169 return
170 else:
171 raise RuntimeError("No scan given")
172 try:
173 scanid = _to_list(scanid)
174 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
175 for sid in scanid: allscans.remove(sid)
176 if len(allscans) == 0:
177 raise ValueError("Can't remove all scans")
178 except ValueError:
179 if rcParams['verbose']:
180 #print "Couldn't find any match."
181 print_log()
182 asaplog.push( "Couldn't find any match." )
183 print_log( 'ERROR' )
184 return
185 else: raise
186 try:
187 bsel = self.get_selection()
188 sel = selector()
189 sel.set_scans(allscans)
190 self.set_selection(bsel+sel)
191 scopy = self._copy()
192 self.set_selection(bsel)
193 return scantable(scopy)
194 except RuntimeError:
195 if rcParams['verbose']:
196 #print "Couldn't find any match."
197 print_log()
198 asaplog.push( "Couldn't find any match." )
199 print_log( 'ERROR' )
200 else:
201 raise
202
203
204 def get_scan(self, scanid=None):
205 """
206 Return a specific scan (by scanno) or collection of scans (by
207 source name) in a new scantable.
208 Note:
209 See scantable.drop_scan() for the inverse operation.
210 Parameters:
211 scanid: a (list of) scanno or a source name, unix-style
212 patterns are accepted for source name matching, e.g.
213 '*_R' gets all 'ref scans
214 Example:
215 # get all scans containing the source '323p459'
216 newscan = scan.get_scan('323p459')
217 # get all 'off' scans
218 refscans = scan.get_scan('*_R')
219 # get a susbset of scans by scanno (as listed in scan.summary())
220 newscan = scan.get_scan([0, 2, 7, 10])
221 """
222 if scanid is None:
223 if rcParams['verbose']:
224 #print "Please specify a scan no or name to " \
225 # "retrieve from the scantable"
226 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
227 print_log( 'ERROR' )
228 return
229 else:
230 raise RuntimeError("No scan given")
231
232 try:
233 bsel = self.get_selection()
234 sel = selector()
235 if type(scanid) is str:
236 sel.set_name(scanid)
237 self.set_selection(bsel+sel)
238 scopy = self._copy()
239 self.set_selection(bsel)
240 return scantable(scopy)
241 elif type(scanid) is int:
242 sel.set_scans([scanid])
243 self.set_selection(bsel+sel)
244 scopy = self._copy()
245 self.set_selection(bsel)
246 return scantable(scopy)
247 elif type(scanid) is list:
248 sel.set_scans(scanid)
249 self.set_selection(sel)
250 scopy = self._copy()
251 self.set_selection(bsel)
252 return scantable(scopy)
253 else:
254 msg = "Illegal scanid type, use 'int' or 'list' if ints."
255 if rcParams['verbose']:
256 #print msg
257 asaplog.push( msg )
258 print_log( 'ERROR' )
259 else:
260 raise TypeError(msg)
261 except RuntimeError:
262 if rcParams['verbose']:
263 #print "Couldn't find any match."
264 print_log()
265 asaplog.push( "Couldn't find any match." )
266 print_log( 'ERROR' )
267 else: raise
268
269 def __str__(self):
270 return Scantable._summary(self, True)
271
272 def summary(self, filename=None):
273 """
274 Print a summary of the contents of this scantable.
275 Parameters:
276 filename: the name of a file to write the putput to
277 Default - no file output
278 verbose: print extra info such as the frequency table
279 The default (False) is taken from .asaprc
280 """
281 info = Scantable._summary(self, True)
282 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
283 if filename is not None:
284 if filename is "":
285 filename = 'scantable_summary.txt'
286 from os.path import expandvars, isdir
287 filename = expandvars(filename)
288 if not isdir(filename):
289 data = open(filename, 'w')
290 data.write(info)
291 data.close()
292 else:
293 msg = "Illegal file name '%s'." % (filename)
294 if rcParams['verbose']:
295 #print msg
296 asaplog.push( msg )
297 print_log( 'ERROR' )
298 else:
299 raise IOError(msg)
300 if rcParams['verbose']:
301 try:
302 from IPython.genutils import page as pager
303 except ImportError:
304 from pydoc import pager
305 pager(info)
306 else:
307 return info
308
309 def get_spectrum(self, rowno):
310 """Return the spectrum for the current row in the scantable as a list.
311 Parameters:
312 rowno: the row number to retrieve the spectrum from
313 """
314 return self._getspectrum(rowno)
315
316 def get_mask(self, rowno):
317 """Return the mask for the current row in the scantable as a list.
318 Parameters:
319 rowno: the row number to retrieve the mask from
320 """
321 return self._getmask(rowno)
322
323 def set_spectrum(self, spec, rowno):
324 """Return the spectrum for the current row in the scantable as a list.
325 Parameters:
326 spec: the spectrum
327 rowno: the row number to set the spectrum for
328 """
329 assert(len(spec) == self.nchan())
330 return self._setspectrum(spec, rowno)
331
332 def get_selection(self):
333 """
334 Get the selection object currently set on this scantable.
335 Parameters:
336 none
337 Example:
338 sel = scan.get_selection()
339 sel.set_ifs(0) # select IF 0
340 scan.set_selection(sel) # apply modified selection
341 """
342 return selector(self._getselection())
343
344 def set_selection(self, selection=selector()):
345 """
346 Select a subset of the data. All following operations on this scantable
347 are only applied to thi selection.
348 Parameters:
349 selection: a selector object (default unset the selection)
350 Examples:
351 sel = selector() # create a selection object
352 self.set_scans([0, 3]) # select SCANNO 0 and 3
353 scan.set_selection(sel) # set the selection
354 scan.summary() # will only print summary of scanno 0 an 3
355 scan.set_selection() # unset the selection
356 """
357 self._setselection(selection)
358
359 def get_row(self, row=0, insitu=None):
360 """
361 Select a row in the scantable.
362 Return a scantable with single row.
363 Parameters:
364 row: row no of integration, default is 0.
365 insitu: if False a new scantable is returned.
366 Otherwise, the scaling is done in-situ
367 The default is taken from .asaprc (False)
368 """
369 if insitu is None: insitu = rcParams['insitu']
370 if not insitu:
371 workscan = self.copy()
372 else:
373 workscan = self
374 # Select a row
375 sel=selector()
376 sel.set_scans([workscan.getscan(row)])
377 sel.set_cycles([workscan.getcycle(row)])
378 sel.set_beams([workscan.getbeam(row)])
379 sel.set_ifs([workscan.getif(row)])
380 sel.set_polarisations([workscan.getpol(row)])
381 sel.set_name(workscan._getsourcename(row))
382 workscan.set_selection(sel)
383 if not workscan.nrow() == 1:
384 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
385 raise RuntimeError(msg)
386 del sel
387 if insitu:
388 self._assign(workscan)
389 else:
390 return workscan
391
392 def stats(self, stat='stddev', mask=None):
393 """
394 Determine the specified statistic of the current beam/if/pol
395 Takes a 'mask' as an optional parameter to specify which
396 channels should be excluded.
397 Parameters:
398 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
399 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
400 mask: an optional mask specifying where the statistic
401 should be determined.
402 Example:
403 scan.set_unit('channel')
404 msk = scan.create_mask([100, 200], [500, 600])
405 scan.stats(stat='mean', mask=m)
406 """
407 if mask == None:
408 mask = []
409 axes = ['Beam', 'IF', 'Pol', 'Time']
410 if not self._check_ifs():
411 raise ValueError("Cannot apply mask as the IFs have different "
412 "number of channels. Please use setselection() "
413 "to select individual IFs")
414 rtnabc = False
415 if stat.lower().endswith('_abc'): rtnabc = True
416 getchan = False
417 if stat.lower().startswith('min') or stat.lower().startswith('max'):
418 chan = self._math._minmaxchan(self, mask, stat)
419 getchan = True
420 statvals = []
421 if not rtnabc: statvals = self._math._stats(self, mask, stat)
422
423 out = ''
424 axes = []
425 for i in range(self.nrow()):
426 axis = []
427 axis.append(self.getscan(i))
428 axis.append(self.getbeam(i))
429 axis.append(self.getif(i))
430 axis.append(self.getpol(i))
431 axis.append(self.getcycle(i))
432 axes.append(axis)
433 tm = self._gettime(i)
434 src = self._getsourcename(i)
435 refstr = ''
436 statunit= ''
437 if getchan:
438 qx, qy = self.chan2data(rowno=i, chan=chan[i])
439 if rtnabc:
440 statvals.append(qx['value'])
441 refstr = '(value: %3.3f' % (qy['value'])+' ['+qy['unit']+'])'
442 statunit= '['+qx['unit']+']'
443 else:
444 refstr = '(@ %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
445 #statunit= ' ['+qy['unit']+']'
446 out += 'Scan[%d] (%s) ' % (axis[0], src)
447 out += 'Time[%s]:\n' % (tm)
448 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
449 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
450 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
451 out += '= %3.3f ' % (statvals[i]) +refstr+'\n'
452 out += "--------------------------------------------------\n"
453
454 if rcParams['verbose']:
455 import os
456 usr=os.environ['USER']
457 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
458 f=open(tmpfile,'w')
459 print >> f, "--------------------------------------------------"
460 print >> f, " ", stat, statunit
461 print >> f, "--------------------------------------------------"
462 print >> f, out
463 f.close()
464 f=open(tmpfile,'r')
465 x=f.readlines()
466 f.close()
467 for xx in x:
468 asaplog.push( xx )
469 print_log()
470 #else:
471 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
472 # 'axes' : axes,
473 # 'data': statvals}
474 return statvals
475
476 def chan2data(self, rowno=0, chan=0):
477 """
478 Returns channel/frequency/velocity and spectral value
479 at an arbitrary row and channel in the scantable.
480 Parameters:
481 rowno: a row number in the scantable. Default is the
482 first row, i.e. rowno=0
483 chan: a channel in the scantable. Default is the first
484 channel, i.e. pos=0
485 """
486 if isinstance(rowno, int) and isinstance(chan, int):
487 qx = {'unit': self.get_unit(),
488 'value': self._getabcissa(rowno)[chan]}
489 qy = {'unit': self.get_fluxunit(),
490 'value': self._getspectrum(rowno)[chan]}
491 return qx, qy
492
493 def stddev(self, mask=None):
494 """
495 Determine the standard deviation of the current beam/if/pol
496 Takes a 'mask' as an optional parameter to specify which
497 channels should be excluded.
498 Parameters:
499 mask: an optional mask specifying where the standard
500 deviation should be determined.
501
502 Example:
503 scan.set_unit('channel')
504 msk = scan.create_mask([100, 200], [500, 600])
505 scan.stddev(mask=m)
506 """
507 return self.stats(stat='stddev', mask=mask);
508
509
510 def get_column_names(self):
511 """
512 Return a list of column names, which can be used for selection.
513 """
514 return list(Scantable.get_column_names(self))
515
516 def get_tsys(self):
517 """
518 Return the System temperatures.
519 Returns:
520 a list of Tsys values for the current selection
521 """
522
523 return self._row_callback(self._gettsys, "Tsys")
524
525 def _row_callback(self, callback, label):
526 axes = []
527 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
528 out = ""
529 outvec = []
530 for i in range(self.nrow()):
531 axis = []
532 axis.append(self.getscan(i))
533 axis.append(self.getbeam(i))
534 axis.append(self.getif(i))
535 axis.append(self.getpol(i))
536 axis.append(self.getcycle(i))
537 axes.append(axis)
538 tm = self._gettime(i)
539 src = self._getsourcename(i)
540 out += 'Scan[%d] (%s) ' % (axis[0], src)
541 out += 'Time[%s]:\n' % (tm)
542 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
543 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
544 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
545 outvec.append(callback(i))
546 out += '= %3.3f\n' % (outvec[i])
547 out += "--------------------------------------------------\n"
548 if rcParams['verbose']:
549 asaplog.push("--------------------------------------------------")
550 asaplog.push(" %s" % (label))
551 asaplog.push("--------------------------------------------------")
552 asaplog.push(out)
553 print_log()
554 # disabled because the vector seems more useful
555 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
556 return outvec
557
558 def _get_column(self, callback, row=-1):
559 """
560 """
561 if row == -1:
562 return [callback(i) for i in range(self.nrow())]
563 else:
564 if 0 <= row < self.nrow():
565 return callback(row)
566
567
568 def get_time(self, row=-1, asdatetime=False):
569 """
570 Get a list of time stamps for the observations.
571 Return a datetime object for each integration time stamp in the scantable.
572 Parameters:
573 row: row no of integration. Default -1 return all rows
574 asdatetime: return values as datetime objects rather than strings
575 Example:
576 none
577 """
578 from time import strptime
579 from datetime import datetime
580 times = self._get_column(self._gettime, row)
581 if not asdatetime:
582 return times
583 format = "%Y/%m/%d/%H:%M:%S"
584 if isinstance(times, list):
585 return [datetime(*strptime(i, format)[:6]) for i in times]
586 else:
587 return datetime(*strptime(times, format)[:6])
588
589
590 def get_inttime(self, row=-1):
591 """
592 Get a list of integration times for the observations.
593 Return a time in seconds for each integration in the scantable.
594 Parameters:
595 row: row no of integration. Default -1 return all rows.
596 Example:
597 none
598 """
599 return self._get_column(self._getinttime, row)
600
601
602 def get_sourcename(self, row=-1):
603 """
604 Get a list source names for the observations.
605 Return a string for each integration in the scantable.
606 Parameters:
607 row: row no of integration. Default -1 return all rows.
608 Example:
609 none
610 """
611 return self._get_column(self._getsourcename, row)
612
613 def get_elevation(self, row=-1):
614 """
615 Get a list of elevations for the observations.
616 Return a float for each integration in the scantable.
617 Parameters:
618 row: row no of integration. Default -1 return all rows.
619 Example:
620 none
621 """
622 return self._get_column(self._getelevation, row)
623
624 def get_azimuth(self, row=-1):
625 """
626 Get a list of azimuths for the observations.
627 Return a float for each integration in the scantable.
628 Parameters:
629 row: row no of integration. Default -1 return all rows.
630 Example:
631 none
632 """
633 return self._get_column(self._getazimuth, row)
634
635 def get_parangle(self, row=-1):
636 """
637 Get a list of parallactic angles for the observations.
638 Return a float for each integration in the scantable.
639 Parameters:
640 row: row no of integration. Default -1 return all rows.
641 Example:
642 none
643 """
644 return self._get_column(self._getparangle, row)
645
646 def get_direction(self, row=-1):
647 """
648 Get a list of Positions on the sky (direction) for the observations.
649 Return a float for each integration in the scantable.
650 Parameters:
651 row: row no of integration. Default -1 return all rows
652 Example:
653 none
654 """
655 return self._get_column(self._getdirection, row)
656
657 def get_directionval(self, row=-1):
658 """
659 Get a list of Positions on the sky (direction) for the observations.
660 Return a float for each integration in the scantable.
661 Parameters:
662 row: row no of integration. Default -1 return all rows
663 Example:
664 none
665 """
666 return self._get_column(self._getdirectionvec, row)
667
668 def set_unit(self, unit='channel'):
669 """
670 Set the unit for all following operations on this scantable
671 Parameters:
672 unit: optional unit, default is 'channel'
673 one of '*Hz', 'km/s', 'channel', ''
674 """
675 varlist = vars()
676 if unit in ['', 'pixel', 'channel']:
677 unit = ''
678 inf = list(self._getcoordinfo())
679 inf[0] = unit
680 self._setcoordinfo(inf)
681 self._add_history("set_unit", varlist)
682
683 def set_instrument(self, instr):
684 """
685 Set the instrument for subsequent processing.
686 Parameters:
687 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
688 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
689 """
690 self._setInstrument(instr)
691 self._add_history("set_instument", vars())
692 print_log()
693
694 def set_feedtype(self, feedtype):
695 """
696 Overwrite the feed type, which might not be set correctly.
697 Parameters:
698 feedtype: 'linear' or 'circular'
699 """
700 self._setfeedtype(feedtype)
701 self._add_history("set_feedtype", vars())
702 print_log()
703
704 def set_doppler(self, doppler='RADIO'):
705 """
706 Set the doppler for all following operations on this scantable.
707 Parameters:
708 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
709 """
710 varlist = vars()
711 inf = list(self._getcoordinfo())
712 inf[2] = doppler
713 self._setcoordinfo(inf)
714 self._add_history("set_doppler", vars())
715 print_log()
716
717 def set_freqframe(self, frame=None):
718 """
719 Set the frame type of the Spectral Axis.
720 Parameters:
721 frame: an optional frame type, default 'LSRK'. Valid frames are:
722 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
723 'GEO', 'GALACTO', 'LGROUP', 'CMB'
724 Examples:
725 scan.set_freqframe('BARY')
726 """
727 if frame is None: frame = rcParams['scantable.freqframe']
728 varlist = vars()
729 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
730 'GEO', 'GALACTO', 'LGROUP', 'CMB']
731
732 if frame in valid:
733 inf = list(self._getcoordinfo())
734 inf[1] = frame
735 self._setcoordinfo(inf)
736 self._add_history("set_freqframe", varlist)
737 else:
738 msg = "Please specify a valid freq type. Valid types are:\n", valid
739 if rcParams['verbose']:
740 #print msg
741 asaplog.push( msg )
742 print_log( 'ERROR' )
743 else:
744 raise TypeError(msg)
745 print_log()
746
747 def set_dirframe(self, frame=""):
748 """
749 Set the frame type of the Direction on the sky.
750 Parameters:
751 frame: an optional frame type, default ''. Valid frames are:
752 'J2000', 'B1950', 'GALACTIC'
753 Examples:
754 scan.set_dirframe('GALACTIC')
755 """
756 varlist = vars()
757 try:
758 Scantable.set_dirframe(self, frame)
759 except RuntimeError, msg:
760 if rcParams['verbose']:
761 #print msg
762 print_log()
763 asaplog.push( msg )
764 print_log( 'ERROR' )
765 else:
766 raise
767 self._add_history("set_dirframe", varlist)
768
769 def get_unit(self):
770 """
771 Get the default unit set in this scantable
772 Returns:
773 A unit string
774 """
775 inf = self._getcoordinfo()
776 unit = inf[0]
777 if unit == '': unit = 'channel'
778 return unit
779
780 def get_abcissa(self, rowno=0):
781 """
782 Get the abcissa in the current coordinate setup for the currently
783 selected Beam/IF/Pol
784 Parameters:
785 rowno: an optional row number in the scantable. Default is the
786 first row, i.e. rowno=0
787 Returns:
788 The abcissa values and the format string (as a dictionary)
789 """
790 abc = self._getabcissa(rowno)
791 lbl = self._getabcissalabel(rowno)
792 print_log()
793 return abc, lbl
794
795 def flag(self, mask=None, unflag=False):
796 """
797 Flag the selected data using an optional channel mask.
798 Parameters:
799 mask: an optional channel mask, created with create_mask. Default
800 (no mask) is all channels.
801 unflag: if True, unflag the data
802 """
803 varlist = vars()
804 if mask is None:
805 mask = []
806 try:
807 self._flag(mask, unflag)
808 except RuntimeError, msg:
809 if rcParams['verbose']:
810 #print msg
811 print_log()
812 asaplog.push( msg )
813 print_log( 'ERROR' )
814 return
815 else: raise
816 self._add_history("flag", varlist)
817
818 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
819 """
820 Flag the data in 'lag' space by providing a frequency to remove.
821 Flagged data in the scantable gets set to 0.0 before the fft.
822 No taper is applied.
823 Parameters:
824 frequency: the frequency (really a period within the bandwidth)
825 to remove
826 width: the width of the frequency to remove, to remove a
827 range of frequencies around the centre.
828 unit: the frequency unit (default "GHz")
829 Notes:
830 It is recommended to flag edges of the band or strong
831 signals beforehand.
832 """
833 if insitu is None: insitu = rcParams['insitu']
834 self._math._setinsitu(insitu)
835 varlist = vars()
836 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
837 if not base.has_key(unit):
838 raise ValueError("%s is not a valid unit." % unit)
839 try:
840 s = scantable(self._math._lag_flag(self, frequency*base[unit],
841 width*base[unit]))
842 except RuntimeError, msg:
843 if rcParams['verbose']:
844 #print msg
845 print_log()
846 asaplog.push( msg )
847 print_log( 'ERROR' )
848 return
849 else: raise
850 s._add_history("lag_flag", varlist)
851 print_log()
852 if insitu:
853 self._assign(s)
854 else:
855 return s
856
857
858 def create_mask(self, *args, **kwargs):
859 """
860 Compute and return a mask based on [min, max] windows.
861 The specified windows are to be INCLUDED, when the mask is
862 applied.
863 Parameters:
864 [min, max], [min2, max2], ...
865 Pairs of start/end points (inclusive)specifying the regions
866 to be masked
867 invert: optional argument. If specified as True,
868 return an inverted mask, i.e. the regions
869 specified are EXCLUDED
870 row: create the mask using the specified row for
871 unit conversions, default is row=0
872 only necessary if frequency varies over rows.
873 Example:
874 scan.set_unit('channel')
875 a)
876 msk = scan.create_mask([400, 500], [800, 900])
877 # masks everything outside 400 and 500
878 # and 800 and 900 in the unit 'channel'
879
880 b)
881 msk = scan.create_mask([400, 500], [800, 900], invert=True)
882 # masks the regions between 400 and 500
883 # and 800 and 900 in the unit 'channel'
884 c)
885 mask only channel 400
886 msk = scan.create_mask([400, 400])
887 """
888 row = 0
889 if kwargs.has_key("row"):
890 row = kwargs.get("row")
891 data = self._getabcissa(row)
892 u = self._getcoordinfo()[0]
893 if rcParams['verbose']:
894 if u == "": u = "channel"
895 msg = "The current mask window unit is %s" % u
896 i = self._check_ifs()
897 if not i:
898 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
899 asaplog.push(msg)
900 n = self.nchan()
901 msk = _n_bools(n, False)
902 # test if args is a 'list' or a 'normal *args - UGLY!!!
903
904 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
905 and args or args[0]
906 for window in ws:
907 if (len(window) != 2 or window[0] > window[1] ):
908 raise TypeError("A window needs to be defined as [min, max]")
909 for i in range(n):
910 if data[i] >= window[0] and data[i] <= window[1]:
911 msk[i] = True
912 if kwargs.has_key('invert'):
913 if kwargs.get('invert'):
914 msk = mask_not(msk)
915 print_log()
916 return msk
917
918 def get_masklist(self, mask=None, row=0):
919 """
920 Compute and return a list of mask windows, [min, max].
921 Parameters:
922 mask: channel mask, created with create_mask.
923 row: calcutate the masklist using the specified row
924 for unit conversions, default is row=0
925 only necessary if frequency varies over rows.
926 Returns:
927 [min, max], [min2, max2], ...
928 Pairs of start/end points (inclusive)specifying
929 the masked regions
930 """
931 if not (isinstance(mask,list) or isinstance(mask, tuple)):
932 raise TypeError("The mask should be list or tuple.")
933 if len(mask) < 2:
934 raise TypeError("The mask elements should be > 1")
935 if self.nchan() != len(mask):
936 msg = "Number of channels in scantable != number of mask elements"
937 raise TypeError(msg)
938 data = self._getabcissa(row)
939 u = self._getcoordinfo()[0]
940 if rcParams['verbose']:
941 if u == "": u = "channel"
942 msg = "The current mask window unit is %s" % u
943 i = self._check_ifs()
944 if not i:
945 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
946 asaplog.push(msg)
947 masklist=[]
948 ist, ien = None, None
949 ist, ien=self.get_mask_indices(mask)
950 if ist is not None and ien is not None:
951 for i in xrange(len(ist)):
952 range=[data[ist[i]],data[ien[i]]]
953 range.sort()
954 masklist.append([range[0],range[1]])
955 return masklist
956
957 def get_mask_indices(self, mask=None):
958 """
959 Compute and Return lists of mask start indices and mask end indices.
960 Parameters:
961 mask: channel mask, created with create_mask.
962 Returns:
963 List of mask start indices and that of mask end indices,
964 i.e., [istart1,istart2,....], [iend1,iend2,....].
965 """
966 if not (isinstance(mask,list) or isinstance(mask, tuple)):
967 raise TypeError("The mask should be list or tuple.")
968 if len(mask) < 2:
969 raise TypeError("The mask elements should be > 1")
970 istart=[]
971 iend=[]
972 if mask[0]: istart.append(0)
973 for i in range(len(mask)-1):
974 if not mask[i] and mask[i+1]:
975 istart.append(i+1)
976 elif mask[i] and not mask[i+1]:
977 iend.append(i)
978 if mask[len(mask)-1]: iend.append(len(mask)-1)
979 if len(istart) != len(iend):
980 raise RuntimeError("Numbers of mask start != mask end.")
981 for i in range(len(istart)):
982 if istart[i] > iend[i]:
983 raise RuntimeError("Mask start index > mask end index")
984 break
985 return istart,iend
986
987# def get_restfreqs(self):
988# """
989# Get the restfrequency(s) stored in this scantable.
990# The return value(s) are always of unit 'Hz'
991# Parameters:
992# none
993# Returns:
994# a list of doubles
995# """
996# return list(self._getrestfreqs())
997
998 def get_restfreqs(self, ids=None):
999 """
1000 Get the restfrequency(s) stored in this scantable.
1001 The return value(s) are always of unit 'Hz'
1002 Parameters:
1003 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1004 be retrieved
1005 Returns:
1006 dictionary containing ids and a list of doubles for each id
1007 """
1008 if ids is None:
1009 rfreqs={}
1010 idlist = self.getmolnos()
1011 for i in idlist:
1012 rfreqs[i]=list(self._getrestfreqs(i))
1013 return rfreqs
1014 else:
1015 if type(ids)==list or type(ids)==tuple:
1016 rfreqs={}
1017 for i in ids:
1018 rfreqs[i]=list(self._getrestfreqs(i))
1019 return rfreqs
1020 else:
1021 return list(self._getrestfreqs(ids))
1022 #return list(self._getrestfreqs(ids))
1023
1024 def set_restfreqs(self, freqs=None, unit='Hz'):
1025 """
1026 ********NEED TO BE UPDATED begin************
1027 Set or replace the restfrequency specified and
1028 If the 'freqs' argument holds a scalar,
1029 then that rest frequency will be applied to all the selected
1030 data. If the 'freqs' argument holds
1031 a vector, then it MUST be of equal or smaller length than
1032 the number of IFs (and the available restfrequencies will be
1033 replaced by this vector). In this case, *all* data have
1034 the restfrequency set per IF according
1035 to the corresponding value you give in the 'freqs' vector.
1036 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1037 IF 1 gets restfreq 2e9.
1038 ********NEED TO BE UPDATED end************
1039 You can also specify the frequencies via a linecatalog.
1040
1041 Parameters:
1042 freqs: list of rest frequency values or string idenitfiers
1043 unit: unit for rest frequency (default 'Hz')
1044
1045 Example:
1046 # set the given restfrequency for the all currently selected IFs
1047 scan.set_restfreqs(freqs=1.4e9)
1048 # set multiple restfrequencies to all the selected data
1049 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1050 # If the number of IFs in the data is >= 2 the IF0 gets the first
1051 # value IF1 the second... NOTE that freqs needs to be
1052 # specified in list of list (e.g. [[],[],...] ).
1053 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
1054 #set the given restfrequency for the whole table (by name)
1055 scan.set_restfreqs(freqs="OH1667")
1056
1057 Note:
1058 To do more sophisticate Restfrequency setting, e.g. on a
1059 source and IF basis, use scantable.set_selection() before using
1060 this function.
1061 # provide your scantable is call scan
1062 selection = selector()
1063 selection.set_name("ORION*")
1064 selection.set_ifs([1])
1065 scan.set_selection(selection)
1066 scan.set_restfreqs(freqs=86.6e9)
1067
1068 """
1069 varlist = vars()
1070 from asap import linecatalog
1071 # simple value
1072 if isinstance(freqs, int) or isinstance(freqs, float):
1073 # TT mod
1074 #self._setrestfreqs(freqs, "",unit)
1075 self._setrestfreqs([freqs], [""],unit)
1076 # list of values
1077 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1078 # list values are scalars
1079 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1080 self._setrestfreqs(freqs, [""],unit)
1081 # list values are tuples, (value, name)
1082 elif isinstance(freqs[-1], dict):
1083 #sel = selector()
1084 #savesel = self._getselection()
1085 #iflist = self.getifnos()
1086 #for i in xrange(len(freqs)):
1087 # sel.set_ifs(iflist[i])
1088 # self._setselection(sel)
1089 # self._setrestfreqs(freqs[i], "",unit)
1090 #self._setselection(savesel)
1091 self._setrestfreqs(freqs["value"],
1092 freqs["name"], "MHz")
1093 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1094 sel = selector()
1095 savesel = self._getselection()
1096 iflist = self.getifnos()
1097 if len(freqs)>len(iflist):
1098 raise ValueError("number of elements in list of list exeeds the current IF selections")
1099 for i in xrange(len(freqs)):
1100 sel.set_ifs(iflist[i])
1101 self._setselection(sel)
1102 self._setrestfreqs(freqs[i]["value"],
1103 freqs[i]["name"], "MHz")
1104 self._setselection(savesel)
1105 # freqs are to be taken from a linecatalog
1106 elif isinstance(freqs, linecatalog):
1107 sel = selector()
1108 savesel = self._getselection()
1109 for i in xrange(freqs.nrow()):
1110 sel.set_ifs(iflist[i])
1111 self._setselection(sel)
1112 self._setrestfreqs(freqs.get_frequency(i),
1113 freqs.get_name(i), "MHz")
1114 # ensure that we are not iterating past nIF
1115 if i == self.nif()-1: break
1116 self._setselection(savesel)
1117 else:
1118 return
1119 self._add_history("set_restfreqs", varlist)
1120
1121 def shift_refpix(self, delta):
1122 """
1123 Shift the reference pixel of the Spectra Coordinate by an
1124 integer amount.
1125 Parameters:
1126 delta: the amount to shift by
1127 Note:
1128 Be careful using this with broadband data.
1129 """
1130 Scantable.shift(self, delta)
1131
1132 def history(self, filename=None):
1133 """
1134 Print the history. Optionally to a file.
1135 Parameters:
1136 filename: The name of the file to save the history to.
1137 """
1138 hist = list(self._gethistory())
1139 out = "-"*80
1140 for h in hist:
1141 if h.startswith("---"):
1142 out += "\n"+h
1143 else:
1144 items = h.split("##")
1145 date = items[0]
1146 func = items[1]
1147 items = items[2:]
1148 out += "\n"+date+"\n"
1149 out += "Function: %s\n Parameters:" % (func)
1150 for i in items:
1151 s = i.split("=")
1152 out += "\n %s = %s" % (s[0], s[1])
1153 out += "\n"+"-"*80
1154 if filename is not None:
1155 if filename is "":
1156 filename = 'scantable_history.txt'
1157 import os
1158 filename = os.path.expandvars(os.path.expanduser(filename))
1159 if not os.path.isdir(filename):
1160 data = open(filename, 'w')
1161 data.write(out)
1162 data.close()
1163 else:
1164 msg = "Illegal file name '%s'." % (filename)
1165 if rcParams['verbose']:
1166 #print msg
1167 asaplog.push( msg )
1168 print_log( 'ERROR' )
1169 else:
1170 raise IOError(msg)
1171 if rcParams['verbose']:
1172 try:
1173 from IPython.genutils import page as pager
1174 except ImportError:
1175 from pydoc import pager
1176 pager(out)
1177 else:
1178 return out
1179 return
1180 #
1181 # Maths business
1182 #
1183
1184 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1185 """
1186 Return the (time) weighted average of a scan.
1187 Note:
1188 in channels only - align if necessary
1189 Parameters:
1190 mask: an optional mask (only used for 'var' and 'tsys'
1191 weighting)
1192 scanav: True averages each scan separately
1193 False (default) averages all scans together,
1194 weight: Weighting scheme.
1195 'none' (mean no weight)
1196 'var' (1/var(spec) weighted)
1197 'tsys' (1/Tsys**2 weighted)
1198 'tint' (integration time weighted)
1199 'tintsys' (Tint/Tsys**2)
1200 'median' ( median averaging)
1201 The default is 'tint'
1202 align: align the spectra in velocity before averaging. It takes
1203 the time of the first spectrum as reference time.
1204 Example:
1205 # time average the scantable without using a mask
1206 newscan = scan.average_time()
1207 """
1208 varlist = vars()
1209 if weight is None: weight = 'TINT'
1210 if mask is None: mask = ()
1211 if scanav: scanav = "SCAN"
1212 else: scanav = "NONE"
1213 scan = (self, )
1214 try:
1215 if align:
1216 scan = (self.freq_align(insitu=False), )
1217 s = None
1218 if weight.upper() == 'MEDIAN':
1219 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1220 scanav))
1221 else:
1222 s = scantable(self._math._average(scan, mask, weight.upper(),
1223 scanav))
1224 except RuntimeError, msg:
1225 if rcParams['verbose']:
1226 #print msg
1227 print_log()
1228 asaplog.push( msg )
1229 print_log( 'ERROR' )
1230 return
1231 else: raise
1232 s._add_history("average_time", varlist)
1233 print_log()
1234 return s
1235
1236 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1237 """
1238 Return a scan where all spectra are converted to either
1239 Jansky or Kelvin depending upon the flux units of the scan table.
1240 By default the function tries to look the values up internally.
1241 If it can't find them (or if you want to over-ride), you must
1242 specify EITHER jyperk OR eta (and D which it will try to look up
1243 also if you don't set it). jyperk takes precedence if you set both.
1244 Parameters:
1245 jyperk: the Jy / K conversion factor
1246 eta: the aperture efficiency
1247 d: the geomtric diameter (metres)
1248 insitu: if False a new scantable is returned.
1249 Otherwise, the scaling is done in-situ
1250 The default is taken from .asaprc (False)
1251 """
1252 if insitu is None: insitu = rcParams['insitu']
1253 self._math._setinsitu(insitu)
1254 varlist = vars()
1255 if jyperk is None: jyperk = -1.0
1256 if d is None: d = -1.0
1257 if eta is None: eta = -1.0
1258 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1259 s._add_history("convert_flux", varlist)
1260 print_log()
1261 if insitu: self._assign(s)
1262 else: return s
1263
1264 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1265 """
1266 Return a scan after applying a gain-elevation correction.
1267 The correction can be made via either a polynomial or a
1268 table-based interpolation (and extrapolation if necessary).
1269 You specify polynomial coefficients, an ascii table or neither.
1270 If you specify neither, then a polynomial correction will be made
1271 with built in coefficients known for certain telescopes (an error
1272 will occur if the instrument is not known).
1273 The data and Tsys are *divided* by the scaling factors.
1274 Parameters:
1275 poly: Polynomial coefficients (default None) to compute a
1276 gain-elevation correction as a function of
1277 elevation (in degrees).
1278 filename: The name of an ascii file holding correction factors.
1279 The first row of the ascii file must give the column
1280 names and these MUST include columns
1281 "ELEVATION" (degrees) and "FACTOR" (multiply data
1282 by this) somewhere.
1283 The second row must give the data type of the
1284 column. Use 'R' for Real and 'I' for Integer.
1285 An example file would be
1286 (actual factors are arbitrary) :
1287
1288 TIME ELEVATION FACTOR
1289 R R R
1290 0.1 0 0.8
1291 0.2 20 0.85
1292 0.3 40 0.9
1293 0.4 60 0.85
1294 0.5 80 0.8
1295 0.6 90 0.75
1296 method: Interpolation method when correcting from a table.
1297 Values are "nearest", "linear" (default), "cubic"
1298 and "spline"
1299 insitu: if False a new scantable is returned.
1300 Otherwise, the scaling is done in-situ
1301 The default is taken from .asaprc (False)
1302 """
1303
1304 if insitu is None: insitu = rcParams['insitu']
1305 self._math._setinsitu(insitu)
1306 varlist = vars()
1307 if poly is None:
1308 poly = ()
1309 from os.path import expandvars
1310 filename = expandvars(filename)
1311 s = scantable(self._math._gainel(self, poly, filename, method))
1312 s._add_history("gain_el", varlist)
1313 print_log()
1314 if insitu: self._assign(s)
1315 else: return s
1316
1317 def freq_align(self, reftime=None, method='cubic', insitu=None):
1318 """
1319 Return a scan where all rows have been aligned in frequency/velocity.
1320 The alignment frequency frame (e.g. LSRK) is that set by function
1321 set_freqframe.
1322 Parameters:
1323 reftime: reference time to align at. By default, the time of
1324 the first row of data is used.
1325 method: Interpolation method for regridding the spectra.
1326 Choose from "nearest", "linear", "cubic" (default)
1327 and "spline"
1328 insitu: if False a new scantable is returned.
1329 Otherwise, the scaling is done in-situ
1330 The default is taken from .asaprc (False)
1331 """
1332 if insitu is None: insitu = rcParams["insitu"]
1333 self._math._setinsitu(insitu)
1334 varlist = vars()
1335 if reftime is None: reftime = ""
1336 s = scantable(self._math._freq_align(self, reftime, method))
1337 s._add_history("freq_align", varlist)
1338 print_log()
1339 if insitu: self._assign(s)
1340 else: return s
1341
1342 def opacity(self, tau, insitu=None):
1343 """
1344 Apply an opacity correction. The data
1345 and Tsys are multiplied by the correction factor.
1346 Parameters:
1347 tau: Opacity from which the correction factor is
1348 exp(tau*ZD)
1349 where ZD is the zenith-distance
1350 insitu: if False a new scantable is returned.
1351 Otherwise, the scaling is done in-situ
1352 The default is taken from .asaprc (False)
1353 """
1354 if insitu is None: insitu = rcParams['insitu']
1355 self._math._setinsitu(insitu)
1356 varlist = vars()
1357 s = scantable(self._math._opacity(self, tau))
1358 s._add_history("opacity", varlist)
1359 print_log()
1360 if insitu: self._assign(s)
1361 else: return s
1362
1363 def bin(self, width=5, insitu=None):
1364 """
1365 Return a scan where all spectra have been binned up.
1366 Parameters:
1367 width: The bin width (default=5) in pixels
1368 insitu: if False a new scantable is returned.
1369 Otherwise, the scaling is done in-situ
1370 The default is taken from .asaprc (False)
1371 """
1372 if insitu is None: insitu = rcParams['insitu']
1373 self._math._setinsitu(insitu)
1374 varlist = vars()
1375 s = scantable(self._math._bin(self, width))
1376 s._add_history("bin", varlist)
1377 print_log()
1378 if insitu: self._assign(s)
1379 else: return s
1380
1381
1382 def resample(self, width=5, method='cubic', insitu=None):
1383 """
1384 Return a scan where all spectra have been binned up.
1385
1386 Parameters:
1387 width: The bin width (default=5) in pixels
1388 method: Interpolation method when correcting from a table.
1389 Values are "nearest", "linear", "cubic" (default)
1390 and "spline"
1391 insitu: if False a new scantable is returned.
1392 Otherwise, the scaling is done in-situ
1393 The default is taken from .asaprc (False)
1394 """
1395 if insitu is None: insitu = rcParams['insitu']
1396 self._math._setinsitu(insitu)
1397 varlist = vars()
1398 s = scantable(self._math._resample(self, method, width))
1399 s._add_history("resample", varlist)
1400 print_log()
1401 if insitu: self._assign(s)
1402 else: return s
1403
1404
1405 def average_pol(self, mask=None, weight='none'):
1406 """
1407 Average the Polarisations together.
1408 Parameters:
1409 mask: An optional mask defining the region, where the
1410 averaging will be applied. The output will have all
1411 specified points masked.
1412 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1413 weighted), or 'tsys' (1/Tsys**2 weighted)
1414 """
1415 varlist = vars()
1416 if mask is None:
1417 mask = ()
1418 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1419 s._add_history("average_pol", varlist)
1420 print_log()
1421 return s
1422
1423 def average_beam(self, mask=None, weight='none'):
1424 """
1425 Average the Beams together.
1426 Parameters:
1427 mask: An optional mask defining the region, where the
1428 averaging will be applied. The output will have all
1429 specified points masked.
1430 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1431 weighted), or 'tsys' (1/Tsys**2 weighted)
1432 """
1433 varlist = vars()
1434 if mask is None:
1435 mask = ()
1436 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1437 s._add_history("average_beam", varlist)
1438 print_log()
1439 return s
1440
1441 def convert_pol(self, poltype=None):
1442 """
1443 Convert the data to a different polarisation type.
1444 Parameters:
1445 poltype: The new polarisation type. Valid types are:
1446 "linear", "stokes" and "circular"
1447 """
1448 varlist = vars()
1449 try:
1450 s = scantable(self._math._convertpol(self, poltype))
1451 except RuntimeError, msg:
1452 if rcParams['verbose']:
1453 #print msg
1454 print_log()
1455 asaplog.push( msg )
1456 print_log( 'ERROR' )
1457 return
1458 else:
1459 raise
1460 s._add_history("convert_pol", varlist)
1461 print_log()
1462 return s
1463
1464 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1465 """
1466 Smooth the spectrum by the specified kernel (conserving flux).
1467 Parameters:
1468 kernel: The type of smoothing kernel. Select from
1469 'hanning' (default), 'gaussian', 'boxcar' and
1470 'rmedian'
1471 width: The width of the kernel in pixels. For hanning this is
1472 ignored otherwise it defauls to 5 pixels.
1473 For 'gaussian' it is the Full Width Half
1474 Maximum. For 'boxcar' it is the full width.
1475 For 'rmedian' it is the half width.
1476 insitu: if False a new scantable is returned.
1477 Otherwise, the scaling is done in-situ
1478 The default is taken from .asaprc (False)
1479 Example:
1480 none
1481 """
1482 if insitu is None: insitu = rcParams['insitu']
1483 self._math._setinsitu(insitu)
1484 varlist = vars()
1485 s = scantable(self._math._smooth(self, kernel.lower(), width))
1486 s._add_history("smooth", varlist)
1487 print_log()
1488 if insitu: self._assign(s)
1489 else: return s
1490
1491
1492 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1493 """
1494 Return a scan which has been baselined (all rows) by a polynomial.
1495 Parameters:
1496 mask: an optional mask
1497 order: the order of the polynomial (default is 0)
1498 plot: plot the fit and the residual. In this each
1499 indivual fit has to be approved, by typing 'y'
1500 or 'n'
1501 uselin: use linear polynomial fit
1502 insitu: if False a new scantable is returned.
1503 Otherwise, the scaling is done in-situ
1504 The default is taken from .asaprc (False)
1505 Example:
1506 # return a scan baselined by a third order polynomial,
1507 # not using a mask
1508 bscan = scan.poly_baseline(order=3)
1509 """
1510 if insitu is None: insitu = rcParams['insitu']
1511 varlist = vars()
1512 if mask is None:
1513 mask = [True for i in xrange(self.nchan(-1))]
1514 from asap.asapfitter import fitter
1515 try:
1516 f = fitter()
1517 f.set_scan(self, mask)
1518 if uselin:
1519 f.set_function(lpoly=order)
1520 else:
1521 f.set_function(poly=order)
1522 s = f.auto_fit(insitu, plot=plot)
1523 # Save parameters of baseline fits as a class attribute.
1524 # NOTICE: It does not reflect changes in scantable!
1525 self.blpars = f.blpars
1526 s._add_history("poly_baseline", varlist)
1527 print_log()
1528 if insitu: self._assign(s)
1529 else: return s
1530 except RuntimeError:
1531 msg = "The fit failed, possibly because it didn't converge."
1532 if rcParams['verbose']:
1533 #print msg
1534 print_log()
1535 asaplog.push( msg )
1536 print_log( 'ERROR' )
1537 return
1538 else:
1539 raise RuntimeError(msg)
1540
1541
1542 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1543 threshold=3, chan_avg_limit=1, plot=False,
1544 insitu=None):
1545 """
1546 Return a scan which has been baselined (all rows) by a polynomial.
1547 Spectral lines are detected first using linefinder and masked out
1548 to avoid them affecting the baseline solution.
1549
1550 Parameters:
1551 mask: an optional mask retreived from scantable
1552 edge: an optional number of channel to drop at
1553 the edge of spectrum. If only one value is
1554 specified, the same number will be dropped from
1555 both sides of the spectrum. Default is to keep
1556 all channels. Nested tuples represent individual
1557 edge selection for different IFs (a number of spectral
1558 channels can be different)
1559 order: the order of the polynomial (default is 0)
1560 threshold: the threshold used by line finder. It is better to
1561 keep it large as only strong lines affect the
1562 baseline solution.
1563 chan_avg_limit:
1564 a maximum number of consequtive spectral channels to
1565 average during the search of weak and broad lines.
1566 The default is no averaging (and no search for weak
1567 lines). If such lines can affect the fitted baseline
1568 (e.g. a high order polynomial is fitted), increase this
1569 parameter (usually values up to 8 are reasonable). Most
1570 users of this method should find the default value
1571 sufficient.
1572 plot: plot the fit and the residual. In this each
1573 indivual fit has to be approved, by typing 'y'
1574 or 'n'
1575 insitu: if False a new scantable is returned.
1576 Otherwise, the scaling is done in-situ
1577 The default is taken from .asaprc (False)
1578
1579 Example:
1580 scan2=scan.auto_poly_baseline(order=7)
1581 """
1582 if insitu is None: insitu = rcParams['insitu']
1583 varlist = vars()
1584 from asap.asapfitter import fitter
1585 from asap.asaplinefind import linefinder
1586 from asap import _is_sequence_or_number as _is_valid
1587
1588 # check whether edge is set up for each IF individually
1589 individualedge = False;
1590 if len(edge) > 1:
1591 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1592 individualedge = True;
1593
1594 if not _is_valid(edge, int) and not individualedge:
1595 raise ValueError, "Parameter 'edge' has to be an integer or a \
1596 pair of integers specified as a tuple. Nested tuples are allowed \
1597 to make individual selection for different IFs."
1598
1599 curedge = (0, 0)
1600 if individualedge:
1601 for edgepar in edge:
1602 if not _is_valid(edgepar, int):
1603 raise ValueError, "Each element of the 'edge' tuple has \
1604 to be a pair of integers or an integer."
1605 else:
1606 curedge = edge;
1607
1608 # setup fitter
1609 f = fitter()
1610 f.set_function(poly=order)
1611
1612 # setup line finder
1613 fl = linefinder()
1614 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1615
1616 if not insitu:
1617 workscan = self.copy()
1618 else:
1619 workscan = self
1620
1621 fl.set_scan(workscan)
1622
1623 rows = range(workscan.nrow())
1624 # Save parameters of baseline fits & masklists as a class attribute.
1625 # NOTICE: It does not reflect changes in scantable!
1626 if len(rows) > 0:
1627 self.blpars=[]
1628 self.masklists=[]
1629 asaplog.push("Processing:")
1630 for r in rows:
1631 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1632 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1633 workscan.getpol(r), workscan.getcycle(r))
1634 asaplog.push(msg, False)
1635
1636 # figure out edge parameter
1637 if individualedge:
1638 if len(edge) >= workscan.getif(r):
1639 raise RuntimeError, "Number of edge elements appear to " \
1640 "be less than the number of IFs"
1641 curedge = edge[workscan.getif(r)]
1642
1643 # setup line finder
1644 fl.find_lines(r, mask, curedge)
1645 outmask=fl.get_mask()
1646 f.set_scan(workscan, fl.get_mask())
1647 f.x = workscan._getabcissa(r)
1648 f.y = workscan._getspectrum(r)
1649 f.data = None
1650 f.fit()
1651
1652 # Show mask list
1653 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1654 msg = "mask range: "+str(masklist)
1655 asaplog.push(msg, False)
1656
1657 fpar = f.get_parameters()
1658 if plot:
1659 f.plot(residual=True)
1660 x = raw_input("Accept fit ( [y]/n ): ")
1661 if x.upper() == 'N':
1662 self.blpars.append(None)
1663 self.masklists.append(None)
1664 continue
1665 workscan._setspectrum(f.fitter.getresidual(), r)
1666 self.blpars.append(fpar)
1667 self.masklists.append(masklist)
1668 if plot:
1669 f._p.unmap()
1670 f._p = None
1671 workscan._add_history("auto_poly_baseline", varlist)
1672 if insitu:
1673 self._assign(workscan)
1674 else:
1675 return workscan
1676
1677 def rotate_linpolphase(self, angle):
1678 """
1679 Rotate the phase of the complex polarization O=Q+iU correlation.
1680 This is always done in situ in the raw data. So if you call this
1681 function more than once then each call rotates the phase further.
1682 Parameters:
1683 angle: The angle (degrees) to rotate (add) by.
1684 Examples:
1685 scan.rotate_linpolphase(2.3)
1686 """
1687 varlist = vars()
1688 self._math._rotate_linpolphase(self, angle)
1689 self._add_history("rotate_linpolphase", varlist)
1690 print_log()
1691 return
1692
1693
1694 def rotate_xyphase(self, angle):
1695 """
1696 Rotate the phase of the XY correlation. This is always done in situ
1697 in the data. So if you call this function more than once
1698 then each call rotates the phase further.
1699 Parameters:
1700 angle: The angle (degrees) to rotate (add) by.
1701 Examples:
1702 scan.rotate_xyphase(2.3)
1703 """
1704 varlist = vars()
1705 self._math._rotate_xyphase(self, angle)
1706 self._add_history("rotate_xyphase", varlist)
1707 print_log()
1708 return
1709
1710 def swap_linears(self):
1711 """
1712 Swap the linear polarisations XX and YY, or better the first two
1713 polarisations as this also works for ciculars.
1714 """
1715 varlist = vars()
1716 self._math._swap_linears(self)
1717 self._add_history("swap_linears", varlist)
1718 print_log()
1719 return
1720
1721 def invert_phase(self):
1722 """
1723 Invert the phase of the complex polarisation
1724 """
1725 varlist = vars()
1726 self._math._invert_phase(self)
1727 self._add_history("invert_phase", varlist)
1728 print_log()
1729 return
1730
1731 def add(self, offset, insitu=None):
1732 """
1733 Return a scan where all spectra have the offset added
1734 Parameters:
1735 offset: the offset
1736 insitu: if False a new scantable is returned.
1737 Otherwise, the scaling is done in-situ
1738 The default is taken from .asaprc (False)
1739 """
1740 if insitu is None: insitu = rcParams['insitu']
1741 self._math._setinsitu(insitu)
1742 varlist = vars()
1743 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1744 s._add_history("add", varlist)
1745 print_log()
1746 if insitu:
1747 self._assign(s)
1748 else:
1749 return s
1750
1751 def scale(self, factor, tsys=True, insitu=None):
1752 """
1753 Return a scan where all spectra are scaled by the give 'factor'
1754 Parameters:
1755 factor: the scaling factor
1756 insitu: if False a new scantable is returned.
1757 Otherwise, the scaling is done in-situ
1758 The default is taken from .asaprc (False)
1759 tsys: if True (default) then apply the operation to Tsys
1760 as well as the data
1761 """
1762 if insitu is None: insitu = rcParams['insitu']
1763 self._math._setinsitu(insitu)
1764 varlist = vars()
1765 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1766 s._add_history("scale", varlist)
1767 print_log()
1768 if insitu:
1769 self._assign(s)
1770 else:
1771 return s
1772
1773 def set_sourcetype(self, match, matchtype="pattern",
1774 sourcetype="reference"):
1775 """
1776 Set the type of the source to be an source or reference scan
1777 using the provided pattern:
1778 Parameters:
1779 match: a Unix style pattern, regular expression or selector
1780 matchtype: 'pattern' (default) UNIX style pattern or
1781 'regex' regular expression
1782 sourcetype: the type of the source to use (source/reference)
1783 """
1784 varlist = vars()
1785 basesel = self.get_selection()
1786 stype = -1
1787 if sourcetype.lower().startswith("r"):
1788 stype = 1
1789 elif sourcetype.lower().startswith("s"):
1790 stype = 0
1791 else:
1792 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1793 if matchtype.lower().startswith("p"):
1794 matchtype = "pattern"
1795 elif matchtype.lower().startswith("r"):
1796 matchtype = "regex"
1797 else:
1798 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1799 sel = selector()
1800 if isinstance(match, selector):
1801 sel = match
1802 else:
1803 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1804 self.set_selection(basesel+sel)
1805 self._setsourcetype(stype)
1806 self.set_selection(basesel)
1807 s._add_history("set_sourcetype", varlist)
1808
1809 def auto_quotient(self, preserve=True, mode='paired'):
1810 """
1811 This function allows to build quotients automatically.
1812 It assumes the observation to have the same numer of
1813 "ons" and "offs"
1814 Parameters:
1815 preserve: you can preserve (default) the continuum or
1816 remove it. The equations used are
1817 preserve: Output = Toff * (on/off) - Toff
1818 remove: Output = Toff * (on/off) - Ton
1819 mode: the on/off detection mode
1820 'paired' (default)
1821 identifies 'off' scans by the
1822 trailing '_R' (Mopra/Parkes) or
1823 '_e'/'_w' (Tid) and matches
1824 on/off pairs from the observing pattern
1825 'time'
1826 finds the closest off in time
1827
1828 """
1829 modes = ["time", "paired"]
1830 if not mode in modes:
1831 msg = "please provide valid mode. Valid modes are %s" % (modes)
1832 raise ValueError(msg)
1833 varlist = vars()
1834 s = None
1835 if mode.lower() == "paired":
1836 basesel = self.get_selection()
1837 sel = selector()+basesel
1838 sel.set_query("SRCTYPE==1")
1839 self.set_selection(sel)
1840 offs = self.copy()
1841 sel.set_query("SRCTYPE==0")
1842 self.set_selection(sel)
1843 ons = self.copy()
1844 s = scantable(self._math._quotient(ons, offs, preserve))
1845 self.set_selection(basesel)
1846 elif mode.lower() == "time":
1847 s = scantable(self._math._auto_quotient(self, mode, preserve))
1848 s._add_history("auto_quotient", varlist)
1849 print_log()
1850 return s
1851
1852 def mx_quotient(self, mask = None, weight='median', preserve=True):
1853 """
1854 Form a quotient using "off" beams when observing in "MX" mode.
1855 Parameters:
1856 mask: an optional mask to be used when weight == 'stddev'
1857 weight: How to average the off beams. Default is 'median'.
1858 preserve: you can preserve (default) the continuum or
1859 remove it. The equations used are
1860 preserve: Output = Toff * (on/off) - Toff
1861 remove: Output = Toff * (on/off) - Ton
1862 """
1863 if mask is None: mask = ()
1864 varlist = vars()
1865 on = scantable(self._math._mx_extract(self, 'on'))
1866 preoff = scantable(self._math._mx_extract(self, 'off'))
1867 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1868 from asapmath import quotient
1869 q = quotient(on, off, preserve)
1870 q._add_history("mx_quotient", varlist)
1871 print_log()
1872 return q
1873
1874 def freq_switch(self, insitu=None):
1875 """
1876 Apply frequency switching to the data.
1877 Parameters:
1878 insitu: if False a new scantable is returned.
1879 Otherwise, the swictching is done in-situ
1880 The default is taken from .asaprc (False)
1881 Example:
1882 none
1883 """
1884 if insitu is None: insitu = rcParams['insitu']
1885 self._math._setinsitu(insitu)
1886 varlist = vars()
1887 s = scantable(self._math._freqswitch(self))
1888 s._add_history("freq_switch", varlist)
1889 print_log()
1890 if insitu: self._assign(s)
1891 else: return s
1892
1893 def recalc_azel(self):
1894 """
1895 Recalculate the azimuth and elevation for each position.
1896 Parameters:
1897 none
1898 Example:
1899 """
1900 varlist = vars()
1901 self._recalcazel()
1902 self._add_history("recalc_azel", varlist)
1903 print_log()
1904 return
1905
1906 def __add__(self, other):
1907 varlist = vars()
1908 s = None
1909 if isinstance(other, scantable):
1910 s = scantable(self._math._binaryop(self, other, "ADD"))
1911 elif isinstance(other, float):
1912 s = scantable(self._math._unaryop(self, other, "ADD", False))
1913 else:
1914 raise TypeError("Other input is not a scantable or float value")
1915 s._add_history("operator +", varlist)
1916 print_log()
1917 return s
1918
1919 def __sub__(self, other):
1920 """
1921 implicit on all axes and on Tsys
1922 """
1923 varlist = vars()
1924 s = None
1925 if isinstance(other, scantable):
1926 s = scantable(self._math._binaryop(self, other, "SUB"))
1927 elif isinstance(other, float):
1928 s = scantable(self._math._unaryop(self, other, "SUB", False))
1929 else:
1930 raise TypeError("Other input is not a scantable or float value")
1931 s._add_history("operator -", varlist)
1932 print_log()
1933 return s
1934
1935 def __mul__(self, other):
1936 """
1937 implicit on all axes and on Tsys
1938 """
1939 varlist = vars()
1940 s = None
1941 if isinstance(other, scantable):
1942 s = scantable(self._math._binaryop(self, other, "MUL"))
1943 elif isinstance(other, float):
1944 s = scantable(self._math._unaryop(self, other, "MUL", False))
1945 else:
1946 raise TypeError("Other input is not a scantable or float value")
1947 s._add_history("operator *", varlist)
1948 print_log()
1949 return s
1950
1951
1952 def __div__(self, other):
1953 """
1954 implicit on all axes and on Tsys
1955 """
1956 varlist = vars()
1957 s = None
1958 if isinstance(other, scantable):
1959 s = scantable(self._math._binaryop(self, other, "DIV"))
1960 elif isinstance(other, float):
1961 if other == 0.0:
1962 raise ZeroDivisionError("Dividing by zero is not recommended")
1963 s = scantable(self._math._unaryop(self, other, "DIV", False))
1964 else:
1965 raise TypeError("Other input is not a scantable or float value")
1966 s._add_history("operator /", varlist)
1967 print_log()
1968 return s
1969
1970 def get_fit(self, row=0):
1971 """
1972 Print or return the stored fits for a row in the scantable
1973 Parameters:
1974 row: the row which the fit has been applied to.
1975 """
1976 if row > self.nrow():
1977 return
1978 from asap.asapfit import asapfit
1979 fit = asapfit(self._getfit(row))
1980 if rcParams['verbose']:
1981 #print fit
1982 asaplog.push( '%s' %(fit) )
1983 print_log()
1984 return
1985 else:
1986 return fit.as_dict()
1987
1988 def flag_nans(self):
1989 """
1990 Utility function to flag NaN values in the scantable.
1991 """
1992 import numpy
1993 basesel = self.get_selection()
1994 for i in range(self.nrow()):
1995 sel = selector()+basesel
1996 sel.set_scans(self.getscan(i))
1997 sel.set_beams(self.getbeam(i))
1998 sel.set_ifs(self.getif(i))
1999 sel.set_polarisations(self.getpol(i))
2000 self.set_selection(sel)
2001 nans = numpy.isnan(self._getspectrum(0))
2002 if numpy.any(nans):
2003 bnans = [ bool(v) for v in nans]
2004 self.flag(bnans)
2005 self.set_selection(basesel)
2006
2007
2008 def _add_history(self, funcname, parameters):
2009 if not rcParams['scantable.history']:
2010 return
2011 # create date
2012 sep = "##"
2013 from datetime import datetime
2014 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2015 hist = dstr+sep
2016 hist += funcname+sep#cdate+sep
2017 if parameters.has_key('self'): del parameters['self']
2018 for k, v in parameters.iteritems():
2019 if type(v) is dict:
2020 for k2, v2 in v.iteritems():
2021 hist += k2
2022 hist += "="
2023 if isinstance(v2, scantable):
2024 hist += 'scantable'
2025 elif k2 == 'mask':
2026 if isinstance(v2, list) or isinstance(v2, tuple):
2027 hist += str(self._zip_mask(v2))
2028 else:
2029 hist += str(v2)
2030 else:
2031 hist += str(v2)
2032 else:
2033 hist += k
2034 hist += "="
2035 if isinstance(v, scantable):
2036 hist += 'scantable'
2037 elif k == 'mask':
2038 if isinstance(v, list) or isinstance(v, tuple):
2039 hist += str(self._zip_mask(v))
2040 else:
2041 hist += str(v)
2042 else:
2043 hist += str(v)
2044 hist += sep
2045 hist = hist[:-2] # remove trailing '##'
2046 self._addhistory(hist)
2047
2048
2049 def _zip_mask(self, mask):
2050 mask = list(mask)
2051 i = 0
2052 segments = []
2053 while mask[i:].count(1):
2054 i += mask[i:].index(1)
2055 if mask[i:].count(0):
2056 j = i + mask[i:].index(0)
2057 else:
2058 j = len(mask)
2059 segments.append([i, j])
2060 i = j
2061 return segments
2062
2063 def _get_ordinate_label(self):
2064 fu = "("+self.get_fluxunit()+")"
2065 import re
2066 lbl = "Intensity"
2067 if re.match(".K.", fu):
2068 lbl = "Brightness Temperature "+ fu
2069 elif re.match(".Jy.", fu):
2070 lbl = "Flux density "+ fu
2071 return lbl
2072
2073 def _check_ifs(self):
2074 nchans = [self.nchan(i) for i in range(self.nif(-1))]
2075 nchans = filter(lambda t: t > 0, nchans)
2076 return (sum(nchans)/len(nchans) == nchans[0])
2077
2078 def _fill(self, names, unit, average, getpt):
2079 import os
2080 from asap._asap import stfiller
2081 first = True
2082 fullnames = []
2083 for name in names:
2084 name = os.path.expandvars(name)
2085 name = os.path.expanduser(name)
2086 if not os.path.exists(name):
2087 msg = "File '%s' does not exists" % (name)
2088 if rcParams['verbose']:
2089 asaplog.push(msg)
2090 #print asaplog.pop().strip()
2091 print_log( 'ERROR' )
2092 return
2093 raise IOError(msg)
2094 fullnames.append(name)
2095 if average:
2096 asaplog.push('Auto averaging integrations')
2097 stype = int(rcParams['scantable.storage'].lower() == 'disk')
2098 for name in fullnames:
2099 tbl = Scantable(stype)
2100 r = stfiller(tbl)
2101 rx = rcParams['scantable.reference']
2102 r._setreferenceexpr(rx)
2103 msg = "Importing %s..." % (name)
2104 asaplog.push(msg, False)
2105 print_log()
2106 r._open(name, -1, -1, getpt)
2107 r._read()
2108 if average:
2109 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2110 if not first:
2111 tbl = self._math._merge([self, tbl])
2112 Scantable.__init__(self, tbl)
2113 r._close()
2114 del r, tbl
2115 first = False
2116 if unit is not None:
2117 self.set_fluxunit(unit)
2118 #self.set_freqframe(rcParams['scantable.freqframe'])
2119
2120 def __getitem__(self, key):
2121 if key < 0:
2122 key += self.nrow()
2123 if key >= self.nrow():
2124 raise IndexError("Row index out of range.")
2125 return self._getspectrum(key)
2126
2127 def __setitem__(self, key, value):
2128 if key < 0:
2129 key += self.nrow()
2130 if key >= self.nrow():
2131 raise IndexError("Row index out of range.")
2132 if not hasattr(value, "__len__") or \
2133 len(value) > self.nchan(self.getif(key)):
2134 raise ValueError("Spectrum length doesn't match.")
2135 return self._setspectrum(value, key)
2136
2137 def __len__(self):
2138 return self.nrow()
2139
2140 def __iter__(self):
2141 for i in range(len(self)):
2142 yield self[i]
Note: See TracBrowser for help on using the repository browser.