source: trunk/python/scantable.py@ 1830

Last change on this file since 1830 was 1824, checked in by Malte Marquarding, 14 years ago

Refactoring of init.py. Moved functionality into separate modules. Some minor fixes to make unit test work under 'standard asap'.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 90.6 KB
Line 
1import os
2try:
3 from functools import wraps as wraps_dec
4except ImportError:
5 from asap.compatibility import wraps as wraps_dec
6
7from asap.env import is_casapy
8from asap._asap import Scantable
9from asap.parameters import rcParams
10from asap.logging import asaplog, print_log, print_log_dec
11from asap.selector import selector
12from asap.linecatalog import linecatalog
13from asap.coordinate import coordinate
14from asap.utils import _n_bools, mask_not, mask_and, mask_or
15
16
17def preserve_selection(func):
18 @wraps_dec(func)
19 def wrap(obj, *args, **kw):
20 basesel = obj.get_selection()
21 val = func(obj, *args, **kw)
22 obj.set_selection(basesel)
23 return val
24 return wrap
25
26
27def is_scantable(filename):
28 return (os.path.isdir(filename)
29 and not os.path.exists(filename+'/table.f1')
30 and os.path.exists(filename+'/table.info'))
31
32
33class scantable(Scantable):
34 """
35 The ASAP container for scans
36 """
37
38 @print_log_dec
39 def __init__(self, filename, average=None, unit=None, getpt=None,
40 antenna=None, parallactify=None):
41 """
42 Create a scantable from a saved one or make a reference
43 Parameters:
44 filename: the name of an asap table on disk
45 or
46 the name of a rpfits/sdfits/ms file
47 (integrations within scans are auto averaged
48 and the whole file is read)
49 or
50 [advanced] a reference to an existing
51 scantable
52 average: average all integrations withinb a scan on read.
53 The default (True) is taken from .asaprc.
54 unit: brightness unit; must be consistent with K or Jy.
55 Over-rides the default selected by the reader
56 (input rpfits/sdfits/ms) or replaces the value
57 in existing scantables
58 getpt: for MeasurementSet input data only:
59 If True, all pointing data are filled.
60 The deafult is False, which makes time to load
61 the MS data faster in some cases.
62 antenna: Antenna selection. integer (id) or string (name
63 or id).
64 parallactify: Indcicate that the data had been parallatified.
65 Default is taken form rc file.
66 """
67 if average is None:
68 average = rcParams['scantable.autoaverage']
69 if getpt is None:
70 getpt = True
71 if antenna is None:
72 antenna = ''
73 elif type(antenna) == int:
74 antenna = '%s'%antenna
75 elif type(antenna) == list:
76 tmpstr = ''
77 for i in range( len(antenna) ):
78 if type(antenna[i]) == int:
79 tmpstr = tmpstr + ('%s,'%(antenna[i]))
80 elif type(antenna[i]) == str:
81 tmpstr=tmpstr+antenna[i]+','
82 else:
83 asaplog.push('Bad antenna selection.')
84 print_log('ERROR')
85 return
86 antenna = tmpstr.rstrip(',')
87 parallactify = parallactify or rcParams['scantable.parallactify']
88 varlist = vars()
89 from asap._asap import stmath
90 self._math = stmath( rcParams['insitu'] )
91 if isinstance(filename, Scantable):
92 Scantable.__init__(self, filename)
93 else:
94 if isinstance(filename, str):
95 filename = os.path.expandvars(filename)
96 filename = os.path.expanduser(filename)
97 if not os.path.exists(filename):
98 s = "File '%s' not found." % (filename)
99 if rcParams['verbose']:
100 asaplog.push(s)
101 print_log('ERROR')
102 return
103 raise IOError(s)
104 if is_scantable(filename):
105 ondisk = rcParams['scantable.storage'] == 'disk'
106 Scantable.__init__(self, filename, ondisk)
107 if unit is not None:
108 self.set_fluxunit(unit)
109 # do not reset to the default freqframe
110 #self.set_freqframe(rcParams['scantable.freqframe'])
111 elif os.path.isdir(filename) \
112 and not os.path.exists(filename+'/table.f1'):
113 msg = "The given file '%s'is not a valid " \
114 "asap table." % (filename)
115 if rcParams['verbose']:
116 #print msg
117 asaplog.push( msg )
118 print_log( 'ERROR' )
119 return
120 else:
121 raise IOError(msg)
122 else:
123 self._fill([filename], unit, average, getpt, antenna)
124 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
125 and isinstance(filename[-1], str):
126 self._fill(filename, unit, average, getpt, antenna)
127 self.parallactify(parallactify)
128 self._add_history("scantable", varlist)
129 print_log()
130
131 @print_log_dec
132 def save(self, name=None, format=None, overwrite=False):
133 """
134 Store the scantable on disk. This can be an asap (aips++) Table,
135 SDFITS or MS2 format.
136 Parameters:
137 name: the name of the outputfile. For format "ASCII"
138 this is the root file name (data in 'name'.txt
139 and header in 'name'_header.txt)
140 format: an optional file format. Default is ASAP.
141 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
142 'SDFITS' (save as SDFITS file)
143 'ASCII' (saves as ascii text file)
144 'MS2' (saves as an aips++
145 MeasurementSet V2)
146 'FITS' (save as image FITS - not
147 readable by class)
148 'CLASS' (save as FITS readable by CLASS)
149 overwrite: If the file should be overwritten if it exists.
150 The default False is to return with warning
151 without writing the output. USE WITH CARE.
152 Example:
153 scan.save('myscan.asap')
154 scan.save('myscan.sdfits', 'SDFITS')
155 """
156 from os import path
157 format = format or rcParams['scantable.save']
158 suffix = '.'+format.lower()
159 if name is None or name == "":
160 name = 'scantable'+suffix
161 msg = "No filename given. Using default name %s..." % name
162 asaplog.push(msg)
163 name = path.expandvars(name)
164 if path.isfile(name) or path.isdir(name):
165 if not overwrite:
166 msg = "File %s exists." % name
167 if rcParams['verbose']:
168 #print msg
169 asaplog.push( msg )
170 print_log( 'ERROR' )
171 return
172 else:
173 raise IOError(msg)
174 format2 = format.upper()
175 if format2 == 'ASAP':
176 self._save(name)
177 else:
178 from asap._asap import stwriter as stw
179 writer = stw(format2)
180 writer.write(self, name)
181 print_log()
182 return
183
184 def copy(self):
185 """
186 Return a copy of this scantable.
187 Note:
188 This makes a full (deep) copy. scan2 = scan1 makes a reference.
189 Parameters:
190 none
191 Example:
192 copiedscan = scan.copy()
193 """
194 sd = scantable(Scantable._copy(self))
195 return sd
196
197 def drop_scan(self, scanid=None):
198 """
199 Return a new scantable where the specified scan number(s) has(have)
200 been dropped.
201 Parameters:
202 scanid: a (list of) scan number(s)
203 """
204 from asap import _is_sequence_or_number as _is_valid
205 from asap import _to_list
206 from asap import unique
207 if not _is_valid(scanid):
208 if rcParams['verbose']:
209 #print "Please specify a scanno to drop from the scantable"
210 asaplog.push( 'Please specify a scanno to drop from the scantable' )
211 print_log( 'ERROR' )
212 return
213 else:
214 raise RuntimeError("No scan given")
215 try:
216 scanid = _to_list(scanid)
217 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
218 for sid in scanid: allscans.remove(sid)
219 if len(allscans) == 0:
220 raise ValueError("Can't remove all scans")
221 except ValueError:
222 if rcParams['verbose']:
223 #print "Couldn't find any match."
224 print_log()
225 asaplog.push( "Couldn't find any match." )
226 print_log( 'ERROR' )
227 return
228 else: raise
229 try:
230 sel = selector(scans=allscans)
231 return self._select_copy(sel)
232 except RuntimeError:
233 if rcParams['verbose']:
234 #print "Couldn't find any match."
235 print_log()
236 asaplog.push( "Couldn't find any match." )
237 print_log( 'ERROR' )
238 else:
239 raise
240
241 def _select_copy(self, selection):
242 orig = self.get_selection()
243 self.set_selection(orig+selection)
244 cp = self.copy()
245 self.set_selection(orig)
246 return cp
247
248 def get_scan(self, scanid=None):
249 """
250 Return a specific scan (by scanno) or collection of scans (by
251 source name) in a new scantable.
252 Note:
253 See scantable.drop_scan() for the inverse operation.
254 Parameters:
255 scanid: a (list of) scanno or a source name, unix-style
256 patterns are accepted for source name matching, e.g.
257 '*_R' gets all 'ref scans
258 Example:
259 # get all scans containing the source '323p459'
260 newscan = scan.get_scan('323p459')
261 # get all 'off' scans
262 refscans = scan.get_scan('*_R')
263 # get a susbset of scans by scanno (as listed in scan.summary())
264 newscan = scan.get_scan([0, 2, 7, 10])
265 """
266 if scanid is None:
267 if rcParams['verbose']:
268 #print "Please specify a scan no or name to " \
269 # "retrieve from the scantable"
270 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
271 print_log( 'ERROR' )
272 return
273 else:
274 raise RuntimeError("No scan given")
275
276 try:
277 bsel = self.get_selection()
278 sel = selector()
279 if type(scanid) is str:
280 sel.set_name(scanid)
281 return self._select_copy(sel)
282 elif type(scanid) is int:
283 sel.set_scans([scanid])
284 return self._select_copy(sel)
285 elif type(scanid) is list:
286 sel.set_scans(scanid)
287 return self._select_copy(sel)
288 else:
289 msg = "Illegal scanid type, use 'int' or 'list' if ints."
290 if rcParams['verbose']:
291 #print msg
292 asaplog.push( msg )
293 print_log( 'ERROR' )
294 else:
295 raise TypeError(msg)
296 except RuntimeError:
297 if rcParams['verbose']:
298 #print "Couldn't find any match."
299 print_log()
300 asaplog.push( "Couldn't find any match." )
301 print_log( 'ERROR' )
302 else: raise
303
304 def __str__(self):
305 return Scantable._summary(self, True)
306
307 def summary(self, filename=None):
308 """
309 Print a summary of the contents of this scantable.
310 Parameters:
311 filename: the name of a file to write the putput to
312 Default - no file output
313 """
314 info = Scantable._summary(self, True)
315 if filename is not None:
316 if filename is "":
317 filename = 'scantable_summary.txt'
318 from os.path import expandvars, isdir
319 filename = expandvars(filename)
320 if not isdir(filename):
321 data = open(filename, 'w')
322 data.write(info)
323 data.close()
324 else:
325 msg = "Illegal file name '%s'." % (filename)
326 if rcParams['verbose']:
327 #print msg
328 asaplog.push( msg )
329 print_log( 'ERROR' )
330 else:
331 raise IOError(msg)
332 if rcParams['verbose']:
333 try:
334 from IPython.genutils import page as pager
335 except ImportError:
336 from pydoc import pager
337 pager(info)
338 else:
339 return info
340
341 def get_spectrum(self, rowno):
342 """Return the spectrum for the current row in the scantable as a list.
343 Parameters:
344 rowno: the row number to retrieve the spectrum from
345 """
346 return self._getspectrum(rowno)
347
348 def get_mask(self, rowno):
349 """Return the mask for the current row in the scantable as a list.
350 Parameters:
351 rowno: the row number to retrieve the mask from
352 """
353 return self._getmask(rowno)
354
355 def set_spectrum(self, spec, rowno):
356 """Return the spectrum for the current row in the scantable as a list.
357 Parameters:
358 spec: the spectrum
359 rowno: the row number to set the spectrum for
360 """
361 assert(len(spec) == self.nchan())
362 return self._setspectrum(spec, rowno)
363
364 def get_coordinate(self, rowno):
365 """Return the (spectral) coordinate for a a given 'rowno'.
366 NOTE:
367 * This coordinate is only valid until a scantable method modifies
368 the frequency axis.
369 * This coordinate does contain the original frequency set-up
370 NOT the new frame. The conversions however are done using the user
371 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
372 use scantable.freq_align first. Without it there is no closure,
373 i.e.
374 c = myscan.get_coordinate(0)
375 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
376
377 Parameters:
378 rowno: the row number for the spectral coordinate
379
380 """
381 return coordinate(Scantable.get_coordinate(self, rowno))
382
383 def get_selection(self):
384 """
385 Get the selection object currently set on this scantable.
386 Parameters:
387 none
388 Example:
389 sel = scan.get_selection()
390 sel.set_ifs(0) # select IF 0
391 scan.set_selection(sel) # apply modified selection
392 """
393 return selector(self._getselection())
394
395 def set_selection(self, selection=None, **kw):
396 """
397 Select a subset of the data. All following operations on this scantable
398 are only applied to thi selection.
399 Parameters:
400 selection: a selector object (default unset the selection),
401
402 or
403
404 any combination of
405 "pols", "ifs", "beams", "scans", "cycles", "name", "query"
406
407 Examples:
408 sel = selector() # create a selection object
409 self.set_scans([0, 3]) # select SCANNO 0 and 3
410 scan.set_selection(sel) # set the selection
411 scan.summary() # will only print summary of scanno 0 an 3
412 scan.set_selection() # unset the selection
413 # or the equivalent
414 scan.set_selection(scans=[0,3])
415 scan.summary() # will only print summary of scanno 0 an 3
416 scan.set_selection() # unset the selection
417 """
418 if selection is None:
419 # reset
420 if len(kw) == 0:
421 selection = selector()
422 else:
423 # try keywords
424 for k in kw:
425 if k not in selector.fields:
426 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
427 selection = selector(**kw)
428 self._setselection(selection)
429
430 def get_row(self, row=0, insitu=None):
431 """
432 Select a row in the scantable.
433 Return a scantable with single row.
434 Parameters:
435 row: row no of integration, default is 0.
436 insitu: if False a new scantable is returned.
437 Otherwise, the scaling is done in-situ
438 The default is taken from .asaprc (False)
439 """
440 if insitu is None: insitu = rcParams['insitu']
441 if not insitu:
442 workscan = self.copy()
443 else:
444 workscan = self
445 # Select a row
446 sel=selector()
447 sel.set_scans([workscan.getscan(row)])
448 sel.set_cycles([workscan.getcycle(row)])
449 sel.set_beams([workscan.getbeam(row)])
450 sel.set_ifs([workscan.getif(row)])
451 sel.set_polarisations([workscan.getpol(row)])
452 sel.set_name(workscan._getsourcename(row))
453 workscan.set_selection(sel)
454 if not workscan.nrow() == 1:
455 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
456 raise RuntimeError(msg)
457 del sel
458 if insitu:
459 self._assign(workscan)
460 else:
461 return workscan
462
463 #def stats(self, stat='stddev', mask=None):
464 def stats(self, stat='stddev', mask=None, form='3.3f'):
465 """
466 Determine the specified statistic of the current beam/if/pol
467 Takes a 'mask' as an optional parameter to specify which
468 channels should be excluded.
469 Parameters:
470 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
471 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
472 mask: an optional mask specifying where the statistic
473 should be determined.
474 form: format string to print statistic values
475 Example:
476 scan.set_unit('channel')
477 msk = scan.create_mask([100, 200], [500, 600])
478 scan.stats(stat='mean', mask=m)
479 """
480 mask = mask or []
481 if not self._check_ifs():
482 raise ValueError("Cannot apply mask as the IFs have different "
483 "number of channels. Please use setselection() "
484 "to select individual IFs")
485 rtnabc = False
486 if stat.lower().endswith('_abc'): rtnabc = True
487 getchan = False
488 if stat.lower().startswith('min') or stat.lower().startswith('max'):
489 chan = self._math._minmaxchan(self, mask, stat)
490 getchan = True
491 statvals = []
492 if not rtnabc: statvals = self._math._stats(self, mask, stat)
493
494 #def cb(i):
495 # return statvals[i]
496
497 #return self._row_callback(cb, stat)
498
499 label=stat
500 #callback=cb
501 out = ""
502 #outvec = []
503 sep = '-'*50
504 for i in range(self.nrow()):
505 refstr = ''
506 statunit= ''
507 if getchan:
508 qx, qy = self.chan2data(rowno=i, chan=chan[i])
509 if rtnabc:
510 statvals.append(qx['value'])
511 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
512 statunit= '['+qx['unit']+']'
513 else:
514 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
515
516 tm = self._gettime(i)
517 src = self._getsourcename(i)
518 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
519 out += 'Time[%s]:\n' % (tm)
520 if self.nbeam(-1) > 1:
521 out += ' Beam[%d] ' % (self.getbeam(i))
522 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
523 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
524 #outvec.append(callback(i))
525 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
526 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
527 out += sep+"\n"
528
529 if rcParams['verbose']:
530 import os
531 if os.environ.has_key( 'USER' ):
532 usr=os.environ['USER']
533 else:
534 import commands
535 usr=commands.getoutput( 'whoami' )
536 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
537 f=open(tmpfile,'w')
538 print >> f, sep
539 print >> f, ' %s %s' % (label, statunit)
540 print >> f, sep
541 print >> f, out
542 f.close()
543 f=open(tmpfile,'r')
544 x=f.readlines()
545 f.close()
546 blanc=''
547 asaplog.push(blanc.join(x), False)
548 #for xx in x:
549 # asaplog.push( xx, False )
550 print_log()
551 return statvals
552
553 def chan2data(self, rowno=0, chan=0):
554 """
555 Returns channel/frequency/velocity and spectral value
556 at an arbitrary row and channel in the scantable.
557 Parameters:
558 rowno: a row number in the scantable. Default is the
559 first row, i.e. rowno=0
560 chan: a channel in the scantable. Default is the first
561 channel, i.e. pos=0
562 """
563 if isinstance(rowno, int) and isinstance(chan, int):
564 qx = {'unit': self.get_unit(),
565 'value': self._getabcissa(rowno)[chan]}
566 qy = {'unit': self.get_fluxunit(),
567 'value': self._getspectrum(rowno)[chan]}
568 return qx, qy
569
570 def stddev(self, mask=None):
571 """
572 Determine the standard deviation of the current beam/if/pol
573 Takes a 'mask' as an optional parameter to specify which
574 channels should be excluded.
575 Parameters:
576 mask: an optional mask specifying where the standard
577 deviation should be determined.
578
579 Example:
580 scan.set_unit('channel')
581 msk = scan.create_mask([100, 200], [500, 600])
582 scan.stddev(mask=m)
583 """
584 return self.stats(stat='stddev', mask=mask);
585
586
587 def get_column_names(self):
588 """
589 Return a list of column names, which can be used for selection.
590 """
591 return list(Scantable.get_column_names(self))
592
593 def get_tsys(self, row=-1):
594 """
595 Return the System temperatures.
596 Returns:
597 a list of Tsys values for the current selection
598 """
599 if row > -1:
600 return self._get_column(self._gettsys, row)
601 return self._row_callback(self._gettsys, "Tsys")
602
603
604 def get_weather(self, row=-1):
605 values = self._get_column(self._get_weather, row)
606 if row > -1:
607 return {'temperature': values[0],
608 'pressure': values[1], 'humidity' : values[2],
609 'windspeed' : values[3], 'windaz' : values[4]
610 }
611 else:
612 out = []
613 for r in values:
614
615 out.append({'temperature': r[0],
616 'pressure': r[1], 'humidity' : r[2],
617 'windspeed' : r[3], 'windaz' : r[4]
618 })
619 return out
620
621 def _row_callback(self, callback, label):
622 out = ""
623 outvec = []
624 sep = '-'*50
625 for i in range(self.nrow()):
626 tm = self._gettime(i)
627 src = self._getsourcename(i)
628 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
629 out += 'Time[%s]:\n' % (tm)
630 if self.nbeam(-1) > 1:
631 out += ' Beam[%d] ' % (self.getbeam(i))
632 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
633 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
634 outvec.append(callback(i))
635 out += '= %3.3f\n' % (outvec[i])
636 out += sep+'\n'
637 if rcParams['verbose']:
638 asaplog.push(sep)
639 asaplog.push(" %s" % (label))
640 asaplog.push(sep)
641 asaplog.push(out)
642 print_log()
643 return outvec
644
645 def _get_column(self, callback, row=-1):
646 """
647 """
648 if row == -1:
649 return [callback(i) for i in range(self.nrow())]
650 else:
651 if 0 <= row < self.nrow():
652 return callback(row)
653
654
655 def get_time(self, row=-1, asdatetime=False):
656 """
657 Get a list of time stamps for the observations.
658 Return a datetime object for each integration time stamp in the scantable.
659 Parameters:
660 row: row no of integration. Default -1 return all rows
661 asdatetime: return values as datetime objects rather than strings
662 Example:
663 none
664 """
665 from time import strptime
666 from datetime import datetime
667 times = self._get_column(self._gettime, row)
668 if not asdatetime:
669 return times
670 format = "%Y/%m/%d/%H:%M:%S"
671 if isinstance(times, list):
672 return [datetime(*strptime(i, format)[:6]) for i in times]
673 else:
674 return datetime(*strptime(times, format)[:6])
675
676
677 def get_inttime(self, row=-1):
678 """
679 Get a list of integration times for the observations.
680 Return a time in seconds for each integration in the scantable.
681 Parameters:
682 row: row no of integration. Default -1 return all rows.
683 Example:
684 none
685 """
686 return self._get_column(self._getinttime, row)
687
688
689 def get_sourcename(self, row=-1):
690 """
691 Get a list source names for the observations.
692 Return a string for each integration in the scantable.
693 Parameters:
694 row: row no of integration. Default -1 return all rows.
695 Example:
696 none
697 """
698 return self._get_column(self._getsourcename, row)
699
700 def get_elevation(self, row=-1):
701 """
702 Get a list of elevations for the observations.
703 Return a float for each integration in the scantable.
704 Parameters:
705 row: row no of integration. Default -1 return all rows.
706 Example:
707 none
708 """
709 return self._get_column(self._getelevation, row)
710
711 def get_azimuth(self, row=-1):
712 """
713 Get a list of azimuths for the observations.
714 Return a float for each integration in the scantable.
715 Parameters:
716 row: row no of integration. Default -1 return all rows.
717 Example:
718 none
719 """
720 return self._get_column(self._getazimuth, row)
721
722 def get_parangle(self, row=-1):
723 """
724 Get a list of parallactic angles for the observations.
725 Return a float for each integration in the scantable.
726 Parameters:
727 row: row no of integration. Default -1 return all rows.
728 Example:
729 none
730 """
731 return self._get_column(self._getparangle, row)
732
733 def get_direction(self, row=-1):
734 """
735 Get a list of Positions on the sky (direction) for the observations.
736 Return a string for each integration in the scantable.
737 Parameters:
738 row: row no of integration. Default -1 return all rows
739 Example:
740 none
741 """
742 return self._get_column(self._getdirection, row)
743
744 def get_directionval(self, row=-1):
745 """
746 Get a list of Positions on the sky (direction) for the observations.
747 Return a float for each integration in the scantable.
748 Parameters:
749 row: row no of integration. Default -1 return all rows
750 Example:
751 none
752 """
753 return self._get_column(self._getdirectionvec, row)
754
755 @print_log_dec
756 def set_unit(self, unit='channel'):
757 """
758 Set the unit for all following operations on this scantable
759 Parameters:
760 unit: optional unit, default is 'channel'
761 one of '*Hz', 'km/s', 'channel', ''
762 """
763 varlist = vars()
764 if unit in ['', 'pixel', 'channel']:
765 unit = ''
766 inf = list(self._getcoordinfo())
767 inf[0] = unit
768 self._setcoordinfo(inf)
769 self._add_history("set_unit", varlist)
770
771 @print_log_dec
772 def set_instrument(self, instr):
773 """
774 Set the instrument for subsequent processing.
775 Parameters:
776 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
777 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
778 """
779 self._setInstrument(instr)
780 self._add_history("set_instument", vars())
781 print_log()
782
783 @print_log_dec
784 def set_feedtype(self, feedtype):
785 """
786 Overwrite the feed type, which might not be set correctly.
787 Parameters:
788 feedtype: 'linear' or 'circular'
789 """
790 self._setfeedtype(feedtype)
791 self._add_history("set_feedtype", vars())
792 print_log()
793
794 @print_log_dec
795 def set_doppler(self, doppler='RADIO'):
796 """
797 Set the doppler for all following operations on this scantable.
798 Parameters:
799 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
800 """
801 varlist = vars()
802 inf = list(self._getcoordinfo())
803 inf[2] = doppler
804 self._setcoordinfo(inf)
805 self._add_history("set_doppler", vars())
806 print_log()
807
808 @print_log_dec
809 def set_freqframe(self, frame=None):
810 """
811 Set the frame type of the Spectral Axis.
812 Parameters:
813 frame: an optional frame type, default 'LSRK'. Valid frames are:
814 'TOPO', 'LSRD', 'LSRK', 'BARY',
815 'GEO', 'GALACTO', 'LGROUP', 'CMB'
816 Examples:
817 scan.set_freqframe('BARY')
818 """
819 frame = frame or rcParams['scantable.freqframe']
820 varlist = vars()
821 # "REST" is not implemented in casacore
822 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
823 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
824 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
825 'GEO', 'GALACTO', 'LGROUP', 'CMB']
826
827 if frame in valid:
828 inf = list(self._getcoordinfo())
829 inf[1] = frame
830 self._setcoordinfo(inf)
831 self._add_history("set_freqframe", varlist)
832 else:
833 msg = "Please specify a valid freq type. Valid types are:\n", valid
834 if rcParams['verbose']:
835 #print msg
836 asaplog.push( msg )
837 print_log( 'ERROR' )
838 else:
839 raise TypeError(msg)
840 print_log()
841
842 def set_dirframe(self, frame=""):
843 """
844 Set the frame type of the Direction on the sky.
845 Parameters:
846 frame: an optional frame type, default ''. Valid frames are:
847 'J2000', 'B1950', 'GALACTIC'
848 Examples:
849 scan.set_dirframe('GALACTIC')
850 """
851 varlist = vars()
852 try:
853 Scantable.set_dirframe(self, frame)
854 except RuntimeError, msg:
855 if rcParams['verbose']:
856 #print msg
857 print_log()
858 asaplog.push( str(msg) )
859 print_log( 'ERROR' )
860 else:
861 raise
862 self._add_history("set_dirframe", varlist)
863
864 def get_unit(self):
865 """
866 Get the default unit set in this scantable
867 Returns:
868 A unit string
869 """
870 inf = self._getcoordinfo()
871 unit = inf[0]
872 if unit == '': unit = 'channel'
873 return unit
874
875 def get_abcissa(self, rowno=0):
876 """
877 Get the abcissa in the current coordinate setup for the currently
878 selected Beam/IF/Pol
879 Parameters:
880 rowno: an optional row number in the scantable. Default is the
881 first row, i.e. rowno=0
882 Returns:
883 The abcissa values and the format string (as a dictionary)
884 """
885 abc = self._getabcissa(rowno)
886 lbl = self._getabcissalabel(rowno)
887 print_log()
888 return abc, lbl
889
890 def flag(self, mask=None, unflag=False):
891 """
892 Flag the selected data using an optional channel mask.
893 Parameters:
894 mask: an optional channel mask, created with create_mask. Default
895 (no mask) is all channels.
896 unflag: if True, unflag the data
897 """
898 varlist = vars()
899 mask = mask or []
900 try:
901 self._flag(mask, unflag)
902 except RuntimeError, msg:
903 if rcParams['verbose']:
904 #print msg
905 print_log()
906 asaplog.push( str(msg) )
907 print_log( 'ERROR' )
908 return
909 else: raise
910 self._add_history("flag", varlist)
911
912 def flag_row(self, rows=[], unflag=False):
913 """
914 Flag the selected data in row-based manner.
915 Parameters:
916 rows: list of row numbers to be flagged. Default is no row (must be explicitly specified to execute row-based flagging).
917 unflag: if True, unflag the data.
918 """
919 varlist = vars()
920 try:
921 self._flag_row(rows, unflag)
922 except RuntimeError, msg:
923 if rcParams['verbose']:
924 print_log()
925 asaplog.push( str(msg) )
926 print_log('ERROR')
927 return
928 else: raise
929 self._add_history("flag_row", varlist)
930
931 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
932 """
933 Flag the selected data outside a specified range (in channel-base)
934 Parameters:
935 uthres: upper threshold.
936 dthres: lower threshold
937 clipoutside: True for flagging data outside the range [dthres:uthres].
938 False for glagging data inside the range.
939 unflag : if True, unflag the data.
940 """
941 varlist = vars()
942 try:
943 self._clip(uthres, dthres, clipoutside, unflag)
944 except RuntimeError, msg:
945 if rcParams['verbose']:
946 print_log()
947 asaplog.push(str(msg))
948 print_log('ERROR')
949 return
950 else: raise
951 self._add_history("clip", varlist)
952
953 @print_log_dec
954 def lag_flag(self, start, end, unit="MHz", insitu=None):
955 #def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
956 """
957 Flag the data in 'lag' space by providing a frequency to remove.
958 Flagged data in the scantable gets interpolated over the region.
959 No taper is applied.
960 Parameters:
961 start: the start frequency (really a period within the
962 bandwidth) or period to remove
963 end: the end frequency or period to remove
964 unit: the frequency unit (default "MHz") or "" for
965 explicit lag channels
966 Notes:
967 It is recommended to flag edges of the band or strong
968 signals beforehand.
969 """
970 if insitu is None: insitu = rcParams['insitu']
971 self._math._setinsitu(insitu)
972 varlist = vars()
973 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
974 if not (unit == "" or base.has_key(unit)):
975 raise ValueError("%s is not a valid unit." % unit)
976 try:
977 if unit == "":
978 s = scantable(self._math._lag_flag(self, start, end, "lags"))
979 else:
980 s = scantable(self._math._lag_flag(self, start*base[unit],
981 end*base[unit], "frequency"))
982 except RuntimeError, msg:
983 if rcParams['verbose']:
984 #print msg
985 print_log()
986 asaplog.push( str(msg) )
987 print_log( 'ERROR' )
988 return
989 else: raise
990 s._add_history("lag_flag", varlist)
991 print_log()
992 if insitu:
993 self._assign(s)
994 else:
995 return s
996
997 @print_log_dec
998 def create_mask(self, *args, **kwargs):
999 """
1000 Compute and return a mask based on [min, max] windows.
1001 The specified windows are to be INCLUDED, when the mask is
1002 applied.
1003 Parameters:
1004 [min, max], [min2, max2], ...
1005 Pairs of start/end points (inclusive)specifying the regions
1006 to be masked
1007 invert: optional argument. If specified as True,
1008 return an inverted mask, i.e. the regions
1009 specified are EXCLUDED
1010 row: create the mask using the specified row for
1011 unit conversions, default is row=0
1012 only necessary if frequency varies over rows.
1013 Example:
1014 scan.set_unit('channel')
1015 a)
1016 msk = scan.create_mask([400, 500], [800, 900])
1017 # masks everything outside 400 and 500
1018 # and 800 and 900 in the unit 'channel'
1019
1020 b)
1021 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1022 # masks the regions between 400 and 500
1023 # and 800 and 900 in the unit 'channel'
1024 c)
1025 mask only channel 400
1026 msk = scan.create_mask([400])
1027 """
1028 row = kwargs.get("row", 0)
1029 data = self._getabcissa(row)
1030 u = self._getcoordinfo()[0]
1031 if rcParams['verbose']:
1032 if u == "": u = "channel"
1033 msg = "The current mask window unit is %s" % u
1034 i = self._check_ifs()
1035 if not i:
1036 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1037 asaplog.push(msg)
1038 n = self.nchan()
1039 msk = _n_bools(n, False)
1040 # test if args is a 'list' or a 'normal *args - UGLY!!!
1041
1042 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1043 and args or args[0]
1044 for window in ws:
1045 if len(window) == 1:
1046 window = [window[0], window[0]]
1047 if len(window) == 0 or len(window) > 2:
1048 raise ValueError("A window needs to be defined as [start(, end)]")
1049 if window[0] > window[1]:
1050 tmp = window[0]
1051 window[0] = window[1]
1052 window[1] = tmp
1053 for i in range(n):
1054 if data[i] >= window[0] and data[i] <= window[1]:
1055 msk[i] = True
1056 if kwargs.has_key('invert'):
1057 if kwargs.get('invert'):
1058 msk = mask_not(msk)
1059 print_log()
1060 return msk
1061
1062 def get_masklist(self, mask=None, row=0):
1063 """
1064 Compute and return a list of mask windows, [min, max].
1065 Parameters:
1066 mask: channel mask, created with create_mask.
1067 row: calcutate the masklist using the specified row
1068 for unit conversions, default is row=0
1069 only necessary if frequency varies over rows.
1070 Returns:
1071 [min, max], [min2, max2], ...
1072 Pairs of start/end points (inclusive)specifying
1073 the masked regions
1074 """
1075 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1076 raise TypeError("The mask should be list or tuple.")
1077 if len(mask) < 2:
1078 raise TypeError("The mask elements should be > 1")
1079 if self.nchan() != len(mask):
1080 msg = "Number of channels in scantable != number of mask elements"
1081 raise TypeError(msg)
1082 data = self._getabcissa(row)
1083 u = self._getcoordinfo()[0]
1084 if rcParams['verbose']:
1085 if u == "": u = "channel"
1086 msg = "The current mask window unit is %s" % u
1087 i = self._check_ifs()
1088 if not i:
1089 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1090 asaplog.push(msg)
1091 masklist=[]
1092 ist, ien = None, None
1093 ist, ien=self.get_mask_indices(mask)
1094 if ist is not None and ien is not None:
1095 for i in xrange(len(ist)):
1096 range=[data[ist[i]],data[ien[i]]]
1097 range.sort()
1098 masklist.append([range[0],range[1]])
1099 return masklist
1100
1101 def get_mask_indices(self, mask=None):
1102 """
1103 Compute and Return lists of mask start indices and mask end indices.
1104 Parameters:
1105 mask: channel mask, created with create_mask.
1106 Returns:
1107 List of mask start indices and that of mask end indices,
1108 i.e., [istart1,istart2,....], [iend1,iend2,....].
1109 """
1110 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1111 raise TypeError("The mask should be list or tuple.")
1112 if len(mask) < 2:
1113 raise TypeError("The mask elements should be > 1")
1114 istart=[]
1115 iend=[]
1116 if mask[0]: istart.append(0)
1117 for i in range(len(mask)-1):
1118 if not mask[i] and mask[i+1]:
1119 istart.append(i+1)
1120 elif mask[i] and not mask[i+1]:
1121 iend.append(i)
1122 if mask[len(mask)-1]: iend.append(len(mask)-1)
1123 if len(istart) != len(iend):
1124 raise RuntimeError("Numbers of mask start != mask end.")
1125 for i in range(len(istart)):
1126 if istart[i] > iend[i]:
1127 raise RuntimeError("Mask start index > mask end index")
1128 break
1129 return istart,iend
1130
1131# def get_restfreqs(self):
1132# """
1133# Get the restfrequency(s) stored in this scantable.
1134# The return value(s) are always of unit 'Hz'
1135# Parameters:
1136# none
1137# Returns:
1138# a list of doubles
1139# """
1140# return list(self._getrestfreqs())
1141
1142 def get_restfreqs(self, ids=None):
1143 """
1144 Get the restfrequency(s) stored in this scantable.
1145 The return value(s) are always of unit 'Hz'
1146 Parameters:
1147 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1148 be retrieved
1149 Returns:
1150 dictionary containing ids and a list of doubles for each id
1151 """
1152 if ids is None:
1153 rfreqs={}
1154 idlist = self.getmolnos()
1155 for i in idlist:
1156 rfreqs[i]=list(self._getrestfreqs(i))
1157 return rfreqs
1158 else:
1159 if type(ids)==list or type(ids)==tuple:
1160 rfreqs={}
1161 for i in ids:
1162 rfreqs[i]=list(self._getrestfreqs(i))
1163 return rfreqs
1164 else:
1165 return list(self._getrestfreqs(ids))
1166 #return list(self._getrestfreqs(ids))
1167
1168 def set_restfreqs(self, freqs=None, unit='Hz'):
1169 """
1170 ********NEED TO BE UPDATED begin************
1171 Set or replace the restfrequency specified and
1172 If the 'freqs' argument holds a scalar,
1173 then that rest frequency will be applied to all the selected
1174 data. If the 'freqs' argument holds
1175 a vector, then it MUST be of equal or smaller length than
1176 the number of IFs (and the available restfrequencies will be
1177 replaced by this vector). In this case, *all* data have
1178 the restfrequency set per IF according
1179 to the corresponding value you give in the 'freqs' vector.
1180 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1181 IF 1 gets restfreq 2e9.
1182 ********NEED TO BE UPDATED end************
1183 You can also specify the frequencies via a linecatalog.
1184
1185 Parameters:
1186 freqs: list of rest frequency values or string idenitfiers
1187 unit: unit for rest frequency (default 'Hz')
1188
1189 Example:
1190 # set the given restfrequency for the all currently selected IFs
1191 scan.set_restfreqs(freqs=1.4e9)
1192 # set multiple restfrequencies to all the selected data
1193 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1194 # If the number of IFs in the data is >= 2 the IF0 gets the first
1195 # value IF1 the second... NOTE that freqs needs to be
1196 # specified in list of list (e.g. [[],[],...] ).
1197 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
1198 #set the given restfrequency for the whole table (by name)
1199 scan.set_restfreqs(freqs="OH1667")
1200
1201 Note:
1202 To do more sophisticate Restfrequency setting, e.g. on a
1203 source and IF basis, use scantable.set_selection() before using
1204 this function.
1205 # provide your scantable is called scan
1206 selection = selector()
1207 selection.set_name("ORION*")
1208 selection.set_ifs([1])
1209 scan.set_selection(selection)
1210 scan.set_restfreqs(freqs=86.6e9)
1211
1212 """
1213 varlist = vars()
1214 from asap import linecatalog
1215 # simple value
1216 if isinstance(freqs, int) or isinstance(freqs, float):
1217 # TT mod
1218 #self._setrestfreqs(freqs, "",unit)
1219 self._setrestfreqs([freqs], [""],unit)
1220 # list of values
1221 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1222 # list values are scalars
1223 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1224 self._setrestfreqs(freqs, [""], unit)
1225 # list values are tuples, (value, name)
1226 elif isinstance(freqs[-1], dict):
1227 #sel = selector()
1228 #savesel = self._getselection()
1229 #iflist = self.getifnos()
1230 #for i in xrange(len(freqs)):
1231 # sel.set_ifs(iflist[i])
1232 # self._setselection(sel)
1233 # self._setrestfreqs(freqs[i], "",unit)
1234 #self._setselection(savesel)
1235 self._setrestfreqs(freqs["value"],
1236 freqs["name"], unit)
1237 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1238 sel = selector()
1239 savesel = self._getselection()
1240 iflist = self.getifnos()
1241 if len(freqs)>len(iflist):
1242 raise ValueError("number of elements in list of list exeeds the current IF selections")
1243 for i in xrange(len(freqs)):
1244 sel.set_ifs(iflist[i])
1245 self._setselection(sel)
1246 self._setrestfreqs(freqs[i], [""], unit)
1247 self._setselection(savesel)
1248 # freqs are to be taken from a linecatalog
1249 elif isinstance(freqs, linecatalog):
1250 sel = selector()
1251 savesel = self._getselection()
1252 for i in xrange(freqs.nrow()):
1253 sel.set_ifs(iflist[i])
1254 self._setselection(sel)
1255 self._setrestfreqs(freqs.get_frequency(i),
1256 freqs.get_name(i), "MHz")
1257 # ensure that we are not iterating past nIF
1258 if i == self.nif()-1: break
1259 self._setselection(savesel)
1260 else:
1261 return
1262 self._add_history("set_restfreqs", varlist)
1263
1264 def shift_refpix(self, delta):
1265 """
1266 Shift the reference pixel of the Spectra Coordinate by an
1267 integer amount.
1268 Parameters:
1269 delta: the amount to shift by
1270 Note:
1271 Be careful using this with broadband data.
1272 """
1273 Scantable.shift_refpix(self, delta)
1274
1275 def history(self, filename=None):
1276 """
1277 Print the history. Optionally to a file.
1278 Parameters:
1279 filename: The name of the file to save the history to.
1280 """
1281 hist = list(self._gethistory())
1282 out = "-"*80
1283 for h in hist:
1284 if h.startswith("---"):
1285 out += "\n"+h
1286 else:
1287 items = h.split("##")
1288 date = items[0]
1289 func = items[1]
1290 items = items[2:]
1291 out += "\n"+date+"\n"
1292 out += "Function: %s\n Parameters:" % (func)
1293 for i in items:
1294 s = i.split("=")
1295 out += "\n %s = %s" % (s[0], s[1])
1296 out += "\n"+"-"*80
1297 if filename is not None:
1298 if filename is "":
1299 filename = 'scantable_history.txt'
1300 import os
1301 filename = os.path.expandvars(os.path.expanduser(filename))
1302 if not os.path.isdir(filename):
1303 data = open(filename, 'w')
1304 data.write(out)
1305 data.close()
1306 else:
1307 msg = "Illegal file name '%s'." % (filename)
1308 if rcParams['verbose']:
1309 #print msg
1310 asaplog.push( msg )
1311 print_log( 'ERROR' )
1312 else:
1313 raise IOError(msg)
1314 if rcParams['verbose']:
1315 try:
1316 from IPython.genutils import page as pager
1317 except ImportError:
1318 from pydoc import pager
1319 pager(out)
1320 else:
1321 return out
1322 return
1323 #
1324 # Maths business
1325 #
1326 @print_log_dec
1327 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1328 """
1329 Return the (time) weighted average of a scan.
1330 Note:
1331 in channels only - align if necessary
1332 Parameters:
1333 mask: an optional mask (only used for 'var' and 'tsys'
1334 weighting)
1335 scanav: True averages each scan separately
1336 False (default) averages all scans together,
1337 weight: Weighting scheme.
1338 'none' (mean no weight)
1339 'var' (1/var(spec) weighted)
1340 'tsys' (1/Tsys**2 weighted)
1341 'tint' (integration time weighted)
1342 'tintsys' (Tint/Tsys**2)
1343 'median' ( median averaging)
1344 The default is 'tint'
1345 align: align the spectra in velocity before averaging. It takes
1346 the time of the first spectrum as reference time.
1347 Example:
1348 # time average the scantable without using a mask
1349 newscan = scan.average_time()
1350 """
1351 varlist = vars()
1352 weight = weight or 'TINT'
1353 mask = mask or ()
1354 scanav = (scanav and 'SCAN') or 'NONE'
1355 scan = (self, )
1356 try:
1357 if align:
1358 scan = (self.freq_align(insitu=False), )
1359 s = None
1360 if weight.upper() == 'MEDIAN':
1361 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1362 scanav))
1363 else:
1364 s = scantable(self._math._average(scan, mask, weight.upper(),
1365 scanav))
1366 except RuntimeError, msg:
1367 if rcParams['verbose']:
1368 #print msg
1369 print_log()
1370 asaplog.push( str(msg) )
1371 print_log( 'ERROR' )
1372 return
1373 else: raise
1374 s._add_history("average_time", varlist)
1375 print_log()
1376 return s
1377
1378 @print_log_dec
1379 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1380 """
1381 Return a scan where all spectra are converted to either
1382 Jansky or Kelvin depending upon the flux units of the scan table.
1383 By default the function tries to look the values up internally.
1384 If it can't find them (or if you want to over-ride), you must
1385 specify EITHER jyperk OR eta (and D which it will try to look up
1386 also if you don't set it). jyperk takes precedence if you set both.
1387 Parameters:
1388 jyperk: the Jy / K conversion factor
1389 eta: the aperture efficiency
1390 d: the geomtric diameter (metres)
1391 insitu: if False a new scantable is returned.
1392 Otherwise, the scaling is done in-situ
1393 The default is taken from .asaprc (False)
1394 """
1395 if insitu is None: insitu = rcParams['insitu']
1396 self._math._setinsitu(insitu)
1397 varlist = vars()
1398 jyperk = jyperk or -1.0
1399 d = d or -1.0
1400 eta = eta or -1.0
1401 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1402 s._add_history("convert_flux", varlist)
1403 print_log()
1404 if insitu: self._assign(s)
1405 else: return s
1406
1407 @print_log_dec
1408 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1409 """
1410 Return a scan after applying a gain-elevation correction.
1411 The correction can be made via either a polynomial or a
1412 table-based interpolation (and extrapolation if necessary).
1413 You specify polynomial coefficients, an ascii table or neither.
1414 If you specify neither, then a polynomial correction will be made
1415 with built in coefficients known for certain telescopes (an error
1416 will occur if the instrument is not known).
1417 The data and Tsys are *divided* by the scaling factors.
1418 Parameters:
1419 poly: Polynomial coefficients (default None) to compute a
1420 gain-elevation correction as a function of
1421 elevation (in degrees).
1422 filename: The name of an ascii file holding correction factors.
1423 The first row of the ascii file must give the column
1424 names and these MUST include columns
1425 "ELEVATION" (degrees) and "FACTOR" (multiply data
1426 by this) somewhere.
1427 The second row must give the data type of the
1428 column. Use 'R' for Real and 'I' for Integer.
1429 An example file would be
1430 (actual factors are arbitrary) :
1431
1432 TIME ELEVATION FACTOR
1433 R R R
1434 0.1 0 0.8
1435 0.2 20 0.85
1436 0.3 40 0.9
1437 0.4 60 0.85
1438 0.5 80 0.8
1439 0.6 90 0.75
1440 method: Interpolation method when correcting from a table.
1441 Values are "nearest", "linear" (default), "cubic"
1442 and "spline"
1443 insitu: if False a new scantable is returned.
1444 Otherwise, the scaling is done in-situ
1445 The default is taken from .asaprc (False)
1446 """
1447
1448 if insitu is None: insitu = rcParams['insitu']
1449 self._math._setinsitu(insitu)
1450 varlist = vars()
1451 poly = poly or ()
1452 from os.path import expandvars
1453 filename = expandvars(filename)
1454 s = scantable(self._math._gainel(self, poly, filename, method))
1455 s._add_history("gain_el", varlist)
1456 print_log()
1457 if insitu:
1458 self._assign(s)
1459 else:
1460 return s
1461
1462 @print_log_dec
1463 def freq_align(self, reftime=None, method='cubic', insitu=None):
1464 """
1465 Return a scan where all rows have been aligned in frequency/velocity.
1466 The alignment frequency frame (e.g. LSRK) is that set by function
1467 set_freqframe.
1468 Parameters:
1469 reftime: reference time to align at. By default, the time of
1470 the first row of data is used.
1471 method: Interpolation method for regridding the spectra.
1472 Choose from "nearest", "linear", "cubic" (default)
1473 and "spline"
1474 insitu: if False a new scantable is returned.
1475 Otherwise, the scaling is done in-situ
1476 The default is taken from .asaprc (False)
1477 """
1478 if insitu is None: insitu = rcParams["insitu"]
1479 self._math._setinsitu(insitu)
1480 varlist = vars()
1481 reftime = reftime or ""
1482 s = scantable(self._math._freq_align(self, reftime, method))
1483 s._add_history("freq_align", varlist)
1484 print_log()
1485 if insitu: self._assign(s)
1486 else: return s
1487
1488 @print_log_dec
1489 def opacity(self, tau=None, insitu=None):
1490 """
1491 Apply an opacity correction. The data
1492 and Tsys are multiplied by the correction factor.
1493 Parameters:
1494 tau: (list of) opacity from which the correction factor is
1495 exp(tau*ZD)
1496 where ZD is the zenith-distance.
1497 If a list is provided, it has to be of length nIF,
1498 nIF*nPol or 1 and in order of IF/POL, e.g.
1499 [opif0pol0, opif0pol1, opif1pol0 ...]
1500 if tau is `None` the opacities are determined from a
1501 model.
1502 insitu: if False a new scantable is returned.
1503 Otherwise, the scaling is done in-situ
1504 The default is taken from .asaprc (False)
1505 """
1506 if insitu is None: insitu = rcParams['insitu']
1507 self._math._setinsitu(insitu)
1508 varlist = vars()
1509 if not hasattr(tau, "__len__"):
1510 tau = [tau]
1511 s = scantable(self._math._opacity(self, tau))
1512 s._add_history("opacity", varlist)
1513 print_log()
1514 if insitu: self._assign(s)
1515 else: return s
1516
1517 @print_log_dec
1518 def bin(self, width=5, insitu=None):
1519 """
1520 Return a scan where all spectra have been binned up.
1521 Parameters:
1522 width: The bin width (default=5) in pixels
1523 insitu: if False a new scantable is returned.
1524 Otherwise, the scaling is done in-situ
1525 The default is taken from .asaprc (False)
1526 """
1527 if insitu is None: insitu = rcParams['insitu']
1528 self._math._setinsitu(insitu)
1529 varlist = vars()
1530 s = scantable(self._math._bin(self, width))
1531 s._add_history("bin", varlist)
1532 print_log()
1533 if insitu:
1534 self._assign(s)
1535 else:
1536 return s
1537
1538 @print_log_dec
1539 def resample(self, width=5, method='cubic', insitu=None):
1540 """
1541 Return a scan where all spectra have been binned up.
1542
1543 Parameters:
1544 width: The bin width (default=5) in pixels
1545 method: Interpolation method when correcting from a table.
1546 Values are "nearest", "linear", "cubic" (default)
1547 and "spline"
1548 insitu: if False a new scantable is returned.
1549 Otherwise, the scaling is done in-situ
1550 The default is taken from .asaprc (False)
1551 """
1552 if insitu is None: insitu = rcParams['insitu']
1553 self._math._setinsitu(insitu)
1554 varlist = vars()
1555 s = scantable(self._math._resample(self, method, width))
1556 s._add_history("resample", varlist)
1557 print_log()
1558 if insitu: self._assign(s)
1559 else: return s
1560
1561 @print_log_dec
1562 def average_pol(self, mask=None, weight='none'):
1563 """
1564 Average the Polarisations together.
1565 Parameters:
1566 mask: An optional mask defining the region, where the
1567 averaging will be applied. The output will have all
1568 specified points masked.
1569 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1570 weighted), or 'tsys' (1/Tsys**2 weighted)
1571 """
1572 varlist = vars()
1573 mask = mask or ()
1574 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1575 s._add_history("average_pol", varlist)
1576 print_log()
1577 return s
1578
1579 @print_log_dec
1580 def average_beam(self, mask=None, weight='none'):
1581 """
1582 Average the Beams together.
1583 Parameters:
1584 mask: An optional mask defining the region, where the
1585 averaging will be applied. The output will have all
1586 specified points masked.
1587 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1588 weighted), or 'tsys' (1/Tsys**2 weighted)
1589 """
1590 varlist = vars()
1591 mask = mask or ()
1592 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1593 s._add_history("average_beam", varlist)
1594 print_log()
1595 return s
1596
1597 def parallactify(self, pflag):
1598 """
1599 Set a flag to inidcate whether this data should be treated as having
1600 been 'parallactified' (total phase == 0.0)
1601 Parameters:
1602 pflag: Bool inidcating whether to turn this on (True) or
1603 off (False)
1604 """
1605 varlist = vars()
1606 self._parallactify(pflag)
1607 self._add_history("parallactify", varlist)
1608
1609 @print_log_dec
1610 def convert_pol(self, poltype=None):
1611 """
1612 Convert the data to a different polarisation type.
1613 Note that you will need cross-polarisation terms for most conversions.
1614 Parameters:
1615 poltype: The new polarisation type. Valid types are:
1616 "linear", "circular", "stokes" and "linpol"
1617 """
1618 varlist = vars()
1619 try:
1620 s = scantable(self._math._convertpol(self, poltype))
1621 except RuntimeError, msg:
1622 if rcParams['verbose']:
1623 #print msg
1624 print_log()
1625 asaplog.push( str(msg) )
1626 print_log( 'ERROR' )
1627 return
1628 else:
1629 raise
1630 s._add_history("convert_pol", varlist)
1631 print_log()
1632 return s
1633
1634 @print_log_dec
1635 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1636 """
1637 Smooth the spectrum by the specified kernel (conserving flux).
1638 Parameters:
1639 kernel: The type of smoothing kernel. Select from
1640 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1641 or 'poly'
1642 width: The width of the kernel in pixels. For hanning this is
1643 ignored otherwise it defauls to 5 pixels.
1644 For 'gaussian' it is the Full Width Half
1645 Maximum. For 'boxcar' it is the full width.
1646 For 'rmedian' and 'poly' it is the half width.
1647 order: Optional parameter for 'poly' kernel (default is 2), to
1648 specify the order of the polnomial. Ignored by all other
1649 kernels.
1650 plot: plot the original and the smoothed spectra.
1651 In this each indivual fit has to be approved, by
1652 typing 'y' or 'n'
1653 insitu: if False a new scantable is returned.
1654 Otherwise, the scaling is done in-situ
1655 The default is taken from .asaprc (False)
1656 Example:
1657 none
1658 """
1659 if insitu is None: insitu = rcParams['insitu']
1660 self._math._setinsitu(insitu)
1661 varlist = vars()
1662
1663 if plot: orgscan = self.copy()
1664
1665 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1666 s._add_history("smooth", varlist)
1667
1668 if plot:
1669 if rcParams['plotter.gui']:
1670 from asap.asaplotgui import asaplotgui as asaplot
1671 else:
1672 from asap.asaplot import asaplot
1673 self._p=asaplot()
1674 self._p.set_panels()
1675 ylab=s._get_ordinate_label()
1676 #self._p.palette(0,["#777777","red"])
1677 for r in xrange(s.nrow()):
1678 xsm=s._getabcissa(r)
1679 ysm=s._getspectrum(r)
1680 xorg=orgscan._getabcissa(r)
1681 yorg=orgscan._getspectrum(r)
1682 self._p.clear()
1683 self._p.hold()
1684 self._p.set_axes('ylabel',ylab)
1685 self._p.set_axes('xlabel',s._getabcissalabel(r))
1686 self._p.set_axes('title',s._getsourcename(r))
1687 self._p.set_line(label='Original',color="#777777")
1688 self._p.plot(xorg,yorg)
1689 self._p.set_line(label='Smoothed',color="red")
1690 self._p.plot(xsm,ysm)
1691 ### Ugly part for legend
1692 for i in [0,1]:
1693 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1694 self._p.release()
1695 ### Ugly part for legend
1696 self._p.subplots[0]['lines']=[]
1697 res = raw_input("Accept smoothing ([y]/n): ")
1698 if res.upper() == 'N':
1699 s._setspectrum(yorg, r)
1700 self._p.unmap()
1701 self._p = None
1702 del orgscan
1703
1704 print_log()
1705 if insitu: self._assign(s)
1706 else: return s
1707
1708 @print_log_dec
1709 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1710 insitu=None):
1711 """
1712 Return a scan which has been baselined (all rows) by a polynomial.
1713 Parameters:
1714 mask: an optional mask
1715 order: the order of the polynomial (default is 0)
1716 plot: plot the fit and the residual. In this each
1717 indivual fit has to be approved, by typing 'y'
1718 or 'n'
1719 uselin: use linear polynomial fit
1720 insitu: if False a new scantable is returned.
1721 Otherwise, the scaling is done in-situ
1722 The default is taken from .asaprc (False)
1723 Example:
1724 # return a scan baselined by a third order polynomial,
1725 # not using a mask
1726 bscan = scan.poly_baseline(order=3)
1727 """
1728 if insitu is None: insitu = rcParams['insitu']
1729 if not insitu:
1730 workscan = self.copy()
1731 else:
1732 workscan = self
1733 varlist = vars()
1734 if mask is None:
1735 mask = [True for i in xrange(self.nchan(-1))]
1736
1737 from asap.asapfitter import fitter
1738 try:
1739 f = fitter()
1740 if uselin:
1741 f.set_function(lpoly=order)
1742 else:
1743 f.set_function(poly=order)
1744
1745 rows = range(workscan.nrow())
1746 if len(rows) > 0:
1747 self.blpars = []
1748
1749 for r in rows:
1750 # take into account flagtra info (CAS-1434)
1751 flagtra = workscan._getmask(r)
1752 actualmask = mask[:]
1753 if len(actualmask) == 0:
1754 actualmask = list(flagtra[:])
1755 else:
1756 if len(actualmask) != len(flagtra):
1757 raise RuntimeError, "Mask and flagtra have different length"
1758 else:
1759 for i in range(0, len(actualmask)):
1760 actualmask[i] = actualmask[i] and flagtra[i]
1761 f.set_scan(workscan, actualmask)
1762 f.x = workscan._getabcissa(r)
1763 f.y = workscan._getspectrum(r)
1764 f.data = None
1765 f.fit()
1766 if plot:
1767 f.plot(residual=True)
1768 x = raw_input("Accept fit ( [y]/n ): ")
1769 if x.upper() == 'N':
1770 self.blpars.append(None)
1771 continue
1772 workscan._setspectrum(f.fitter.getresidual(), r)
1773 self.blpars.append(f.get_parameters())
1774
1775 if plot:
1776 f._p.unmap()
1777 f._p = None
1778 workscan._add_history("poly_baseline", varlist)
1779 print_log()
1780 if insitu: self._assign(workscan)
1781 else: return workscan
1782 except RuntimeError:
1783 msg = "The fit failed, possibly because it didn't converge."
1784 if rcParams['verbose']:
1785 #print msg
1786 print_log()
1787 asaplog.push( str(msg) )
1788 print_log( 'ERROR' )
1789 return
1790 else:
1791 raise RuntimeError(msg)
1792
1793
1794 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1795 threshold=3, chan_avg_limit=1, plot=False,
1796 insitu=None):
1797 """
1798 Return a scan which has been baselined (all rows) by a polynomial.
1799 Spectral lines are detected first using linefinder and masked out
1800 to avoid them affecting the baseline solution.
1801
1802 Parameters:
1803 mask: an optional mask retreived from scantable
1804 edge: an optional number of channel to drop at
1805 the edge of spectrum. If only one value is
1806 specified, the same number will be dropped from
1807 both sides of the spectrum. Default is to keep
1808 all channels. Nested tuples represent individual
1809 edge selection for different IFs (a number of spectral
1810 channels can be different)
1811 order: the order of the polynomial (default is 0)
1812 threshold: the threshold used by line finder. It is better to
1813 keep it large as only strong lines affect the
1814 baseline solution.
1815 chan_avg_limit:
1816 a maximum number of consequtive spectral channels to
1817 average during the search of weak and broad lines.
1818 The default is no averaging (and no search for weak
1819 lines). If such lines can affect the fitted baseline
1820 (e.g. a high order polynomial is fitted), increase this
1821 parameter (usually values up to 8 are reasonable). Most
1822 users of this method should find the default value
1823 sufficient.
1824 plot: plot the fit and the residual. In this each
1825 indivual fit has to be approved, by typing 'y'
1826 or 'n'
1827 insitu: if False a new scantable is returned.
1828 Otherwise, the scaling is done in-situ
1829 The default is taken from .asaprc (False)
1830
1831 Example:
1832 scan2=scan.auto_poly_baseline(order=7)
1833 """
1834 if insitu is None: insitu = rcParams['insitu']
1835 varlist = vars()
1836 from asap.asapfitter import fitter
1837 from asap.asaplinefind import linefinder
1838 from asap import _is_sequence_or_number as _is_valid
1839
1840 # check whether edge is set up for each IF individually
1841 individualedge = False;
1842 if len(edge) > 1:
1843 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1844 individualedge = True;
1845
1846 if not _is_valid(edge, int) and not individualedge:
1847 raise ValueError, "Parameter 'edge' has to be an integer or a \
1848 pair of integers specified as a tuple. Nested tuples are allowed \
1849 to make individual selection for different IFs."
1850
1851 curedge = (0, 0)
1852 if individualedge:
1853 for edgepar in edge:
1854 if not _is_valid(edgepar, int):
1855 raise ValueError, "Each element of the 'edge' tuple has \
1856 to be a pair of integers or an integer."
1857 else:
1858 curedge = edge;
1859
1860 # setup fitter
1861 f = fitter()
1862 f.set_function(poly=order)
1863
1864 # setup line finder
1865 fl = linefinder()
1866 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1867
1868 if not insitu:
1869 workscan = self.copy()
1870 else:
1871 workscan = self
1872
1873 fl.set_scan(workscan)
1874
1875 rows = range(workscan.nrow())
1876 # Save parameters of baseline fits & masklists as a class attribute.
1877 # NOTICE: It does not reflect changes in scantable!
1878 if len(rows) > 0:
1879 self.blpars=[]
1880 self.masklists=[]
1881 asaplog.push("Processing:")
1882 for r in rows:
1883 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1884 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1885 workscan.getpol(r), workscan.getcycle(r))
1886 asaplog.push(msg, False)
1887
1888 # figure out edge parameter
1889 if individualedge:
1890 if len(edge) >= workscan.getif(r):
1891 raise RuntimeError, "Number of edge elements appear to " \
1892 "be less than the number of IFs"
1893 curedge = edge[workscan.getif(r)]
1894
1895 # take into account flagtra info (CAS-1434)
1896 flagtra = workscan._getmask(r)
1897 actualmask = mask[:]
1898 if len(actualmask) == 0:
1899 actualmask = list(flagtra[:])
1900 else:
1901 if len(actualmask) != len(flagtra):
1902 raise RuntimeError, "Mask and flagtra have different length"
1903 else:
1904 for i in range(0, len(actualmask)):
1905 actualmask[i] = actualmask[i] and flagtra[i]
1906
1907 # setup line finder
1908 fl.find_lines(r, actualmask, curedge)
1909 outmask=fl.get_mask()
1910 f.set_scan(workscan, fl.get_mask())
1911 f.x = workscan._getabcissa(r)
1912 f.y = workscan._getspectrum(r)
1913 f.data = None
1914 f.fit()
1915
1916 # Show mask list
1917 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1918 msg = "mask range: "+str(masklist)
1919 asaplog.push(msg, False)
1920
1921 if plot:
1922 f.plot(residual=True)
1923 x = raw_input("Accept fit ( [y]/n ): ")
1924 if x.upper() == 'N':
1925 self.blpars.append(None)
1926 self.masklists.append(None)
1927 continue
1928
1929 workscan._setspectrum(f.fitter.getresidual(), r)
1930 self.blpars.append(f.get_parameters())
1931 self.masklists.append(masklist)
1932 if plot:
1933 f._p.unmap()
1934 f._p = None
1935 workscan._add_history("auto_poly_baseline", varlist)
1936 if insitu:
1937 self._assign(workscan)
1938 else:
1939 return workscan
1940
1941 @print_log_dec
1942 def rotate_linpolphase(self, angle):
1943 """
1944 Rotate the phase of the complex polarization O=Q+iU correlation.
1945 This is always done in situ in the raw data. So if you call this
1946 function more than once then each call rotates the phase further.
1947 Parameters:
1948 angle: The angle (degrees) to rotate (add) by.
1949 Examples:
1950 scan.rotate_linpolphase(2.3)
1951 """
1952 varlist = vars()
1953 self._math._rotate_linpolphase(self, angle)
1954 self._add_history("rotate_linpolphase", varlist)
1955 print_log()
1956 return
1957
1958 @print_log_dec
1959 def rotate_xyphase(self, angle):
1960 """
1961 Rotate the phase of the XY correlation. This is always done in situ
1962 in the data. So if you call this function more than once
1963 then each call rotates the phase further.
1964 Parameters:
1965 angle: The angle (degrees) to rotate (add) by.
1966 Examples:
1967 scan.rotate_xyphase(2.3)
1968 """
1969 varlist = vars()
1970 self._math._rotate_xyphase(self, angle)
1971 self._add_history("rotate_xyphase", varlist)
1972 print_log()
1973 return
1974
1975 @print_log_dec
1976 def swap_linears(self):
1977 """
1978 Swap the linear polarisations XX and YY, or better the first two
1979 polarisations as this also works for ciculars.
1980 """
1981 varlist = vars()
1982 self._math._swap_linears(self)
1983 self._add_history("swap_linears", varlist)
1984 print_log()
1985 return
1986
1987 @print_log_dec
1988 def invert_phase(self):
1989 """
1990 Invert the phase of the complex polarisation
1991 """
1992 varlist = vars()
1993 self._math._invert_phase(self)
1994 self._add_history("invert_phase", varlist)
1995 print_log()
1996 return
1997
1998 @print_log_dec
1999 def add(self, offset, insitu=None):
2000 """
2001 Return a scan where all spectra have the offset added
2002 Parameters:
2003 offset: the offset
2004 insitu: if False a new scantable is returned.
2005 Otherwise, the scaling is done in-situ
2006 The default is taken from .asaprc (False)
2007 """
2008 if insitu is None: insitu = rcParams['insitu']
2009 self._math._setinsitu(insitu)
2010 varlist = vars()
2011 s = scantable(self._math._unaryop(self, offset, "ADD", False))
2012 s._add_history("add", varlist)
2013 print_log()
2014 if insitu:
2015 self._assign(s)
2016 else:
2017 return s
2018
2019 @print_log_dec
2020 def scale(self, factor, tsys=True, insitu=None):
2021 """
2022 Return a scan where all spectra are scaled by the give 'factor'
2023 Parameters:
2024 factor: the scaling factor (float or 1D float list)
2025 insitu: if False a new scantable is returned.
2026 Otherwise, the scaling is done in-situ
2027 The default is taken from .asaprc (False)
2028 tsys: if True (default) then apply the operation to Tsys
2029 as well as the data
2030 """
2031 if insitu is None: insitu = rcParams['insitu']
2032 self._math._setinsitu(insitu)
2033 varlist = vars()
2034 s = None
2035 import numpy
2036 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2037 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2038 from asapmath import _array2dOp
2039 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2040 else:
2041 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2042 else:
2043 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2044 s._add_history("scale", varlist)
2045 print_log()
2046 if insitu:
2047 self._assign(s)
2048 else:
2049 return s
2050
2051 def set_sourcetype(self, match, matchtype="pattern",
2052 sourcetype="reference"):
2053 """
2054 Set the type of the source to be an source or reference scan
2055 using the provided pattern:
2056 Parameters:
2057 match: a Unix style pattern, regular expression or selector
2058 matchtype: 'pattern' (default) UNIX style pattern or
2059 'regex' regular expression
2060 sourcetype: the type of the source to use (source/reference)
2061 """
2062 varlist = vars()
2063 basesel = self.get_selection()
2064 stype = -1
2065 if sourcetype.lower().startswith("r"):
2066 stype = 1
2067 elif sourcetype.lower().startswith("s"):
2068 stype = 0
2069 else:
2070 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2071 if matchtype.lower().startswith("p"):
2072 matchtype = "pattern"
2073 elif matchtype.lower().startswith("r"):
2074 matchtype = "regex"
2075 else:
2076 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2077 sel = selector()
2078 if isinstance(match, selector):
2079 sel = match
2080 else:
2081 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2082 self.set_selection(basesel+sel)
2083 self._setsourcetype(stype)
2084 self.set_selection(basesel)
2085 self._add_history("set_sourcetype", varlist)
2086
2087 @print_log_dec
2088 def auto_quotient(self, preserve=True, mode='paired', verify=False):
2089 """
2090 This function allows to build quotients automatically.
2091 It assumes the observation to have the same number of
2092 "ons" and "offs"
2093 Parameters:
2094 preserve: you can preserve (default) the continuum or
2095 remove it. The equations used are
2096 preserve: Output = Toff * (on/off) - Toff
2097 remove: Output = Toff * (on/off) - Ton
2098 mode: the on/off detection mode
2099 'paired' (default)
2100 identifies 'off' scans by the
2101 trailing '_R' (Mopra/Parkes) or
2102 '_e'/'_w' (Tid) and matches
2103 on/off pairs from the observing pattern
2104 'time'
2105 finds the closest off in time
2106
2107 """
2108 modes = ["time", "paired"]
2109 if not mode in modes:
2110 msg = "please provide valid mode. Valid modes are %s" % (modes)
2111 raise ValueError(msg)
2112 varlist = vars()
2113 s = None
2114 if mode.lower() == "paired":
2115 basesel = self.get_selection()
2116 sel = selector()+basesel
2117 sel.set_query("SRCTYPE==1")
2118 self.set_selection(sel)
2119 offs = self.copy()
2120 sel.set_query("SRCTYPE==0")
2121 self.set_selection(sel)
2122 ons = self.copy()
2123 s = scantable(self._math._quotient(ons, offs, preserve))
2124 self.set_selection(basesel)
2125 elif mode.lower() == "time":
2126 s = scantable(self._math._auto_quotient(self, mode, preserve))
2127 s._add_history("auto_quotient", varlist)
2128 print_log()
2129 return s
2130
2131 @print_log_dec
2132 def mx_quotient(self, mask = None, weight='median', preserve=True):
2133 """
2134 Form a quotient using "off" beams when observing in "MX" mode.
2135 Parameters:
2136 mask: an optional mask to be used when weight == 'stddev'
2137 weight: How to average the off beams. Default is 'median'.
2138 preserve: you can preserve (default) the continuum or
2139 remove it. The equations used are
2140 preserve: Output = Toff * (on/off) - Toff
2141 remove: Output = Toff * (on/off) - Ton
2142 """
2143 mask = mask or ()
2144 varlist = vars()
2145 on = scantable(self._math._mx_extract(self, 'on'))
2146 preoff = scantable(self._math._mx_extract(self, 'off'))
2147 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2148 from asapmath import quotient
2149 q = quotient(on, off, preserve)
2150 q._add_history("mx_quotient", varlist)
2151 print_log()
2152 return q
2153
2154 @print_log_dec
2155 def freq_switch(self, insitu=None):
2156 """
2157 Apply frequency switching to the data.
2158 Parameters:
2159 insitu: if False a new scantable is returned.
2160 Otherwise, the swictching is done in-situ
2161 The default is taken from .asaprc (False)
2162 Example:
2163 none
2164 """
2165 if insitu is None: insitu = rcParams['insitu']
2166 self._math._setinsitu(insitu)
2167 varlist = vars()
2168 s = scantable(self._math._freqswitch(self))
2169 s._add_history("freq_switch", varlist)
2170 print_log()
2171 if insitu: self._assign(s)
2172 else: return s
2173
2174 @print_log_dec
2175 def recalc_azel(self):
2176 """
2177 Recalculate the azimuth and elevation for each position.
2178 Parameters:
2179 none
2180 Example:
2181 """
2182 varlist = vars()
2183 self._recalcazel()
2184 self._add_history("recalc_azel", varlist)
2185 print_log()
2186 return
2187
2188 @print_log_dec
2189 def __add__(self, other):
2190 varlist = vars()
2191 s = None
2192 if isinstance(other, scantable):
2193 s = scantable(self._math._binaryop(self, other, "ADD"))
2194 elif isinstance(other, float):
2195 s = scantable(self._math._unaryop(self, other, "ADD", False))
2196 else:
2197 raise TypeError("Other input is not a scantable or float value")
2198 s._add_history("operator +", varlist)
2199 return s
2200
2201 @print_log_dec
2202 def __sub__(self, other):
2203 """
2204 implicit on all axes and on Tsys
2205 """
2206 varlist = vars()
2207 s = None
2208 if isinstance(other, scantable):
2209 s = scantable(self._math._binaryop(self, other, "SUB"))
2210 elif isinstance(other, float):
2211 s = scantable(self._math._unaryop(self, other, "SUB", False))
2212 else:
2213 raise TypeError("Other input is not a scantable or float value")
2214 s._add_history("operator -", varlist)
2215 return s
2216
2217 @print_log_dec
2218 def __mul__(self, other):
2219 """
2220 implicit on all axes and on Tsys
2221 """
2222 varlist = vars()
2223 s = None
2224 if isinstance(other, scantable):
2225 s = scantable(self._math._binaryop(self, other, "MUL"))
2226 elif isinstance(other, float):
2227 s = scantable(self._math._unaryop(self, other, "MUL", False))
2228 else:
2229 raise TypeError("Other input is not a scantable or float value")
2230 s._add_history("operator *", varlist)
2231 return s
2232
2233
2234 @print_log_dec
2235 def __div__(self, other):
2236 """
2237 implicit on all axes and on Tsys
2238 """
2239 varlist = vars()
2240 s = None
2241 if isinstance(other, scantable):
2242 s = scantable(self._math._binaryop(self, other, "DIV"))
2243 elif isinstance(other, float):
2244 if other == 0.0:
2245 raise ZeroDivisionError("Dividing by zero is not recommended")
2246 s = scantable(self._math._unaryop(self, other, "DIV", False))
2247 else:
2248 raise TypeError("Other input is not a scantable or float value")
2249 s._add_history("operator /", varlist)
2250 return s
2251
2252 def get_fit(self, row=0):
2253 """
2254 Print or return the stored fits for a row in the scantable
2255 Parameters:
2256 row: the row which the fit has been applied to.
2257 """
2258 if row > self.nrow():
2259 return
2260 from asap.asapfit import asapfit
2261 fit = asapfit(self._getfit(row))
2262 if rcParams['verbose']:
2263 #print fit
2264 asaplog.push( '%s' %(fit) )
2265 print_log()
2266 return
2267 else:
2268 return fit.as_dict()
2269
2270 def flag_nans(self):
2271 """
2272 Utility function to flag NaN values in the scantable.
2273 """
2274 import numpy
2275 basesel = self.get_selection()
2276 for i in range(self.nrow()):
2277 sel = self.get_row_selector(i)
2278 self.set_selection(basesel+sel)
2279 nans = numpy.isnan(self._getspectrum(0))
2280 if numpy.any(nans):
2281 bnans = [ bool(v) for v in nans]
2282 self.flag(bnans)
2283 self.set_selection(basesel)
2284
2285 def get_row_selector(self, rowno):
2286 return selector(beams=self.getbeam(rowno),
2287 ifs=self.getif(rowno),
2288 pols=self.getpol(rowno),
2289 scans=self.getscan(rowno),
2290 cycles=self.getcycle(rowno))
2291
2292 def _add_history(self, funcname, parameters):
2293 if not rcParams['scantable.history']:
2294 return
2295 # create date
2296 sep = "##"
2297 from datetime import datetime
2298 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2299 hist = dstr+sep
2300 hist += funcname+sep#cdate+sep
2301 if parameters.has_key('self'): del parameters['self']
2302 for k, v in parameters.iteritems():
2303 if type(v) is dict:
2304 for k2, v2 in v.iteritems():
2305 hist += k2
2306 hist += "="
2307 if isinstance(v2, scantable):
2308 hist += 'scantable'
2309 elif k2 == 'mask':
2310 if isinstance(v2, list) or isinstance(v2, tuple):
2311 hist += str(self._zip_mask(v2))
2312 else:
2313 hist += str(v2)
2314 else:
2315 hist += str(v2)
2316 else:
2317 hist += k
2318 hist += "="
2319 if isinstance(v, scantable):
2320 hist += 'scantable'
2321 elif k == 'mask':
2322 if isinstance(v, list) or isinstance(v, tuple):
2323 hist += str(self._zip_mask(v))
2324 else:
2325 hist += str(v)
2326 else:
2327 hist += str(v)
2328 hist += sep
2329 hist = hist[:-2] # remove trailing '##'
2330 self._addhistory(hist)
2331
2332
2333 def _zip_mask(self, mask):
2334 mask = list(mask)
2335 i = 0
2336 segments = []
2337 while mask[i:].count(1):
2338 i += mask[i:].index(1)
2339 if mask[i:].count(0):
2340 j = i + mask[i:].index(0)
2341 else:
2342 j = len(mask)
2343 segments.append([i, j])
2344 i = j
2345 return segments
2346
2347 def _get_ordinate_label(self):
2348 fu = "("+self.get_fluxunit()+")"
2349 import re
2350 lbl = "Intensity"
2351 if re.match(".K.", fu):
2352 lbl = "Brightness Temperature "+ fu
2353 elif re.match(".Jy.", fu):
2354 lbl = "Flux density "+ fu
2355 return lbl
2356
2357 def _check_ifs(self):
2358 nchans = [self.nchan(i) for i in range(self.nif(-1))]
2359 nchans = filter(lambda t: t > 0, nchans)
2360 return (sum(nchans)/len(nchans) == nchans[0])
2361
2362 def _fill(self, names, unit, average, getpt, antenna):
2363 import os
2364 from asap._asap import stfiller
2365 first = True
2366 fullnames = []
2367 for name in names:
2368 name = os.path.expandvars(name)
2369 name = os.path.expanduser(name)
2370 if not os.path.exists(name):
2371 msg = "File '%s' does not exists" % (name)
2372 if rcParams['verbose']:
2373 asaplog.push(msg)
2374 #print asaplog.pop().strip()
2375 print_log( 'ERROR' )
2376 return
2377 raise IOError(msg)
2378 fullnames.append(name)
2379 if average:
2380 asaplog.push('Auto averaging integrations')
2381 stype = int(rcParams['scantable.storage'].lower() == 'disk')
2382 for name in fullnames:
2383 tbl = Scantable(stype)
2384 r = stfiller(tbl)
2385 rx = rcParams['scantable.reference']
2386 r._setreferenceexpr(rx)
2387 msg = "Importing %s..." % (name)
2388 asaplog.push(msg, False)
2389 print_log()
2390 r._open(name, antenna, -1, -1, getpt)
2391 r._read()
2392 if average:
2393 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2394 if not first:
2395 tbl = self._math._merge([self, tbl])
2396 Scantable.__init__(self, tbl)
2397 r._close()
2398 del r, tbl
2399 first = False
2400 if unit is not None:
2401 self.set_fluxunit(unit)
2402 if not is_casapy():
2403 self.set_freqframe(rcParams['scantable.freqframe'])
2404
2405 def __getitem__(self, key):
2406 if key < 0:
2407 key += self.nrow()
2408 if key >= self.nrow():
2409 raise IndexError("Row index out of range.")
2410 return self._getspectrum(key)
2411
2412 def __setitem__(self, key, value):
2413 if key < 0:
2414 key += self.nrow()
2415 if key >= self.nrow():
2416 raise IndexError("Row index out of range.")
2417 if not hasattr(value, "__len__") or \
2418 len(value) > self.nchan(self.getif(key)):
2419 raise ValueError("Spectrum length doesn't match.")
2420 return self._setspectrum(value, key)
2421
2422 def __len__(self):
2423 return self.nrow()
2424
2425 def __iter__(self):
2426 for i in range(len(self)):
2427 yield self[i]
Note: See TracBrowser for help on using the repository browser.