source: trunk/python/scantable.py@ 1843

Last change on this file since 1843 was 1843, checked in by Malte Marquarding, 14 years ago

Ticket #192: enabling new filler. Get rid of unused asapreader.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 90.7 KB
Line 
1import os
2try:
3 from functools import wraps as wraps_dec
4except ImportError:
5 from asap.compatibility import wraps as wraps_dec
6
7from asap.env import is_casapy
8from asap._asap import Scantable
9from asap._asap import filler
10from asap.parameters import rcParams
11from asap.logging import asaplog, print_log, print_log_dec
12from asap.selector import selector
13from asap.linecatalog import linecatalog
14from asap.coordinate import coordinate
15from asap.utils import _n_bools, mask_not, mask_and, mask_or
16
17
18def preserve_selection(func):
19 @wraps_dec(func)
20 def wrap(obj, *args, **kw):
21 basesel = obj.get_selection()
22 val = func(obj, *args, **kw)
23 obj.set_selection(basesel)
24 return val
25 return wrap
26
27
28def is_scantable(filename):
29 return (os.path.isdir(filename)
30 and not os.path.exists(filename+'/table.f1')
31 and os.path.exists(filename+'/table.info'))
32
33
34class scantable(Scantable):
35 """
36 The ASAP container for scans
37 """
38
39 @print_log_dec
40 def __init__(self, filename, average=None, unit=None, getpt=None,
41 antenna=None, parallactify=None):
42 """
43 Create a scantable from a saved one or make a reference
44 Parameters:
45 filename: the name of an asap table on disk
46 or
47 the name of a rpfits/sdfits/ms file
48 (integrations within scans are auto averaged
49 and the whole file is read)
50 or
51 [advanced] a reference to an existing
52 scantable
53 average: average all integrations withinb a scan on read.
54 The default (True) is taken from .asaprc.
55 unit: brightness unit; must be consistent with K or Jy.
56 Over-rides the default selected by the filler
57 (input rpfits/sdfits/ms) or replaces the value
58 in existing scantables
59 getpt: for MeasurementSet input data only:
60 If True, all pointing data are filled.
61 The deafult is False, which makes time to load
62 the MS data faster in some cases.
63 antenna: Antenna selection. integer (id) or string (name
64 or id).
65 parallactify: Indicate that the data had been parallatified.
66 Default is taken form rc file.
67 """
68 if average is None:
69 average = rcParams['scantable.autoaverage']
70 if getpt is None:
71 getpt = True
72 if antenna is not None:
73 asaplog.push("Antenna selection currently unsupported."
74 "Using '0'")
75 print_log('WARN')
76 if antenna is None:
77 antenna = ''
78 elif type(antenna) == int:
79 antenna = '%s' % antenna
80 elif type(antenna) == list:
81 tmpstr = ''
82 for i in range( len(antenna) ):
83 if type(antenna[i]) == int:
84 tmpstr = tmpstr + ('%s,'%(antenna[i]))
85 elif type(antenna[i]) == str:
86 tmpstr=tmpstr+antenna[i]+','
87 else:
88 asaplog.push('Bad antenna selection.')
89 print_log('ERROR')
90 return
91 antenna = tmpstr.rstrip(',')
92 parallactify = parallactify or rcParams['scantable.parallactify']
93 varlist = vars()
94 from asap._asap import stmath
95 self._math = stmath( rcParams['insitu'] )
96 if isinstance(filename, Scantable):
97 Scantable.__init__(self, filename)
98 else:
99 if isinstance(filename, str):
100 filename = os.path.expandvars(filename)
101 filename = os.path.expanduser(filename)
102 if not os.path.exists(filename):
103 s = "File '%s' not found." % (filename)
104 if rcParams['verbose']:
105 asaplog.push(s)
106 print_log('ERROR')
107 return
108 raise IOError(s)
109 if is_scantable(filename):
110 ondisk = rcParams['scantable.storage'] == 'disk'
111 Scantable.__init__(self, filename, ondisk)
112 if unit is not None:
113 self.set_fluxunit(unit)
114 # do not reset to the default freqframe
115 #self.set_freqframe(rcParams['scantable.freqframe'])
116 elif os.path.isdir(filename) \
117 and not os.path.exists(filename+'/table.f1'):
118 msg = "The given file '%s'is not a valid " \
119 "asap table." % (filename)
120 if rcParams['verbose']:
121 #print msg
122 asaplog.push( msg )
123 print_log( 'ERROR' )
124 return
125 else:
126 raise IOError(msg)
127 else:
128 self._fill([filename], unit, average, getpt, antenna)
129 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
130 and isinstance(filename[-1], str):
131 self._fill(filename, unit, average, getpt, antenna)
132 self.parallactify(parallactify)
133 self._add_history("scantable", varlist)
134 print_log()
135
136 @print_log_dec
137 def save(self, name=None, format=None, overwrite=False):
138 """
139 Store the scantable on disk. This can be an asap (aips++) Table,
140 SDFITS or MS2 format.
141 Parameters:
142 name: the name of the outputfile. For format "ASCII"
143 this is the root file name (data in 'name'.txt
144 and header in 'name'_header.txt)
145 format: an optional file format. Default is ASAP.
146 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
147 'SDFITS' (save as SDFITS file)
148 'ASCII' (saves as ascii text file)
149 'MS2' (saves as an aips++
150 MeasurementSet V2)
151 'FITS' (save as image FITS - not
152 readable by class)
153 'CLASS' (save as FITS readable by CLASS)
154 overwrite: If the file should be overwritten if it exists.
155 The default False is to return with warning
156 without writing the output. USE WITH CARE.
157 Example:
158 scan.save('myscan.asap')
159 scan.save('myscan.sdfits', 'SDFITS')
160 """
161 from os import path
162 format = format or rcParams['scantable.save']
163 suffix = '.'+format.lower()
164 if name is None or name == "":
165 name = 'scantable'+suffix
166 msg = "No filename given. Using default name %s..." % name
167 asaplog.push(msg)
168 name = path.expandvars(name)
169 if path.isfile(name) or path.isdir(name):
170 if not overwrite:
171 msg = "File %s exists." % name
172 if rcParams['verbose']:
173 #print msg
174 asaplog.push( msg )
175 print_log( 'ERROR' )
176 return
177 else:
178 raise IOError(msg)
179 format2 = format.upper()
180 if format2 == 'ASAP':
181 self._save(name)
182 else:
183 from asap._asap import stwriter as stw
184 writer = stw(format2)
185 writer.write(self, name)
186 print_log()
187 return
188
189 def copy(self):
190 """
191 Return a copy of this scantable.
192 Note:
193 This makes a full (deep) copy. scan2 = scan1 makes a reference.
194 Parameters:
195 none
196 Example:
197 copiedscan = scan.copy()
198 """
199 sd = scantable(Scantable._copy(self))
200 return sd
201
202 def drop_scan(self, scanid=None):
203 """
204 Return a new scantable where the specified scan number(s) has(have)
205 been dropped.
206 Parameters:
207 scanid: a (list of) scan number(s)
208 """
209 from asap import _is_sequence_or_number as _is_valid
210 from asap import _to_list
211 from asap import unique
212 if not _is_valid(scanid):
213 if rcParams['verbose']:
214 #print "Please specify a scanno to drop from the scantable"
215 asaplog.push( 'Please specify a scanno to drop from the scantable' )
216 print_log( 'ERROR' )
217 return
218 else:
219 raise RuntimeError("No scan given")
220 try:
221 scanid = _to_list(scanid)
222 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
223 for sid in scanid: allscans.remove(sid)
224 if len(allscans) == 0:
225 raise ValueError("Can't remove all scans")
226 except ValueError:
227 if rcParams['verbose']:
228 #print "Couldn't find any match."
229 print_log()
230 asaplog.push( "Couldn't find any match." )
231 print_log( 'ERROR' )
232 return
233 else: raise
234 try:
235 sel = selector(scans=allscans)
236 return self._select_copy(sel)
237 except RuntimeError:
238 if rcParams['verbose']:
239 #print "Couldn't find any match."
240 print_log()
241 asaplog.push( "Couldn't find any match." )
242 print_log( 'ERROR' )
243 else:
244 raise
245
246 def _select_copy(self, selection):
247 orig = self.get_selection()
248 self.set_selection(orig+selection)
249 cp = self.copy()
250 self.set_selection(orig)
251 return cp
252
253 def get_scan(self, scanid=None):
254 """
255 Return a specific scan (by scanno) or collection of scans (by
256 source name) in a new scantable.
257 Note:
258 See scantable.drop_scan() for the inverse operation.
259 Parameters:
260 scanid: a (list of) scanno or a source name, unix-style
261 patterns are accepted for source name matching, e.g.
262 '*_R' gets all 'ref scans
263 Example:
264 # get all scans containing the source '323p459'
265 newscan = scan.get_scan('323p459')
266 # get all 'off' scans
267 refscans = scan.get_scan('*_R')
268 # get a susbset of scans by scanno (as listed in scan.summary())
269 newscan = scan.get_scan([0, 2, 7, 10])
270 """
271 if scanid is None:
272 if rcParams['verbose']:
273 #print "Please specify a scan no or name to " \
274 # "retrieve from the scantable"
275 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
276 print_log( 'ERROR' )
277 return
278 else:
279 raise RuntimeError("No scan given")
280
281 try:
282 bsel = self.get_selection()
283 sel = selector()
284 if type(scanid) is str:
285 sel.set_name(scanid)
286 return self._select_copy(sel)
287 elif type(scanid) is int:
288 sel.set_scans([scanid])
289 return self._select_copy(sel)
290 elif type(scanid) is list:
291 sel.set_scans(scanid)
292 return self._select_copy(sel)
293 else:
294 msg = "Illegal scanid type, use 'int' or 'list' if ints."
295 if rcParams['verbose']:
296 #print msg
297 asaplog.push( msg )
298 print_log( 'ERROR' )
299 else:
300 raise TypeError(msg)
301 except RuntimeError:
302 if rcParams['verbose']:
303 #print "Couldn't find any match."
304 print_log()
305 asaplog.push( "Couldn't find any match." )
306 print_log( 'ERROR' )
307 else: raise
308
309 def __str__(self):
310 return Scantable._summary(self, True)
311
312 def summary(self, filename=None):
313 """
314 Print a summary of the contents of this scantable.
315 Parameters:
316 filename: the name of a file to write the putput to
317 Default - no file output
318 """
319 info = Scantable._summary(self, True)
320 if filename is not None:
321 if filename is "":
322 filename = 'scantable_summary.txt'
323 from os.path import expandvars, isdir
324 filename = expandvars(filename)
325 if not isdir(filename):
326 data = open(filename, 'w')
327 data.write(info)
328 data.close()
329 else:
330 msg = "Illegal file name '%s'." % (filename)
331 if rcParams['verbose']:
332 #print msg
333 asaplog.push( msg )
334 print_log( 'ERROR' )
335 else:
336 raise IOError(msg)
337 if rcParams['verbose']:
338 try:
339 from IPython.genutils import page as pager
340 except ImportError:
341 from pydoc import pager
342 pager(info)
343 else:
344 return info
345
346 def get_spectrum(self, rowno):
347 """Return the spectrum for the current row in the scantable as a list.
348 Parameters:
349 rowno: the row number to retrieve the spectrum from
350 """
351 return self._getspectrum(rowno)
352
353 def get_mask(self, rowno):
354 """Return the mask for the current row in the scantable as a list.
355 Parameters:
356 rowno: the row number to retrieve the mask from
357 """
358 return self._getmask(rowno)
359
360 def set_spectrum(self, spec, rowno):
361 """Return the spectrum for the current row in the scantable as a list.
362 Parameters:
363 spec: the spectrum
364 rowno: the row number to set the spectrum for
365 """
366 assert(len(spec) == self.nchan())
367 return self._setspectrum(spec, rowno)
368
369 def get_coordinate(self, rowno):
370 """Return the (spectral) coordinate for a a given 'rowno'.
371 NOTE:
372 * This coordinate is only valid until a scantable method modifies
373 the frequency axis.
374 * This coordinate does contain the original frequency set-up
375 NOT the new frame. The conversions however are done using the user
376 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
377 use scantable.freq_align first. Without it there is no closure,
378 i.e.
379 c = myscan.get_coordinate(0)
380 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
381
382 Parameters:
383 rowno: the row number for the spectral coordinate
384
385 """
386 return coordinate(Scantable.get_coordinate(self, rowno))
387
388 def get_selection(self):
389 """
390 Get the selection object currently set on this scantable.
391 Parameters:
392 none
393 Example:
394 sel = scan.get_selection()
395 sel.set_ifs(0) # select IF 0
396 scan.set_selection(sel) # apply modified selection
397 """
398 return selector(self._getselection())
399
400 def set_selection(self, selection=None, **kw):
401 """
402 Select a subset of the data. All following operations on this scantable
403 are only applied to thi selection.
404 Parameters:
405 selection: a selector object (default unset the selection),
406
407 or
408
409 any combination of
410 "pols", "ifs", "beams", "scans", "cycles", "name", "query"
411
412 Examples:
413 sel = selector() # create a selection object
414 self.set_scans([0, 3]) # select SCANNO 0 and 3
415 scan.set_selection(sel) # set the selection
416 scan.summary() # will only print summary of scanno 0 an 3
417 scan.set_selection() # unset the selection
418 # or the equivalent
419 scan.set_selection(scans=[0,3])
420 scan.summary() # will only print summary of scanno 0 an 3
421 scan.set_selection() # unset the selection
422 """
423 if selection is None:
424 # reset
425 if len(kw) == 0:
426 selection = selector()
427 else:
428 # try keywords
429 for k in kw:
430 if k not in selector.fields:
431 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
432 selection = selector(**kw)
433 self._setselection(selection)
434
435 def get_row(self, row=0, insitu=None):
436 """
437 Select a row in the scantable.
438 Return a scantable with single row.
439 Parameters:
440 row: row no of integration, default is 0.
441 insitu: if False a new scantable is returned.
442 Otherwise, the scaling is done in-situ
443 The default is taken from .asaprc (False)
444 """
445 if insitu is None: insitu = rcParams['insitu']
446 if not insitu:
447 workscan = self.copy()
448 else:
449 workscan = self
450 # Select a row
451 sel=selector()
452 sel.set_scans([workscan.getscan(row)])
453 sel.set_cycles([workscan.getcycle(row)])
454 sel.set_beams([workscan.getbeam(row)])
455 sel.set_ifs([workscan.getif(row)])
456 sel.set_polarisations([workscan.getpol(row)])
457 sel.set_name(workscan._getsourcename(row))
458 workscan.set_selection(sel)
459 if not workscan.nrow() == 1:
460 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
461 raise RuntimeError(msg)
462 del sel
463 if insitu:
464 self._assign(workscan)
465 else:
466 return workscan
467
468 #def stats(self, stat='stddev', mask=None):
469 def stats(self, stat='stddev', mask=None, form='3.3f'):
470 """
471 Determine the specified statistic of the current beam/if/pol
472 Takes a 'mask' as an optional parameter to specify which
473 channels should be excluded.
474 Parameters:
475 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
476 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
477 mask: an optional mask specifying where the statistic
478 should be determined.
479 form: format string to print statistic values
480 Example:
481 scan.set_unit('channel')
482 msk = scan.create_mask([100, 200], [500, 600])
483 scan.stats(stat='mean', mask=m)
484 """
485 mask = mask or []
486 if not self._check_ifs():
487 raise ValueError("Cannot apply mask as the IFs have different "
488 "number of channels. Please use setselection() "
489 "to select individual IFs")
490 rtnabc = False
491 if stat.lower().endswith('_abc'): rtnabc = True
492 getchan = False
493 if stat.lower().startswith('min') or stat.lower().startswith('max'):
494 chan = self._math._minmaxchan(self, mask, stat)
495 getchan = True
496 statvals = []
497 if not rtnabc: statvals = self._math._stats(self, mask, stat)
498
499 #def cb(i):
500 # return statvals[i]
501
502 #return self._row_callback(cb, stat)
503
504 label=stat
505 #callback=cb
506 out = ""
507 #outvec = []
508 sep = '-'*50
509 for i in range(self.nrow()):
510 refstr = ''
511 statunit= ''
512 if getchan:
513 qx, qy = self.chan2data(rowno=i, chan=chan[i])
514 if rtnabc:
515 statvals.append(qx['value'])
516 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
517 statunit= '['+qx['unit']+']'
518 else:
519 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
520
521 tm = self._gettime(i)
522 src = self._getsourcename(i)
523 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
524 out += 'Time[%s]:\n' % (tm)
525 if self.nbeam(-1) > 1:
526 out += ' Beam[%d] ' % (self.getbeam(i))
527 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
528 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
529 #outvec.append(callback(i))
530 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
531 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
532 out += sep+"\n"
533
534 if rcParams['verbose']:
535 import os
536 if os.environ.has_key( 'USER' ):
537 usr=os.environ['USER']
538 else:
539 import commands
540 usr=commands.getoutput( 'whoami' )
541 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
542 f=open(tmpfile,'w')
543 print >> f, sep
544 print >> f, ' %s %s' % (label, statunit)
545 print >> f, sep
546 print >> f, out
547 f.close()
548 f=open(tmpfile,'r')
549 x=f.readlines()
550 f.close()
551 blanc=''
552 asaplog.push(blanc.join(x), False)
553 #for xx in x:
554 # asaplog.push( xx, False )
555 print_log()
556 return statvals
557
558 def chan2data(self, rowno=0, chan=0):
559 """
560 Returns channel/frequency/velocity and spectral value
561 at an arbitrary row and channel in the scantable.
562 Parameters:
563 rowno: a row number in the scantable. Default is the
564 first row, i.e. rowno=0
565 chan: a channel in the scantable. Default is the first
566 channel, i.e. pos=0
567 """
568 if isinstance(rowno, int) and isinstance(chan, int):
569 qx = {'unit': self.get_unit(),
570 'value': self._getabcissa(rowno)[chan]}
571 qy = {'unit': self.get_fluxunit(),
572 'value': self._getspectrum(rowno)[chan]}
573 return qx, qy
574
575 def stddev(self, mask=None):
576 """
577 Determine the standard deviation of the current beam/if/pol
578 Takes a 'mask' as an optional parameter to specify which
579 channels should be excluded.
580 Parameters:
581 mask: an optional mask specifying where the standard
582 deviation should be determined.
583
584 Example:
585 scan.set_unit('channel')
586 msk = scan.create_mask([100, 200], [500, 600])
587 scan.stddev(mask=m)
588 """
589 return self.stats(stat='stddev', mask=mask);
590
591
592 def get_column_names(self):
593 """
594 Return a list of column names, which can be used for selection.
595 """
596 return list(Scantable.get_column_names(self))
597
598 def get_tsys(self, row=-1):
599 """
600 Return the System temperatures.
601 Returns:
602 a list of Tsys values for the current selection
603 """
604 if row > -1:
605 return self._get_column(self._gettsys, row)
606 return self._row_callback(self._gettsys, "Tsys")
607
608
609 def get_weather(self, row=-1):
610 values = self._get_column(self._get_weather, row)
611 if row > -1:
612 return {'temperature': values[0],
613 'pressure': values[1], 'humidity' : values[2],
614 'windspeed' : values[3], 'windaz' : values[4]
615 }
616 else:
617 out = []
618 for r in values:
619
620 out.append({'temperature': r[0],
621 'pressure': r[1], 'humidity' : r[2],
622 'windspeed' : r[3], 'windaz' : r[4]
623 })
624 return out
625
626 def _row_callback(self, callback, label):
627 out = ""
628 outvec = []
629 sep = '-'*50
630 for i in range(self.nrow()):
631 tm = self._gettime(i)
632 src = self._getsourcename(i)
633 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
634 out += 'Time[%s]:\n' % (tm)
635 if self.nbeam(-1) > 1:
636 out += ' Beam[%d] ' % (self.getbeam(i))
637 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
638 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
639 outvec.append(callback(i))
640 out += '= %3.3f\n' % (outvec[i])
641 out += sep+'\n'
642 if rcParams['verbose']:
643 asaplog.push(sep)
644 asaplog.push(" %s" % (label))
645 asaplog.push(sep)
646 asaplog.push(out)
647 print_log()
648 return outvec
649
650 def _get_column(self, callback, row=-1):
651 """
652 """
653 if row == -1:
654 return [callback(i) for i in range(self.nrow())]
655 else:
656 if 0 <= row < self.nrow():
657 return callback(row)
658
659
660 def get_time(self, row=-1, asdatetime=False):
661 """
662 Get a list of time stamps for the observations.
663 Return a datetime object for each integration time stamp in the scantable.
664 Parameters:
665 row: row no of integration. Default -1 return all rows
666 asdatetime: return values as datetime objects rather than strings
667 Example:
668 none
669 """
670 from time import strptime
671 from datetime import datetime
672 times = self._get_column(self._gettime, row)
673 if not asdatetime:
674 return times
675 format = "%Y/%m/%d/%H:%M:%S"
676 if isinstance(times, list):
677 return [datetime(*strptime(i, format)[:6]) for i in times]
678 else:
679 return datetime(*strptime(times, format)[:6])
680
681
682 def get_inttime(self, row=-1):
683 """
684 Get a list of integration times for the observations.
685 Return a time in seconds for each integration in the scantable.
686 Parameters:
687 row: row no of integration. Default -1 return all rows.
688 Example:
689 none
690 """
691 return self._get_column(self._getinttime, row)
692
693
694 def get_sourcename(self, row=-1):
695 """
696 Get a list source names for the observations.
697 Return a string for each integration in the scantable.
698 Parameters:
699 row: row no of integration. Default -1 return all rows.
700 Example:
701 none
702 """
703 return self._get_column(self._getsourcename, row)
704
705 def get_elevation(self, row=-1):
706 """
707 Get a list of elevations for the observations.
708 Return a float for each integration in the scantable.
709 Parameters:
710 row: row no of integration. Default -1 return all rows.
711 Example:
712 none
713 """
714 return self._get_column(self._getelevation, row)
715
716 def get_azimuth(self, row=-1):
717 """
718 Get a list of azimuths for the observations.
719 Return a float for each integration in the scantable.
720 Parameters:
721 row: row no of integration. Default -1 return all rows.
722 Example:
723 none
724 """
725 return self._get_column(self._getazimuth, row)
726
727 def get_parangle(self, row=-1):
728 """
729 Get a list of parallactic angles for the observations.
730 Return a float for each integration in the scantable.
731 Parameters:
732 row: row no of integration. Default -1 return all rows.
733 Example:
734 none
735 """
736 return self._get_column(self._getparangle, row)
737
738 def get_direction(self, row=-1):
739 """
740 Get a list of Positions on the sky (direction) for the observations.
741 Return a string for each integration in the scantable.
742 Parameters:
743 row: row no of integration. Default -1 return all rows
744 Example:
745 none
746 """
747 return self._get_column(self._getdirection, row)
748
749 def get_directionval(self, row=-1):
750 """
751 Get a list of Positions on the sky (direction) for the observations.
752 Return a float for each integration in the scantable.
753 Parameters:
754 row: row no of integration. Default -1 return all rows
755 Example:
756 none
757 """
758 return self._get_column(self._getdirectionvec, row)
759
760 @print_log_dec
761 def set_unit(self, unit='channel'):
762 """
763 Set the unit for all following operations on this scantable
764 Parameters:
765 unit: optional unit, default is 'channel'
766 one of '*Hz', 'km/s', 'channel', ''
767 """
768 varlist = vars()
769 if unit in ['', 'pixel', 'channel']:
770 unit = ''
771 inf = list(self._getcoordinfo())
772 inf[0] = unit
773 self._setcoordinfo(inf)
774 self._add_history("set_unit", varlist)
775
776 @print_log_dec
777 def set_instrument(self, instr):
778 """
779 Set the instrument for subsequent processing.
780 Parameters:
781 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
782 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
783 """
784 self._setInstrument(instr)
785 self._add_history("set_instument", vars())
786 print_log()
787
788 @print_log_dec
789 def set_feedtype(self, feedtype):
790 """
791 Overwrite the feed type, which might not be set correctly.
792 Parameters:
793 feedtype: 'linear' or 'circular'
794 """
795 self._setfeedtype(feedtype)
796 self._add_history("set_feedtype", vars())
797 print_log()
798
799 @print_log_dec
800 def set_doppler(self, doppler='RADIO'):
801 """
802 Set the doppler for all following operations on this scantable.
803 Parameters:
804 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
805 """
806 varlist = vars()
807 inf = list(self._getcoordinfo())
808 inf[2] = doppler
809 self._setcoordinfo(inf)
810 self._add_history("set_doppler", vars())
811 print_log()
812
813 @print_log_dec
814 def set_freqframe(self, frame=None):
815 """
816 Set the frame type of the Spectral Axis.
817 Parameters:
818 frame: an optional frame type, default 'LSRK'. Valid frames are:
819 'TOPO', 'LSRD', 'LSRK', 'BARY',
820 'GEO', 'GALACTO', 'LGROUP', 'CMB'
821 Examples:
822 scan.set_freqframe('BARY')
823 """
824 frame = frame or rcParams['scantable.freqframe']
825 varlist = vars()
826 # "REST" is not implemented in casacore
827 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
828 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
829 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
830 'GEO', 'GALACTO', 'LGROUP', 'CMB']
831
832 if frame in valid:
833 inf = list(self._getcoordinfo())
834 inf[1] = frame
835 self._setcoordinfo(inf)
836 self._add_history("set_freqframe", varlist)
837 else:
838 msg = "Please specify a valid freq type. Valid types are:\n", valid
839 if rcParams['verbose']:
840 #print msg
841 asaplog.push( msg )
842 print_log( 'ERROR' )
843 else:
844 raise TypeError(msg)
845 print_log()
846
847 def set_dirframe(self, frame=""):
848 """
849 Set the frame type of the Direction on the sky.
850 Parameters:
851 frame: an optional frame type, default ''. Valid frames are:
852 'J2000', 'B1950', 'GALACTIC'
853 Examples:
854 scan.set_dirframe('GALACTIC')
855 """
856 varlist = vars()
857 try:
858 Scantable.set_dirframe(self, frame)
859 except RuntimeError, msg:
860 if rcParams['verbose']:
861 #print msg
862 print_log()
863 asaplog.push( str(msg) )
864 print_log( 'ERROR' )
865 else:
866 raise
867 self._add_history("set_dirframe", varlist)
868
869 def get_unit(self):
870 """
871 Get the default unit set in this scantable
872 Returns:
873 A unit string
874 """
875 inf = self._getcoordinfo()
876 unit = inf[0]
877 if unit == '': unit = 'channel'
878 return unit
879
880 def get_abcissa(self, rowno=0):
881 """
882 Get the abcissa in the current coordinate setup for the currently
883 selected Beam/IF/Pol
884 Parameters:
885 rowno: an optional row number in the scantable. Default is the
886 first row, i.e. rowno=0
887 Returns:
888 The abcissa values and the format string (as a dictionary)
889 """
890 abc = self._getabcissa(rowno)
891 lbl = self._getabcissalabel(rowno)
892 print_log()
893 return abc, lbl
894
895 def flag(self, mask=None, unflag=False):
896 """
897 Flag the selected data using an optional channel mask.
898 Parameters:
899 mask: an optional channel mask, created with create_mask. Default
900 (no mask) is all channels.
901 unflag: if True, unflag the data
902 """
903 varlist = vars()
904 mask = mask or []
905 try:
906 self._flag(mask, unflag)
907 except RuntimeError, msg:
908 if rcParams['verbose']:
909 #print msg
910 print_log()
911 asaplog.push( str(msg) )
912 print_log( 'ERROR' )
913 return
914 else: raise
915 self._add_history("flag", varlist)
916
917 def flag_row(self, rows=[], unflag=False):
918 """
919 Flag the selected data in row-based manner.
920 Parameters:
921 rows: list of row numbers to be flagged. Default is no row
922 (must be explicitly specified to execute row-based flagging).
923 unflag: if True, unflag the data.
924 """
925 varlist = vars()
926 try:
927 self._flag_row(rows, unflag)
928 except RuntimeError, msg:
929 if rcParams['verbose']:
930 print_log()
931 asaplog.push( str(msg) )
932 print_log('ERROR')
933 return
934 else: raise
935 self._add_history("flag_row", varlist)
936
937 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
938 """
939 Flag the selected data outside a specified range (in channel-base)
940 Parameters:
941 uthres: upper threshold.
942 dthres: lower threshold
943 clipoutside: True for flagging data outside the range [dthres:uthres].
944 False for glagging data inside the range.
945 unflag : if True, unflag the data.
946 """
947 varlist = vars()
948 try:
949 self._clip(uthres, dthres, clipoutside, unflag)
950 except RuntimeError, msg:
951 if rcParams['verbose']:
952 print_log()
953 asaplog.push(str(msg))
954 print_log('ERROR')
955 return
956 else: raise
957 self._add_history("clip", varlist)
958
959 @print_log_dec
960 def lag_flag(self, start, end, unit="MHz", insitu=None):
961 #def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
962 """
963 Flag the data in 'lag' space by providing a frequency to remove.
964 Flagged data in the scantable gets interpolated over the region.
965 No taper is applied.
966 Parameters:
967 start: the start frequency (really a period within the
968 bandwidth) or period to remove
969 end: the end frequency or period to remove
970 unit: the frequency unit (default "MHz") or "" for
971 explicit lag channels
972 Notes:
973 It is recommended to flag edges of the band or strong
974 signals beforehand.
975 """
976 if insitu is None: insitu = rcParams['insitu']
977 self._math._setinsitu(insitu)
978 varlist = vars()
979 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
980 if not (unit == "" or base.has_key(unit)):
981 raise ValueError("%s is not a valid unit." % unit)
982 try:
983 if unit == "":
984 s = scantable(self._math._lag_flag(self, start, end, "lags"))
985 else:
986 s = scantable(self._math._lag_flag(self, start*base[unit],
987 end*base[unit], "frequency"))
988 except RuntimeError, msg:
989 if rcParams['verbose']:
990 #print msg
991 print_log()
992 asaplog.push( str(msg) )
993 print_log( 'ERROR' )
994 return
995 else: raise
996 s._add_history("lag_flag", varlist)
997 print_log()
998 if insitu:
999 self._assign(s)
1000 else:
1001 return s
1002
1003 @print_log_dec
1004 def create_mask(self, *args, **kwargs):
1005 """
1006 Compute and return a mask based on [min, max] windows.
1007 The specified windows are to be INCLUDED, when the mask is
1008 applied.
1009 Parameters:
1010 [min, max], [min2, max2], ...
1011 Pairs of start/end points (inclusive)specifying the regions
1012 to be masked
1013 invert: optional argument. If specified as True,
1014 return an inverted mask, i.e. the regions
1015 specified are EXCLUDED
1016 row: create the mask using the specified row for
1017 unit conversions, default is row=0
1018 only necessary if frequency varies over rows.
1019 Example:
1020 scan.set_unit('channel')
1021 a)
1022 msk = scan.create_mask([400, 500], [800, 900])
1023 # masks everything outside 400 and 500
1024 # and 800 and 900 in the unit 'channel'
1025
1026 b)
1027 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1028 # masks the regions between 400 and 500
1029 # and 800 and 900 in the unit 'channel'
1030 c)
1031 mask only channel 400
1032 msk = scan.create_mask([400])
1033 """
1034 row = kwargs.get("row", 0)
1035 data = self._getabcissa(row)
1036 u = self._getcoordinfo()[0]
1037 if rcParams['verbose']:
1038 if u == "": u = "channel"
1039 msg = "The current mask window unit is %s" % u
1040 i = self._check_ifs()
1041 if not i:
1042 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1043 asaplog.push(msg)
1044 n = self.nchan()
1045 msk = _n_bools(n, False)
1046 # test if args is a 'list' or a 'normal *args - UGLY!!!
1047
1048 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1049 and args or args[0]
1050 for window in ws:
1051 if len(window) == 1:
1052 window = [window[0], window[0]]
1053 if len(window) == 0 or len(window) > 2:
1054 raise ValueError("A window needs to be defined as [start(, end)]")
1055 if window[0] > window[1]:
1056 tmp = window[0]
1057 window[0] = window[1]
1058 window[1] = tmp
1059 for i in range(n):
1060 if data[i] >= window[0] and data[i] <= window[1]:
1061 msk[i] = True
1062 if kwargs.has_key('invert'):
1063 if kwargs.get('invert'):
1064 msk = mask_not(msk)
1065 print_log()
1066 return msk
1067
1068 def get_masklist(self, mask=None, row=0):
1069 """
1070 Compute and return a list of mask windows, [min, max].
1071 Parameters:
1072 mask: channel mask, created with create_mask.
1073 row: calcutate the masklist using the specified row
1074 for unit conversions, default is row=0
1075 only necessary if frequency varies over rows.
1076 Returns:
1077 [min, max], [min2, max2], ...
1078 Pairs of start/end points (inclusive)specifying
1079 the masked regions
1080 """
1081 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1082 raise TypeError("The mask should be list or tuple.")
1083 if len(mask) < 2:
1084 raise TypeError("The mask elements should be > 1")
1085 if self.nchan() != len(mask):
1086 msg = "Number of channels in scantable != number of mask elements"
1087 raise TypeError(msg)
1088 data = self._getabcissa(row)
1089 u = self._getcoordinfo()[0]
1090 if rcParams['verbose']:
1091 if u == "": u = "channel"
1092 msg = "The current mask window unit is %s" % u
1093 i = self._check_ifs()
1094 if not i:
1095 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1096 asaplog.push(msg)
1097 masklist=[]
1098 ist, ien = None, None
1099 ist, ien=self.get_mask_indices(mask)
1100 if ist is not None and ien is not None:
1101 for i in xrange(len(ist)):
1102 range=[data[ist[i]],data[ien[i]]]
1103 range.sort()
1104 masklist.append([range[0],range[1]])
1105 return masklist
1106
1107 def get_mask_indices(self, mask=None):
1108 """
1109 Compute and Return lists of mask start indices and mask end indices.
1110 Parameters:
1111 mask: channel mask, created with create_mask.
1112 Returns:
1113 List of mask start indices and that of mask end indices,
1114 i.e., [istart1,istart2,....], [iend1,iend2,....].
1115 """
1116 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1117 raise TypeError("The mask should be list or tuple.")
1118 if len(mask) < 2:
1119 raise TypeError("The mask elements should be > 1")
1120 istart=[]
1121 iend=[]
1122 if mask[0]: istart.append(0)
1123 for i in range(len(mask)-1):
1124 if not mask[i] and mask[i+1]:
1125 istart.append(i+1)
1126 elif mask[i] and not mask[i+1]:
1127 iend.append(i)
1128 if mask[len(mask)-1]: iend.append(len(mask)-1)
1129 if len(istart) != len(iend):
1130 raise RuntimeError("Numbers of mask start != mask end.")
1131 for i in range(len(istart)):
1132 if istart[i] > iend[i]:
1133 raise RuntimeError("Mask start index > mask end index")
1134 break
1135 return istart,iend
1136
1137# def get_restfreqs(self):
1138# """
1139# Get the restfrequency(s) stored in this scantable.
1140# The return value(s) are always of unit 'Hz'
1141# Parameters:
1142# none
1143# Returns:
1144# a list of doubles
1145# """
1146# return list(self._getrestfreqs())
1147
1148 def get_restfreqs(self, ids=None):
1149 """
1150 Get the restfrequency(s) stored in this scantable.
1151 The return value(s) are always of unit 'Hz'
1152 Parameters:
1153 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1154 be retrieved
1155 Returns:
1156 dictionary containing ids and a list of doubles for each id
1157 """
1158 if ids is None:
1159 rfreqs={}
1160 idlist = self.getmolnos()
1161 for i in idlist:
1162 rfreqs[i]=list(self._getrestfreqs(i))
1163 return rfreqs
1164 else:
1165 if type(ids)==list or type(ids)==tuple:
1166 rfreqs={}
1167 for i in ids:
1168 rfreqs[i]=list(self._getrestfreqs(i))
1169 return rfreqs
1170 else:
1171 return list(self._getrestfreqs(ids))
1172 #return list(self._getrestfreqs(ids))
1173
1174 def set_restfreqs(self, freqs=None, unit='Hz'):
1175 """
1176 ********NEED TO BE UPDATED begin************
1177 Set or replace the restfrequency specified and
1178 If the 'freqs' argument holds a scalar,
1179 then that rest frequency will be applied to all the selected
1180 data. If the 'freqs' argument holds
1181 a vector, then it MUST be of equal or smaller length than
1182 the number of IFs (and the available restfrequencies will be
1183 replaced by this vector). In this case, *all* data have
1184 the restfrequency set per IF according
1185 to the corresponding value you give in the 'freqs' vector.
1186 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1187 IF 1 gets restfreq 2e9.
1188 ********NEED TO BE UPDATED end************
1189 You can also specify the frequencies via a linecatalog.
1190
1191 Parameters:
1192 freqs: list of rest frequency values or string idenitfiers
1193 unit: unit for rest frequency (default 'Hz')
1194
1195 Example:
1196 # set the given restfrequency for the all currently selected IFs
1197 scan.set_restfreqs(freqs=1.4e9)
1198 # set multiple restfrequencies to all the selected data
1199 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1200 # If the number of IFs in the data is >= 2 the IF0 gets the first
1201 # value IF1 the second... NOTE that freqs needs to be
1202 # specified in list of list (e.g. [[],[],...] ).
1203 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
1204 #set the given restfrequency for the whole table (by name)
1205 scan.set_restfreqs(freqs="OH1667")
1206
1207 Note:
1208 To do more sophisticate Restfrequency setting, e.g. on a
1209 source and IF basis, use scantable.set_selection() before using
1210 this function.
1211 # provide your scantable is called scan
1212 selection = selector()
1213 selection.set_name("ORION*")
1214 selection.set_ifs([1])
1215 scan.set_selection(selection)
1216 scan.set_restfreqs(freqs=86.6e9)
1217
1218 """
1219 varlist = vars()
1220 from asap import linecatalog
1221 # simple value
1222 if isinstance(freqs, int) or isinstance(freqs, float):
1223 # TT mod
1224 #self._setrestfreqs(freqs, "",unit)
1225 self._setrestfreqs([freqs], [""],unit)
1226 # list of values
1227 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1228 # list values are scalars
1229 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1230 self._setrestfreqs(freqs, [""], unit)
1231 # list values are tuples, (value, name)
1232 elif isinstance(freqs[-1], dict):
1233 #sel = selector()
1234 #savesel = self._getselection()
1235 #iflist = self.getifnos()
1236 #for i in xrange(len(freqs)):
1237 # sel.set_ifs(iflist[i])
1238 # self._setselection(sel)
1239 # self._setrestfreqs(freqs[i], "",unit)
1240 #self._setselection(savesel)
1241 self._setrestfreqs(freqs["value"],
1242 freqs["name"], unit)
1243 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1244 sel = selector()
1245 savesel = self._getselection()
1246 iflist = self.getifnos()
1247 if len(freqs)>len(iflist):
1248 raise ValueError("number of elements in list of list exeeds the current IF selections")
1249 for i in xrange(len(freqs)):
1250 sel.set_ifs(iflist[i])
1251 self._setselection(sel)
1252 self._setrestfreqs(freqs[i], [""], unit)
1253 self._setselection(savesel)
1254 # freqs are to be taken from a linecatalog
1255 elif isinstance(freqs, linecatalog):
1256 sel = selector()
1257 savesel = self._getselection()
1258 for i in xrange(freqs.nrow()):
1259 sel.set_ifs(iflist[i])
1260 self._setselection(sel)
1261 self._setrestfreqs(freqs.get_frequency(i),
1262 freqs.get_name(i), "MHz")
1263 # ensure that we are not iterating past nIF
1264 if i == self.nif()-1: break
1265 self._setselection(savesel)
1266 else:
1267 return
1268 self._add_history("set_restfreqs", varlist)
1269
1270 def shift_refpix(self, delta):
1271 """
1272 Shift the reference pixel of the Spectra Coordinate by an
1273 integer amount.
1274 Parameters:
1275 delta: the amount to shift by
1276 Note:
1277 Be careful using this with broadband data.
1278 """
1279 Scantable.shift_refpix(self, delta)
1280
1281 def history(self, filename=None):
1282 """
1283 Print the history. Optionally to a file.
1284 Parameters:
1285 filename: The name of the file to save the history to.
1286 """
1287 hist = list(self._gethistory())
1288 out = "-"*80
1289 for h in hist:
1290 if h.startswith("---"):
1291 out += "\n"+h
1292 else:
1293 items = h.split("##")
1294 date = items[0]
1295 func = items[1]
1296 items = items[2:]
1297 out += "\n"+date+"\n"
1298 out += "Function: %s\n Parameters:" % (func)
1299 for i in items:
1300 s = i.split("=")
1301 out += "\n %s = %s" % (s[0], s[1])
1302 out += "\n"+"-"*80
1303 if filename is not None:
1304 if filename is "":
1305 filename = 'scantable_history.txt'
1306 import os
1307 filename = os.path.expandvars(os.path.expanduser(filename))
1308 if not os.path.isdir(filename):
1309 data = open(filename, 'w')
1310 data.write(out)
1311 data.close()
1312 else:
1313 msg = "Illegal file name '%s'." % (filename)
1314 if rcParams['verbose']:
1315 #print msg
1316 asaplog.push( msg )
1317 print_log( 'ERROR' )
1318 else:
1319 raise IOError(msg)
1320 if rcParams['verbose']:
1321 try:
1322 from IPython.genutils import page as pager
1323 except ImportError:
1324 from pydoc import pager
1325 pager(out)
1326 else:
1327 return out
1328 return
1329 #
1330 # Maths business
1331 #
1332 @print_log_dec
1333 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1334 """
1335 Return the (time) weighted average of a scan.
1336 Note:
1337 in channels only - align if necessary
1338 Parameters:
1339 mask: an optional mask (only used for 'var' and 'tsys'
1340 weighting)
1341 scanav: True averages each scan separately
1342 False (default) averages all scans together,
1343 weight: Weighting scheme.
1344 'none' (mean no weight)
1345 'var' (1/var(spec) weighted)
1346 'tsys' (1/Tsys**2 weighted)
1347 'tint' (integration time weighted)
1348 'tintsys' (Tint/Tsys**2)
1349 'median' ( median averaging)
1350 The default is 'tint'
1351 align: align the spectra in velocity before averaging. It takes
1352 the time of the first spectrum as reference time.
1353 Example:
1354 # time average the scantable without using a mask
1355 newscan = scan.average_time()
1356 """
1357 varlist = vars()
1358 weight = weight or 'TINT'
1359 mask = mask or ()
1360 scanav = (scanav and 'SCAN') or 'NONE'
1361 scan = (self, )
1362 try:
1363 if align:
1364 scan = (self.freq_align(insitu=False), )
1365 s = None
1366 if weight.upper() == 'MEDIAN':
1367 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1368 scanav))
1369 else:
1370 s = scantable(self._math._average(scan, mask, weight.upper(),
1371 scanav))
1372 except RuntimeError, msg:
1373 if rcParams['verbose']:
1374 #print msg
1375 print_log()
1376 asaplog.push( str(msg) )
1377 print_log( 'ERROR' )
1378 return
1379 else: raise
1380 s._add_history("average_time", varlist)
1381 print_log()
1382 return s
1383
1384 @print_log_dec
1385 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1386 """
1387 Return a scan where all spectra are converted to either
1388 Jansky or Kelvin depending upon the flux units of the scan table.
1389 By default the function tries to look the values up internally.
1390 If it can't find them (or if you want to over-ride), you must
1391 specify EITHER jyperk OR eta (and D which it will try to look up
1392 also if you don't set it). jyperk takes precedence if you set both.
1393 Parameters:
1394 jyperk: the Jy / K conversion factor
1395 eta: the aperture efficiency
1396 d: the geomtric diameter (metres)
1397 insitu: if False a new scantable is returned.
1398 Otherwise, the scaling is done in-situ
1399 The default is taken from .asaprc (False)
1400 """
1401 if insitu is None: insitu = rcParams['insitu']
1402 self._math._setinsitu(insitu)
1403 varlist = vars()
1404 jyperk = jyperk or -1.0
1405 d = d or -1.0
1406 eta = eta or -1.0
1407 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1408 s._add_history("convert_flux", varlist)
1409 print_log()
1410 if insitu: self._assign(s)
1411 else: return s
1412
1413 @print_log_dec
1414 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1415 """
1416 Return a scan after applying a gain-elevation correction.
1417 The correction can be made via either a polynomial or a
1418 table-based interpolation (and extrapolation if necessary).
1419 You specify polynomial coefficients, an ascii table or neither.
1420 If you specify neither, then a polynomial correction will be made
1421 with built in coefficients known for certain telescopes (an error
1422 will occur if the instrument is not known).
1423 The data and Tsys are *divided* by the scaling factors.
1424 Parameters:
1425 poly: Polynomial coefficients (default None) to compute a
1426 gain-elevation correction as a function of
1427 elevation (in degrees).
1428 filename: The name of an ascii file holding correction factors.
1429 The first row of the ascii file must give the column
1430 names and these MUST include columns
1431 "ELEVATION" (degrees) and "FACTOR" (multiply data
1432 by this) somewhere.
1433 The second row must give the data type of the
1434 column. Use 'R' for Real and 'I' for Integer.
1435 An example file would be
1436 (actual factors are arbitrary) :
1437
1438 TIME ELEVATION FACTOR
1439 R R R
1440 0.1 0 0.8
1441 0.2 20 0.85
1442 0.3 40 0.9
1443 0.4 60 0.85
1444 0.5 80 0.8
1445 0.6 90 0.75
1446 method: Interpolation method when correcting from a table.
1447 Values are "nearest", "linear" (default), "cubic"
1448 and "spline"
1449 insitu: if False a new scantable is returned.
1450 Otherwise, the scaling is done in-situ
1451 The default is taken from .asaprc (False)
1452 """
1453
1454 if insitu is None: insitu = rcParams['insitu']
1455 self._math._setinsitu(insitu)
1456 varlist = vars()
1457 poly = poly or ()
1458 from os.path import expandvars
1459 filename = expandvars(filename)
1460 s = scantable(self._math._gainel(self, poly, filename, method))
1461 s._add_history("gain_el", varlist)
1462 print_log()
1463 if insitu:
1464 self._assign(s)
1465 else:
1466 return s
1467
1468 @print_log_dec
1469 def freq_align(self, reftime=None, method='cubic', insitu=None):
1470 """
1471 Return a scan where all rows have been aligned in frequency/velocity.
1472 The alignment frequency frame (e.g. LSRK) is that set by function
1473 set_freqframe.
1474 Parameters:
1475 reftime: reference time to align at. By default, the time of
1476 the first row of data is used.
1477 method: Interpolation method for regridding the spectra.
1478 Choose from "nearest", "linear", "cubic" (default)
1479 and "spline"
1480 insitu: if False a new scantable is returned.
1481 Otherwise, the scaling is done in-situ
1482 The default is taken from .asaprc (False)
1483 """
1484 if insitu is None: insitu = rcParams["insitu"]
1485 self._math._setinsitu(insitu)
1486 varlist = vars()
1487 reftime = reftime or ""
1488 s = scantable(self._math._freq_align(self, reftime, method))
1489 s._add_history("freq_align", varlist)
1490 print_log()
1491 if insitu: self._assign(s)
1492 else: return s
1493
1494 @print_log_dec
1495 def opacity(self, tau=None, insitu=None):
1496 """
1497 Apply an opacity correction. The data
1498 and Tsys are multiplied by the correction factor.
1499 Parameters:
1500 tau: (list of) opacity from which the correction factor is
1501 exp(tau*ZD)
1502 where ZD is the zenith-distance.
1503 If a list is provided, it has to be of length nIF,
1504 nIF*nPol or 1 and in order of IF/POL, e.g.
1505 [opif0pol0, opif0pol1, opif1pol0 ...]
1506 if tau is `None` the opacities are determined from a
1507 model.
1508 insitu: if False a new scantable is returned.
1509 Otherwise, the scaling is done in-situ
1510 The default is taken from .asaprc (False)
1511 """
1512 if insitu is None: insitu = rcParams['insitu']
1513 self._math._setinsitu(insitu)
1514 varlist = vars()
1515 if not hasattr(tau, "__len__"):
1516 tau = [tau]
1517 s = scantable(self._math._opacity(self, tau))
1518 s._add_history("opacity", varlist)
1519 print_log()
1520 if insitu: self._assign(s)
1521 else: return s
1522
1523 @print_log_dec
1524 def bin(self, width=5, insitu=None):
1525 """
1526 Return a scan where all spectra have been binned up.
1527 Parameters:
1528 width: The bin width (default=5) in pixels
1529 insitu: if False a new scantable is returned.
1530 Otherwise, the scaling is done in-situ
1531 The default is taken from .asaprc (False)
1532 """
1533 if insitu is None: insitu = rcParams['insitu']
1534 self._math._setinsitu(insitu)
1535 varlist = vars()
1536 s = scantable(self._math._bin(self, width))
1537 s._add_history("bin", varlist)
1538 print_log()
1539 if insitu:
1540 self._assign(s)
1541 else:
1542 return s
1543
1544 @print_log_dec
1545 def resample(self, width=5, method='cubic', insitu=None):
1546 """
1547 Return a scan where all spectra have been binned up.
1548
1549 Parameters:
1550 width: The bin width (default=5) in pixels
1551 method: Interpolation method when correcting from a table.
1552 Values are "nearest", "linear", "cubic" (default)
1553 and "spline"
1554 insitu: if False a new scantable is returned.
1555 Otherwise, the scaling is done in-situ
1556 The default is taken from .asaprc (False)
1557 """
1558 if insitu is None: insitu = rcParams['insitu']
1559 self._math._setinsitu(insitu)
1560 varlist = vars()
1561 s = scantable(self._math._resample(self, method, width))
1562 s._add_history("resample", varlist)
1563 print_log()
1564 if insitu: self._assign(s)
1565 else: return s
1566
1567 @print_log_dec
1568 def average_pol(self, mask=None, weight='none'):
1569 """
1570 Average the Polarisations together.
1571 Parameters:
1572 mask: An optional mask defining the region, where the
1573 averaging will be applied. The output will have all
1574 specified points masked.
1575 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1576 weighted), or 'tsys' (1/Tsys**2 weighted)
1577 """
1578 varlist = vars()
1579 mask = mask or ()
1580 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1581 s._add_history("average_pol", varlist)
1582 print_log()
1583 return s
1584
1585 @print_log_dec
1586 def average_beam(self, mask=None, weight='none'):
1587 """
1588 Average the Beams together.
1589 Parameters:
1590 mask: An optional mask defining the region, where the
1591 averaging will be applied. The output will have all
1592 specified points masked.
1593 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1594 weighted), or 'tsys' (1/Tsys**2 weighted)
1595 """
1596 varlist = vars()
1597 mask = mask or ()
1598 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1599 s._add_history("average_beam", varlist)
1600 print_log()
1601 return s
1602
1603 def parallactify(self, pflag):
1604 """
1605 Set a flag to indicate whether this data should be treated as having
1606 been 'parallactified' (total phase == 0.0)
1607 Parameters:
1608 pflag: Bool indicating whether to turn this on (True) or
1609 off (False)
1610 """
1611 varlist = vars()
1612 self._parallactify(pflag)
1613 self._add_history("parallactify", varlist)
1614
1615 @print_log_dec
1616 def convert_pol(self, poltype=None):
1617 """
1618 Convert the data to a different polarisation type.
1619 Note that you will need cross-polarisation terms for most conversions.
1620 Parameters:
1621 poltype: The new polarisation type. Valid types are:
1622 "linear", "circular", "stokes" and "linpol"
1623 """
1624 varlist = vars()
1625 try:
1626 s = scantable(self._math._convertpol(self, poltype))
1627 except RuntimeError, msg:
1628 if rcParams['verbose']:
1629 #print msg
1630 print_log()
1631 asaplog.push( str(msg) )
1632 print_log( 'ERROR' )
1633 return
1634 else:
1635 raise
1636 s._add_history("convert_pol", varlist)
1637 print_log()
1638 return s
1639
1640 @print_log_dec
1641 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1642 """
1643 Smooth the spectrum by the specified kernel (conserving flux).
1644 Parameters:
1645 kernel: The type of smoothing kernel. Select from
1646 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1647 or 'poly'
1648 width: The width of the kernel in pixels. For hanning this is
1649 ignored otherwise it defauls to 5 pixels.
1650 For 'gaussian' it is the Full Width Half
1651 Maximum. For 'boxcar' it is the full width.
1652 For 'rmedian' and 'poly' it is the half width.
1653 order: Optional parameter for 'poly' kernel (default is 2), to
1654 specify the order of the polnomial. Ignored by all other
1655 kernels.
1656 plot: plot the original and the smoothed spectra.
1657 In this each indivual fit has to be approved, by
1658 typing 'y' or 'n'
1659 insitu: if False a new scantable is returned.
1660 Otherwise, the scaling is done in-situ
1661 The default is taken from .asaprc (False)
1662 Example:
1663 none
1664 """
1665 if insitu is None: insitu = rcParams['insitu']
1666 self._math._setinsitu(insitu)
1667 varlist = vars()
1668
1669 if plot: orgscan = self.copy()
1670
1671 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1672 s._add_history("smooth", varlist)
1673
1674 if plot:
1675 if rcParams['plotter.gui']:
1676 from asap.asaplotgui import asaplotgui as asaplot
1677 else:
1678 from asap.asaplot import asaplot
1679 self._p=asaplot()
1680 self._p.set_panels()
1681 ylab=s._get_ordinate_label()
1682 #self._p.palette(0,["#777777","red"])
1683 for r in xrange(s.nrow()):
1684 xsm=s._getabcissa(r)
1685 ysm=s._getspectrum(r)
1686 xorg=orgscan._getabcissa(r)
1687 yorg=orgscan._getspectrum(r)
1688 self._p.clear()
1689 self._p.hold()
1690 self._p.set_axes('ylabel',ylab)
1691 self._p.set_axes('xlabel',s._getabcissalabel(r))
1692 self._p.set_axes('title',s._getsourcename(r))
1693 self._p.set_line(label='Original',color="#777777")
1694 self._p.plot(xorg,yorg)
1695 self._p.set_line(label='Smoothed',color="red")
1696 self._p.plot(xsm,ysm)
1697 ### Ugly part for legend
1698 for i in [0,1]:
1699 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1700 self._p.release()
1701 ### Ugly part for legend
1702 self._p.subplots[0]['lines']=[]
1703 res = raw_input("Accept smoothing ([y]/n): ")
1704 if res.upper() == 'N':
1705 s._setspectrum(yorg, r)
1706 self._p.unmap()
1707 self._p = None
1708 del orgscan
1709
1710 print_log()
1711 if insitu: self._assign(s)
1712 else: return s
1713
1714 @print_log_dec
1715 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1716 insitu=None):
1717 """
1718 Return a scan which has been baselined (all rows) by a polynomial.
1719 Parameters:
1720 mask: an optional mask
1721 order: the order of the polynomial (default is 0)
1722 plot: plot the fit and the residual. In this each
1723 indivual fit has to be approved, by typing 'y'
1724 or 'n'
1725 uselin: use linear polynomial fit
1726 insitu: if False a new scantable is returned.
1727 Otherwise, the scaling is done in-situ
1728 The default is taken from .asaprc (False)
1729 Example:
1730 # return a scan baselined by a third order polynomial,
1731 # not using a mask
1732 bscan = scan.poly_baseline(order=3)
1733 """
1734 if insitu is None: insitu = rcParams['insitu']
1735 if not insitu:
1736 workscan = self.copy()
1737 else:
1738 workscan = self
1739 varlist = vars()
1740 if mask is None:
1741 mask = [True for i in xrange(self.nchan(-1))]
1742
1743 from asap.asapfitter import fitter
1744 try:
1745 f = fitter()
1746 if uselin:
1747 f.set_function(lpoly=order)
1748 else:
1749 f.set_function(poly=order)
1750
1751 rows = range(workscan.nrow())
1752 if len(rows) > 0:
1753 self.blpars = []
1754
1755 for r in rows:
1756 # take into account flagtra info (CAS-1434)
1757 flagtra = workscan._getmask(r)
1758 actualmask = mask[:]
1759 if len(actualmask) == 0:
1760 actualmask = list(flagtra[:])
1761 else:
1762 if len(actualmask) != len(flagtra):
1763 raise RuntimeError, "Mask and flagtra have different length"
1764 else:
1765 for i in range(0, len(actualmask)):
1766 actualmask[i] = actualmask[i] and flagtra[i]
1767 f.set_scan(workscan, actualmask)
1768 f.x = workscan._getabcissa(r)
1769 f.y = workscan._getspectrum(r)
1770 f.data = None
1771 f.fit()
1772 if plot:
1773 f.plot(residual=True)
1774 x = raw_input("Accept fit ( [y]/n ): ")
1775 if x.upper() == 'N':
1776 self.blpars.append(None)
1777 continue
1778 workscan._setspectrum(f.fitter.getresidual(), r)
1779 self.blpars.append(f.get_parameters())
1780
1781 if plot:
1782 f._p.unmap()
1783 f._p = None
1784 workscan._add_history("poly_baseline", varlist)
1785 print_log()
1786 if insitu: self._assign(workscan)
1787 else: return workscan
1788 except RuntimeError:
1789 msg = "The fit failed, possibly because it didn't converge."
1790 if rcParams['verbose']:
1791 #print msg
1792 print_log()
1793 asaplog.push( str(msg) )
1794 print_log( 'ERROR' )
1795 return
1796 else:
1797 raise RuntimeError(msg)
1798
1799
1800 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1801 threshold=3, chan_avg_limit=1, plot=False,
1802 insitu=None):
1803 """
1804 Return a scan which has been baselined (all rows) by a polynomial.
1805 Spectral lines are detected first using linefinder and masked out
1806 to avoid them affecting the baseline solution.
1807
1808 Parameters:
1809 mask: an optional mask retreived from scantable
1810 edge: an optional number of channel to drop at
1811 the edge of spectrum. If only one value is
1812 specified, the same number will be dropped from
1813 both sides of the spectrum. Default is to keep
1814 all channels. Nested tuples represent individual
1815 edge selection for different IFs (a number of spectral
1816 channels can be different)
1817 order: the order of the polynomial (default is 0)
1818 threshold: the threshold used by line finder. It is better to
1819 keep it large as only strong lines affect the
1820 baseline solution.
1821 chan_avg_limit:
1822 a maximum number of consequtive spectral channels to
1823 average during the search of weak and broad lines.
1824 The default is no averaging (and no search for weak
1825 lines). If such lines can affect the fitted baseline
1826 (e.g. a high order polynomial is fitted), increase this
1827 parameter (usually values up to 8 are reasonable). Most
1828 users of this method should find the default value
1829 sufficient.
1830 plot: plot the fit and the residual. In this each
1831 indivual fit has to be approved, by typing 'y'
1832 or 'n'
1833 insitu: if False a new scantable is returned.
1834 Otherwise, the scaling is done in-situ
1835 The default is taken from .asaprc (False)
1836
1837 Example:
1838 scan2=scan.auto_poly_baseline(order=7)
1839 """
1840 if insitu is None: insitu = rcParams['insitu']
1841 varlist = vars()
1842 from asap.asapfitter import fitter
1843 from asap.asaplinefind import linefinder
1844 from asap import _is_sequence_or_number as _is_valid
1845
1846 # check whether edge is set up for each IF individually
1847 individualedge = False;
1848 if len(edge) > 1:
1849 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1850 individualedge = True;
1851
1852 if not _is_valid(edge, int) and not individualedge:
1853 raise ValueError, "Parameter 'edge' has to be an integer or a \
1854 pair of integers specified as a tuple. Nested tuples are allowed \
1855 to make individual selection for different IFs."
1856
1857 curedge = (0, 0)
1858 if individualedge:
1859 for edgepar in edge:
1860 if not _is_valid(edgepar, int):
1861 raise ValueError, "Each element of the 'edge' tuple has \
1862 to be a pair of integers or an integer."
1863 else:
1864 curedge = edge;
1865
1866 # setup fitter
1867 f = fitter()
1868 f.set_function(poly=order)
1869
1870 # setup line finder
1871 fl = linefinder()
1872 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1873
1874 if not insitu:
1875 workscan = self.copy()
1876 else:
1877 workscan = self
1878
1879 fl.set_scan(workscan)
1880
1881 rows = range(workscan.nrow())
1882 # Save parameters of baseline fits & masklists as a class attribute.
1883 # NOTICE: It does not reflect changes in scantable!
1884 if len(rows) > 0:
1885 self.blpars=[]
1886 self.masklists=[]
1887 asaplog.push("Processing:")
1888 for r in rows:
1889 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1890 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1891 workscan.getpol(r), workscan.getcycle(r))
1892 asaplog.push(msg, False)
1893
1894 # figure out edge parameter
1895 if individualedge:
1896 if len(edge) >= workscan.getif(r):
1897 raise RuntimeError, "Number of edge elements appear to " \
1898 "be less than the number of IFs"
1899 curedge = edge[workscan.getif(r)]
1900
1901 # take into account flagtra info (CAS-1434)
1902 flagtra = workscan._getmask(r)
1903 actualmask = mask[:]
1904 if len(actualmask) == 0:
1905 actualmask = list(flagtra[:])
1906 else:
1907 if len(actualmask) != len(flagtra):
1908 raise RuntimeError, "Mask and flagtra have different length"
1909 else:
1910 for i in range(0, len(actualmask)):
1911 actualmask[i] = actualmask[i] and flagtra[i]
1912
1913 # setup line finder
1914 fl.find_lines(r, actualmask, curedge)
1915 outmask=fl.get_mask()
1916 f.set_scan(workscan, fl.get_mask())
1917 f.x = workscan._getabcissa(r)
1918 f.y = workscan._getspectrum(r)
1919 f.data = None
1920 f.fit()
1921
1922 # Show mask list
1923 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1924 msg = "mask range: "+str(masklist)
1925 asaplog.push(msg, False)
1926
1927 if plot:
1928 f.plot(residual=True)
1929 x = raw_input("Accept fit ( [y]/n ): ")
1930 if x.upper() == 'N':
1931 self.blpars.append(None)
1932 self.masklists.append(None)
1933 continue
1934
1935 workscan._setspectrum(f.fitter.getresidual(), r)
1936 self.blpars.append(f.get_parameters())
1937 self.masklists.append(masklist)
1938 if plot:
1939 f._p.unmap()
1940 f._p = None
1941 workscan._add_history("auto_poly_baseline", varlist)
1942 if insitu:
1943 self._assign(workscan)
1944 else:
1945 return workscan
1946
1947 @print_log_dec
1948 def rotate_linpolphase(self, angle):
1949 """
1950 Rotate the phase of the complex polarization O=Q+iU correlation.
1951 This is always done in situ in the raw data. So if you call this
1952 function more than once then each call rotates the phase further.
1953 Parameters:
1954 angle: The angle (degrees) to rotate (add) by.
1955 Examples:
1956 scan.rotate_linpolphase(2.3)
1957 """
1958 varlist = vars()
1959 self._math._rotate_linpolphase(self, angle)
1960 self._add_history("rotate_linpolphase", varlist)
1961 print_log()
1962 return
1963
1964 @print_log_dec
1965 def rotate_xyphase(self, angle):
1966 """
1967 Rotate the phase of the XY correlation. This is always done in situ
1968 in the data. So if you call this function more than once
1969 then each call rotates the phase further.
1970 Parameters:
1971 angle: The angle (degrees) to rotate (add) by.
1972 Examples:
1973 scan.rotate_xyphase(2.3)
1974 """
1975 varlist = vars()
1976 self._math._rotate_xyphase(self, angle)
1977 self._add_history("rotate_xyphase", varlist)
1978 print_log()
1979 return
1980
1981 @print_log_dec
1982 def swap_linears(self):
1983 """
1984 Swap the linear polarisations XX and YY, or better the first two
1985 polarisations as this also works for ciculars.
1986 """
1987 varlist = vars()
1988 self._math._swap_linears(self)
1989 self._add_history("swap_linears", varlist)
1990 print_log()
1991 return
1992
1993 @print_log_dec
1994 def invert_phase(self):
1995 """
1996 Invert the phase of the complex polarisation
1997 """
1998 varlist = vars()
1999 self._math._invert_phase(self)
2000 self._add_history("invert_phase", varlist)
2001 print_log()
2002 return
2003
2004 @print_log_dec
2005 def add(self, offset, insitu=None):
2006 """
2007 Return a scan where all spectra have the offset added
2008 Parameters:
2009 offset: the offset
2010 insitu: if False a new scantable is returned.
2011 Otherwise, the scaling is done in-situ
2012 The default is taken from .asaprc (False)
2013 """
2014 if insitu is None: insitu = rcParams['insitu']
2015 self._math._setinsitu(insitu)
2016 varlist = vars()
2017 s = scantable(self._math._unaryop(self, offset, "ADD", False))
2018 s._add_history("add", varlist)
2019 print_log()
2020 if insitu:
2021 self._assign(s)
2022 else:
2023 return s
2024
2025 @print_log_dec
2026 def scale(self, factor, tsys=True, insitu=None):
2027 """
2028 Return a scan where all spectra are scaled by the give 'factor'
2029 Parameters:
2030 factor: the scaling factor (float or 1D float list)
2031 insitu: if False a new scantable is returned.
2032 Otherwise, the scaling is done in-situ
2033 The default is taken from .asaprc (False)
2034 tsys: if True (default) then apply the operation to Tsys
2035 as well as the data
2036 """
2037 if insitu is None: insitu = rcParams['insitu']
2038 self._math._setinsitu(insitu)
2039 varlist = vars()
2040 s = None
2041 import numpy
2042 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2043 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2044 from asapmath import _array2dOp
2045 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2046 else:
2047 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2048 else:
2049 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2050 s._add_history("scale", varlist)
2051 print_log()
2052 if insitu:
2053 self._assign(s)
2054 else:
2055 return s
2056
2057 def set_sourcetype(self, match, matchtype="pattern",
2058 sourcetype="reference"):
2059 """
2060 Set the type of the source to be an source or reference scan
2061 using the provided pattern:
2062 Parameters:
2063 match: a Unix style pattern, regular expression or selector
2064 matchtype: 'pattern' (default) UNIX style pattern or
2065 'regex' regular expression
2066 sourcetype: the type of the source to use (source/reference)
2067 """
2068 varlist = vars()
2069 basesel = self.get_selection()
2070 stype = -1
2071 if sourcetype.lower().startswith("r"):
2072 stype = 1
2073 elif sourcetype.lower().startswith("s"):
2074 stype = 0
2075 else:
2076 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2077 if matchtype.lower().startswith("p"):
2078 matchtype = "pattern"
2079 elif matchtype.lower().startswith("r"):
2080 matchtype = "regex"
2081 else:
2082 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2083 sel = selector()
2084 if isinstance(match, selector):
2085 sel = match
2086 else:
2087 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2088 self.set_selection(basesel+sel)
2089 self._setsourcetype(stype)
2090 self.set_selection(basesel)
2091 self._add_history("set_sourcetype", varlist)
2092
2093 @print_log_dec
2094 def auto_quotient(self, preserve=True, mode='paired', verify=False):
2095 """
2096 This function allows to build quotients automatically.
2097 It assumes the observation to have the same number of
2098 "ons" and "offs"
2099 Parameters:
2100 preserve: you can preserve (default) the continuum or
2101 remove it. The equations used are
2102 preserve: Output = Toff * (on/off) - Toff
2103 remove: Output = Toff * (on/off) - Ton
2104 mode: the on/off detection mode
2105 'paired' (default)
2106 identifies 'off' scans by the
2107 trailing '_R' (Mopra/Parkes) or
2108 '_e'/'_w' (Tid) and matches
2109 on/off pairs from the observing pattern
2110 'time'
2111 finds the closest off in time
2112
2113 """
2114 modes = ["time", "paired"]
2115 if not mode in modes:
2116 msg = "please provide valid mode. Valid modes are %s" % (modes)
2117 raise ValueError(msg)
2118 varlist = vars()
2119 s = None
2120 if mode.lower() == "paired":
2121 basesel = self.get_selection()
2122 sel = selector()+basesel
2123 sel.set_query("SRCTYPE==1")
2124 self.set_selection(sel)
2125 offs = self.copy()
2126 sel.set_query("SRCTYPE==0")
2127 self.set_selection(sel)
2128 ons = self.copy()
2129 s = scantable(self._math._quotient(ons, offs, preserve))
2130 self.set_selection(basesel)
2131 elif mode.lower() == "time":
2132 s = scantable(self._math._auto_quotient(self, mode, preserve))
2133 s._add_history("auto_quotient", varlist)
2134 print_log()
2135 return s
2136
2137 @print_log_dec
2138 def mx_quotient(self, mask = None, weight='median', preserve=True):
2139 """
2140 Form a quotient using "off" beams when observing in "MX" mode.
2141 Parameters:
2142 mask: an optional mask to be used when weight == 'stddev'
2143 weight: How to average the off beams. Default is 'median'.
2144 preserve: you can preserve (default) the continuum or
2145 remove it. The equations used are
2146 preserve: Output = Toff * (on/off) - Toff
2147 remove: Output = Toff * (on/off) - Ton
2148 """
2149 mask = mask or ()
2150 varlist = vars()
2151 on = scantable(self._math._mx_extract(self, 'on'))
2152 preoff = scantable(self._math._mx_extract(self, 'off'))
2153 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2154 from asapmath import quotient
2155 q = quotient(on, off, preserve)
2156 q._add_history("mx_quotient", varlist)
2157 print_log()
2158 return q
2159
2160 @print_log_dec
2161 def freq_switch(self, insitu=None):
2162 """
2163 Apply frequency switching to the data.
2164 Parameters:
2165 insitu: if False a new scantable is returned.
2166 Otherwise, the swictching is done in-situ
2167 The default is taken from .asaprc (False)
2168 Example:
2169 none
2170 """
2171 if insitu is None: insitu = rcParams['insitu']
2172 self._math._setinsitu(insitu)
2173 varlist = vars()
2174 s = scantable(self._math._freqswitch(self))
2175 s._add_history("freq_switch", varlist)
2176 print_log()
2177 if insitu: self._assign(s)
2178 else: return s
2179
2180 @print_log_dec
2181 def recalc_azel(self):
2182 """
2183 Recalculate the azimuth and elevation for each position.
2184 Parameters:
2185 none
2186 Example:
2187 """
2188 varlist = vars()
2189 self._recalcazel()
2190 self._add_history("recalc_azel", varlist)
2191 print_log()
2192 return
2193
2194 @print_log_dec
2195 def __add__(self, other):
2196 varlist = vars()
2197 s = None
2198 if isinstance(other, scantable):
2199 s = scantable(self._math._binaryop(self, other, "ADD"))
2200 elif isinstance(other, float):
2201 s = scantable(self._math._unaryop(self, other, "ADD", False))
2202 else:
2203 raise TypeError("Other input is not a scantable or float value")
2204 s._add_history("operator +", varlist)
2205 return s
2206
2207 @print_log_dec
2208 def __sub__(self, other):
2209 """
2210 implicit on all axes and on Tsys
2211 """
2212 varlist = vars()
2213 s = None
2214 if isinstance(other, scantable):
2215 s = scantable(self._math._binaryop(self, other, "SUB"))
2216 elif isinstance(other, float):
2217 s = scantable(self._math._unaryop(self, other, "SUB", False))
2218 else:
2219 raise TypeError("Other input is not a scantable or float value")
2220 s._add_history("operator -", varlist)
2221 return s
2222
2223 @print_log_dec
2224 def __mul__(self, other):
2225 """
2226 implicit on all axes and on Tsys
2227 """
2228 varlist = vars()
2229 s = None
2230 if isinstance(other, scantable):
2231 s = scantable(self._math._binaryop(self, other, "MUL"))
2232 elif isinstance(other, float):
2233 s = scantable(self._math._unaryop(self, other, "MUL", False))
2234 else:
2235 raise TypeError("Other input is not a scantable or float value")
2236 s._add_history("operator *", varlist)
2237 return s
2238
2239
2240 @print_log_dec
2241 def __div__(self, other):
2242 """
2243 implicit on all axes and on Tsys
2244 """
2245 varlist = vars()
2246 s = None
2247 if isinstance(other, scantable):
2248 s = scantable(self._math._binaryop(self, other, "DIV"))
2249 elif isinstance(other, float):
2250 if other == 0.0:
2251 raise ZeroDivisionError("Dividing by zero is not recommended")
2252 s = scantable(self._math._unaryop(self, other, "DIV", False))
2253 else:
2254 raise TypeError("Other input is not a scantable or float value")
2255 s._add_history("operator /", varlist)
2256 return s
2257
2258 def get_fit(self, row=0):
2259 """
2260 Print or return the stored fits for a row in the scantable
2261 Parameters:
2262 row: the row which the fit has been applied to.
2263 """
2264 if row > self.nrow():
2265 return
2266 from asap.asapfit import asapfit
2267 fit = asapfit(self._getfit(row))
2268 if rcParams['verbose']:
2269 #print fit
2270 asaplog.push( '%s' %(fit) )
2271 print_log()
2272 return
2273 else:
2274 return fit.as_dict()
2275
2276 def flag_nans(self):
2277 """
2278 Utility function to flag NaN values in the scantable.
2279 """
2280 import numpy
2281 basesel = self.get_selection()
2282 for i in range(self.nrow()):
2283 sel = self.get_row_selector(i)
2284 self.set_selection(basesel+sel)
2285 nans = numpy.isnan(self._getspectrum(0))
2286 if numpy.any(nans):
2287 bnans = [ bool(v) for v in nans]
2288 self.flag(bnans)
2289 self.set_selection(basesel)
2290
2291 def get_row_selector(self, rowno):
2292 return selector(beams=self.getbeam(rowno),
2293 ifs=self.getif(rowno),
2294 pols=self.getpol(rowno),
2295 scans=self.getscan(rowno),
2296 cycles=self.getcycle(rowno))
2297
2298 def _add_history(self, funcname, parameters):
2299 if not rcParams['scantable.history']:
2300 return
2301 # create date
2302 sep = "##"
2303 from datetime import datetime
2304 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2305 hist = dstr+sep
2306 hist += funcname+sep#cdate+sep
2307 if parameters.has_key('self'): del parameters['self']
2308 for k, v in parameters.iteritems():
2309 if type(v) is dict:
2310 for k2, v2 in v.iteritems():
2311 hist += k2
2312 hist += "="
2313 if isinstance(v2, scantable):
2314 hist += 'scantable'
2315 elif k2 == 'mask':
2316 if isinstance(v2, list) or isinstance(v2, tuple):
2317 hist += str(self._zip_mask(v2))
2318 else:
2319 hist += str(v2)
2320 else:
2321 hist += str(v2)
2322 else:
2323 hist += k
2324 hist += "="
2325 if isinstance(v, scantable):
2326 hist += 'scantable'
2327 elif k == 'mask':
2328 if isinstance(v, list) or isinstance(v, tuple):
2329 hist += str(self._zip_mask(v))
2330 else:
2331 hist += str(v)
2332 else:
2333 hist += str(v)
2334 hist += sep
2335 hist = hist[:-2] # remove trailing '##'
2336 self._addhistory(hist)
2337
2338
2339 def _zip_mask(self, mask):
2340 mask = list(mask)
2341 i = 0
2342 segments = []
2343 while mask[i:].count(1):
2344 i += mask[i:].index(1)
2345 if mask[i:].count(0):
2346 j = i + mask[i:].index(0)
2347 else:
2348 j = len(mask)
2349 segments.append([i, j])
2350 i = j
2351 return segments
2352
2353 def _get_ordinate_label(self):
2354 fu = "("+self.get_fluxunit()+")"
2355 import re
2356 lbl = "Intensity"
2357 if re.match(".K.", fu):
2358 lbl = "Brightness Temperature "+ fu
2359 elif re.match(".Jy.", fu):
2360 lbl = "Flux density "+ fu
2361 return lbl
2362
2363 def _check_ifs(self):
2364 nchans = [self.nchan(i) for i in range(self.nif(-1))]
2365 nchans = filter(lambda t: t > 0, nchans)
2366 return (sum(nchans)/len(nchans) == nchans[0])
2367
2368 def _fill(self, names, unit, average, getpt, antenna):
2369 first = True
2370 fullnames = []
2371 for name in names:
2372 name = os.path.expandvars(name)
2373 name = os.path.expanduser(name)
2374 if not os.path.exists(name):
2375 msg = "File '%s' does not exists" % (name)
2376 if rcParams['verbose']:
2377 asaplog.push(msg)
2378 print_log( 'ERROR' )
2379 return
2380 raise IOError(msg)
2381 fullnames.append(name)
2382 if average:
2383 asaplog.push('Auto averaging integrations')
2384 stype = int(rcParams['scantable.storage'].lower() == 'disk')
2385 for name in fullnames:
2386 tbl = Scantable(stype)
2387 r = filler(tbl)
2388 rx = rcParams['scantable.reference']
2389 r.setreferenceexpr(rx)
2390 msg = "Importing %s..." % (name)
2391 asaplog.push(msg, False)
2392 print_log()
2393 r.open(name)# antenna, -1, -1, getpt)
2394 r.fill()
2395 if average:
2396 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2397 if not first:
2398 tbl = self._math._merge([self, tbl])
2399 Scantable.__init__(self, tbl)
2400 r.close()
2401 del r, tbl
2402 first = False
2403 if unit is not None:
2404 self.set_fluxunit(unit)
2405 if not is_casapy():
2406 self.set_freqframe(rcParams['scantable.freqframe'])
2407
2408 def __getitem__(self, key):
2409 if key < 0:
2410 key += self.nrow()
2411 if key >= self.nrow():
2412 raise IndexError("Row index out of range.")
2413 return self._getspectrum(key)
2414
2415 def __setitem__(self, key, value):
2416 if key < 0:
2417 key += self.nrow()
2418 if key >= self.nrow():
2419 raise IndexError("Row index out of range.")
2420 if not hasattr(value, "__len__") or \
2421 len(value) > self.nchan(self.getif(key)):
2422 raise ValueError("Spectrum length doesn't match.")
2423 return self._setspectrum(value, key)
2424
2425 def __len__(self):
2426 return self.nrow()
2427
2428 def __iter__(self):
2429 for i in range(len(self)):
2430 yield self[i]
Note: See TracBrowser for help on using the repository browser.