source: trunk/python/scantable.py@ 1845

Last change on this file since 1845 was 1845, checked in by Malte Marquarding, 15 years ago

changed set_restfreqs to support use of the old behaviour. If list of float/int is given the items are applied one per IF. Also fixed some broken code for the other cases

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 91.3 KB
Line 
1import os
2try:
3 from functools import wraps as wraps_dec
4except ImportError:
5 from asap.compatibility import wraps as wraps_dec
6
7from asap.env import is_casapy
8from asap._asap import Scantable
9from asap._asap import filler
10from asap.parameters import rcParams
11from asap.logging import asaplog, print_log, print_log_dec
12from asap.selector import selector
13from asap.linecatalog import linecatalog
14from asap.coordinate import coordinate
15from asap.utils import _n_bools, mask_not, mask_and, mask_or
16
17
18def preserve_selection(func):
19 @wraps_dec(func)
20 def wrap(obj, *args, **kw):
21 basesel = obj.get_selection()
22 val = func(obj, *args, **kw)
23 obj.set_selection(basesel)
24 return val
25 return wrap
26
27
28def is_scantable(filename):
29 return (os.path.isdir(filename)
30 and not os.path.exists(filename+'/table.f1')
31 and os.path.exists(filename+'/table.info'))
32
33
34class scantable(Scantable):
35 """
36 The ASAP container for scans
37 """
38
39 @print_log_dec
40 def __init__(self, filename, average=None, unit=None, getpt=None,
41 antenna=None, parallactify=None):
42 """
43 Create a scantable from a saved one or make a reference
44 Parameters:
45 filename: the name of an asap table on disk
46 or
47 the name of a rpfits/sdfits/ms file
48 (integrations within scans are auto averaged
49 and the whole file is read)
50 or
51 [advanced] a reference to an existing
52 scantable
53 average: average all integrations withinb a scan on read.
54 The default (True) is taken from .asaprc.
55 unit: brightness unit; must be consistent with K or Jy.
56 Over-rides the default selected by the filler
57 (input rpfits/sdfits/ms) or replaces the value
58 in existing scantables
59 getpt: for MeasurementSet input data only:
60 If True, all pointing data are filled.
61 The deafult is False, which makes time to load
62 the MS data faster in some cases.
63 antenna: Antenna selection. integer (id) or string (name
64 or id).
65 parallactify: Indicate that the data had been parallatified.
66 Default is taken form rc file.
67 """
68 if average is None:
69 average = rcParams['scantable.autoaverage']
70 if getpt is None:
71 getpt = True
72 if antenna is not None:
73 asaplog.push("Antenna selection currently unsupported."
74 "Using '0'")
75 print_log('WARN')
76 if antenna is None:
77 antenna = ''
78 elif type(antenna) == int:
79 antenna = '%s' % antenna
80 elif type(antenna) == list:
81 tmpstr = ''
82 for i in range( len(antenna) ):
83 if type(antenna[i]) == int:
84 tmpstr = tmpstr + ('%s,'%(antenna[i]))
85 elif type(antenna[i]) == str:
86 tmpstr=tmpstr+antenna[i]+','
87 else:
88 asaplog.push('Bad antenna selection.')
89 print_log('ERROR')
90 return
91 antenna = tmpstr.rstrip(',')
92 parallactify = parallactify or rcParams['scantable.parallactify']
93 varlist = vars()
94 from asap._asap import stmath
95 self._math = stmath( rcParams['insitu'] )
96 if isinstance(filename, Scantable):
97 Scantable.__init__(self, filename)
98 else:
99 if isinstance(filename, str):
100 filename = os.path.expandvars(filename)
101 filename = os.path.expanduser(filename)
102 if not os.path.exists(filename):
103 s = "File '%s' not found." % (filename)
104 if rcParams['verbose']:
105 asaplog.push(s)
106 print_log('ERROR')
107 return
108 raise IOError(s)
109 if is_scantable(filename):
110 ondisk = rcParams['scantable.storage'] == 'disk'
111 Scantable.__init__(self, filename, ondisk)
112 if unit is not None:
113 self.set_fluxunit(unit)
114 # do not reset to the default freqframe
115 #self.set_freqframe(rcParams['scantable.freqframe'])
116 elif os.path.isdir(filename) \
117 and not os.path.exists(filename+'/table.f1'):
118 msg = "The given file '%s'is not a valid " \
119 "asap table." % (filename)
120 if rcParams['verbose']:
121 #print msg
122 asaplog.push( msg )
123 print_log( 'ERROR' )
124 return
125 else:
126 raise IOError(msg)
127 else:
128 self._fill([filename], unit, average, getpt, antenna)
129 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
130 and isinstance(filename[-1], str):
131 self._fill(filename, unit, average, getpt, antenna)
132 self.parallactify(parallactify)
133 self._add_history("scantable", varlist)
134 print_log()
135
136 @print_log_dec
137 def save(self, name=None, format=None, overwrite=False):
138 """
139 Store the scantable on disk. This can be an asap (aips++) Table,
140 SDFITS or MS2 format.
141 Parameters:
142 name: the name of the outputfile. For format "ASCII"
143 this is the root file name (data in 'name'.txt
144 and header in 'name'_header.txt)
145 format: an optional file format. Default is ASAP.
146 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
147 'SDFITS' (save as SDFITS file)
148 'ASCII' (saves as ascii text file)
149 'MS2' (saves as an aips++
150 MeasurementSet V2)
151 'FITS' (save as image FITS - not
152 readable by class)
153 'CLASS' (save as FITS readable by CLASS)
154 overwrite: If the file should be overwritten if it exists.
155 The default False is to return with warning
156 without writing the output. USE WITH CARE.
157 Example:
158 scan.save('myscan.asap')
159 scan.save('myscan.sdfits', 'SDFITS')
160 """
161 from os import path
162 format = format or rcParams['scantable.save']
163 suffix = '.'+format.lower()
164 if name is None or name == "":
165 name = 'scantable'+suffix
166 msg = "No filename given. Using default name %s..." % name
167 asaplog.push(msg)
168 name = path.expandvars(name)
169 if path.isfile(name) or path.isdir(name):
170 if not overwrite:
171 msg = "File %s exists." % name
172 if rcParams['verbose']:
173 #print msg
174 asaplog.push( msg )
175 print_log( 'ERROR' )
176 return
177 else:
178 raise IOError(msg)
179 format2 = format.upper()
180 if format2 == 'ASAP':
181 self._save(name)
182 else:
183 from asap._asap import stwriter as stw
184 writer = stw(format2)
185 writer.write(self, name)
186 print_log()
187 return
188
189 def copy(self):
190 """
191 Return a copy of this scantable.
192 Note:
193 This makes a full (deep) copy. scan2 = scan1 makes a reference.
194 Parameters:
195 none
196 Example:
197 copiedscan = scan.copy()
198 """
199 sd = scantable(Scantable._copy(self))
200 return sd
201
202 def drop_scan(self, scanid=None):
203 """
204 Return a new scantable where the specified scan number(s) has(have)
205 been dropped.
206 Parameters:
207 scanid: a (list of) scan number(s)
208 """
209 from asap import _is_sequence_or_number as _is_valid
210 from asap import _to_list
211 from asap import unique
212 if not _is_valid(scanid):
213 if rcParams['verbose']:
214 #print "Please specify a scanno to drop from the scantable"
215 asaplog.push( 'Please specify a scanno to drop from the scantable' )
216 print_log( 'ERROR' )
217 return
218 else:
219 raise RuntimeError("No scan given")
220 try:
221 scanid = _to_list(scanid)
222 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
223 for sid in scanid: allscans.remove(sid)
224 if len(allscans) == 0:
225 raise ValueError("Can't remove all scans")
226 except ValueError:
227 if rcParams['verbose']:
228 #print "Couldn't find any match."
229 print_log()
230 asaplog.push( "Couldn't find any match." )
231 print_log( 'ERROR' )
232 return
233 else: raise
234 try:
235 sel = selector(scans=allscans)
236 return self._select_copy(sel)
237 except RuntimeError:
238 if rcParams['verbose']:
239 #print "Couldn't find any match."
240 print_log()
241 asaplog.push( "Couldn't find any match." )
242 print_log( 'ERROR' )
243 else:
244 raise
245
246 def _select_copy(self, selection):
247 orig = self.get_selection()
248 self.set_selection(orig+selection)
249 cp = self.copy()
250 self.set_selection(orig)
251 return cp
252
253 def get_scan(self, scanid=None):
254 """
255 Return a specific scan (by scanno) or collection of scans (by
256 source name) in a new scantable.
257 Note:
258 See scantable.drop_scan() for the inverse operation.
259 Parameters:
260 scanid: a (list of) scanno or a source name, unix-style
261 patterns are accepted for source name matching, e.g.
262 '*_R' gets all 'ref scans
263 Example:
264 # get all scans containing the source '323p459'
265 newscan = scan.get_scan('323p459')
266 # get all 'off' scans
267 refscans = scan.get_scan('*_R')
268 # get a susbset of scans by scanno (as listed in scan.summary())
269 newscan = scan.get_scan([0, 2, 7, 10])
270 """
271 if scanid is None:
272 if rcParams['verbose']:
273 #print "Please specify a scan no or name to " \
274 # "retrieve from the scantable"
275 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
276 print_log( 'ERROR' )
277 return
278 else:
279 raise RuntimeError("No scan given")
280
281 try:
282 bsel = self.get_selection()
283 sel = selector()
284 if type(scanid) is str:
285 sel.set_name(scanid)
286 return self._select_copy(sel)
287 elif type(scanid) is int:
288 sel.set_scans([scanid])
289 return self._select_copy(sel)
290 elif type(scanid) is list:
291 sel.set_scans(scanid)
292 return self._select_copy(sel)
293 else:
294 msg = "Illegal scanid type, use 'int' or 'list' if ints."
295 if rcParams['verbose']:
296 #print msg
297 asaplog.push( msg )
298 print_log( 'ERROR' )
299 else:
300 raise TypeError(msg)
301 except RuntimeError:
302 if rcParams['verbose']:
303 #print "Couldn't find any match."
304 print_log()
305 asaplog.push( "Couldn't find any match." )
306 print_log( 'ERROR' )
307 else: raise
308
309 def __str__(self):
310 return Scantable._summary(self, True)
311
312 def summary(self, filename=None):
313 """
314 Print a summary of the contents of this scantable.
315 Parameters:
316 filename: the name of a file to write the putput to
317 Default - no file output
318 """
319 info = Scantable._summary(self, True)
320 if filename is not None:
321 if filename is "":
322 filename = 'scantable_summary.txt'
323 from os.path import expandvars, isdir
324 filename = expandvars(filename)
325 if not isdir(filename):
326 data = open(filename, 'w')
327 data.write(info)
328 data.close()
329 else:
330 msg = "Illegal file name '%s'." % (filename)
331 if rcParams['verbose']:
332 #print msg
333 asaplog.push( msg )
334 print_log( 'ERROR' )
335 else:
336 raise IOError(msg)
337 if rcParams['verbose']:
338 try:
339 from IPython.genutils import page as pager
340 except ImportError:
341 from pydoc import pager
342 pager(info)
343 else:
344 return info
345
346 def get_spectrum(self, rowno):
347 """Return the spectrum for the current row in the scantable as a list.
348 Parameters:
349 rowno: the row number to retrieve the spectrum from
350 """
351 return self._getspectrum(rowno)
352
353 def get_mask(self, rowno):
354 """Return the mask for the current row in the scantable as a list.
355 Parameters:
356 rowno: the row number to retrieve the mask from
357 """
358 return self._getmask(rowno)
359
360 def set_spectrum(self, spec, rowno):
361 """Return the spectrum for the current row in the scantable as a list.
362 Parameters:
363 spec: the spectrum
364 rowno: the row number to set the spectrum for
365 """
366 assert(len(spec) == self.nchan())
367 return self._setspectrum(spec, rowno)
368
369 def get_coordinate(self, rowno):
370 """Return the (spectral) coordinate for a a given 'rowno'.
371 NOTE:
372 * This coordinate is only valid until a scantable method modifies
373 the frequency axis.
374 * This coordinate does contain the original frequency set-up
375 NOT the new frame. The conversions however are done using the user
376 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
377 use scantable.freq_align first. Without it there is no closure,
378 i.e.
379 c = myscan.get_coordinate(0)
380 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
381
382 Parameters:
383 rowno: the row number for the spectral coordinate
384
385 """
386 return coordinate(Scantable.get_coordinate(self, rowno))
387
388 def get_selection(self):
389 """
390 Get the selection object currently set on this scantable.
391 Parameters:
392 none
393 Example:
394 sel = scan.get_selection()
395 sel.set_ifs(0) # select IF 0
396 scan.set_selection(sel) # apply modified selection
397 """
398 return selector(self._getselection())
399
400 def set_selection(self, selection=None, **kw):
401 """
402 Select a subset of the data. All following operations on this scantable
403 are only applied to thi selection.
404 Parameters:
405 selection: a selector object (default unset the selection),
406
407 or
408
409 any combination of
410 "pols", "ifs", "beams", "scans", "cycles", "name", "query"
411
412 Examples:
413 sel = selector() # create a selection object
414 self.set_scans([0, 3]) # select SCANNO 0 and 3
415 scan.set_selection(sel) # set the selection
416 scan.summary() # will only print summary of scanno 0 an 3
417 scan.set_selection() # unset the selection
418 # or the equivalent
419 scan.set_selection(scans=[0,3])
420 scan.summary() # will only print summary of scanno 0 an 3
421 scan.set_selection() # unset the selection
422 """
423 if selection is None:
424 # reset
425 if len(kw) == 0:
426 selection = selector()
427 else:
428 # try keywords
429 for k in kw:
430 if k not in selector.fields:
431 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
432 selection = selector(**kw)
433 self._setselection(selection)
434
435 def get_row(self, row=0, insitu=None):
436 """
437 Select a row in the scantable.
438 Return a scantable with single row.
439 Parameters:
440 row: row no of integration, default is 0.
441 insitu: if False a new scantable is returned.
442 Otherwise, the scaling is done in-situ
443 The default is taken from .asaprc (False)
444 """
445 if insitu is None: insitu = rcParams['insitu']
446 if not insitu:
447 workscan = self.copy()
448 else:
449 workscan = self
450 # Select a row
451 sel=selector()
452 sel.set_scans([workscan.getscan(row)])
453 sel.set_cycles([workscan.getcycle(row)])
454 sel.set_beams([workscan.getbeam(row)])
455 sel.set_ifs([workscan.getif(row)])
456 sel.set_polarisations([workscan.getpol(row)])
457 sel.set_name(workscan._getsourcename(row))
458 workscan.set_selection(sel)
459 if not workscan.nrow() == 1:
460 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
461 raise RuntimeError(msg)
462 del sel
463 if insitu:
464 self._assign(workscan)
465 else:
466 return workscan
467
468 #def stats(self, stat='stddev', mask=None):
469 def stats(self, stat='stddev', mask=None, form='3.3f'):
470 """
471 Determine the specified statistic of the current beam/if/pol
472 Takes a 'mask' as an optional parameter to specify which
473 channels should be excluded.
474 Parameters:
475 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
476 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
477 mask: an optional mask specifying where the statistic
478 should be determined.
479 form: format string to print statistic values
480 Example:
481 scan.set_unit('channel')
482 msk = scan.create_mask([100, 200], [500, 600])
483 scan.stats(stat='mean', mask=m)
484 """
485 mask = mask or []
486 if not self._check_ifs():
487 raise ValueError("Cannot apply mask as the IFs have different "
488 "number of channels. Please use setselection() "
489 "to select individual IFs")
490 rtnabc = False
491 if stat.lower().endswith('_abc'): rtnabc = True
492 getchan = False
493 if stat.lower().startswith('min') or stat.lower().startswith('max'):
494 chan = self._math._minmaxchan(self, mask, stat)
495 getchan = True
496 statvals = []
497 if not rtnabc: statvals = self._math._stats(self, mask, stat)
498
499 #def cb(i):
500 # return statvals[i]
501
502 #return self._row_callback(cb, stat)
503
504 label=stat
505 #callback=cb
506 out = ""
507 #outvec = []
508 sep = '-'*50
509 for i in range(self.nrow()):
510 refstr = ''
511 statunit= ''
512 if getchan:
513 qx, qy = self.chan2data(rowno=i, chan=chan[i])
514 if rtnabc:
515 statvals.append(qx['value'])
516 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
517 statunit= '['+qx['unit']+']'
518 else:
519 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
520
521 tm = self._gettime(i)
522 src = self._getsourcename(i)
523 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
524 out += 'Time[%s]:\n' % (tm)
525 if self.nbeam(-1) > 1:
526 out += ' Beam[%d] ' % (self.getbeam(i))
527 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
528 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
529 #outvec.append(callback(i))
530 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
531 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
532 out += sep+"\n"
533
534 if rcParams['verbose']:
535 import os
536 if os.environ.has_key( 'USER' ):
537 usr=os.environ['USER']
538 else:
539 import commands
540 usr=commands.getoutput( 'whoami' )
541 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
542 f=open(tmpfile,'w')
543 print >> f, sep
544 print >> f, ' %s %s' % (label, statunit)
545 print >> f, sep
546 print >> f, out
547 f.close()
548 f=open(tmpfile,'r')
549 x=f.readlines()
550 f.close()
551 blanc=''
552 asaplog.push(blanc.join(x), False)
553 #for xx in x:
554 # asaplog.push( xx, False )
555 print_log()
556 return statvals
557
558 def chan2data(self, rowno=0, chan=0):
559 """
560 Returns channel/frequency/velocity and spectral value
561 at an arbitrary row and channel in the scantable.
562 Parameters:
563 rowno: a row number in the scantable. Default is the
564 first row, i.e. rowno=0
565 chan: a channel in the scantable. Default is the first
566 channel, i.e. pos=0
567 """
568 if isinstance(rowno, int) and isinstance(chan, int):
569 qx = {'unit': self.get_unit(),
570 'value': self._getabcissa(rowno)[chan]}
571 qy = {'unit': self.get_fluxunit(),
572 'value': self._getspectrum(rowno)[chan]}
573 return qx, qy
574
575 def stddev(self, mask=None):
576 """
577 Determine the standard deviation of the current beam/if/pol
578 Takes a 'mask' as an optional parameter to specify which
579 channels should be excluded.
580 Parameters:
581 mask: an optional mask specifying where the standard
582 deviation should be determined.
583
584 Example:
585 scan.set_unit('channel')
586 msk = scan.create_mask([100, 200], [500, 600])
587 scan.stddev(mask=m)
588 """
589 return self.stats(stat='stddev', mask=mask);
590
591
592 def get_column_names(self):
593 """
594 Return a list of column names, which can be used for selection.
595 """
596 return list(Scantable.get_column_names(self))
597
598 def get_tsys(self, row=-1):
599 """
600 Return the System temperatures.
601 Returns:
602 a list of Tsys values for the current selection
603 """
604 if row > -1:
605 return self._get_column(self._gettsys, row)
606 return self._row_callback(self._gettsys, "Tsys")
607
608
609 def get_weather(self, row=-1):
610 values = self._get_column(self._get_weather, row)
611 if row > -1:
612 return {'temperature': values[0],
613 'pressure': values[1], 'humidity' : values[2],
614 'windspeed' : values[3], 'windaz' : values[4]
615 }
616 else:
617 out = []
618 for r in values:
619
620 out.append({'temperature': r[0],
621 'pressure': r[1], 'humidity' : r[2],
622 'windspeed' : r[3], 'windaz' : r[4]
623 })
624 return out
625
626 def _row_callback(self, callback, label):
627 out = ""
628 outvec = []
629 sep = '-'*50
630 for i in range(self.nrow()):
631 tm = self._gettime(i)
632 src = self._getsourcename(i)
633 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
634 out += 'Time[%s]:\n' % (tm)
635 if self.nbeam(-1) > 1:
636 out += ' Beam[%d] ' % (self.getbeam(i))
637 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
638 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
639 outvec.append(callback(i))
640 out += '= %3.3f\n' % (outvec[i])
641 out += sep+'\n'
642 if rcParams['verbose']:
643 asaplog.push(sep)
644 asaplog.push(" %s" % (label))
645 asaplog.push(sep)
646 asaplog.push(out)
647 print_log()
648 return outvec
649
650 def _get_column(self, callback, row=-1):
651 """
652 """
653 if row == -1:
654 return [callback(i) for i in range(self.nrow())]
655 else:
656 if 0 <= row < self.nrow():
657 return callback(row)
658
659
660 def get_time(self, row=-1, asdatetime=False):
661 """
662 Get a list of time stamps for the observations.
663 Return a datetime object for each integration time stamp in the scantable.
664 Parameters:
665 row: row no of integration. Default -1 return all rows
666 asdatetime: return values as datetime objects rather than strings
667 Example:
668 none
669 """
670 from time import strptime
671 from datetime import datetime
672 times = self._get_column(self._gettime, row)
673 if not asdatetime:
674 return times
675 format = "%Y/%m/%d/%H:%M:%S"
676 if isinstance(times, list):
677 return [datetime(*strptime(i, format)[:6]) for i in times]
678 else:
679 return datetime(*strptime(times, format)[:6])
680
681
682 def get_inttime(self, row=-1):
683 """
684 Get a list of integration times for the observations.
685 Return a time in seconds for each integration in the scantable.
686 Parameters:
687 row: row no of integration. Default -1 return all rows.
688 Example:
689 none
690 """
691 return self._get_column(self._getinttime, row)
692
693
694 def get_sourcename(self, row=-1):
695 """
696 Get a list source names for the observations.
697 Return a string for each integration in the scantable.
698 Parameters:
699 row: row no of integration. Default -1 return all rows.
700 Example:
701 none
702 """
703 return self._get_column(self._getsourcename, row)
704
705 def get_elevation(self, row=-1):
706 """
707 Get a list of elevations for the observations.
708 Return a float for each integration in the scantable.
709 Parameters:
710 row: row no of integration. Default -1 return all rows.
711 Example:
712 none
713 """
714 return self._get_column(self._getelevation, row)
715
716 def get_azimuth(self, row=-1):
717 """
718 Get a list of azimuths for the observations.
719 Return a float for each integration in the scantable.
720 Parameters:
721 row: row no of integration. Default -1 return all rows.
722 Example:
723 none
724 """
725 return self._get_column(self._getazimuth, row)
726
727 def get_parangle(self, row=-1):
728 """
729 Get a list of parallactic angles for the observations.
730 Return a float for each integration in the scantable.
731 Parameters:
732 row: row no of integration. Default -1 return all rows.
733 Example:
734 none
735 """
736 return self._get_column(self._getparangle, row)
737
738 def get_direction(self, row=-1):
739 """
740 Get a list of Positions on the sky (direction) for the observations.
741 Return a string for each integration in the scantable.
742 Parameters:
743 row: row no of integration. Default -1 return all rows
744 Example:
745 none
746 """
747 return self._get_column(self._getdirection, row)
748
749 def get_directionval(self, row=-1):
750 """
751 Get a list of Positions on the sky (direction) for the observations.
752 Return a float for each integration in the scantable.
753 Parameters:
754 row: row no of integration. Default -1 return all rows
755 Example:
756 none
757 """
758 return self._get_column(self._getdirectionvec, row)
759
760 @print_log_dec
761 def set_unit(self, unit='channel'):
762 """
763 Set the unit for all following operations on this scantable
764 Parameters:
765 unit: optional unit, default is 'channel'
766 one of '*Hz', 'km/s', 'channel', ''
767 """
768 varlist = vars()
769 if unit in ['', 'pixel', 'channel']:
770 unit = ''
771 inf = list(self._getcoordinfo())
772 inf[0] = unit
773 self._setcoordinfo(inf)
774 self._add_history("set_unit", varlist)
775
776 @print_log_dec
777 def set_instrument(self, instr):
778 """
779 Set the instrument for subsequent processing.
780 Parameters:
781 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
782 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
783 """
784 self._setInstrument(instr)
785 self._add_history("set_instument", vars())
786 print_log()
787
788 @print_log_dec
789 def set_feedtype(self, feedtype):
790 """
791 Overwrite the feed type, which might not be set correctly.
792 Parameters:
793 feedtype: 'linear' or 'circular'
794 """
795 self._setfeedtype(feedtype)
796 self._add_history("set_feedtype", vars())
797 print_log()
798
799 @print_log_dec
800 def set_doppler(self, doppler='RADIO'):
801 """
802 Set the doppler for all following operations on this scantable.
803 Parameters:
804 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
805 """
806 varlist = vars()
807 inf = list(self._getcoordinfo())
808 inf[2] = doppler
809 self._setcoordinfo(inf)
810 self._add_history("set_doppler", vars())
811 print_log()
812
813 @print_log_dec
814 def set_freqframe(self, frame=None):
815 """
816 Set the frame type of the Spectral Axis.
817 Parameters:
818 frame: an optional frame type, default 'LSRK'. Valid frames are:
819 'TOPO', 'LSRD', 'LSRK', 'BARY',
820 'GEO', 'GALACTO', 'LGROUP', 'CMB'
821 Examples:
822 scan.set_freqframe('BARY')
823 """
824 frame = frame or rcParams['scantable.freqframe']
825 varlist = vars()
826 # "REST" is not implemented in casacore
827 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
828 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
829 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
830 'GEO', 'GALACTO', 'LGROUP', 'CMB']
831
832 if frame in valid:
833 inf = list(self._getcoordinfo())
834 inf[1] = frame
835 self._setcoordinfo(inf)
836 self._add_history("set_freqframe", varlist)
837 else:
838 msg = "Please specify a valid freq type. Valid types are:\n", valid
839 if rcParams['verbose']:
840 #print msg
841 asaplog.push( msg )
842 print_log( 'ERROR' )
843 else:
844 raise TypeError(msg)
845 print_log()
846
847 def set_dirframe(self, frame=""):
848 """
849 Set the frame type of the Direction on the sky.
850 Parameters:
851 frame: an optional frame type, default ''. Valid frames are:
852 'J2000', 'B1950', 'GALACTIC'
853 Examples:
854 scan.set_dirframe('GALACTIC')
855 """
856 varlist = vars()
857 try:
858 Scantable.set_dirframe(self, frame)
859 except RuntimeError, msg:
860 if rcParams['verbose']:
861 #print msg
862 print_log()
863 asaplog.push( str(msg) )
864 print_log( 'ERROR' )
865 else:
866 raise
867 self._add_history("set_dirframe", varlist)
868
869 def get_unit(self):
870 """
871 Get the default unit set in this scantable
872 Returns:
873 A unit string
874 """
875 inf = self._getcoordinfo()
876 unit = inf[0]
877 if unit == '': unit = 'channel'
878 return unit
879
880 def get_abcissa(self, rowno=0):
881 """
882 Get the abcissa in the current coordinate setup for the currently
883 selected Beam/IF/Pol
884 Parameters:
885 rowno: an optional row number in the scantable. Default is the
886 first row, i.e. rowno=0
887 Returns:
888 The abcissa values and the format string (as a dictionary)
889 """
890 abc = self._getabcissa(rowno)
891 lbl = self._getabcissalabel(rowno)
892 print_log()
893 return abc, lbl
894
895 def flag(self, mask=None, unflag=False):
896 """
897 Flag the selected data using an optional channel mask.
898 Parameters:
899 mask: an optional channel mask, created with create_mask. Default
900 (no mask) is all channels.
901 unflag: if True, unflag the data
902 """
903 varlist = vars()
904 mask = mask or []
905 try:
906 self._flag(mask, unflag)
907 except RuntimeError, msg:
908 if rcParams['verbose']:
909 #print msg
910 print_log()
911 asaplog.push( str(msg) )
912 print_log( 'ERROR' )
913 return
914 else: raise
915 self._add_history("flag", varlist)
916
917 def flag_row(self, rows=[], unflag=False):
918 """
919 Flag the selected data in row-based manner.
920 Parameters:
921 rows: list of row numbers to be flagged. Default is no row
922 (must be explicitly specified to execute row-based flagging).
923 unflag: if True, unflag the data.
924 """
925 varlist = vars()
926 try:
927 self._flag_row(rows, unflag)
928 except RuntimeError, msg:
929 if rcParams['verbose']:
930 print_log()
931 asaplog.push( str(msg) )
932 print_log('ERROR')
933 return
934 else: raise
935 self._add_history("flag_row", varlist)
936
937 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
938 """
939 Flag the selected data outside a specified range (in channel-base)
940 Parameters:
941 uthres: upper threshold.
942 dthres: lower threshold
943 clipoutside: True for flagging data outside the range [dthres:uthres].
944 False for glagging data inside the range.
945 unflag : if True, unflag the data.
946 """
947 varlist = vars()
948 try:
949 self._clip(uthres, dthres, clipoutside, unflag)
950 except RuntimeError, msg:
951 if rcParams['verbose']:
952 print_log()
953 asaplog.push(str(msg))
954 print_log('ERROR')
955 return
956 else: raise
957 self._add_history("clip", varlist)
958
959 @print_log_dec
960 def lag_flag(self, start, end, unit="MHz", insitu=None):
961 #def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
962 """
963 Flag the data in 'lag' space by providing a frequency to remove.
964 Flagged data in the scantable gets interpolated over the region.
965 No taper is applied.
966 Parameters:
967 start: the start frequency (really a period within the
968 bandwidth) or period to remove
969 end: the end frequency or period to remove
970 unit: the frequency unit (default "MHz") or "" for
971 explicit lag channels
972 Notes:
973 It is recommended to flag edges of the band or strong
974 signals beforehand.
975 """
976 if insitu is None: insitu = rcParams['insitu']
977 self._math._setinsitu(insitu)
978 varlist = vars()
979 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
980 if not (unit == "" or base.has_key(unit)):
981 raise ValueError("%s is not a valid unit." % unit)
982 try:
983 if unit == "":
984 s = scantable(self._math._lag_flag(self, start, end, "lags"))
985 else:
986 s = scantable(self._math._lag_flag(self, start*base[unit],
987 end*base[unit], "frequency"))
988 except RuntimeError, msg:
989 if rcParams['verbose']:
990 #print msg
991 print_log()
992 asaplog.push( str(msg) )
993 print_log( 'ERROR' )
994 return
995 else: raise
996 s._add_history("lag_flag", varlist)
997 print_log()
998 if insitu:
999 self._assign(s)
1000 else:
1001 return s
1002
1003 @print_log_dec
1004 def create_mask(self, *args, **kwargs):
1005 """
1006 Compute and return a mask based on [min, max] windows.
1007 The specified windows are to be INCLUDED, when the mask is
1008 applied.
1009 Parameters:
1010 [min, max], [min2, max2], ...
1011 Pairs of start/end points (inclusive)specifying the regions
1012 to be masked
1013 invert: optional argument. If specified as True,
1014 return an inverted mask, i.e. the regions
1015 specified are EXCLUDED
1016 row: create the mask using the specified row for
1017 unit conversions, default is row=0
1018 only necessary if frequency varies over rows.
1019 Example:
1020 scan.set_unit('channel')
1021 a)
1022 msk = scan.create_mask([400, 500], [800, 900])
1023 # masks everything outside 400 and 500
1024 # and 800 and 900 in the unit 'channel'
1025
1026 b)
1027 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1028 # masks the regions between 400 and 500
1029 # and 800 and 900 in the unit 'channel'
1030 c)
1031 mask only channel 400
1032 msk = scan.create_mask([400])
1033 """
1034 row = kwargs.get("row", 0)
1035 data = self._getabcissa(row)
1036 u = self._getcoordinfo()[0]
1037 if rcParams['verbose']:
1038 if u == "": u = "channel"
1039 msg = "The current mask window unit is %s" % u
1040 i = self._check_ifs()
1041 if not i:
1042 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1043 asaplog.push(msg)
1044 n = self.nchan()
1045 msk = _n_bools(n, False)
1046 # test if args is a 'list' or a 'normal *args - UGLY!!!
1047
1048 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1049 and args or args[0]
1050 for window in ws:
1051 if len(window) == 1:
1052 window = [window[0], window[0]]
1053 if len(window) == 0 or len(window) > 2:
1054 raise ValueError("A window needs to be defined as [start(, end)]")
1055 if window[0] > window[1]:
1056 tmp = window[0]
1057 window[0] = window[1]
1058 window[1] = tmp
1059 for i in range(n):
1060 if data[i] >= window[0] and data[i] <= window[1]:
1061 msk[i] = True
1062 if kwargs.has_key('invert'):
1063 if kwargs.get('invert'):
1064 msk = mask_not(msk)
1065 print_log()
1066 return msk
1067
1068 def get_masklist(self, mask=None, row=0):
1069 """
1070 Compute and return a list of mask windows, [min, max].
1071 Parameters:
1072 mask: channel mask, created with create_mask.
1073 row: calcutate the masklist using the specified row
1074 for unit conversions, default is row=0
1075 only necessary if frequency varies over rows.
1076 Returns:
1077 [min, max], [min2, max2], ...
1078 Pairs of start/end points (inclusive)specifying
1079 the masked regions
1080 """
1081 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1082 raise TypeError("The mask should be list or tuple.")
1083 if len(mask) < 2:
1084 raise TypeError("The mask elements should be > 1")
1085 if self.nchan() != len(mask):
1086 msg = "Number of channels in scantable != number of mask elements"
1087 raise TypeError(msg)
1088 data = self._getabcissa(row)
1089 u = self._getcoordinfo()[0]
1090 if rcParams['verbose']:
1091 if u == "": u = "channel"
1092 msg = "The current mask window unit is %s" % u
1093 i = self._check_ifs()
1094 if not i:
1095 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1096 asaplog.push(msg)
1097 masklist=[]
1098 ist, ien = None, None
1099 ist, ien=self.get_mask_indices(mask)
1100 if ist is not None and ien is not None:
1101 for i in xrange(len(ist)):
1102 range=[data[ist[i]],data[ien[i]]]
1103 range.sort()
1104 masklist.append([range[0],range[1]])
1105 return masklist
1106
1107 def get_mask_indices(self, mask=None):
1108 """
1109 Compute and Return lists of mask start indices and mask end indices.
1110 Parameters:
1111 mask: channel mask, created with create_mask.
1112 Returns:
1113 List of mask start indices and that of mask end indices,
1114 i.e., [istart1,istart2,....], [iend1,iend2,....].
1115 """
1116 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1117 raise TypeError("The mask should be list or tuple.")
1118 if len(mask) < 2:
1119 raise TypeError("The mask elements should be > 1")
1120 istart=[]
1121 iend=[]
1122 if mask[0]: istart.append(0)
1123 for i in range(len(mask)-1):
1124 if not mask[i] and mask[i+1]:
1125 istart.append(i+1)
1126 elif mask[i] and not mask[i+1]:
1127 iend.append(i)
1128 if mask[len(mask)-1]: iend.append(len(mask)-1)
1129 if len(istart) != len(iend):
1130 raise RuntimeError("Numbers of mask start != mask end.")
1131 for i in range(len(istart)):
1132 if istart[i] > iend[i]:
1133 raise RuntimeError("Mask start index > mask end index")
1134 break
1135 return istart,iend
1136
1137# def get_restfreqs(self):
1138# """
1139# Get the restfrequency(s) stored in this scantable.
1140# The return value(s) are always of unit 'Hz'
1141# Parameters:
1142# none
1143# Returns:
1144# a list of doubles
1145# """
1146# return list(self._getrestfreqs())
1147
1148 def get_restfreqs(self, ids=None):
1149 """
1150 Get the restfrequency(s) stored in this scantable.
1151 The return value(s) are always of unit 'Hz'
1152 Parameters:
1153 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1154 be retrieved
1155 Returns:
1156 dictionary containing ids and a list of doubles for each id
1157 """
1158 if ids is None:
1159 rfreqs={}
1160 idlist = self.getmolnos()
1161 for i in idlist:
1162 rfreqs[i]=list(self._getrestfreqs(i))
1163 return rfreqs
1164 else:
1165 if type(ids)==list or type(ids)==tuple:
1166 rfreqs={}
1167 for i in ids:
1168 rfreqs[i]=list(self._getrestfreqs(i))
1169 return rfreqs
1170 else:
1171 return list(self._getrestfreqs(ids))
1172 #return list(self._getrestfreqs(ids))
1173
1174 def set_restfreqs(self, freqs=None, unit='Hz'):
1175 """
1176 ********NEED TO BE UPDATED begin************
1177 Set or replace the restfrequency specified and
1178 If the 'freqs' argument holds a scalar,
1179 then that rest frequency will be applied to all the selected
1180 data. If the 'freqs' argument holds
1181 a vector, then it MUST be of equal or smaller length than
1182 the number of IFs (and the available restfrequencies will be
1183 replaced by this vector). In this case, *all* data have
1184 the restfrequency set per IF according
1185 to the corresponding value you give in the 'freqs' vector.
1186 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1187 IF 1 gets restfreq 2e9.
1188 ********NEED TO BE UPDATED end************
1189 You can also specify the frequencies via a linecatalog.
1190
1191 Parameters:
1192 freqs: list of rest frequency values or string idenitfiers
1193 unit: unit for rest frequency (default 'Hz')
1194
1195 Example:
1196 # set the given restfrequency for the all currently selected IFs
1197 scan.set_restfreqs(freqs=1.4e9)
1198 # set restfrequencies for the n IFs (n > 1) in the order of the
1199 # list, i.e
1200 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1201 # len(list_of_restfreqs) == nIF
1202 # for nIF == 1 the following will set multiple restfrequency for
1203 # that IF
1204 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1205 # set multiple restfrequencies per IF. as a list of lists where
1206 # the outer list has nIF elements, the inner s arbitrary
1207 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1208
1209
1210 Note:
1211 To do more sophisticate Restfrequency setting, e.g. on a
1212 source and IF basis, use scantable.set_selection() before using
1213 this function.
1214 # provided your scantable is called scan
1215 selection = selector()
1216 selection.set_name("ORION*")
1217 selection.set_ifs([1])
1218 scan.set_selection(selection)
1219 scan.set_restfreqs(freqs=86.6e9)
1220
1221 """
1222 varlist = vars()
1223 from asap import linecatalog
1224 # simple value
1225 if isinstance(freqs, int) or isinstance(freqs, float):
1226 self._setrestfreqs([freqs], [""], unit)
1227 # list of values
1228 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1229 # list values are scalars
1230 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1231 if len(freqs) == 1:
1232 self._setrestfreqs(freqs, [""], unit)
1233 else:
1234 # allow the 'old' mode of setting mulitple IFs
1235 sel = selector()
1236 savesel = self._getselection()
1237 iflist = self.getifnos()
1238 if len(freqs)>len(iflist):
1239 raise ValueError("number of elements in list of list "
1240 "exeeds the current IF selections")
1241 iflist = self.getifnos()
1242 for i, fval in enumerate(freqs):
1243 sel.set_ifs(iflist[i])
1244 self._setselection(sel)
1245 self._setrestfreqs([fval], [""], unit)
1246 self._setselection(savesel)
1247
1248 # list values are dict, {'value'=, 'name'=)
1249 elif isinstance(freqs[-1], dict):
1250 values = []
1251 names = []
1252 for d in freqs:
1253 values.append(d["value"])
1254 names.append(d["name"])
1255 self._setrestfreqs(values, names, unit)
1256 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1257 sel = selector()
1258 savesel = self._getselection()
1259 iflist = self.getifnos()
1260 if len(freqs)>len(iflist):
1261 raise ValueError("number of elements in list of list exeeds"
1262 " the current IF selections")
1263 for i, fval in enumerate(freqs):
1264 sel.set_ifs(iflist[i])
1265 self._setselection(sel)
1266 self._setrestfreqs(fval, [""], unit)
1267 self._setselection(savesel)
1268 # freqs are to be taken from a linecatalog
1269 elif isinstance(freqs, linecatalog):
1270 sel = selector()
1271 savesel = self._getselection()
1272 for i in xrange(freqs.nrow()):
1273 sel.set_ifs(iflist[i])
1274 self._setselection(sel)
1275 self._setrestfreqs([freqs.get_frequency(i)],
1276 [freqs.get_name(i)], "MHz")
1277 # ensure that we are not iterating past nIF
1278 if i == self.nif()-1: break
1279 self._setselection(savesel)
1280 else:
1281 return
1282 self._add_history("set_restfreqs", varlist)
1283
1284 def shift_refpix(self, delta):
1285 """
1286 Shift the reference pixel of the Spectra Coordinate by an
1287 integer amount.
1288 Parameters:
1289 delta: the amount to shift by
1290 Note:
1291 Be careful using this with broadband data.
1292 """
1293 Scantable.shift_refpix(self, delta)
1294
1295 def history(self, filename=None):
1296 """
1297 Print the history. Optionally to a file.
1298 Parameters:
1299 filename: The name of the file to save the history to.
1300 """
1301 hist = list(self._gethistory())
1302 out = "-"*80
1303 for h in hist:
1304 if h.startswith("---"):
1305 out += "\n"+h
1306 else:
1307 items = h.split("##")
1308 date = items[0]
1309 func = items[1]
1310 items = items[2:]
1311 out += "\n"+date+"\n"
1312 out += "Function: %s\n Parameters:" % (func)
1313 for i in items:
1314 s = i.split("=")
1315 out += "\n %s = %s" % (s[0], s[1])
1316 out += "\n"+"-"*80
1317 if filename is not None:
1318 if filename is "":
1319 filename = 'scantable_history.txt'
1320 import os
1321 filename = os.path.expandvars(os.path.expanduser(filename))
1322 if not os.path.isdir(filename):
1323 data = open(filename, 'w')
1324 data.write(out)
1325 data.close()
1326 else:
1327 msg = "Illegal file name '%s'." % (filename)
1328 if rcParams['verbose']:
1329 #print msg
1330 asaplog.push( msg )
1331 print_log( 'ERROR' )
1332 else:
1333 raise IOError(msg)
1334 if rcParams['verbose']:
1335 try:
1336 from IPython.genutils import page as pager
1337 except ImportError:
1338 from pydoc import pager
1339 pager(out)
1340 else:
1341 return out
1342 return
1343 #
1344 # Maths business
1345 #
1346 @print_log_dec
1347 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1348 """
1349 Return the (time) weighted average of a scan.
1350 Note:
1351 in channels only - align if necessary
1352 Parameters:
1353 mask: an optional mask (only used for 'var' and 'tsys'
1354 weighting)
1355 scanav: True averages each scan separately
1356 False (default) averages all scans together,
1357 weight: Weighting scheme.
1358 'none' (mean no weight)
1359 'var' (1/var(spec) weighted)
1360 'tsys' (1/Tsys**2 weighted)
1361 'tint' (integration time weighted)
1362 'tintsys' (Tint/Tsys**2)
1363 'median' ( median averaging)
1364 The default is 'tint'
1365 align: align the spectra in velocity before averaging. It takes
1366 the time of the first spectrum as reference time.
1367 Example:
1368 # time average the scantable without using a mask
1369 newscan = scan.average_time()
1370 """
1371 varlist = vars()
1372 weight = weight or 'TINT'
1373 mask = mask or ()
1374 scanav = (scanav and 'SCAN') or 'NONE'
1375 scan = (self, )
1376 try:
1377 if align:
1378 scan = (self.freq_align(insitu=False), )
1379 s = None
1380 if weight.upper() == 'MEDIAN':
1381 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1382 scanav))
1383 else:
1384 s = scantable(self._math._average(scan, mask, weight.upper(),
1385 scanav))
1386 except RuntimeError, msg:
1387 if rcParams['verbose']:
1388 #print msg
1389 print_log()
1390 asaplog.push( str(msg) )
1391 print_log( 'ERROR' )
1392 return
1393 else: raise
1394 s._add_history("average_time", varlist)
1395 print_log()
1396 return s
1397
1398 @print_log_dec
1399 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1400 """
1401 Return a scan where all spectra are converted to either
1402 Jansky or Kelvin depending upon the flux units of the scan table.
1403 By default the function tries to look the values up internally.
1404 If it can't find them (or if you want to over-ride), you must
1405 specify EITHER jyperk OR eta (and D which it will try to look up
1406 also if you don't set it). jyperk takes precedence if you set both.
1407 Parameters:
1408 jyperk: the Jy / K conversion factor
1409 eta: the aperture efficiency
1410 d: the geomtric diameter (metres)
1411 insitu: if False a new scantable is returned.
1412 Otherwise, the scaling is done in-situ
1413 The default is taken from .asaprc (False)
1414 """
1415 if insitu is None: insitu = rcParams['insitu']
1416 self._math._setinsitu(insitu)
1417 varlist = vars()
1418 jyperk = jyperk or -1.0
1419 d = d or -1.0
1420 eta = eta or -1.0
1421 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1422 s._add_history("convert_flux", varlist)
1423 print_log()
1424 if insitu: self._assign(s)
1425 else: return s
1426
1427 @print_log_dec
1428 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1429 """
1430 Return a scan after applying a gain-elevation correction.
1431 The correction can be made via either a polynomial or a
1432 table-based interpolation (and extrapolation if necessary).
1433 You specify polynomial coefficients, an ascii table or neither.
1434 If you specify neither, then a polynomial correction will be made
1435 with built in coefficients known for certain telescopes (an error
1436 will occur if the instrument is not known).
1437 The data and Tsys are *divided* by the scaling factors.
1438 Parameters:
1439 poly: Polynomial coefficients (default None) to compute a
1440 gain-elevation correction as a function of
1441 elevation (in degrees).
1442 filename: The name of an ascii file holding correction factors.
1443 The first row of the ascii file must give the column
1444 names and these MUST include columns
1445 "ELEVATION" (degrees) and "FACTOR" (multiply data
1446 by this) somewhere.
1447 The second row must give the data type of the
1448 column. Use 'R' for Real and 'I' for Integer.
1449 An example file would be
1450 (actual factors are arbitrary) :
1451
1452 TIME ELEVATION FACTOR
1453 R R R
1454 0.1 0 0.8
1455 0.2 20 0.85
1456 0.3 40 0.9
1457 0.4 60 0.85
1458 0.5 80 0.8
1459 0.6 90 0.75
1460 method: Interpolation method when correcting from a table.
1461 Values are "nearest", "linear" (default), "cubic"
1462 and "spline"
1463 insitu: if False a new scantable is returned.
1464 Otherwise, the scaling is done in-situ
1465 The default is taken from .asaprc (False)
1466 """
1467
1468 if insitu is None: insitu = rcParams['insitu']
1469 self._math._setinsitu(insitu)
1470 varlist = vars()
1471 poly = poly or ()
1472 from os.path import expandvars
1473 filename = expandvars(filename)
1474 s = scantable(self._math._gainel(self, poly, filename, method))
1475 s._add_history("gain_el", varlist)
1476 print_log()
1477 if insitu:
1478 self._assign(s)
1479 else:
1480 return s
1481
1482 @print_log_dec
1483 def freq_align(self, reftime=None, method='cubic', insitu=None):
1484 """
1485 Return a scan where all rows have been aligned in frequency/velocity.
1486 The alignment frequency frame (e.g. LSRK) is that set by function
1487 set_freqframe.
1488 Parameters:
1489 reftime: reference time to align at. By default, the time of
1490 the first row of data is used.
1491 method: Interpolation method for regridding the spectra.
1492 Choose from "nearest", "linear", "cubic" (default)
1493 and "spline"
1494 insitu: if False a new scantable is returned.
1495 Otherwise, the scaling is done in-situ
1496 The default is taken from .asaprc (False)
1497 """
1498 if insitu is None: insitu = rcParams["insitu"]
1499 self._math._setinsitu(insitu)
1500 varlist = vars()
1501 reftime = reftime or ""
1502 s = scantable(self._math._freq_align(self, reftime, method))
1503 s._add_history("freq_align", varlist)
1504 print_log()
1505 if insitu: self._assign(s)
1506 else: return s
1507
1508 @print_log_dec
1509 def opacity(self, tau=None, insitu=None):
1510 """
1511 Apply an opacity correction. The data
1512 and Tsys are multiplied by the correction factor.
1513 Parameters:
1514 tau: (list of) opacity from which the correction factor is
1515 exp(tau*ZD)
1516 where ZD is the zenith-distance.
1517 If a list is provided, it has to be of length nIF,
1518 nIF*nPol or 1 and in order of IF/POL, e.g.
1519 [opif0pol0, opif0pol1, opif1pol0 ...]
1520 if tau is `None` the opacities are determined from a
1521 model.
1522 insitu: if False a new scantable is returned.
1523 Otherwise, the scaling is done in-situ
1524 The default is taken from .asaprc (False)
1525 """
1526 if insitu is None: insitu = rcParams['insitu']
1527 self._math._setinsitu(insitu)
1528 varlist = vars()
1529 if not hasattr(tau, "__len__"):
1530 tau = [tau]
1531 s = scantable(self._math._opacity(self, tau))
1532 s._add_history("opacity", varlist)
1533 print_log()
1534 if insitu: self._assign(s)
1535 else: return s
1536
1537 @print_log_dec
1538 def bin(self, width=5, insitu=None):
1539 """
1540 Return a scan where all spectra have been binned up.
1541 Parameters:
1542 width: The bin width (default=5) in pixels
1543 insitu: if False a new scantable is returned.
1544 Otherwise, the scaling is done in-situ
1545 The default is taken from .asaprc (False)
1546 """
1547 if insitu is None: insitu = rcParams['insitu']
1548 self._math._setinsitu(insitu)
1549 varlist = vars()
1550 s = scantable(self._math._bin(self, width))
1551 s._add_history("bin", varlist)
1552 print_log()
1553 if insitu:
1554 self._assign(s)
1555 else:
1556 return s
1557
1558 @print_log_dec
1559 def resample(self, width=5, method='cubic', insitu=None):
1560 """
1561 Return a scan where all spectra have been binned up.
1562
1563 Parameters:
1564 width: The bin width (default=5) in pixels
1565 method: Interpolation method when correcting from a table.
1566 Values are "nearest", "linear", "cubic" (default)
1567 and "spline"
1568 insitu: if False a new scantable is returned.
1569 Otherwise, the scaling is done in-situ
1570 The default is taken from .asaprc (False)
1571 """
1572 if insitu is None: insitu = rcParams['insitu']
1573 self._math._setinsitu(insitu)
1574 varlist = vars()
1575 s = scantable(self._math._resample(self, method, width))
1576 s._add_history("resample", varlist)
1577 print_log()
1578 if insitu: self._assign(s)
1579 else: return s
1580
1581 @print_log_dec
1582 def average_pol(self, mask=None, weight='none'):
1583 """
1584 Average the Polarisations together.
1585 Parameters:
1586 mask: An optional mask defining the region, where the
1587 averaging will be applied. The output will have all
1588 specified points masked.
1589 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1590 weighted), or 'tsys' (1/Tsys**2 weighted)
1591 """
1592 varlist = vars()
1593 mask = mask or ()
1594 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1595 s._add_history("average_pol", varlist)
1596 print_log()
1597 return s
1598
1599 @print_log_dec
1600 def average_beam(self, mask=None, weight='none'):
1601 """
1602 Average the Beams together.
1603 Parameters:
1604 mask: An optional mask defining the region, where the
1605 averaging will be applied. The output will have all
1606 specified points masked.
1607 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1608 weighted), or 'tsys' (1/Tsys**2 weighted)
1609 """
1610 varlist = vars()
1611 mask = mask or ()
1612 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1613 s._add_history("average_beam", varlist)
1614 print_log()
1615 return s
1616
1617 def parallactify(self, pflag):
1618 """
1619 Set a flag to indicate whether this data should be treated as having
1620 been 'parallactified' (total phase == 0.0)
1621 Parameters:
1622 pflag: Bool indicating whether to turn this on (True) or
1623 off (False)
1624 """
1625 varlist = vars()
1626 self._parallactify(pflag)
1627 self._add_history("parallactify", varlist)
1628
1629 @print_log_dec
1630 def convert_pol(self, poltype=None):
1631 """
1632 Convert the data to a different polarisation type.
1633 Note that you will need cross-polarisation terms for most conversions.
1634 Parameters:
1635 poltype: The new polarisation type. Valid types are:
1636 "linear", "circular", "stokes" and "linpol"
1637 """
1638 varlist = vars()
1639 try:
1640 s = scantable(self._math._convertpol(self, poltype))
1641 except RuntimeError, msg:
1642 if rcParams['verbose']:
1643 #print msg
1644 print_log()
1645 asaplog.push( str(msg) )
1646 print_log( 'ERROR' )
1647 return
1648 else:
1649 raise
1650 s._add_history("convert_pol", varlist)
1651 print_log()
1652 return s
1653
1654 @print_log_dec
1655 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1656 """
1657 Smooth the spectrum by the specified kernel (conserving flux).
1658 Parameters:
1659 kernel: The type of smoothing kernel. Select from
1660 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1661 or 'poly'
1662 width: The width of the kernel in pixels. For hanning this is
1663 ignored otherwise it defauls to 5 pixels.
1664 For 'gaussian' it is the Full Width Half
1665 Maximum. For 'boxcar' it is the full width.
1666 For 'rmedian' and 'poly' it is the half width.
1667 order: Optional parameter for 'poly' kernel (default is 2), to
1668 specify the order of the polnomial. Ignored by all other
1669 kernels.
1670 plot: plot the original and the smoothed spectra.
1671 In this each indivual fit has to be approved, by
1672 typing 'y' or 'n'
1673 insitu: if False a new scantable is returned.
1674 Otherwise, the scaling is done in-situ
1675 The default is taken from .asaprc (False)
1676 Example:
1677 none
1678 """
1679 if insitu is None: insitu = rcParams['insitu']
1680 self._math._setinsitu(insitu)
1681 varlist = vars()
1682
1683 if plot: orgscan = self.copy()
1684
1685 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1686 s._add_history("smooth", varlist)
1687
1688 if plot:
1689 if rcParams['plotter.gui']:
1690 from asap.asaplotgui import asaplotgui as asaplot
1691 else:
1692 from asap.asaplot import asaplot
1693 self._p=asaplot()
1694 self._p.set_panels()
1695 ylab=s._get_ordinate_label()
1696 #self._p.palette(0,["#777777","red"])
1697 for r in xrange(s.nrow()):
1698 xsm=s._getabcissa(r)
1699 ysm=s._getspectrum(r)
1700 xorg=orgscan._getabcissa(r)
1701 yorg=orgscan._getspectrum(r)
1702 self._p.clear()
1703 self._p.hold()
1704 self._p.set_axes('ylabel',ylab)
1705 self._p.set_axes('xlabel',s._getabcissalabel(r))
1706 self._p.set_axes('title',s._getsourcename(r))
1707 self._p.set_line(label='Original',color="#777777")
1708 self._p.plot(xorg,yorg)
1709 self._p.set_line(label='Smoothed',color="red")
1710 self._p.plot(xsm,ysm)
1711 ### Ugly part for legend
1712 for i in [0,1]:
1713 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1714 self._p.release()
1715 ### Ugly part for legend
1716 self._p.subplots[0]['lines']=[]
1717 res = raw_input("Accept smoothing ([y]/n): ")
1718 if res.upper() == 'N':
1719 s._setspectrum(yorg, r)
1720 self._p.unmap()
1721 self._p = None
1722 del orgscan
1723
1724 print_log()
1725 if insitu: self._assign(s)
1726 else: return s
1727
1728 @print_log_dec
1729 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1730 insitu=None):
1731 """
1732 Return a scan which has been baselined (all rows) by a polynomial.
1733 Parameters:
1734 mask: an optional mask
1735 order: the order of the polynomial (default is 0)
1736 plot: plot the fit and the residual. In this each
1737 indivual fit has to be approved, by typing 'y'
1738 or 'n'
1739 uselin: use linear polynomial fit
1740 insitu: if False a new scantable is returned.
1741 Otherwise, the scaling is done in-situ
1742 The default is taken from .asaprc (False)
1743 Example:
1744 # return a scan baselined by a third order polynomial,
1745 # not using a mask
1746 bscan = scan.poly_baseline(order=3)
1747 """
1748 if insitu is None: insitu = rcParams['insitu']
1749 if not insitu:
1750 workscan = self.copy()
1751 else:
1752 workscan = self
1753 varlist = vars()
1754 if mask is None:
1755 mask = [True for i in xrange(self.nchan(-1))]
1756
1757 from asap.asapfitter import fitter
1758 try:
1759 f = fitter()
1760 if uselin:
1761 f.set_function(lpoly=order)
1762 else:
1763 f.set_function(poly=order)
1764
1765 rows = range(workscan.nrow())
1766 if len(rows) > 0:
1767 self.blpars = []
1768
1769 for r in rows:
1770 # take into account flagtra info (CAS-1434)
1771 flagtra = workscan._getmask(r)
1772 actualmask = mask[:]
1773 if len(actualmask) == 0:
1774 actualmask = list(flagtra[:])
1775 else:
1776 if len(actualmask) != len(flagtra):
1777 raise RuntimeError, "Mask and flagtra have different length"
1778 else:
1779 for i in range(0, len(actualmask)):
1780 actualmask[i] = actualmask[i] and flagtra[i]
1781 f.set_scan(workscan, actualmask)
1782 f.x = workscan._getabcissa(r)
1783 f.y = workscan._getspectrum(r)
1784 f.data = None
1785 f.fit()
1786 if plot:
1787 f.plot(residual=True)
1788 x = raw_input("Accept fit ( [y]/n ): ")
1789 if x.upper() == 'N':
1790 self.blpars.append(None)
1791 continue
1792 workscan._setspectrum(f.fitter.getresidual(), r)
1793 self.blpars.append(f.get_parameters())
1794
1795 if plot:
1796 f._p.unmap()
1797 f._p = None
1798 workscan._add_history("poly_baseline", varlist)
1799 print_log()
1800 if insitu: self._assign(workscan)
1801 else: return workscan
1802 except RuntimeError:
1803 msg = "The fit failed, possibly because it didn't converge."
1804 if rcParams['verbose']:
1805 #print msg
1806 print_log()
1807 asaplog.push( str(msg) )
1808 print_log( 'ERROR' )
1809 return
1810 else:
1811 raise RuntimeError(msg)
1812
1813
1814 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1815 threshold=3, chan_avg_limit=1, plot=False,
1816 insitu=None):
1817 """
1818 Return a scan which has been baselined (all rows) by a polynomial.
1819 Spectral lines are detected first using linefinder and masked out
1820 to avoid them affecting the baseline solution.
1821
1822 Parameters:
1823 mask: an optional mask retreived from scantable
1824 edge: an optional number of channel to drop at
1825 the edge of spectrum. If only one value is
1826 specified, the same number will be dropped from
1827 both sides of the spectrum. Default is to keep
1828 all channels. Nested tuples represent individual
1829 edge selection for different IFs (a number of spectral
1830 channels can be different)
1831 order: the order of the polynomial (default is 0)
1832 threshold: the threshold used by line finder. It is better to
1833 keep it large as only strong lines affect the
1834 baseline solution.
1835 chan_avg_limit:
1836 a maximum number of consequtive spectral channels to
1837 average during the search of weak and broad lines.
1838 The default is no averaging (and no search for weak
1839 lines). If such lines can affect the fitted baseline
1840 (e.g. a high order polynomial is fitted), increase this
1841 parameter (usually values up to 8 are reasonable). Most
1842 users of this method should find the default value
1843 sufficient.
1844 plot: plot the fit and the residual. In this each
1845 indivual fit has to be approved, by typing 'y'
1846 or 'n'
1847 insitu: if False a new scantable is returned.
1848 Otherwise, the scaling is done in-situ
1849 The default is taken from .asaprc (False)
1850
1851 Example:
1852 scan2=scan.auto_poly_baseline(order=7)
1853 """
1854 if insitu is None: insitu = rcParams['insitu']
1855 varlist = vars()
1856 from asap.asapfitter import fitter
1857 from asap.asaplinefind import linefinder
1858 from asap import _is_sequence_or_number as _is_valid
1859
1860 # check whether edge is set up for each IF individually
1861 individualedge = False;
1862 if len(edge) > 1:
1863 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1864 individualedge = True;
1865
1866 if not _is_valid(edge, int) and not individualedge:
1867 raise ValueError, "Parameter 'edge' has to be an integer or a \
1868 pair of integers specified as a tuple. Nested tuples are allowed \
1869 to make individual selection for different IFs."
1870
1871 curedge = (0, 0)
1872 if individualedge:
1873 for edgepar in edge:
1874 if not _is_valid(edgepar, int):
1875 raise ValueError, "Each element of the 'edge' tuple has \
1876 to be a pair of integers or an integer."
1877 else:
1878 curedge = edge;
1879
1880 # setup fitter
1881 f = fitter()
1882 f.set_function(poly=order)
1883
1884 # setup line finder
1885 fl = linefinder()
1886 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1887
1888 if not insitu:
1889 workscan = self.copy()
1890 else:
1891 workscan = self
1892
1893 fl.set_scan(workscan)
1894
1895 rows = range(workscan.nrow())
1896 # Save parameters of baseline fits & masklists as a class attribute.
1897 # NOTICE: It does not reflect changes in scantable!
1898 if len(rows) > 0:
1899 self.blpars=[]
1900 self.masklists=[]
1901 asaplog.push("Processing:")
1902 for r in rows:
1903 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1904 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1905 workscan.getpol(r), workscan.getcycle(r))
1906 asaplog.push(msg, False)
1907
1908 # figure out edge parameter
1909 if individualedge:
1910 if len(edge) >= workscan.getif(r):
1911 raise RuntimeError, "Number of edge elements appear to " \
1912 "be less than the number of IFs"
1913 curedge = edge[workscan.getif(r)]
1914
1915 # take into account flagtra info (CAS-1434)
1916 flagtra = workscan._getmask(r)
1917 actualmask = mask[:]
1918 if len(actualmask) == 0:
1919 actualmask = list(flagtra[:])
1920 else:
1921 if len(actualmask) != len(flagtra):
1922 raise RuntimeError, "Mask and flagtra have different length"
1923 else:
1924 for i in range(0, len(actualmask)):
1925 actualmask[i] = actualmask[i] and flagtra[i]
1926
1927 # setup line finder
1928 fl.find_lines(r, actualmask, curedge)
1929 outmask=fl.get_mask()
1930 f.set_scan(workscan, fl.get_mask())
1931 f.x = workscan._getabcissa(r)
1932 f.y = workscan._getspectrum(r)
1933 f.data = None
1934 f.fit()
1935
1936 # Show mask list
1937 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1938 msg = "mask range: "+str(masklist)
1939 asaplog.push(msg, False)
1940
1941 if plot:
1942 f.plot(residual=True)
1943 x = raw_input("Accept fit ( [y]/n ): ")
1944 if x.upper() == 'N':
1945 self.blpars.append(None)
1946 self.masklists.append(None)
1947 continue
1948
1949 workscan._setspectrum(f.fitter.getresidual(), r)
1950 self.blpars.append(f.get_parameters())
1951 self.masklists.append(masklist)
1952 if plot:
1953 f._p.unmap()
1954 f._p = None
1955 workscan._add_history("auto_poly_baseline", varlist)
1956 if insitu:
1957 self._assign(workscan)
1958 else:
1959 return workscan
1960
1961 @print_log_dec
1962 def rotate_linpolphase(self, angle):
1963 """
1964 Rotate the phase of the complex polarization O=Q+iU correlation.
1965 This is always done in situ in the raw data. So if you call this
1966 function more than once then each call rotates the phase further.
1967 Parameters:
1968 angle: The angle (degrees) to rotate (add) by.
1969 Examples:
1970 scan.rotate_linpolphase(2.3)
1971 """
1972 varlist = vars()
1973 self._math._rotate_linpolphase(self, angle)
1974 self._add_history("rotate_linpolphase", varlist)
1975 print_log()
1976 return
1977
1978 @print_log_dec
1979 def rotate_xyphase(self, angle):
1980 """
1981 Rotate the phase of the XY correlation. This is always done in situ
1982 in the data. So if you call this function more than once
1983 then each call rotates the phase further.
1984 Parameters:
1985 angle: The angle (degrees) to rotate (add) by.
1986 Examples:
1987 scan.rotate_xyphase(2.3)
1988 """
1989 varlist = vars()
1990 self._math._rotate_xyphase(self, angle)
1991 self._add_history("rotate_xyphase", varlist)
1992 print_log()
1993 return
1994
1995 @print_log_dec
1996 def swap_linears(self):
1997 """
1998 Swap the linear polarisations XX and YY, or better the first two
1999 polarisations as this also works for ciculars.
2000 """
2001 varlist = vars()
2002 self._math._swap_linears(self)
2003 self._add_history("swap_linears", varlist)
2004 print_log()
2005 return
2006
2007 @print_log_dec
2008 def invert_phase(self):
2009 """
2010 Invert the phase of the complex polarisation
2011 """
2012 varlist = vars()
2013 self._math._invert_phase(self)
2014 self._add_history("invert_phase", varlist)
2015 print_log()
2016 return
2017
2018 @print_log_dec
2019 def add(self, offset, insitu=None):
2020 """
2021 Return a scan where all spectra have the offset added
2022 Parameters:
2023 offset: the offset
2024 insitu: if False a new scantable is returned.
2025 Otherwise, the scaling is done in-situ
2026 The default is taken from .asaprc (False)
2027 """
2028 if insitu is None: insitu = rcParams['insitu']
2029 self._math._setinsitu(insitu)
2030 varlist = vars()
2031 s = scantable(self._math._unaryop(self, offset, "ADD", False))
2032 s._add_history("add", varlist)
2033 print_log()
2034 if insitu:
2035 self._assign(s)
2036 else:
2037 return s
2038
2039 @print_log_dec
2040 def scale(self, factor, tsys=True, insitu=None):
2041 """
2042 Return a scan where all spectra are scaled by the give 'factor'
2043 Parameters:
2044 factor: the scaling factor (float or 1D float list)
2045 insitu: if False a new scantable is returned.
2046 Otherwise, the scaling is done in-situ
2047 The default is taken from .asaprc (False)
2048 tsys: if True (default) then apply the operation to Tsys
2049 as well as the data
2050 """
2051 if insitu is None: insitu = rcParams['insitu']
2052 self._math._setinsitu(insitu)
2053 varlist = vars()
2054 s = None
2055 import numpy
2056 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2057 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2058 from asapmath import _array2dOp
2059 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2060 else:
2061 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2062 else:
2063 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2064 s._add_history("scale", varlist)
2065 print_log()
2066 if insitu:
2067 self._assign(s)
2068 else:
2069 return s
2070
2071 def set_sourcetype(self, match, matchtype="pattern",
2072 sourcetype="reference"):
2073 """
2074 Set the type of the source to be an source or reference scan
2075 using the provided pattern:
2076 Parameters:
2077 match: a Unix style pattern, regular expression or selector
2078 matchtype: 'pattern' (default) UNIX style pattern or
2079 'regex' regular expression
2080 sourcetype: the type of the source to use (source/reference)
2081 """
2082 varlist = vars()
2083 basesel = self.get_selection()
2084 stype = -1
2085 if sourcetype.lower().startswith("r"):
2086 stype = 1
2087 elif sourcetype.lower().startswith("s"):
2088 stype = 0
2089 else:
2090 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2091 if matchtype.lower().startswith("p"):
2092 matchtype = "pattern"
2093 elif matchtype.lower().startswith("r"):
2094 matchtype = "regex"
2095 else:
2096 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2097 sel = selector()
2098 if isinstance(match, selector):
2099 sel = match
2100 else:
2101 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2102 self.set_selection(basesel+sel)
2103 self._setsourcetype(stype)
2104 self.set_selection(basesel)
2105 self._add_history("set_sourcetype", varlist)
2106
2107 @print_log_dec
2108 def auto_quotient(self, preserve=True, mode='paired', verify=False):
2109 """
2110 This function allows to build quotients automatically.
2111 It assumes the observation to have the same number of
2112 "ons" and "offs"
2113 Parameters:
2114 preserve: you can preserve (default) the continuum or
2115 remove it. The equations used are
2116 preserve: Output = Toff * (on/off) - Toff
2117 remove: Output = Toff * (on/off) - Ton
2118 mode: the on/off detection mode
2119 'paired' (default)
2120 identifies 'off' scans by the
2121 trailing '_R' (Mopra/Parkes) or
2122 '_e'/'_w' (Tid) and matches
2123 on/off pairs from the observing pattern
2124 'time'
2125 finds the closest off in time
2126
2127 """
2128 modes = ["time", "paired"]
2129 if not mode in modes:
2130 msg = "please provide valid mode. Valid modes are %s" % (modes)
2131 raise ValueError(msg)
2132 varlist = vars()
2133 s = None
2134 if mode.lower() == "paired":
2135 basesel = self.get_selection()
2136 sel = selector()+basesel
2137 sel.set_query("SRCTYPE==1")
2138 self.set_selection(sel)
2139 offs = self.copy()
2140 sel.set_query("SRCTYPE==0")
2141 self.set_selection(sel)
2142 ons = self.copy()
2143 s = scantable(self._math._quotient(ons, offs, preserve))
2144 self.set_selection(basesel)
2145 elif mode.lower() == "time":
2146 s = scantable(self._math._auto_quotient(self, mode, preserve))
2147 s._add_history("auto_quotient", varlist)
2148 print_log()
2149 return s
2150
2151 @print_log_dec
2152 def mx_quotient(self, mask = None, weight='median', preserve=True):
2153 """
2154 Form a quotient using "off" beams when observing in "MX" mode.
2155 Parameters:
2156 mask: an optional mask to be used when weight == 'stddev'
2157 weight: How to average the off beams. Default is 'median'.
2158 preserve: you can preserve (default) the continuum or
2159 remove it. The equations used are
2160 preserve: Output = Toff * (on/off) - Toff
2161 remove: Output = Toff * (on/off) - Ton
2162 """
2163 mask = mask or ()
2164 varlist = vars()
2165 on = scantable(self._math._mx_extract(self, 'on'))
2166 preoff = scantable(self._math._mx_extract(self, 'off'))
2167 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2168 from asapmath import quotient
2169 q = quotient(on, off, preserve)
2170 q._add_history("mx_quotient", varlist)
2171 print_log()
2172 return q
2173
2174 @print_log_dec
2175 def freq_switch(self, insitu=None):
2176 """
2177 Apply frequency switching to the data.
2178 Parameters:
2179 insitu: if False a new scantable is returned.
2180 Otherwise, the swictching is done in-situ
2181 The default is taken from .asaprc (False)
2182 Example:
2183 none
2184 """
2185 if insitu is None: insitu = rcParams['insitu']
2186 self._math._setinsitu(insitu)
2187 varlist = vars()
2188 s = scantable(self._math._freqswitch(self))
2189 s._add_history("freq_switch", varlist)
2190 print_log()
2191 if insitu: self._assign(s)
2192 else: return s
2193
2194 @print_log_dec
2195 def recalc_azel(self):
2196 """
2197 Recalculate the azimuth and elevation for each position.
2198 Parameters:
2199 none
2200 Example:
2201 """
2202 varlist = vars()
2203 self._recalcazel()
2204 self._add_history("recalc_azel", varlist)
2205 print_log()
2206 return
2207
2208 @print_log_dec
2209 def __add__(self, other):
2210 varlist = vars()
2211 s = None
2212 if isinstance(other, scantable):
2213 s = scantable(self._math._binaryop(self, other, "ADD"))
2214 elif isinstance(other, float):
2215 s = scantable(self._math._unaryop(self, other, "ADD", False))
2216 else:
2217 raise TypeError("Other input is not a scantable or float value")
2218 s._add_history("operator +", varlist)
2219 return s
2220
2221 @print_log_dec
2222 def __sub__(self, other):
2223 """
2224 implicit on all axes and on Tsys
2225 """
2226 varlist = vars()
2227 s = None
2228 if isinstance(other, scantable):
2229 s = scantable(self._math._binaryop(self, other, "SUB"))
2230 elif isinstance(other, float):
2231 s = scantable(self._math._unaryop(self, other, "SUB", False))
2232 else:
2233 raise TypeError("Other input is not a scantable or float value")
2234 s._add_history("operator -", varlist)
2235 return s
2236
2237 @print_log_dec
2238 def __mul__(self, other):
2239 """
2240 implicit on all axes and on Tsys
2241 """
2242 varlist = vars()
2243 s = None
2244 if isinstance(other, scantable):
2245 s = scantable(self._math._binaryop(self, other, "MUL"))
2246 elif isinstance(other, float):
2247 s = scantable(self._math._unaryop(self, other, "MUL", False))
2248 else:
2249 raise TypeError("Other input is not a scantable or float value")
2250 s._add_history("operator *", varlist)
2251 return s
2252
2253
2254 @print_log_dec
2255 def __div__(self, other):
2256 """
2257 implicit on all axes and on Tsys
2258 """
2259 varlist = vars()
2260 s = None
2261 if isinstance(other, scantable):
2262 s = scantable(self._math._binaryop(self, other, "DIV"))
2263 elif isinstance(other, float):
2264 if other == 0.0:
2265 raise ZeroDivisionError("Dividing by zero is not recommended")
2266 s = scantable(self._math._unaryop(self, other, "DIV", False))
2267 else:
2268 raise TypeError("Other input is not a scantable or float value")
2269 s._add_history("operator /", varlist)
2270 return s
2271
2272 def get_fit(self, row=0):
2273 """
2274 Print or return the stored fits for a row in the scantable
2275 Parameters:
2276 row: the row which the fit has been applied to.
2277 """
2278 if row > self.nrow():
2279 return
2280 from asap.asapfit import asapfit
2281 fit = asapfit(self._getfit(row))
2282 if rcParams['verbose']:
2283 #print fit
2284 asaplog.push( '%s' %(fit) )
2285 print_log()
2286 return
2287 else:
2288 return fit.as_dict()
2289
2290 def flag_nans(self):
2291 """
2292 Utility function to flag NaN values in the scantable.
2293 """
2294 import numpy
2295 basesel = self.get_selection()
2296 for i in range(self.nrow()):
2297 sel = self.get_row_selector(i)
2298 self.set_selection(basesel+sel)
2299 nans = numpy.isnan(self._getspectrum(0))
2300 if numpy.any(nans):
2301 bnans = [ bool(v) for v in nans]
2302 self.flag(bnans)
2303 self.set_selection(basesel)
2304
2305 def get_row_selector(self, rowno):
2306 return selector(beams=self.getbeam(rowno),
2307 ifs=self.getif(rowno),
2308 pols=self.getpol(rowno),
2309 scans=self.getscan(rowno),
2310 cycles=self.getcycle(rowno))
2311
2312 def _add_history(self, funcname, parameters):
2313 if not rcParams['scantable.history']:
2314 return
2315 # create date
2316 sep = "##"
2317 from datetime import datetime
2318 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2319 hist = dstr+sep
2320 hist += funcname+sep#cdate+sep
2321 if parameters.has_key('self'): del parameters['self']
2322 for k, v in parameters.iteritems():
2323 if type(v) is dict:
2324 for k2, v2 in v.iteritems():
2325 hist += k2
2326 hist += "="
2327 if isinstance(v2, scantable):
2328 hist += 'scantable'
2329 elif k2 == 'mask':
2330 if isinstance(v2, list) or isinstance(v2, tuple):
2331 hist += str(self._zip_mask(v2))
2332 else:
2333 hist += str(v2)
2334 else:
2335 hist += str(v2)
2336 else:
2337 hist += k
2338 hist += "="
2339 if isinstance(v, scantable):
2340 hist += 'scantable'
2341 elif k == 'mask':
2342 if isinstance(v, list) or isinstance(v, tuple):
2343 hist += str(self._zip_mask(v))
2344 else:
2345 hist += str(v)
2346 else:
2347 hist += str(v)
2348 hist += sep
2349 hist = hist[:-2] # remove trailing '##'
2350 self._addhistory(hist)
2351
2352
2353 def _zip_mask(self, mask):
2354 mask = list(mask)
2355 i = 0
2356 segments = []
2357 while mask[i:].count(1):
2358 i += mask[i:].index(1)
2359 if mask[i:].count(0):
2360 j = i + mask[i:].index(0)
2361 else:
2362 j = len(mask)
2363 segments.append([i, j])
2364 i = j
2365 return segments
2366
2367 def _get_ordinate_label(self):
2368 fu = "("+self.get_fluxunit()+")"
2369 import re
2370 lbl = "Intensity"
2371 if re.match(".K.", fu):
2372 lbl = "Brightness Temperature "+ fu
2373 elif re.match(".Jy.", fu):
2374 lbl = "Flux density "+ fu
2375 return lbl
2376
2377 def _check_ifs(self):
2378 nchans = [self.nchan(i) for i in range(self.nif(-1))]
2379 nchans = filter(lambda t: t > 0, nchans)
2380 return (sum(nchans)/len(nchans) == nchans[0])
2381
2382 def _fill(self, names, unit, average, getpt, antenna):
2383 first = True
2384 fullnames = []
2385 for name in names:
2386 name = os.path.expandvars(name)
2387 name = os.path.expanduser(name)
2388 if not os.path.exists(name):
2389 msg = "File '%s' does not exists" % (name)
2390 if rcParams['verbose']:
2391 asaplog.push(msg)
2392 print_log( 'ERROR' )
2393 return
2394 raise IOError(msg)
2395 fullnames.append(name)
2396 if average:
2397 asaplog.push('Auto averaging integrations')
2398 stype = int(rcParams['scantable.storage'].lower() == 'disk')
2399 for name in fullnames:
2400 tbl = Scantable(stype)
2401 r = filler(tbl)
2402 rx = rcParams['scantable.reference']
2403 r.setreferenceexpr(rx)
2404 msg = "Importing %s..." % (name)
2405 asaplog.push(msg, False)
2406 print_log()
2407 r.open(name)# antenna, -1, -1, getpt)
2408 r.fill()
2409 if average:
2410 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2411 if not first:
2412 tbl = self._math._merge([self, tbl])
2413 Scantable.__init__(self, tbl)
2414 r.close()
2415 del r, tbl
2416 first = False
2417 if unit is not None:
2418 self.set_fluxunit(unit)
2419 if not is_casapy():
2420 self.set_freqframe(rcParams['scantable.freqframe'])
2421
2422 def __getitem__(self, key):
2423 if key < 0:
2424 key += self.nrow()
2425 if key >= self.nrow():
2426 raise IndexError("Row index out of range.")
2427 return self._getspectrum(key)
2428
2429 def __setitem__(self, key, value):
2430 if key < 0:
2431 key += self.nrow()
2432 if key >= self.nrow():
2433 raise IndexError("Row index out of range.")
2434 if not hasattr(value, "__len__") or \
2435 len(value) > self.nchan(self.getif(key)):
2436 raise ValueError("Spectrum length doesn't match.")
2437 return self._setspectrum(value, key)
2438
2439 def __len__(self):
2440 return self.nrow()
2441
2442 def __iter__(self):
2443 for i in range(len(self)):
2444 yield self[i]
Note: See TracBrowser for help on using the repository browser.