source: trunk/python/scantable.py@ 1854

Last change on this file since 1854 was 1846, checked in by Malte Marquarding, 14 years ago

Documentation update to play nicer with sphinx

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 91.6 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, print_log, print_log_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or
18
19
20def preserve_selection(func):
21 @wraps_dec(func)
22 def wrap(obj, *args, **kw):
23 basesel = obj.get_selection()
24 val = func(obj, *args, **kw)
25 obj.set_selection(basesel)
26 return val
27 return wrap
28
29def is_scantable(filename):
30 """Is the given file a scantable?
31
32 Parameters:
33
34 filename: the name of the file/directory to test
35
36 """
37 return (os.path.isdir(filename)
38 and not os.path.exists(filename+'/table.f1')
39 and os.path.exists(filename+'/table.info'))
40
41
42class scantable(Scantable):
43 """\
44 The ASAP container for scans (single-dish data).
45 """
46
47 @print_log_dec
48 def __init__(self, filename, average=None, unit=None, getpt=None,
49 antenna=None, parallactify=None):
50 """\
51 Create a scantable from a saved one or make a reference
52
53 Parameters:
54
55 filename: the name of an asap table on disk
56 or
57 the name of a rpfits/sdfits/ms file
58 (integrations within scans are auto averaged
59 and the whole file is read) or
60 [advanced] a reference to an existing scantable
61
62 average: average all integrations withinb a scan on read.
63 The default (True) is taken from .asaprc.
64
65 unit: brightness unit; must be consistent with K or Jy.
66 Over-rides the default selected by the filler
67 (input rpfits/sdfits/ms) or replaces the value
68 in existing scantables
69
70 getpt: for MeasurementSet input data only:
71 If True, all pointing data are filled.
72 The deafult is False, which makes time to load
73 the MS data faster in some cases.
74
75 antenna: Antenna selection. integer (id) or string (name or id).
76
77 parallactify: Indicate that the data had been parallatified. Default
78 is taken from rc file.
79
80 """
81 if average is None:
82 average = rcParams['scantable.autoaverage']
83 if getpt is None:
84 getpt = True
85 if antenna is not None:
86 asaplog.push("Antenna selection currently unsupported."
87 "Using '0'")
88 print_log('WARN')
89 if antenna is None:
90 antenna = ''
91 elif type(antenna) == int:
92 antenna = '%s' % antenna
93 elif type(antenna) == list:
94 tmpstr = ''
95 for i in range( len(antenna) ):
96 if type(antenna[i]) == int:
97 tmpstr = tmpstr + ('%s,'%(antenna[i]))
98 elif type(antenna[i]) == str:
99 tmpstr=tmpstr+antenna[i]+','
100 else:
101 asaplog.push('Bad antenna selection.')
102 print_log('ERROR')
103 return
104 antenna = tmpstr.rstrip(',')
105 parallactify = parallactify or rcParams['scantable.parallactify']
106 varlist = vars()
107 from asap._asap import stmath
108 self._math = stmath( rcParams['insitu'] )
109 if isinstance(filename, Scantable):
110 Scantable.__init__(self, filename)
111 else:
112 if isinstance(filename, str):
113 filename = os.path.expandvars(filename)
114 filename = os.path.expanduser(filename)
115 if not os.path.exists(filename):
116 s = "File '%s' not found." % (filename)
117 if rcParams['verbose']:
118 asaplog.push(s)
119 print_log('ERROR')
120 return
121 raise IOError(s)
122 if is_scantable(filename):
123 ondisk = rcParams['scantable.storage'] == 'disk'
124 Scantable.__init__(self, filename, ondisk)
125 if unit is not None:
126 self.set_fluxunit(unit)
127 # do not reset to the default freqframe
128 #self.set_freqframe(rcParams['scantable.freqframe'])
129 elif os.path.isdir(filename) \
130 and not os.path.exists(filename+'/table.f1'):
131 msg = "The given file '%s'is not a valid " \
132 "asap table." % (filename)
133 if rcParams['verbose']:
134 #print msg
135 asaplog.push( msg )
136 print_log( 'ERROR' )
137 return
138 else:
139 raise IOError(msg)
140 else:
141 self._fill([filename], unit, average, getpt, antenna)
142 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
143 and isinstance(filename[-1], str):
144 self._fill(filename, unit, average, getpt, antenna)
145 self.parallactify(parallactify)
146 self._add_history("scantable", varlist)
147 print_log()
148
149 @print_log_dec
150 def save(self, name=None, format=None, overwrite=False):
151 """\
152 Store the scantable on disk. This can be an asap (aips++) Table,
153 SDFITS or MS2 format.
154
155 Parameters:
156
157 name: the name of the outputfile. For format "ASCII"
158 this is the root file name (data in 'name'.txt
159 and header in 'name'_header.txt)
160 format: an optional file format. Default is ASAP.
161 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
162 'SDFITS' (save as SDFITS file)
163 'ASCII' (saves as ascii text file)
164 'MS2' (saves as an aips++
165 MeasurementSet V2)
166 'FITS' (save as image FITS - not
167 readable by class)
168 'CLASS' (save as FITS readable by CLASS)
169 overwrite: If the file should be overwritten if it exists.
170 The default False is to return with warning
171 without writing the output. USE WITH CARE.
172 Example::
173
174 scan.save('myscan.asap')
175 scan.save('myscan.sdfits', 'SDFITS')
176
177 """
178 from os import path
179 format = format or rcParams['scantable.save']
180 suffix = '.'+format.lower()
181 if name is None or name == "":
182 name = 'scantable'+suffix
183 msg = "No filename given. Using default name %s..." % name
184 asaplog.push(msg)
185 name = path.expandvars(name)
186 if path.isfile(name) or path.isdir(name):
187 if not overwrite:
188 msg = "File %s exists." % name
189 if rcParams['verbose']:
190 #print msg
191 asaplog.push( msg )
192 print_log( 'ERROR' )
193 return
194 else:
195 raise IOError(msg)
196 format2 = format.upper()
197 if format2 == 'ASAP':
198 self._save(name)
199 else:
200 from asap._asap import stwriter as stw
201 writer = stw(format2)
202 writer.write(self, name)
203 print_log()
204 return
205
206 def copy(self):
207 """Return a copy of this scantable.
208
209 *Note*:
210
211 This makes a full (deep) copy. scan2 = scan1 makes a reference.
212
213 Example::
214
215 copiedscan = scan.copy()
216
217 """
218 sd = scantable(Scantable._copy(self))
219 return sd
220
221 def drop_scan(self, scanid=None):
222 """\
223 Return a new scantable where the specified scan number(s) has(have)
224 been dropped.
225
226 Parameters:
227
228 scanid: a (list of) scan number(s)
229
230 """
231 from asap import _is_sequence_or_number as _is_valid
232 from asap import _to_list
233 from asap import unique
234 if not _is_valid(scanid):
235 if rcParams['verbose']:
236 #print "Please specify a scanno to drop from the scantable"
237 asaplog.push( 'Please specify a scanno to drop from the scantable' )
238 print_log( 'ERROR' )
239 return
240 else:
241 raise RuntimeError("No scan given")
242 try:
243 scanid = _to_list(scanid)
244 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
245 for sid in scanid: allscans.remove(sid)
246 if len(allscans) == 0:
247 raise ValueError("Can't remove all scans")
248 except ValueError:
249 if rcParams['verbose']:
250 #print "Couldn't find any match."
251 print_log()
252 asaplog.push( "Couldn't find any match." )
253 print_log( 'ERROR' )
254 return
255 else: raise
256 try:
257 sel = selector(scans=allscans)
258 return self._select_copy(sel)
259 except RuntimeError:
260 if rcParams['verbose']:
261 #print "Couldn't find any match."
262 print_log()
263 asaplog.push( "Couldn't find any match." )
264 print_log( 'ERROR' )
265 else:
266 raise
267
268 def _select_copy(self, selection):
269 orig = self.get_selection()
270 self.set_selection(orig+selection)
271 cp = self.copy()
272 self.set_selection(orig)
273 return cp
274
275 def get_scan(self, scanid=None):
276 """
277 Return a specific scan (by scanno) or collection of scans (by
278 source name) in a new scantable.
279
280 *Note*:
281
282 See scantable.drop_scan() for the inverse operation.
283
284 Parameters:
285
286 scanid: a (list of) scanno or a source name, unix-style
287 patterns are accepted for source name matching, e.g.
288 '*_R' gets all 'ref scans
289
290 Example::
291
292 # get all scans containing the source '323p459'
293 newscan = scan.get_scan('323p459')
294 # get all 'off' scans
295 refscans = scan.get_scan('*_R')
296 # get a susbset of scans by scanno (as listed in scan.summary())
297 newscan = scan.get_scan([0, 2, 7, 10])
298
299 """
300 if scanid is None:
301 if rcParams['verbose']:
302 #print "Please specify a scan no or name to " \
303 # "retrieve from the scantable"
304 asaplog.push( 'Please specify a scan no or name to retrieve'
305 ' from the scantable' )
306 print_log( 'ERROR' )
307 return
308 else:
309 raise RuntimeError("No scan given")
310
311 try:
312 bsel = self.get_selection()
313 sel = selector()
314 if type(scanid) is str:
315 sel.set_name(scanid)
316 return self._select_copy(sel)
317 elif type(scanid) is int:
318 sel.set_scans([scanid])
319 return self._select_copy(sel)
320 elif type(scanid) is list:
321 sel.set_scans(scanid)
322 return self._select_copy(sel)
323 else:
324 msg = "Illegal scanid type, use 'int' or 'list' if ints."
325 if rcParams['verbose']:
326 #print msg
327 asaplog.push( msg )
328 print_log( 'ERROR' )
329 else:
330 raise TypeError(msg)
331 except RuntimeError:
332 if rcParams['verbose']:
333 #print "Couldn't find any match."
334 print_log()
335 asaplog.push( "Couldn't find any match." )
336 print_log( 'ERROR' )
337 else: raise
338
339 def __str__(self):
340 return Scantable._summary(self, True)
341
342 def summary(self, filename=None):
343 """\
344 Print a summary of the contents of this scantable.
345
346 Parameters:
347
348 filename: the name of a file to write the putput to
349 Default - no file output
350
351 """
352 info = Scantable._summary(self, True)
353 if filename is not None:
354 if filename is "":
355 filename = 'scantable_summary.txt'
356 from os.path import expandvars, isdir
357 filename = expandvars(filename)
358 if not isdir(filename):
359 data = open(filename, 'w')
360 data.write(info)
361 data.close()
362 else:
363 msg = "Illegal file name '%s'." % (filename)
364 if rcParams['verbose']:
365 #print msg
366 asaplog.push( msg )
367 print_log( 'ERROR' )
368 else:
369 raise IOError(msg)
370 if rcParams['verbose']:
371 try:
372 from IPython.genutils import page as pager
373 except ImportError:
374 from pydoc import pager
375 pager(info)
376 else:
377 return info
378
379 def get_spectrum(self, rowno):
380 """Return the spectrum for the current row in the scantable as a list.
381
382 Parameters:
383
384 rowno: the row number to retrieve the spectrum from
385
386 """
387 return self._getspectrum(rowno)
388
389 def get_mask(self, rowno):
390 """Return the mask for the current row in the scantable as a list.
391
392 Parameters:
393
394 rowno: the row number to retrieve the mask from
395
396 """
397 return self._getmask(rowno)
398
399 def set_spectrum(self, spec, rowno):
400 """Return the spectrum for the current row in the scantable as a list.
401
402 Parameters:
403
404 spec: the spectrum
405 rowno: the row number to set the spectrum for
406
407 """
408 assert(len(spec) == self.nchan())
409 return self._setspectrum(spec, rowno)
410
411 def get_coordinate(self, rowno):
412 """Return the (spectral) coordinate for a a given 'rowno'.
413
414 *Note*:
415
416 * This coordinate is only valid until a scantable method modifies
417 the frequency axis.
418 * This coordinate does contain the original frequency set-up
419 NOT the new frame. The conversions however are done using the user
420 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
421 use scantable.freq_align first. Without it there is no closure,
422 i.e.::
423
424 c = myscan.get_coordinate(0)
425 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
426
427 Parameters:
428
429 rowno: the row number for the spectral coordinate
430
431 """
432 return coordinate(Scantable.get_coordinate(self, rowno))
433
434 def get_selection(self):
435 """\
436 Get the selection object currently set on this scantable.
437
438 Example::
439
440 sel = scan.get_selection()
441 sel.set_ifs(0) # select IF 0
442 scan.set_selection(sel) # apply modified selection
443
444 """
445 return selector(self._getselection())
446
447 def set_selection(self, selection=None, **kw):
448 """\
449 Select a subset of the data. All following operations on this scantable
450 are only applied to thi selection.
451
452 Parameters:
453
454 selection: a selector object (default unset the selection), or
455 any combination of "pols", "ifs", "beams", "scans",
456 "cycles", "name", "query"
457
458 Examples::
459
460 sel = selector() # create a selection object
461 self.set_scans([0, 3]) # select SCANNO 0 and 3
462 scan.set_selection(sel) # set the selection
463 scan.summary() # will only print summary of scanno 0 an 3
464 scan.set_selection() # unset the selection
465 # or the equivalent
466 scan.set_selection(scans=[0,3])
467 scan.summary() # will only print summary of scanno 0 an 3
468 scan.set_selection() # unset the selection
469
470 """
471 if selection is None:
472 # reset
473 if len(kw) == 0:
474 selection = selector()
475 else:
476 # try keywords
477 for k in kw:
478 if k not in selector.fields:
479 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
480 selection = selector(**kw)
481 self._setselection(selection)
482
483 def get_row(self, row=0, insitu=None):
484 """\
485 Select a row in the scantable.
486 Return a scantable with single row.
487
488 Parameters:
489
490 row: row no of integration, default is 0.
491 insitu: if False a new scantable is returned. Otherwise, the
492 scaling is done in-situ. The default is taken from .asaprc
493 (False)
494
495 """
496 if insitu is None: insitu = rcParams['insitu']
497 if not insitu:
498 workscan = self.copy()
499 else:
500 workscan = self
501 # Select a row
502 sel=selector()
503 sel.set_scans([workscan.getscan(row)])
504 sel.set_cycles([workscan.getcycle(row)])
505 sel.set_beams([workscan.getbeam(row)])
506 sel.set_ifs([workscan.getif(row)])
507 sel.set_polarisations([workscan.getpol(row)])
508 sel.set_name(workscan._getsourcename(row))
509 workscan.set_selection(sel)
510 if not workscan.nrow() == 1:
511 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
512 raise RuntimeError(msg)
513 del sel
514 if insitu:
515 self._assign(workscan)
516 else:
517 return workscan
518
519 #def stats(self, stat='stddev', mask=None):
520 def stats(self, stat='stddev', mask=None, form='3.3f'):
521 """\
522 Determine the specified statistic of the current beam/if/pol
523 Takes a 'mask' as an optional parameter to specify which
524 channels should be excluded.
525
526 Parameters:
527
528 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
529 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
530 mask: an optional mask specifying where the statistic
531 should be determined.
532 form: format string to print statistic values
533
534 Example::
535
536 scan.set_unit('channel')
537 msk = scan.create_mask([100, 200], [500, 600])
538 scan.stats(stat='mean', mask=m)
539
540 """
541 mask = mask or []
542 if not self._check_ifs():
543 raise ValueError("Cannot apply mask as the IFs have different "
544 "number of channels. Please use setselection() "
545 "to select individual IFs")
546 rtnabc = False
547 if stat.lower().endswith('_abc'): rtnabc = True
548 getchan = False
549 if stat.lower().startswith('min') or stat.lower().startswith('max'):
550 chan = self._math._minmaxchan(self, mask, stat)
551 getchan = True
552 statvals = []
553 if not rtnabc: statvals = self._math._stats(self, mask, stat)
554
555 #def cb(i):
556 # return statvals[i]
557
558 #return self._row_callback(cb, stat)
559
560 label=stat
561 #callback=cb
562 out = ""
563 #outvec = []
564 sep = '-'*50
565 for i in range(self.nrow()):
566 refstr = ''
567 statunit= ''
568 if getchan:
569 qx, qy = self.chan2data(rowno=i, chan=chan[i])
570 if rtnabc:
571 statvals.append(qx['value'])
572 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
573 statunit= '['+qx['unit']+']'
574 else:
575 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
576
577 tm = self._gettime(i)
578 src = self._getsourcename(i)
579 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
580 out += 'Time[%s]:\n' % (tm)
581 if self.nbeam(-1) > 1:
582 out += ' Beam[%d] ' % (self.getbeam(i))
583 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
584 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
585 #outvec.append(callback(i))
586 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
587 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
588 out += sep+"\n"
589
590 if rcParams['verbose']:
591 import os
592 if os.environ.has_key( 'USER' ):
593 usr=os.environ['USER']
594 else:
595 import commands
596 usr=commands.getoutput( 'whoami' )
597 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
598 f=open(tmpfile,'w')
599 print >> f, sep
600 print >> f, ' %s %s' % (label, statunit)
601 print >> f, sep
602 print >> f, out
603 f.close()
604 f=open(tmpfile,'r')
605 x=f.readlines()
606 f.close()
607 blanc=''
608 asaplog.push(blanc.join(x), False)
609 #for xx in x:
610 # asaplog.push( xx, False )
611 print_log()
612 return statvals
613
614 def chan2data(self, rowno=0, chan=0):
615 """\
616 Returns channel/frequency/velocity and spectral value
617 at an arbitrary row and channel in the scantable.
618
619 Parameters:
620
621 rowno: a row number in the scantable. Default is the
622 first row, i.e. rowno=0
623 chan: a channel in the scantable. Default is the first
624 channel, i.e. pos=0
625
626 """
627 if isinstance(rowno, int) and isinstance(chan, int):
628 qx = {'unit': self.get_unit(),
629 'value': self._getabcissa(rowno)[chan]}
630 qy = {'unit': self.get_fluxunit(),
631 'value': self._getspectrum(rowno)[chan]}
632 return qx, qy
633
634 def stddev(self, mask=None):
635 """\
636 Determine the standard deviation of the current beam/if/pol
637 Takes a 'mask' as an optional parameter to specify which
638 channels should be excluded.
639
640 Parameters:
641
642 mask: an optional mask specifying where the standard
643 deviation should be determined.
644
645 Example::
646
647 scan.set_unit('channel')
648 msk = scan.create_mask([100, 200], [500, 600])
649 scan.stddev(mask=m)
650
651 """
652 return self.stats(stat='stddev', mask=mask);
653
654
655 def get_column_names(self):
656 """\
657 Return a list of column names, which can be used for selection.
658 """
659 return list(Scantable.get_column_names(self))
660
661 def get_tsys(self, row=-1):
662 """\
663 Return the System temperatures.
664
665 Parameters:
666
667 row: the rowno to get the information for. (default all rows)
668
669 Returns:
670
671 a list of Tsys values for the current selection
672
673 """
674 if row > -1:
675 return self._get_column(self._gettsys, row)
676 return self._row_callback(self._gettsys, "Tsys")
677
678
679 def get_weather(self, row=-1):
680 """\
681 Return the weather informations.
682
683 Parameters:
684
685 row: the rowno to get the information for. (default all rows)
686
687 Returns:
688
689 a dict or list of of dicts of values for the current selection
690
691 """
692
693 values = self._get_column(self._get_weather, row)
694 if row > -1:
695 return {'temperature': values[0],
696 'pressure': values[1], 'humidity' : values[2],
697 'windspeed' : values[3], 'windaz' : values[4]
698 }
699 else:
700 out = []
701 for r in values:
702
703 out.append({'temperature': r[0],
704 'pressure': r[1], 'humidity' : r[2],
705 'windspeed' : r[3], 'windaz' : r[4]
706 })
707 return out
708
709 def _row_callback(self, callback, label):
710 out = ""
711 outvec = []
712 sep = '-'*50
713 for i in range(self.nrow()):
714 tm = self._gettime(i)
715 src = self._getsourcename(i)
716 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
717 out += 'Time[%s]:\n' % (tm)
718 if self.nbeam(-1) > 1:
719 out += ' Beam[%d] ' % (self.getbeam(i))
720 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
721 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
722 outvec.append(callback(i))
723 out += '= %3.3f\n' % (outvec[i])
724 out += sep+'\n'
725 if rcParams['verbose']:
726 asaplog.push(sep)
727 asaplog.push(" %s" % (label))
728 asaplog.push(sep)
729 asaplog.push(out)
730 print_log()
731 return outvec
732
733 def _get_column(self, callback, row=-1):
734 """
735 """
736 if row == -1:
737 return [callback(i) for i in range(self.nrow())]
738 else:
739 if 0 <= row < self.nrow():
740 return callback(row)
741
742
743 def get_time(self, row=-1, asdatetime=False):
744 """\
745 Get a list of time stamps for the observations.
746 Return a datetime object for each integration time stamp in the scantable.
747
748 Parameters:
749
750 row: row no of integration. Default -1 return all rows
751 asdatetime: return values as datetime objects rather than strings
752
753 """
754 from time import strptime
755 from datetime import datetime
756 times = self._get_column(self._gettime, row)
757 if not asdatetime:
758 return times
759 format = "%Y/%m/%d/%H:%M:%S"
760 if isinstance(times, list):
761 return [datetime(*strptime(i, format)[:6]) for i in times]
762 else:
763 return datetime(*strptime(times, format)[:6])
764
765
766 def get_inttime(self, row=-1):
767 """\
768 Get a list of integration times for the observations.
769 Return a time in seconds for each integration in the scantable.
770
771 Parameters:
772
773 row: row no of integration. Default -1 return all rows.
774
775 """
776 return self._get_column(self._getinttime, row)
777
778
779 def get_sourcename(self, row=-1):
780 """\
781 Get a list source names for the observations.
782 Return a string for each integration in the scantable.
783 Parameters:
784
785 row: row no of integration. Default -1 return all rows.
786
787 """
788 return self._get_column(self._getsourcename, row)
789
790 def get_elevation(self, row=-1):
791 """\
792 Get a list of elevations for the observations.
793 Return a float for each integration in the scantable.
794
795 Parameters:
796
797 row: row no of integration. Default -1 return all rows.
798
799 """
800 return self._get_column(self._getelevation, row)
801
802 def get_azimuth(self, row=-1):
803 """\
804 Get a list of azimuths for the observations.
805 Return a float for each integration in the scantable.
806
807 Parameters:
808 row: row no of integration. Default -1 return all rows.
809
810 """
811 return self._get_column(self._getazimuth, row)
812
813 def get_parangle(self, row=-1):
814 """\
815 Get a list of parallactic angles for the observations.
816 Return a float for each integration in the scantable.
817
818 Parameters:
819
820 row: row no of integration. Default -1 return all rows.
821
822 """
823 return self._get_column(self._getparangle, row)
824
825 def get_direction(self, row=-1):
826 """
827 Get a list of Positions on the sky (direction) for the observations.
828 Return a string for each integration in the scantable.
829 Parameters:
830 row: row no of integration. Default -1 return all rows
831 Example:
832 none
833 """
834 return self._get_column(self._getdirection, row)
835
836 def get_directionval(self, row=-1):
837 """\
838 Get a list of Positions on the sky (direction) for the observations.
839 Return a float for each integration in the scantable.
840
841 Parameters:
842
843 row: row no of integration. Default -1 return all rows
844
845 """
846 return self._get_column(self._getdirectionvec, row)
847
848 @print_log_dec
849 def set_unit(self, unit='channel'):
850 """\
851 Set the unit for all following operations on this scantable
852
853 Parameters:
854
855 unit: optional unit, default is 'channel'. Use one of '*Hz',
856 'km/s', 'channel' or equivalent ''
857
858 """
859 varlist = vars()
860 if unit in ['', 'pixel', 'channel']:
861 unit = ''
862 inf = list(self._getcoordinfo())
863 inf[0] = unit
864 self._setcoordinfo(inf)
865 self._add_history("set_unit", varlist)
866
867 @print_log_dec
868 def set_instrument(self, instr):
869 """\
870 Set the instrument for subsequent processing.
871
872 Parameters:
873
874 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
875 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
876
877 """
878 self._setInstrument(instr)
879 self._add_history("set_instument", vars())
880 print_log()
881
882 @print_log_dec
883 def set_feedtype(self, feedtype):
884 """\
885 Overwrite the feed type, which might not be set correctly.
886
887 Parameters:
888
889 feedtype: 'linear' or 'circular'
890
891 """
892 self._setfeedtype(feedtype)
893 self._add_history("set_feedtype", vars())
894 print_log()
895
896 @print_log_dec
897 def set_doppler(self, doppler='RADIO'):
898 """\
899 Set the doppler for all following operations on this scantable.
900
901 Parameters:
902
903 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
904
905 """
906 varlist = vars()
907 inf = list(self._getcoordinfo())
908 inf[2] = doppler
909 self._setcoordinfo(inf)
910 self._add_history("set_doppler", vars())
911 print_log()
912
913 @print_log_dec
914 def set_freqframe(self, frame=None):
915 """\
916 Set the frame type of the Spectral Axis.
917
918 Parameters:
919
920 frame: an optional frame type, default 'LSRK'. Valid frames are:
921 'TOPO', 'LSRD', 'LSRK', 'BARY',
922 'GEO', 'GALACTO', 'LGROUP', 'CMB'
923
924 Example::
925
926 scan.set_freqframe('BARY')
927
928 """
929 frame = frame or rcParams['scantable.freqframe']
930 varlist = vars()
931 # "REST" is not implemented in casacore
932 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
933 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
934 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
935 'GEO', 'GALACTO', 'LGROUP', 'CMB']
936
937 if frame in valid:
938 inf = list(self._getcoordinfo())
939 inf[1] = frame
940 self._setcoordinfo(inf)
941 self._add_history("set_freqframe", varlist)
942 else:
943 msg = "Please specify a valid freq type. Valid types are:\n", valid
944 if rcParams['verbose']:
945 #print msg
946 asaplog.push( msg )
947 print_log( 'ERROR' )
948 else:
949 raise TypeError(msg)
950 print_log()
951
952 def set_dirframe(self, frame=""):
953 """\
954 Set the frame type of the Direction on the sky.
955
956 Parameters:
957
958 frame: an optional frame type, default ''. Valid frames are:
959 'J2000', 'B1950', 'GALACTIC'
960
961 Example:
962
963 scan.set_dirframe('GALACTIC')
964
965 """
966 varlist = vars()
967 try:
968 Scantable.set_dirframe(self, frame)
969 except RuntimeError, msg:
970 if rcParams['verbose']:
971 #print msg
972 print_log()
973 asaplog.push( str(msg) )
974 print_log( 'ERROR' )
975 else:
976 raise
977 self._add_history("set_dirframe", varlist)
978
979 def get_unit(self):
980 """\
981 Get the default unit set in this scantable
982
983 Returns:
984
985 A unit string
986
987 """
988 inf = self._getcoordinfo()
989 unit = inf[0]
990 if unit == '': unit = 'channel'
991 return unit
992
993 def get_abcissa(self, rowno=0):
994 """\
995 Get the abcissa in the current coordinate setup for the currently
996 selected Beam/IF/Pol
997
998 Parameters:
999
1000 rowno: an optional row number in the scantable. Default is the
1001 first row, i.e. rowno=0
1002
1003 Returns:
1004
1005 The abcissa values and the format string (as a dictionary)
1006
1007 """
1008 abc = self._getabcissa(rowno)
1009 lbl = self._getabcissalabel(rowno)
1010 print_log()
1011 return abc, lbl
1012
1013 def flag(self, mask=None, unflag=False):
1014 """\
1015 Flag the selected data using an optional channel mask.
1016
1017 Parameters:
1018
1019 mask: an optional channel mask, created with create_mask. Default
1020 (no mask) is all channels.
1021 unflag: if True, unflag the data
1022
1023 """
1024 varlist = vars()
1025 mask = mask or []
1026 try:
1027 self._flag(mask, unflag)
1028 except RuntimeError, msg:
1029 if rcParams['verbose']:
1030 #print msg
1031 print_log()
1032 asaplog.push( str(msg) )
1033 print_log( 'ERROR' )
1034 return
1035 else: raise
1036 self._add_history("flag", varlist)
1037
1038 def flag_row(self, rows=[], unflag=False):
1039 """\
1040 Flag the selected data in row-based manner.
1041
1042 Parameters:
1043
1044 rows: list of row numbers to be flagged. Default is no row
1045 (must be explicitly specified to execute row-based flagging).
1046 unflag: if True, unflag the data.
1047
1048 """
1049 varlist = vars()
1050 try:
1051 self._flag_row(rows, unflag)
1052 except RuntimeError, msg:
1053 if rcParams['verbose']:
1054 print_log()
1055 asaplog.push( str(msg) )
1056 print_log('ERROR')
1057 return
1058 else: raise
1059 self._add_history("flag_row", varlist)
1060
1061 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1062 """\
1063 Flag the selected data outside a specified range (in channel-base)
1064
1065 Parameters:
1066
1067 uthres: upper threshold.
1068 dthres: lower threshold
1069
1070 clipoutside: True for flagging data outside the range [dthres:uthres].
1071 False for glagging data inside the range.
1072 unflag: if True, unflag the data.
1073
1074 """
1075 varlist = vars()
1076 try:
1077 self._clip(uthres, dthres, clipoutside, unflag)
1078 except RuntimeError, msg:
1079 if rcParams['verbose']:
1080 print_log()
1081 asaplog.push(str(msg))
1082 print_log('ERROR')
1083 return
1084 else: raise
1085 self._add_history("clip", varlist)
1086
1087 @print_log_dec
1088 def lag_flag(self, start, end, unit="MHz", insitu=None):
1089 """\
1090 Flag the data in 'lag' space by providing a frequency to remove.
1091 Flagged data in the scantable gets interpolated over the region.
1092 No taper is applied.
1093
1094 Parameters:
1095
1096 start: the start frequency (really a period within the
1097 bandwidth) or period to remove
1098 end: the end frequency or period to remove
1099 unit: the frequency unit (default "MHz") or "" for
1100 explicit lag channels
1101
1102 *Notes*:
1103
1104 It is recommended to flag edges of the band or strong
1105 signals beforehand.
1106
1107 """
1108 if insitu is None: insitu = rcParams['insitu']
1109 self._math._setinsitu(insitu)
1110 varlist = vars()
1111 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1112 if not (unit == "" or base.has_key(unit)):
1113 raise ValueError("%s is not a valid unit." % unit)
1114 try:
1115 if unit == "":
1116 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1117 else:
1118 s = scantable(self._math._lag_flag(self, start*base[unit],
1119 end*base[unit], "frequency"))
1120 except RuntimeError, msg:
1121 if rcParams['verbose']:
1122 #print msg
1123 print_log()
1124 asaplog.push( str(msg) )
1125 print_log( 'ERROR' )
1126 return
1127 else: raise
1128 s._add_history("lag_flag", varlist)
1129 print_log()
1130 if insitu:
1131 self._assign(s)
1132 else:
1133 return s
1134
1135 @print_log_dec
1136 def create_mask(self, *args, **kwargs):
1137 """\
1138 Compute and return a mask based on [min, max] windows.
1139 The specified windows are to be INCLUDED, when the mask is
1140 applied.
1141
1142 Parameters:
1143
1144 [min, max], [min2, max2], ...
1145 Pairs of start/end points (inclusive)specifying the regions
1146 to be masked
1147 invert: optional argument. If specified as True,
1148 return an inverted mask, i.e. the regions
1149 specified are EXCLUDED
1150 row: create the mask using the specified row for
1151 unit conversions, default is row=0
1152 only necessary if frequency varies over rows.
1153
1154 Examples::
1155
1156 scan.set_unit('channel')
1157 # a)
1158 msk = scan.create_mask([400, 500], [800, 900])
1159 # masks everything outside 400 and 500
1160 # and 800 and 900 in the unit 'channel'
1161
1162 # b)
1163 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1164 # masks the regions between 400 and 500
1165 # and 800 and 900 in the unit 'channel'
1166
1167 # c)
1168 #mask only channel 400
1169 msk = scan.create_mask([400])
1170
1171 """
1172 row = kwargs.get("row", 0)
1173 data = self._getabcissa(row)
1174 u = self._getcoordinfo()[0]
1175 if rcParams['verbose']:
1176 if u == "": u = "channel"
1177 msg = "The current mask window unit is %s" % u
1178 i = self._check_ifs()
1179 if not i:
1180 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1181 asaplog.push(msg)
1182 n = self.nchan()
1183 msk = _n_bools(n, False)
1184 # test if args is a 'list' or a 'normal *args - UGLY!!!
1185
1186 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1187 and args or args[0]
1188 for window in ws:
1189 if len(window) == 1:
1190 window = [window[0], window[0]]
1191 if len(window) == 0 or len(window) > 2:
1192 raise ValueError("A window needs to be defined as [start(, end)]")
1193 if window[0] > window[1]:
1194 tmp = window[0]
1195 window[0] = window[1]
1196 window[1] = tmp
1197 for i in range(n):
1198 if data[i] >= window[0] and data[i] <= window[1]:
1199 msk[i] = True
1200 if kwargs.has_key('invert'):
1201 if kwargs.get('invert'):
1202 msk = mask_not(msk)
1203 print_log()
1204 return msk
1205
1206 def get_masklist(self, mask=None, row=0):
1207 """\
1208 Compute and return a list of mask windows, [min, max].
1209
1210 Parameters:
1211
1212 mask: channel mask, created with create_mask.
1213 row: calcutate the masklist using the specified row
1214 for unit conversions, default is row=0
1215 only necessary if frequency varies over rows.
1216
1217 Returns:
1218
1219 [min, max], [min2, max2], ...
1220 Pairs of start/end points (inclusive)specifying
1221 the masked regions
1222
1223 """
1224 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1225 raise TypeError("The mask should be list or tuple.")
1226 if len(mask) < 2:
1227 raise TypeError("The mask elements should be > 1")
1228 if self.nchan() != len(mask):
1229 msg = "Number of channels in scantable != number of mask elements"
1230 raise TypeError(msg)
1231 data = self._getabcissa(row)
1232 u = self._getcoordinfo()[0]
1233 if rcParams['verbose']:
1234 if u == "": u = "channel"
1235 msg = "The current mask window unit is %s" % u
1236 i = self._check_ifs()
1237 if not i:
1238 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1239 asaplog.push(msg)
1240 masklist=[]
1241 ist, ien = None, None
1242 ist, ien=self.get_mask_indices(mask)
1243 if ist is not None and ien is not None:
1244 for i in xrange(len(ist)):
1245 range=[data[ist[i]],data[ien[i]]]
1246 range.sort()
1247 masklist.append([range[0],range[1]])
1248 return masklist
1249
1250 def get_mask_indices(self, mask=None):
1251 """\
1252 Compute and Return lists of mask start indices and mask end indices.
1253 Parameters:
1254 mask: channel mask, created with create_mask.
1255
1256 Returns:
1257
1258 List of mask start indices and that of mask end indices,
1259 i.e., [istart1,istart2,....], [iend1,iend2,....].
1260
1261 """
1262 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1263 raise TypeError("The mask should be list or tuple.")
1264 if len(mask) < 2:
1265 raise TypeError("The mask elements should be > 1")
1266 istart=[]
1267 iend=[]
1268 if mask[0]: istart.append(0)
1269 for i in range(len(mask)-1):
1270 if not mask[i] and mask[i+1]:
1271 istart.append(i+1)
1272 elif mask[i] and not mask[i+1]:
1273 iend.append(i)
1274 if mask[len(mask)-1]: iend.append(len(mask)-1)
1275 if len(istart) != len(iend):
1276 raise RuntimeError("Numbers of mask start != mask end.")
1277 for i in range(len(istart)):
1278 if istart[i] > iend[i]:
1279 raise RuntimeError("Mask start index > mask end index")
1280 break
1281 return istart,iend
1282
1283# def get_restfreqs(self):
1284# """
1285# Get the restfrequency(s) stored in this scantable.
1286# The return value(s) are always of unit 'Hz'
1287# Parameters:
1288# none
1289# Returns:
1290# a list of doubles
1291# """
1292# return list(self._getrestfreqs())
1293
1294 def get_restfreqs(self, ids=None):
1295 """\
1296 Get the restfrequency(s) stored in this scantable.
1297 The return value(s) are always of unit 'Hz'
1298
1299 Parameters:
1300
1301 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1302 be retrieved
1303
1304 Returns:
1305
1306 dictionary containing ids and a list of doubles for each id
1307
1308 """
1309 if ids is None:
1310 rfreqs={}
1311 idlist = self.getmolnos()
1312 for i in idlist:
1313 rfreqs[i]=list(self._getrestfreqs(i))
1314 return rfreqs
1315 else:
1316 if type(ids)==list or type(ids)==tuple:
1317 rfreqs={}
1318 for i in ids:
1319 rfreqs[i]=list(self._getrestfreqs(i))
1320 return rfreqs
1321 else:
1322 return list(self._getrestfreqs(ids))
1323 #return list(self._getrestfreqs(ids))
1324
1325 def set_restfreqs(self, freqs=None, unit='Hz'):
1326 """\
1327 Set or replace the restfrequency specified and
1328 If the 'freqs' argument holds a scalar,
1329 then that rest frequency will be applied to all the selected
1330 data. If the 'freqs' argument holds
1331 a vector, then it MUST be of equal or smaller length than
1332 the number of IFs (and the available restfrequencies will be
1333 replaced by this vector). In this case, *all* data have
1334 the restfrequency set per IF according
1335 to the corresponding value you give in the 'freqs' vector.
1336 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1337 IF 1 gets restfreq 2e9.
1338
1339 You can also specify the frequencies via a linecatalog.
1340
1341 Parameters:
1342
1343 freqs: list of rest frequency values or string idenitfiers
1344 unit: unit for rest frequency (default 'Hz')
1345
1346
1347 Example::
1348
1349 # set the given restfrequency for the all currently selected IFs
1350 scan.set_restfreqs(freqs=1.4e9)
1351 # set restfrequencies for the n IFs (n > 1) in the order of the
1352 # list, i.e
1353 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1354 # len(list_of_restfreqs) == nIF
1355 # for nIF == 1 the following will set multiple restfrequency for
1356 # that IF
1357 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1358 # set multiple restfrequencies per IF. as a list of lists where
1359 # the outer list has nIF elements, the inner s arbitrary
1360 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1361
1362 *Note*:
1363
1364 To do more sophisticate Restfrequency setting, e.g. on a
1365 source and IF basis, use scantable.set_selection() before using
1366 this function::
1367
1368 # provided your scantable is called scan
1369 selection = selector()
1370 selection.set_name("ORION*")
1371 selection.set_ifs([1])
1372 scan.set_selection(selection)
1373 scan.set_restfreqs(freqs=86.6e9)
1374
1375 """
1376 varlist = vars()
1377 from asap import linecatalog
1378 # simple value
1379 if isinstance(freqs, int) or isinstance(freqs, float):
1380 self._setrestfreqs([freqs], [""], unit)
1381 # list of values
1382 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1383 # list values are scalars
1384 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1385 if len(freqs) == 1:
1386 self._setrestfreqs(freqs, [""], unit)
1387 else:
1388 # allow the 'old' mode of setting mulitple IFs
1389 sel = selector()
1390 savesel = self._getselection()
1391 iflist = self.getifnos()
1392 if len(freqs)>len(iflist):
1393 raise ValueError("number of elements in list of list "
1394 "exeeds the current IF selections")
1395 iflist = self.getifnos()
1396 for i, fval in enumerate(freqs):
1397 sel.set_ifs(iflist[i])
1398 self._setselection(sel)
1399 self._setrestfreqs([fval], [""], unit)
1400 self._setselection(savesel)
1401
1402 # list values are dict, {'value'=, 'name'=)
1403 elif isinstance(freqs[-1], dict):
1404 values = []
1405 names = []
1406 for d in freqs:
1407 values.append(d["value"])
1408 names.append(d["name"])
1409 self._setrestfreqs(values, names, unit)
1410 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1411 sel = selector()
1412 savesel = self._getselection()
1413 iflist = self.getifnos()
1414 if len(freqs)>len(iflist):
1415 raise ValueError("number of elements in list of list exeeds"
1416 " the current IF selections")
1417 for i, fval in enumerate(freqs):
1418 sel.set_ifs(iflist[i])
1419 self._setselection(sel)
1420 self._setrestfreqs(fval, [""], unit)
1421 self._setselection(savesel)
1422 # freqs are to be taken from a linecatalog
1423 elif isinstance(freqs, linecatalog):
1424 sel = selector()
1425 savesel = self._getselection()
1426 for i in xrange(freqs.nrow()):
1427 sel.set_ifs(iflist[i])
1428 self._setselection(sel)
1429 self._setrestfreqs([freqs.get_frequency(i)],
1430 [freqs.get_name(i)], "MHz")
1431 # ensure that we are not iterating past nIF
1432 if i == self.nif()-1: break
1433 self._setselection(savesel)
1434 else:
1435 return
1436 self._add_history("set_restfreqs", varlist)
1437
1438 def shift_refpix(self, delta):
1439 """\
1440 Shift the reference pixel of the Spectra Coordinate by an
1441 integer amount.
1442
1443 Parameters:
1444
1445 delta: the amount to shift by
1446
1447 *Note*:
1448
1449 Be careful using this with broadband data.
1450
1451 """
1452 Scantable.shift_refpix(self, delta)
1453
1454 def history(self, filename=None):
1455 """\
1456 Print the history. Optionally to a file.
1457
1458 Parameters:
1459
1460 filename: The name of the file to save the history to.
1461
1462 """
1463 hist = list(self._gethistory())
1464 out = "-"*80
1465 for h in hist:
1466 if h.startswith("---"):
1467 out += "\n"+h
1468 else:
1469 items = h.split("##")
1470 date = items[0]
1471 func = items[1]
1472 items = items[2:]
1473 out += "\n"+date+"\n"
1474 out += "Function: %s\n Parameters:" % (func)
1475 for i in items:
1476 s = i.split("=")
1477 out += "\n %s = %s" % (s[0], s[1])
1478 out += "\n"+"-"*80
1479 if filename is not None:
1480 if filename is "":
1481 filename = 'scantable_history.txt'
1482 import os
1483 filename = os.path.expandvars(os.path.expanduser(filename))
1484 if not os.path.isdir(filename):
1485 data = open(filename, 'w')
1486 data.write(out)
1487 data.close()
1488 else:
1489 msg = "Illegal file name '%s'." % (filename)
1490 if rcParams['verbose']:
1491 #print msg
1492 asaplog.push( msg )
1493 print_log( 'ERROR' )
1494 else:
1495 raise IOError(msg)
1496 if rcParams['verbose']:
1497 try:
1498 from IPython.genutils import page as pager
1499 except ImportError:
1500 from pydoc import pager
1501 pager(out)
1502 else:
1503 return out
1504 return
1505 #
1506 # Maths business
1507 #
1508 @print_log_dec
1509 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1510 """\
1511 Return the (time) weighted average of a scan.
1512
1513 *Note*:
1514
1515 in channels only - align if necessary
1516
1517 Parameters:
1518
1519 mask: an optional mask (only used for 'var' and 'tsys'
1520 weighting)
1521 scanav: True averages each scan separately
1522 False (default) averages all scans together,
1523 weight: Weighting scheme.
1524 'none' (mean no weight)
1525 'var' (1/var(spec) weighted)
1526 'tsys' (1/Tsys**2 weighted)
1527 'tint' (integration time weighted)
1528 'tintsys' (Tint/Tsys**2)
1529 'median' ( median averaging)
1530 The default is 'tint'
1531 align: align the spectra in velocity before averaging. It takes
1532 the time of the first spectrum as reference time.
1533
1534 Example::
1535
1536 # time average the scantable without using a mask
1537 newscan = scan.average_time()
1538
1539 """
1540 varlist = vars()
1541 weight = weight or 'TINT'
1542 mask = mask or ()
1543 scanav = (scanav and 'SCAN') or 'NONE'
1544 scan = (self, )
1545 try:
1546 if align:
1547 scan = (self.freq_align(insitu=False), )
1548 s = None
1549 if weight.upper() == 'MEDIAN':
1550 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1551 scanav))
1552 else:
1553 s = scantable(self._math._average(scan, mask, weight.upper(),
1554 scanav))
1555 except RuntimeError, msg:
1556 if rcParams['verbose']:
1557 #print msg
1558 print_log()
1559 asaplog.push( str(msg) )
1560 print_log( 'ERROR' )
1561 return
1562 else: raise
1563 s._add_history("average_time", varlist)
1564 print_log()
1565 return s
1566
1567 @print_log_dec
1568 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1569 """\
1570 Return a scan where all spectra are converted to either
1571 Jansky or Kelvin depending upon the flux units of the scan table.
1572 By default the function tries to look the values up internally.
1573 If it can't find them (or if you want to over-ride), you must
1574 specify EITHER jyperk OR eta (and D which it will try to look up
1575 also if you don't set it). jyperk takes precedence if you set both.
1576
1577 Parameters:
1578
1579 jyperk: the Jy / K conversion factor
1580 eta: the aperture efficiency
1581 d: the geomtric diameter (metres)
1582 insitu: if False a new scantable is returned.
1583 Otherwise, the scaling is done in-situ
1584 The default is taken from .asaprc (False)
1585
1586 """
1587 if insitu is None: insitu = rcParams['insitu']
1588 self._math._setinsitu(insitu)
1589 varlist = vars()
1590 jyperk = jyperk or -1.0
1591 d = d or -1.0
1592 eta = eta or -1.0
1593 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1594 s._add_history("convert_flux", varlist)
1595 print_log()
1596 if insitu: self._assign(s)
1597 else: return s
1598
1599 @print_log_dec
1600 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1601 """\
1602 Return a scan after applying a gain-elevation correction.
1603 The correction can be made via either a polynomial or a
1604 table-based interpolation (and extrapolation if necessary).
1605 You specify polynomial coefficients, an ascii table or neither.
1606 If you specify neither, then a polynomial correction will be made
1607 with built in coefficients known for certain telescopes (an error
1608 will occur if the instrument is not known).
1609 The data and Tsys are *divided* by the scaling factors.
1610
1611 Parameters:
1612
1613 poly: Polynomial coefficients (default None) to compute a
1614 gain-elevation correction as a function of
1615 elevation (in degrees).
1616 filename: The name of an ascii file holding correction factors.
1617 The first row of the ascii file must give the column
1618 names and these MUST include columns
1619 "ELEVATION" (degrees) and "FACTOR" (multiply data
1620 by this) somewhere.
1621 The second row must give the data type of the
1622 column. Use 'R' for Real and 'I' for Integer.
1623 An example file would be
1624 (actual factors are arbitrary) :
1625
1626 TIME ELEVATION FACTOR
1627 R R R
1628 0.1 0 0.8
1629 0.2 20 0.85
1630 0.3 40 0.9
1631 0.4 60 0.85
1632 0.5 80 0.8
1633 0.6 90 0.75
1634 method: Interpolation method when correcting from a table.
1635 Values are "nearest", "linear" (default), "cubic"
1636 and "spline"
1637 insitu: if False a new scantable is returned.
1638 Otherwise, the scaling is done in-situ
1639 The default is taken from .asaprc (False)
1640
1641 """
1642
1643 if insitu is None: insitu = rcParams['insitu']
1644 self._math._setinsitu(insitu)
1645 varlist = vars()
1646 poly = poly or ()
1647 from os.path import expandvars
1648 filename = expandvars(filename)
1649 s = scantable(self._math._gainel(self, poly, filename, method))
1650 s._add_history("gain_el", varlist)
1651 print_log()
1652 if insitu:
1653 self._assign(s)
1654 else:
1655 return s
1656
1657 @print_log_dec
1658 def freq_align(self, reftime=None, method='cubic', insitu=None):
1659 """\
1660 Return a scan where all rows have been aligned in frequency/velocity.
1661 The alignment frequency frame (e.g. LSRK) is that set by function
1662 set_freqframe.
1663
1664 Parameters:
1665 reftime: reference time to align at. By default, the time of
1666 the first row of data is used.
1667 method: Interpolation method for regridding the spectra.
1668 Choose from "nearest", "linear", "cubic" (default)
1669 and "spline"
1670 insitu: if False a new scantable is returned.
1671 Otherwise, the scaling is done in-situ
1672 The default is taken from .asaprc (False)
1673
1674 """
1675 if insitu is None: insitu = rcParams["insitu"]
1676 self._math._setinsitu(insitu)
1677 varlist = vars()
1678 reftime = reftime or ""
1679 s = scantable(self._math._freq_align(self, reftime, method))
1680 s._add_history("freq_align", varlist)
1681 print_log()
1682 if insitu: self._assign(s)
1683 else: return s
1684
1685 @print_log_dec
1686 def opacity(self, tau=None, insitu=None):
1687 """\
1688 Apply an opacity correction. The data
1689 and Tsys are multiplied by the correction factor.
1690
1691 Parameters:
1692 tau: (list of) opacity from which the correction factor is
1693 exp(tau*ZD)
1694 where ZD is the zenith-distance.
1695 If a list is provided, it has to be of length nIF,
1696 nIF*nPol or 1 and in order of IF/POL, e.g.
1697 [opif0pol0, opif0pol1, opif1pol0 ...]
1698 if tau is `None` the opacities are determined from a
1699 model.
1700 insitu: if False a new scantable is returned.
1701 Otherwise, the scaling is done in-situ
1702 The default is taken from .asaprc (False)
1703
1704 """
1705 if insitu is None: insitu = rcParams['insitu']
1706 self._math._setinsitu(insitu)
1707 varlist = vars()
1708 if not hasattr(tau, "__len__"):
1709 tau = [tau]
1710 s = scantable(self._math._opacity(self, tau))
1711 s._add_history("opacity", varlist)
1712 print_log()
1713 if insitu: self._assign(s)
1714 else: return s
1715
1716 @print_log_dec
1717 def bin(self, width=5, insitu=None):
1718 """\
1719 Return a scan where all spectra have been binned up.
1720
1721 Parameters:
1722
1723 width: The bin width (default=5) in pixels
1724 insitu: if False a new scantable is returned.
1725 Otherwise, the scaling is done in-situ
1726 The default is taken from .asaprc (False)
1727
1728 """
1729 if insitu is None: insitu = rcParams['insitu']
1730 self._math._setinsitu(insitu)
1731 varlist = vars()
1732 s = scantable(self._math._bin(self, width))
1733 s._add_history("bin", varlist)
1734 print_log()
1735 if insitu:
1736 self._assign(s)
1737 else:
1738 return s
1739
1740 @print_log_dec
1741 def resample(self, width=5, method='cubic', insitu=None):
1742 """\
1743 Return a scan where all spectra have been binned up.
1744
1745 Parameters:
1746
1747 width: The bin width (default=5) in pixels
1748 method: Interpolation method when correcting from a table.
1749 Values are "nearest", "linear", "cubic" (default)
1750 and "spline"
1751 insitu: if False a new scantable is returned.
1752 Otherwise, the scaling is done in-situ
1753 The default is taken from .asaprc (False)
1754
1755 """
1756 if insitu is None: insitu = rcParams['insitu']
1757 self._math._setinsitu(insitu)
1758 varlist = vars()
1759 s = scantable(self._math._resample(self, method, width))
1760 s._add_history("resample", varlist)
1761 print_log()
1762 if insitu: self._assign(s)
1763 else: return s
1764
1765 @print_log_dec
1766 def average_pol(self, mask=None, weight='none'):
1767 """\
1768 Average the Polarisations together.
1769
1770 Parameters:
1771
1772 mask: An optional mask defining the region, where the
1773 averaging will be applied. The output will have all
1774 specified points masked.
1775 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1776 weighted), or 'tsys' (1/Tsys**2 weighted)
1777
1778 """
1779 varlist = vars()
1780 mask = mask or ()
1781 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1782 s._add_history("average_pol", varlist)
1783 print_log()
1784 return s
1785
1786 @print_log_dec
1787 def average_beam(self, mask=None, weight='none'):
1788 """\
1789 Average the Beams together.
1790
1791 Parameters:
1792 mask: An optional mask defining the region, where the
1793 averaging will be applied. The output will have all
1794 specified points masked.
1795 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1796 weighted), or 'tsys' (1/Tsys**2 weighted)
1797
1798 """
1799 varlist = vars()
1800 mask = mask or ()
1801 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1802 s._add_history("average_beam", varlist)
1803 print_log()
1804 return s
1805
1806 def parallactify(self, pflag):
1807 """\
1808 Set a flag to indicate whether this data should be treated as having
1809 been 'parallactified' (total phase == 0.0)
1810
1811 Parameters:
1812 pflag: Bool indicating whether to turn this on (True) or
1813 off (False)
1814
1815 """
1816 varlist = vars()
1817 self._parallactify(pflag)
1818 self._add_history("parallactify", varlist)
1819
1820 @print_log_dec
1821 def convert_pol(self, poltype=None):
1822 """\
1823 Convert the data to a different polarisation type.
1824 Note that you will need cross-polarisation terms for most conversions.
1825
1826 Parameters:
1827 poltype: The new polarisation type. Valid types are:
1828 "linear", "circular", "stokes" and "linpol"
1829
1830 """
1831 varlist = vars()
1832 try:
1833 s = scantable(self._math._convertpol(self, poltype))
1834 except RuntimeError, msg:
1835 if rcParams['verbose']:
1836 #print msg
1837 print_log()
1838 asaplog.push( str(msg) )
1839 print_log( 'ERROR' )
1840 return
1841 else:
1842 raise
1843 s._add_history("convert_pol", varlist)
1844 print_log()
1845 return s
1846
1847 @print_log_dec
1848 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1849 """\
1850 Smooth the spectrum by the specified kernel (conserving flux).
1851
1852 Parameters:
1853
1854 kernel: The type of smoothing kernel. Select from
1855 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1856 or 'poly'
1857 width: The width of the kernel in pixels. For hanning this is
1858 ignored otherwise it defauls to 5 pixels.
1859 For 'gaussian' it is the Full Width Half
1860 Maximum. For 'boxcar' it is the full width.
1861 For 'rmedian' and 'poly' it is the half width.
1862 order: Optional parameter for 'poly' kernel (default is 2), to
1863 specify the order of the polnomial. Ignored by all other
1864 kernels.
1865 plot: plot the original and the smoothed spectra.
1866 In this each indivual fit has to be approved, by
1867 typing 'y' or 'n'
1868 insitu: if False a new scantable is returned.
1869 Otherwise, the scaling is done in-situ
1870 The default is taken from .asaprc (False)
1871
1872 """
1873 if insitu is None: insitu = rcParams['insitu']
1874 self._math._setinsitu(insitu)
1875 varlist = vars()
1876
1877 if plot: orgscan = self.copy()
1878
1879 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1880 s._add_history("smooth", varlist)
1881
1882 if plot:
1883 if rcParams['plotter.gui']:
1884 from asap.asaplotgui import asaplotgui as asaplot
1885 else:
1886 from asap.asaplot import asaplot
1887 self._p=asaplot()
1888 self._p.set_panels()
1889 ylab=s._get_ordinate_label()
1890 #self._p.palette(0,["#777777","red"])
1891 for r in xrange(s.nrow()):
1892 xsm=s._getabcissa(r)
1893 ysm=s._getspectrum(r)
1894 xorg=orgscan._getabcissa(r)
1895 yorg=orgscan._getspectrum(r)
1896 self._p.clear()
1897 self._p.hold()
1898 self._p.set_axes('ylabel',ylab)
1899 self._p.set_axes('xlabel',s._getabcissalabel(r))
1900 self._p.set_axes('title',s._getsourcename(r))
1901 self._p.set_line(label='Original',color="#777777")
1902 self._p.plot(xorg,yorg)
1903 self._p.set_line(label='Smoothed',color="red")
1904 self._p.plot(xsm,ysm)
1905 ### Ugly part for legend
1906 for i in [0,1]:
1907 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1908 self._p.release()
1909 ### Ugly part for legend
1910 self._p.subplots[0]['lines']=[]
1911 res = raw_input("Accept smoothing ([y]/n): ")
1912 if res.upper() == 'N':
1913 s._setspectrum(yorg, r)
1914 self._p.unmap()
1915 self._p = None
1916 del orgscan
1917
1918 print_log()
1919 if insitu: self._assign(s)
1920 else: return s
1921
1922 @print_log_dec
1923 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1924 insitu=None):
1925 """\
1926 Return a scan which has been baselined (all rows) by a polynomial.
1927
1928 Parameters:
1929
1930 mask: an optional mask
1931 order: the order of the polynomial (default is 0)
1932 plot: plot the fit and the residual. In this each
1933 indivual fit has to be approved, by typing 'y'
1934 or 'n'
1935 uselin: use linear polynomial fit
1936 insitu: if False a new scantable is returned.
1937 Otherwise, the scaling is done in-situ
1938 The default is taken from .asaprc (False)
1939
1940 Example::
1941
1942 # return a scan baselined by a third order polynomial,
1943 # not using a mask
1944 bscan = scan.poly_baseline(order=3)
1945
1946 """
1947 if insitu is None: insitu = rcParams['insitu']
1948 if not insitu:
1949 workscan = self.copy()
1950 else:
1951 workscan = self
1952 varlist = vars()
1953 if mask is None:
1954 mask = [True for i in xrange(self.nchan(-1))]
1955
1956 from asap.asapfitter import fitter
1957 try:
1958 f = fitter()
1959 if uselin:
1960 f.set_function(lpoly=order)
1961 else:
1962 f.set_function(poly=order)
1963
1964 rows = range(workscan.nrow())
1965 if len(rows) > 0:
1966 self.blpars = []
1967
1968 for r in rows:
1969 # take into account flagtra info (CAS-1434)
1970 flagtra = workscan._getmask(r)
1971 actualmask = mask[:]
1972 if len(actualmask) == 0:
1973 actualmask = list(flagtra[:])
1974 else:
1975 if len(actualmask) != len(flagtra):
1976 raise RuntimeError, "Mask and flagtra have different length"
1977 else:
1978 for i in range(0, len(actualmask)):
1979 actualmask[i] = actualmask[i] and flagtra[i]
1980 f.set_scan(workscan, actualmask)
1981 f.x = workscan._getabcissa(r)
1982 f.y = workscan._getspectrum(r)
1983 f.data = None
1984 f.fit()
1985 if plot:
1986 f.plot(residual=True)
1987 x = raw_input("Accept fit ( [y]/n ): ")
1988 if x.upper() == 'N':
1989 self.blpars.append(None)
1990 continue
1991 workscan._setspectrum(f.fitter.getresidual(), r)
1992 self.blpars.append(f.get_parameters())
1993
1994 if plot:
1995 f._p.unmap()
1996 f._p = None
1997 workscan._add_history("poly_baseline", varlist)
1998 print_log()
1999 if insitu: self._assign(workscan)
2000 else: return workscan
2001 except RuntimeError:
2002 msg = "The fit failed, possibly because it didn't converge."
2003 if rcParams['verbose']:
2004 #print msg
2005 print_log()
2006 asaplog.push( str(msg) )
2007 print_log( 'ERROR' )
2008 return
2009 else:
2010 raise RuntimeError(msg)
2011
2012
2013 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
2014 threshold=3, chan_avg_limit=1, plot=False,
2015 insitu=None):
2016 """\
2017 Return a scan which has been baselined (all rows) by a polynomial.
2018 Spectral lines are detected first using linefinder and masked out
2019 to avoid them affecting the baseline solution.
2020
2021 Parameters:
2022
2023 mask: an optional mask retreived from scantable
2024
2025 edge: an optional number of channel to drop at the edge of
2026 spectrum. If only one value is
2027 specified, the same number will be dropped from
2028 both sides of the spectrum. Default is to keep
2029 all channels. Nested tuples represent individual
2030 edge selection for different IFs (a number of spectral
2031 channels can be different)
2032
2033 order: the order of the polynomial (default is 0)
2034
2035 threshold: the threshold used by line finder. It is better to
2036 keep it large as only strong lines affect the
2037 baseline solution.
2038
2039 chan_avg_limit:
2040 a maximum number of consequtive spectral channels to
2041 average during the search of weak and broad lines.
2042 The default is no averaging (and no search for weak
2043 lines). If such lines can affect the fitted baseline
2044 (e.g. a high order polynomial is fitted), increase this
2045 parameter (usually values up to 8 are reasonable). Most
2046 users of this method should find the default value
2047 sufficient.
2048
2049 plot: plot the fit and the residual. In this each
2050 indivual fit has to be approved, by typing 'y'
2051 or 'n'
2052
2053 insitu: if False a new scantable is returned.
2054 Otherwise, the scaling is done in-situ
2055 The default is taken from .asaprc (False)
2056
2057
2058 Example::
2059
2060 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2061
2062 """
2063 if insitu is None: insitu = rcParams['insitu']
2064 varlist = vars()
2065 from asap.asapfitter import fitter
2066 from asap.asaplinefind import linefinder
2067 from asap import _is_sequence_or_number as _is_valid
2068
2069 # check whether edge is set up for each IF individually
2070 individualedge = False;
2071 if len(edge) > 1:
2072 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2073 individualedge = True;
2074
2075 if not _is_valid(edge, int) and not individualedge:
2076 raise ValueError, "Parameter 'edge' has to be an integer or a \
2077 pair of integers specified as a tuple. Nested tuples are allowed \
2078 to make individual selection for different IFs."
2079
2080 curedge = (0, 0)
2081 if individualedge:
2082 for edgepar in edge:
2083 if not _is_valid(edgepar, int):
2084 raise ValueError, "Each element of the 'edge' tuple has \
2085 to be a pair of integers or an integer."
2086 else:
2087 curedge = edge;
2088
2089 # setup fitter
2090 f = fitter()
2091 f.set_function(poly=order)
2092
2093 # setup line finder
2094 fl = linefinder()
2095 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2096
2097 if not insitu:
2098 workscan = self.copy()
2099 else:
2100 workscan = self
2101
2102 fl.set_scan(workscan)
2103
2104 rows = range(workscan.nrow())
2105 # Save parameters of baseline fits & masklists as a class attribute.
2106 # NOTICE: It does not reflect changes in scantable!
2107 if len(rows) > 0:
2108 self.blpars=[]
2109 self.masklists=[]
2110 asaplog.push("Processing:")
2111 for r in rows:
2112 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2113 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2114 workscan.getpol(r), workscan.getcycle(r))
2115 asaplog.push(msg, False)
2116
2117 # figure out edge parameter
2118 if individualedge:
2119 if len(edge) >= workscan.getif(r):
2120 raise RuntimeError, "Number of edge elements appear to " \
2121 "be less than the number of IFs"
2122 curedge = edge[workscan.getif(r)]
2123
2124 # take into account flagtra info (CAS-1434)
2125 flagtra = workscan._getmask(r)
2126 actualmask = mask[:]
2127 if len(actualmask) == 0:
2128 actualmask = list(flagtra[:])
2129 else:
2130 if len(actualmask) != len(flagtra):
2131 raise RuntimeError, "Mask and flagtra have different length"
2132 else:
2133 for i in range(0, len(actualmask)):
2134 actualmask[i] = actualmask[i] and flagtra[i]
2135
2136 # setup line finder
2137 fl.find_lines(r, actualmask, curedge)
2138 outmask=fl.get_mask()
2139 f.set_scan(workscan, fl.get_mask())
2140 f.x = workscan._getabcissa(r)
2141 f.y = workscan._getspectrum(r)
2142 f.data = None
2143 f.fit()
2144
2145 # Show mask list
2146 masklist=workscan.get_masklist(fl.get_mask(),row=r)
2147 msg = "mask range: "+str(masklist)
2148 asaplog.push(msg, False)
2149
2150 if plot:
2151 f.plot(residual=True)
2152 x = raw_input("Accept fit ( [y]/n ): ")
2153 if x.upper() == 'N':
2154 self.blpars.append(None)
2155 self.masklists.append(None)
2156 continue
2157
2158 workscan._setspectrum(f.fitter.getresidual(), r)
2159 self.blpars.append(f.get_parameters())
2160 self.masklists.append(masklist)
2161 if plot:
2162 f._p.unmap()
2163 f._p = None
2164 workscan._add_history("auto_poly_baseline", varlist)
2165 if insitu:
2166 self._assign(workscan)
2167 else:
2168 return workscan
2169
2170 @print_log_dec
2171 def rotate_linpolphase(self, angle):
2172 """\
2173 Rotate the phase of the complex polarization O=Q+iU correlation.
2174 This is always done in situ in the raw data. So if you call this
2175 function more than once then each call rotates the phase further.
2176
2177 Parameters:
2178
2179 angle: The angle (degrees) to rotate (add) by.
2180
2181 Example::
2182
2183 scan.rotate_linpolphase(2.3)
2184
2185 """
2186 varlist = vars()
2187 self._math._rotate_linpolphase(self, angle)
2188 self._add_history("rotate_linpolphase", varlist)
2189 print_log()
2190 return
2191
2192 @print_log_dec
2193 def rotate_xyphase(self, angle):
2194 """\
2195 Rotate the phase of the XY correlation. This is always done in situ
2196 in the data. So if you call this function more than once
2197 then each call rotates the phase further.
2198
2199 Parameters:
2200
2201 angle: The angle (degrees) to rotate (add) by.
2202
2203 Example::
2204
2205 scan.rotate_xyphase(2.3)
2206
2207 """
2208 varlist = vars()
2209 self._math._rotate_xyphase(self, angle)
2210 self._add_history("rotate_xyphase", varlist)
2211 print_log()
2212 return
2213
2214 @print_log_dec
2215 def swap_linears(self):
2216 """\
2217 Swap the linear polarisations XX and YY, or better the first two
2218 polarisations as this also works for ciculars.
2219 """
2220 varlist = vars()
2221 self._math._swap_linears(self)
2222 self._add_history("swap_linears", varlist)
2223 print_log()
2224 return
2225
2226 @print_log_dec
2227 def invert_phase(self):
2228 """\
2229 Invert the phase of the complex polarisation
2230 """
2231 varlist = vars()
2232 self._math._invert_phase(self)
2233 self._add_history("invert_phase", varlist)
2234 return
2235
2236 @print_log_dec
2237 def add(self, offset, insitu=None):
2238 """\
2239 Return a scan where all spectra have the offset added
2240
2241 Parameters:
2242
2243 offset: the offset
2244 insitu: if False a new scantable is returned.
2245 Otherwise, the scaling is done in-situ
2246 The default is taken from .asaprc (False)
2247
2248 """
2249 if insitu is None: insitu = rcParams['insitu']
2250 self._math._setinsitu(insitu)
2251 varlist = vars()
2252 s = scantable(self._math._unaryop(self, offset, "ADD", False))
2253 s._add_history("add", varlist)
2254 if insitu:
2255 self._assign(s)
2256 else:
2257 return s
2258
2259 @print_log_dec
2260 def scale(self, factor, tsys=True, insitu=None):
2261 """\
2262
2263 Return a scan where all spectra are scaled by the give 'factor'
2264
2265 Parameters:
2266
2267 factor: the scaling factor (float or 1D float list)
2268 insitu: if False a new scantable is returned.
2269 Otherwise, the scaling is done in-situ
2270 The default is taken from .asaprc (False)
2271 tsys: if True (default) then apply the operation to Tsys
2272 as well as the data
2273
2274 """
2275 if insitu is None: insitu = rcParams['insitu']
2276 self._math._setinsitu(insitu)
2277 varlist = vars()
2278 s = None
2279 import numpy
2280 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2281 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2282 from asapmath import _array2dOp
2283 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2284 else:
2285 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2286 else:
2287 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2288 s._add_history("scale", varlist)
2289 print_log()
2290 if insitu:
2291 self._assign(s)
2292 else:
2293 return s
2294
2295 def set_sourcetype(self, match, matchtype="pattern",
2296 sourcetype="reference"):
2297 """\
2298 Set the type of the source to be an source or reference scan
2299 using the provided pattern.
2300
2301 Parameters:
2302
2303 match: a Unix style pattern, regular expression or selector
2304 matchtype: 'pattern' (default) UNIX style pattern or
2305 'regex' regular expression
2306 sourcetype: the type of the source to use (source/reference)
2307
2308 """
2309 varlist = vars()
2310 basesel = self.get_selection()
2311 stype = -1
2312 if sourcetype.lower().startswith("r"):
2313 stype = 1
2314 elif sourcetype.lower().startswith("s"):
2315 stype = 0
2316 else:
2317 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2318 if matchtype.lower().startswith("p"):
2319 matchtype = "pattern"
2320 elif matchtype.lower().startswith("r"):
2321 matchtype = "regex"
2322 else:
2323 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2324 sel = selector()
2325 if isinstance(match, selector):
2326 sel = match
2327 else:
2328 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2329 self.set_selection(basesel+sel)
2330 self._setsourcetype(stype)
2331 self.set_selection(basesel)
2332 self._add_history("set_sourcetype", varlist)
2333
2334 @print_log_dec
2335 def auto_quotient(self, preserve=True, mode='paired', verify=False):
2336 """\
2337 This function allows to build quotients automatically.
2338 It assumes the observation to have the same number of
2339 "ons" and "offs"
2340
2341 Parameters:
2342
2343 preserve: you can preserve (default) the continuum or
2344 remove it. The equations used are
2345 preserve: Output = Toff * (on/off) - Toff
2346 remove: Output = Toff * (on/off) - Ton
2347 mode: the on/off detection mode
2348 'paired' (default)
2349 identifies 'off' scans by the
2350 trailing '_R' (Mopra/Parkes) or
2351 '_e'/'_w' (Tid) and matches
2352 on/off pairs from the observing pattern
2353 'time'
2354 finds the closest off in time
2355
2356 """
2357 modes = ["time", "paired"]
2358 if not mode in modes:
2359 msg = "please provide valid mode. Valid modes are %s" % (modes)
2360 raise ValueError(msg)
2361 varlist = vars()
2362 s = None
2363 if mode.lower() == "paired":
2364 basesel = self.get_selection()
2365 sel = selector()+basesel
2366 sel.set_query("SRCTYPE==1")
2367 self.set_selection(sel)
2368 offs = self.copy()
2369 sel.set_query("SRCTYPE==0")
2370 self.set_selection(sel)
2371 ons = self.copy()
2372 s = scantable(self._math._quotient(ons, offs, preserve))
2373 self.set_selection(basesel)
2374 elif mode.lower() == "time":
2375 s = scantable(self._math._auto_quotient(self, mode, preserve))
2376 s._add_history("auto_quotient", varlist)
2377 print_log()
2378 return s
2379
2380 @print_log_dec
2381 def mx_quotient(self, mask = None, weight='median', preserve=True):
2382 """\
2383 Form a quotient using "off" beams when observing in "MX" mode.
2384
2385 Parameters:
2386
2387 mask: an optional mask to be used when weight == 'stddev'
2388 weight: How to average the off beams. Default is 'median'.
2389 preserve: you can preserve (default) the continuum or
2390 remove it. The equations used are
2391 preserve: Output = Toff * (on/off) - Toff
2392 remove: Output = Toff * (on/off) - Ton
2393
2394 """
2395 mask = mask or ()
2396 varlist = vars()
2397 on = scantable(self._math._mx_extract(self, 'on'))
2398 preoff = scantable(self._math._mx_extract(self, 'off'))
2399 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2400 from asapmath import quotient
2401 q = quotient(on, off, preserve)
2402 q._add_history("mx_quotient", varlist)
2403 print_log()
2404 return q
2405
2406 @print_log_dec
2407 def freq_switch(self, insitu=None):
2408 """\
2409 Apply frequency switching to the data.
2410
2411 Parameters:
2412
2413 insitu: if False a new scantable is returned.
2414 Otherwise, the swictching is done in-situ
2415 The default is taken from .asaprc (False)
2416
2417 """
2418 if insitu is None: insitu = rcParams['insitu']
2419 self._math._setinsitu(insitu)
2420 varlist = vars()
2421 s = scantable(self._math._freqswitch(self))
2422 s._add_history("freq_switch", varlist)
2423 print_log()
2424 if insitu: self._assign(s)
2425 else: return s
2426
2427 @print_log_dec
2428 def recalc_azel(self):
2429 """Recalculate the azimuth and elevation for each position."""
2430 varlist = vars()
2431 self._recalcazel()
2432 self._add_history("recalc_azel", varlist)
2433 print_log()
2434 return
2435
2436 @print_log_dec
2437 def __add__(self, other):
2438 varlist = vars()
2439 s = None
2440 if isinstance(other, scantable):
2441 s = scantable(self._math._binaryop(self, other, "ADD"))
2442 elif isinstance(other, float):
2443 s = scantable(self._math._unaryop(self, other, "ADD", False))
2444 else:
2445 raise TypeError("Other input is not a scantable or float value")
2446 s._add_history("operator +", varlist)
2447 return s
2448
2449 @print_log_dec
2450 def __sub__(self, other):
2451 """
2452 implicit on all axes and on Tsys
2453 """
2454 varlist = vars()
2455 s = None
2456 if isinstance(other, scantable):
2457 s = scantable(self._math._binaryop(self, other, "SUB"))
2458 elif isinstance(other, float):
2459 s = scantable(self._math._unaryop(self, other, "SUB", False))
2460 else:
2461 raise TypeError("Other input is not a scantable or float value")
2462 s._add_history("operator -", varlist)
2463 return s
2464
2465 @print_log_dec
2466 def __mul__(self, other):
2467 """
2468 implicit on all axes and on Tsys
2469 """
2470 varlist = vars()
2471 s = None
2472 if isinstance(other, scantable):
2473 s = scantable(self._math._binaryop(self, other, "MUL"))
2474 elif isinstance(other, float):
2475 s = scantable(self._math._unaryop(self, other, "MUL", False))
2476 else:
2477 raise TypeError("Other input is not a scantable or float value")
2478 s._add_history("operator *", varlist)
2479 return s
2480
2481
2482 @print_log_dec
2483 def __div__(self, other):
2484 """
2485 implicit on all axes and on Tsys
2486 """
2487 varlist = vars()
2488 s = None
2489 if isinstance(other, scantable):
2490 s = scantable(self._math._binaryop(self, other, "DIV"))
2491 elif isinstance(other, float):
2492 if other == 0.0:
2493 raise ZeroDivisionError("Dividing by zero is not recommended")
2494 s = scantable(self._math._unaryop(self, other, "DIV", False))
2495 else:
2496 raise TypeError("Other input is not a scantable or float value")
2497 s._add_history("operator /", varlist)
2498 return s
2499
2500 def get_fit(self, row=0):
2501 """\
2502 Print or return the stored fits for a row in the scantable
2503
2504 Parameters:
2505
2506 row: the row which the fit has been applied to.
2507
2508 """
2509 if row > self.nrow():
2510 return
2511 from asap.asapfit import asapfit
2512 fit = asapfit(self._getfit(row))
2513 if rcParams['verbose']:
2514 #print fit
2515 asaplog.push( '%s' %(fit) )
2516 print_log()
2517 return
2518 else:
2519 return fit.as_dict()
2520
2521 def flag_nans(self):
2522 """\
2523 Utility function to flag NaN values in the scantable.
2524 """
2525 import numpy
2526 basesel = self.get_selection()
2527 for i in range(self.nrow()):
2528 sel = self.get_row_selector(i)
2529 self.set_selection(basesel+sel)
2530 nans = numpy.isnan(self._getspectrum(0))
2531 if numpy.any(nans):
2532 bnans = [ bool(v) for v in nans]
2533 self.flag(bnans)
2534 self.set_selection(basesel)
2535
2536 def get_row_selector(self, rowno):
2537 return selector(beams=self.getbeam(rowno),
2538 ifs=self.getif(rowno),
2539 pols=self.getpol(rowno),
2540 scans=self.getscan(rowno),
2541 cycles=self.getcycle(rowno))
2542
2543 def _add_history(self, funcname, parameters):
2544 if not rcParams['scantable.history']:
2545 return
2546 # create date
2547 sep = "##"
2548 from datetime import datetime
2549 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2550 hist = dstr+sep
2551 hist += funcname+sep#cdate+sep
2552 if parameters.has_key('self'): del parameters['self']
2553 for k, v in parameters.iteritems():
2554 if type(v) is dict:
2555 for k2, v2 in v.iteritems():
2556 hist += k2
2557 hist += "="
2558 if isinstance(v2, scantable):
2559 hist += 'scantable'
2560 elif k2 == 'mask':
2561 if isinstance(v2, list) or isinstance(v2, tuple):
2562 hist += str(self._zip_mask(v2))
2563 else:
2564 hist += str(v2)
2565 else:
2566 hist += str(v2)
2567 else:
2568 hist += k
2569 hist += "="
2570 if isinstance(v, scantable):
2571 hist += 'scantable'
2572 elif k == 'mask':
2573 if isinstance(v, list) or isinstance(v, tuple):
2574 hist += str(self._zip_mask(v))
2575 else:
2576 hist += str(v)
2577 else:
2578 hist += str(v)
2579 hist += sep
2580 hist = hist[:-2] # remove trailing '##'
2581 self._addhistory(hist)
2582
2583
2584 def _zip_mask(self, mask):
2585 mask = list(mask)
2586 i = 0
2587 segments = []
2588 while mask[i:].count(1):
2589 i += mask[i:].index(1)
2590 if mask[i:].count(0):
2591 j = i + mask[i:].index(0)
2592 else:
2593 j = len(mask)
2594 segments.append([i, j])
2595 i = j
2596 return segments
2597
2598 def _get_ordinate_label(self):
2599 fu = "("+self.get_fluxunit()+")"
2600 import re
2601 lbl = "Intensity"
2602 if re.match(".K.", fu):
2603 lbl = "Brightness Temperature "+ fu
2604 elif re.match(".Jy.", fu):
2605 lbl = "Flux density "+ fu
2606 return lbl
2607
2608 def _check_ifs(self):
2609 nchans = [self.nchan(i) for i in range(self.nif(-1))]
2610 nchans = filter(lambda t: t > 0, nchans)
2611 return (sum(nchans)/len(nchans) == nchans[0])
2612
2613 def _fill(self, names, unit, average, getpt, antenna):
2614 first = True
2615 fullnames = []
2616 for name in names:
2617 name = os.path.expandvars(name)
2618 name = os.path.expanduser(name)
2619 if not os.path.exists(name):
2620 msg = "File '%s' does not exists" % (name)
2621 if rcParams['verbose']:
2622 asaplog.push(msg)
2623 print_log( 'ERROR' )
2624 return
2625 raise IOError(msg)
2626 fullnames.append(name)
2627 if average:
2628 asaplog.push('Auto averaging integrations')
2629 stype = int(rcParams['scantable.storage'].lower() == 'disk')
2630 for name in fullnames:
2631 tbl = Scantable(stype)
2632 r = filler(tbl)
2633 rx = rcParams['scantable.reference']
2634 r.setreferenceexpr(rx)
2635 msg = "Importing %s..." % (name)
2636 asaplog.push(msg, False)
2637 print_log()
2638 r.open(name)# antenna, -1, -1, getpt)
2639 r.fill()
2640 if average:
2641 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2642 if not first:
2643 tbl = self._math._merge([self, tbl])
2644 Scantable.__init__(self, tbl)
2645 r.close()
2646 del r, tbl
2647 first = False
2648 if unit is not None:
2649 self.set_fluxunit(unit)
2650 if not is_casapy():
2651 self.set_freqframe(rcParams['scantable.freqframe'])
2652
2653 def __getitem__(self, key):
2654 if key < 0:
2655 key += self.nrow()
2656 if key >= self.nrow():
2657 raise IndexError("Row index out of range.")
2658 return self._getspectrum(key)
2659
2660 def __setitem__(self, key, value):
2661 if key < 0:
2662 key += self.nrow()
2663 if key >= self.nrow():
2664 raise IndexError("Row index out of range.")
2665 if not hasattr(value, "__len__") or \
2666 len(value) > self.nchan(self.getif(key)):
2667 raise ValueError("Spectrum length doesn't match.")
2668 return self._setspectrum(value, key)
2669
2670 def __len__(self):
2671 return self.nrow()
2672
2673 def __iter__(self):
2674 for i in range(len(self)):
2675 yield self[i]
Note: See TracBrowser for help on using the repository browser.