source: trunk/python/scantable.py@ 1855

Last change on this file since 1855 was 1855, checked in by Malte Marquarding, 15 years ago

Ticket #194: docstring changes. Play nicely with sphinx.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 91.5 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, print_log, print_log_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or
18
19
20def preserve_selection(func):
21 @wraps_dec(func)
22 def wrap(obj, *args, **kw):
23 basesel = obj.get_selection()
24 val = func(obj, *args, **kw)
25 obj.set_selection(basesel)
26 return val
27 return wrap
28
29def is_scantable(filename):
30 """Is the given file a scantable?
31
32 Parameters:
33
34 filename: the name of the file/directory to test
35
36 """
37 return (os.path.isdir(filename)
38 and not os.path.exists(filename+'/table.f1')
39 and os.path.exists(filename+'/table.info'))
40
41
42class scantable(Scantable):
43 """\
44 The ASAP container for scans (single-dish data).
45 """
46
47 @print_log_dec
48 def __init__(self, filename, average=None, unit=None, getpt=None,
49 antenna=None, parallactify=None):
50 """\
51 Create a scantable from a saved one or make a reference
52
53 Parameters:
54
55 filename: the name of an asap table on disk
56 or
57 the name of a rpfits/sdfits/ms file
58 (integrations within scans are auto averaged
59 and the whole file is read) or
60 [advanced] a reference to an existing scantable
61
62 average: average all integrations withinb a scan on read.
63 The default (True) is taken from .asaprc.
64
65 unit: brightness unit; must be consistent with K or Jy.
66 Over-rides the default selected by the filler
67 (input rpfits/sdfits/ms) or replaces the value
68 in existing scantables
69
70 getpt: for MeasurementSet input data only:
71 If True, all pointing data are filled.
72 The deafult is False, which makes time to load
73 the MS data faster in some cases.
74
75 antenna: Antenna selection. integer (id) or string (name or id).
76
77 parallactify: Indicate that the data had been parallatified. Default
78 is taken from rc file.
79
80 """
81 if average is None:
82 average = rcParams['scantable.autoaverage']
83 if getpt is None:
84 getpt = True
85 if antenna is not None:
86 asaplog.push("Antenna selection currently unsupported."
87 "Using '0'")
88 print_log('WARN')
89 if antenna is None:
90 antenna = ''
91 elif type(antenna) == int:
92 antenna = '%s' % antenna
93 elif type(antenna) == list:
94 tmpstr = ''
95 for i in range( len(antenna) ):
96 if type(antenna[i]) == int:
97 tmpstr = tmpstr + ('%s,'%(antenna[i]))
98 elif type(antenna[i]) == str:
99 tmpstr=tmpstr+antenna[i]+','
100 else:
101 asaplog.push('Bad antenna selection.')
102 print_log('ERROR')
103 return
104 antenna = tmpstr.rstrip(',')
105 parallactify = parallactify or rcParams['scantable.parallactify']
106 varlist = vars()
107 from asap._asap import stmath
108 self._math = stmath( rcParams['insitu'] )
109 if isinstance(filename, Scantable):
110 Scantable.__init__(self, filename)
111 else:
112 if isinstance(filename, str):
113 filename = os.path.expandvars(filename)
114 filename = os.path.expanduser(filename)
115 if not os.path.exists(filename):
116 s = "File '%s' not found." % (filename)
117 if rcParams['verbose']:
118 asaplog.push(s)
119 print_log('ERROR')
120 return
121 raise IOError(s)
122 if is_scantable(filename):
123 ondisk = rcParams['scantable.storage'] == 'disk'
124 Scantable.__init__(self, filename, ondisk)
125 if unit is not None:
126 self.set_fluxunit(unit)
127 # do not reset to the default freqframe
128 #self.set_freqframe(rcParams['scantable.freqframe'])
129 elif os.path.isdir(filename) \
130 and not os.path.exists(filename+'/table.f1'):
131 msg = "The given file '%s'is not a valid " \
132 "asap table." % (filename)
133 if rcParams['verbose']:
134 #print msg
135 asaplog.push( msg )
136 print_log( 'ERROR' )
137 return
138 else:
139 raise IOError(msg)
140 else:
141 self._fill([filename], unit, average, getpt, antenna)
142 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
143 and isinstance(filename[-1], str):
144 self._fill(filename, unit, average, getpt, antenna)
145 self.parallactify(parallactify)
146 self._add_history("scantable", varlist)
147 print_log()
148
149 @print_log_dec
150 def save(self, name=None, format=None, overwrite=False):
151 """\
152 Store the scantable on disk. This can be an asap (aips++) Table,
153 SDFITS or MS2 format.
154
155 Parameters:
156
157 name: the name of the outputfile. For format "ASCII"
158 this is the root file name (data in 'name'.txt
159 and header in 'name'_header.txt)
160
161 format: an optional file format. Default is ASAP.
162 Allowed are:
163
164 * 'ASAP' (save as ASAP [aips++] Table),
165 * 'SDFITS' (save as SDFITS file)
166 * 'ASCII' (saves as ascii text file)
167 * 'MS2' (saves as an casacore MeasurementSet V2)
168 * 'FITS' (save as image FITS - not readable by class)
169 * 'CLASS' (save as FITS readable by CLASS)
170
171 overwrite: If the file should be overwritten if it exists.
172 The default False is to return with warning
173 without writing the output. USE WITH CARE.
174
175 Example::
176
177 scan.save('myscan.asap')
178 scan.save('myscan.sdfits', 'SDFITS')
179
180 """
181 from os import path
182 format = format or rcParams['scantable.save']
183 suffix = '.'+format.lower()
184 if name is None or name == "":
185 name = 'scantable'+suffix
186 msg = "No filename given. Using default name %s..." % name
187 asaplog.push(msg)
188 name = path.expandvars(name)
189 if path.isfile(name) or path.isdir(name):
190 if not overwrite:
191 msg = "File %s exists." % name
192 if rcParams['verbose']:
193 #print msg
194 asaplog.push( msg )
195 print_log( 'ERROR' )
196 return
197 else:
198 raise IOError(msg)
199 format2 = format.upper()
200 if format2 == 'ASAP':
201 self._save(name)
202 else:
203 from asap._asap import stwriter as stw
204 writer = stw(format2)
205 writer.write(self, name)
206 print_log()
207 return
208
209 def copy(self):
210 """Return a copy of this scantable.
211
212 *Note*:
213
214 This makes a full (deep) copy. scan2 = scan1 makes a reference.
215
216 Example::
217
218 copiedscan = scan.copy()
219
220 """
221 sd = scantable(Scantable._copy(self))
222 return sd
223
224 def drop_scan(self, scanid=None):
225 """\
226 Return a new scantable where the specified scan number(s) has(have)
227 been dropped.
228
229 Parameters:
230
231 scanid: a (list of) scan number(s)
232
233 """
234 from asap import _is_sequence_or_number as _is_valid
235 from asap import _to_list
236 from asap import unique
237 if not _is_valid(scanid):
238 if rcParams['verbose']:
239 #print "Please specify a scanno to drop from the scantable"
240 asaplog.push( 'Please specify a scanno to drop from the scantable' )
241 print_log( 'ERROR' )
242 return
243 else:
244 raise RuntimeError("No scan given")
245 try:
246 scanid = _to_list(scanid)
247 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
248 for sid in scanid: allscans.remove(sid)
249 if len(allscans) == 0:
250 raise ValueError("Can't remove all scans")
251 except ValueError:
252 if rcParams['verbose']:
253 #print "Couldn't find any match."
254 print_log()
255 asaplog.push( "Couldn't find any match." )
256 print_log( 'ERROR' )
257 return
258 else: raise
259 try:
260 sel = selector(scans=allscans)
261 return self._select_copy(sel)
262 except RuntimeError:
263 if rcParams['verbose']:
264 #print "Couldn't find any match."
265 print_log()
266 asaplog.push( "Couldn't find any match." )
267 print_log( 'ERROR' )
268 else:
269 raise
270
271 def _select_copy(self, selection):
272 orig = self.get_selection()
273 self.set_selection(orig+selection)
274 cp = self.copy()
275 self.set_selection(orig)
276 return cp
277
278 def get_scan(self, scanid=None):
279 """\
280 Return a specific scan (by scanno) or collection of scans (by
281 source name) in a new scantable.
282
283 *Note*:
284
285 See scantable.drop_scan() for the inverse operation.
286
287 Parameters:
288
289 scanid: a (list of) scanno or a source name, unix-style
290 patterns are accepted for source name matching, e.g.
291 '*_R' gets all 'ref scans
292
293 Example::
294
295 # get all scans containing the source '323p459'
296 newscan = scan.get_scan('323p459')
297 # get all 'off' scans
298 refscans = scan.get_scan('*_R')
299 # get a susbset of scans by scanno (as listed in scan.summary())
300 newscan = scan.get_scan([0, 2, 7, 10])
301
302 """
303 if scanid is None:
304 if rcParams['verbose']:
305 #print "Please specify a scan no or name to " \
306 # "retrieve from the scantable"
307 asaplog.push( 'Please specify a scan no or name to retrieve'
308 ' from the scantable' )
309 print_log( 'ERROR' )
310 return
311 else:
312 raise RuntimeError("No scan given")
313
314 try:
315 bsel = self.get_selection()
316 sel = selector()
317 if type(scanid) is str:
318 sel.set_name(scanid)
319 return self._select_copy(sel)
320 elif type(scanid) is int:
321 sel.set_scans([scanid])
322 return self._select_copy(sel)
323 elif type(scanid) is list:
324 sel.set_scans(scanid)
325 return self._select_copy(sel)
326 else:
327 msg = "Illegal scanid type, use 'int' or 'list' if ints."
328 if rcParams['verbose']:
329 #print msg
330 asaplog.push( msg )
331 print_log( 'ERROR' )
332 else:
333 raise TypeError(msg)
334 except RuntimeError:
335 if rcParams['verbose']:
336 #print "Couldn't find any match."
337 print_log()
338 asaplog.push( "Couldn't find any match." )
339 print_log( 'ERROR' )
340 else: raise
341
342 def __str__(self):
343 return Scantable._summary(self, True)
344
345 def summary(self, filename=None):
346 """\
347 Print a summary of the contents of this scantable.
348
349 Parameters:
350
351 filename: the name of a file to write the putput to
352 Default - no file output
353
354 """
355 info = Scantable._summary(self, True)
356 if filename is not None:
357 if filename is "":
358 filename = 'scantable_summary.txt'
359 from os.path import expandvars, isdir
360 filename = expandvars(filename)
361 if not isdir(filename):
362 data = open(filename, 'w')
363 data.write(info)
364 data.close()
365 else:
366 msg = "Illegal file name '%s'." % (filename)
367 if rcParams['verbose']:
368 #print msg
369 asaplog.push( msg )
370 print_log( 'ERROR' )
371 else:
372 raise IOError(msg)
373 if rcParams['verbose']:
374 try:
375 from IPython.genutils import page as pager
376 except ImportError:
377 from pydoc import pager
378 pager(info)
379 else:
380 return info
381
382 def get_spectrum(self, rowno):
383 """Return the spectrum for the current row in the scantable as a list.
384
385 Parameters:
386
387 rowno: the row number to retrieve the spectrum from
388
389 """
390 return self._getspectrum(rowno)
391
392 def get_mask(self, rowno):
393 """Return the mask for the current row in the scantable as a list.
394
395 Parameters:
396
397 rowno: the row number to retrieve the mask from
398
399 """
400 return self._getmask(rowno)
401
402 def set_spectrum(self, spec, rowno):
403 """Return the spectrum for the current row in the scantable as a list.
404
405 Parameters:
406
407 spec: the new spectrum
408
409 rowno: the row number to set the spectrum for
410
411 """
412 assert(len(spec) == self.nchan())
413 return self._setspectrum(spec, rowno)
414
415 def get_coordinate(self, rowno):
416 """Return the (spectral) coordinate for a a given 'rowno'.
417
418 *Note*:
419
420 * This coordinate is only valid until a scantable method modifies
421 the frequency axis.
422 * This coordinate does contain the original frequency set-up
423 NOT the new frame. The conversions however are done using the user
424 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
425 use scantable.freq_align first. Without it there is no closure,
426 i.e.::
427
428 c = myscan.get_coordinate(0)
429 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
430
431 Parameters:
432
433 rowno: the row number for the spectral coordinate
434
435 """
436 return coordinate(Scantable.get_coordinate(self, rowno))
437
438 def get_selection(self):
439 """\
440 Get the selection object currently set on this scantable.
441
442 Example::
443
444 sel = scan.get_selection()
445 sel.set_ifs(0) # select IF 0
446 scan.set_selection(sel) # apply modified selection
447
448 """
449 return selector(self._getselection())
450
451 def set_selection(self, selection=None, **kw):
452 """\
453 Select a subset of the data. All following operations on this scantable
454 are only applied to thi selection.
455
456 Parameters:
457
458 selection: a selector object (default unset the selection), or
459 any combination of "pols", "ifs", "beams", "scans",
460 "cycles", "name", "query"
461
462 Examples::
463
464 sel = selector() # create a selection object
465 self.set_scans([0, 3]) # select SCANNO 0 and 3
466 scan.set_selection(sel) # set the selection
467 scan.summary() # will only print summary of scanno 0 an 3
468 scan.set_selection() # unset the selection
469 # or the equivalent
470 scan.set_selection(scans=[0,3])
471 scan.summary() # will only print summary of scanno 0 an 3
472 scan.set_selection() # unset the selection
473
474 """
475 if selection is None:
476 # reset
477 if len(kw) == 0:
478 selection = selector()
479 else:
480 # try keywords
481 for k in kw:
482 if k not in selector.fields:
483 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
484 selection = selector(**kw)
485 self._setselection(selection)
486
487 def get_row(self, row=0, insitu=None):
488 """\
489 Select a row in the scantable.
490 Return a scantable with single row.
491
492 Parameters:
493
494 row: row no of integration, default is 0.
495 insitu: if False a new scantable is returned. Otherwise, the
496 scaling is done in-situ. The default is taken from .asaprc
497 (False)
498
499 """
500 if insitu is None: insitu = rcParams['insitu']
501 if not insitu:
502 workscan = self.copy()
503 else:
504 workscan = self
505 # Select a row
506 sel=selector()
507 sel.set_scans([workscan.getscan(row)])
508 sel.set_cycles([workscan.getcycle(row)])
509 sel.set_beams([workscan.getbeam(row)])
510 sel.set_ifs([workscan.getif(row)])
511 sel.set_polarisations([workscan.getpol(row)])
512 sel.set_name(workscan._getsourcename(row))
513 workscan.set_selection(sel)
514 if not workscan.nrow() == 1:
515 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
516 raise RuntimeError(msg)
517 del sel
518 if insitu:
519 self._assign(workscan)
520 else:
521 return workscan
522
523 #def stats(self, stat='stddev', mask=None):
524 def stats(self, stat='stddev', mask=None, form='3.3f'):
525 """\
526 Determine the specified statistic of the current beam/if/pol
527 Takes a 'mask' as an optional parameter to specify which
528 channels should be excluded.
529
530 Parameters:
531
532 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
533 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
534
535 mask: an optional mask specifying where the statistic
536 should be determined.
537
538 form: format string to print statistic values
539
540 Example::
541
542 scan.set_unit('channel')
543 msk = scan.create_mask([100, 200], [500, 600])
544 scan.stats(stat='mean', mask=m)
545
546 """
547 mask = mask or []
548 if not self._check_ifs():
549 raise ValueError("Cannot apply mask as the IFs have different "
550 "number of channels. Please use setselection() "
551 "to select individual IFs")
552 rtnabc = False
553 if stat.lower().endswith('_abc'): rtnabc = True
554 getchan = False
555 if stat.lower().startswith('min') or stat.lower().startswith('max'):
556 chan = self._math._minmaxchan(self, mask, stat)
557 getchan = True
558 statvals = []
559 if not rtnabc: statvals = self._math._stats(self, mask, stat)
560
561 #def cb(i):
562 # return statvals[i]
563
564 #return self._row_callback(cb, stat)
565
566 label=stat
567 #callback=cb
568 out = ""
569 #outvec = []
570 sep = '-'*50
571 for i in range(self.nrow()):
572 refstr = ''
573 statunit= ''
574 if getchan:
575 qx, qy = self.chan2data(rowno=i, chan=chan[i])
576 if rtnabc:
577 statvals.append(qx['value'])
578 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
579 statunit= '['+qx['unit']+']'
580 else:
581 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
582
583 tm = self._gettime(i)
584 src = self._getsourcename(i)
585 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
586 out += 'Time[%s]:\n' % (tm)
587 if self.nbeam(-1) > 1:
588 out += ' Beam[%d] ' % (self.getbeam(i))
589 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
590 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
591 #outvec.append(callback(i))
592 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
593 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
594 out += sep+"\n"
595
596 if rcParams['verbose']:
597 import os
598 if os.environ.has_key( 'USER' ):
599 usr=os.environ['USER']
600 else:
601 import commands
602 usr=commands.getoutput( 'whoami' )
603 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
604 f=open(tmpfile,'w')
605 print >> f, sep
606 print >> f, ' %s %s' % (label, statunit)
607 print >> f, sep
608 print >> f, out
609 f.close()
610 f=open(tmpfile,'r')
611 x=f.readlines()
612 f.close()
613 blanc=''
614 asaplog.push(blanc.join(x), False)
615 #for xx in x:
616 # asaplog.push( xx, False )
617 print_log()
618 return statvals
619
620 def chan2data(self, rowno=0, chan=0):
621 """\
622 Returns channel/frequency/velocity and spectral value
623 at an arbitrary row and channel in the scantable.
624
625 Parameters:
626
627 rowno: a row number in the scantable. Default is the
628 first row, i.e. rowno=0
629
630 chan: a channel in the scantable. Default is the first
631 channel, i.e. pos=0
632
633 """
634 if isinstance(rowno, int) and isinstance(chan, int):
635 qx = {'unit': self.get_unit(),
636 'value': self._getabcissa(rowno)[chan]}
637 qy = {'unit': self.get_fluxunit(),
638 'value': self._getspectrum(rowno)[chan]}
639 return qx, qy
640
641 def stddev(self, mask=None):
642 """\
643 Determine the standard deviation of the current beam/if/pol
644 Takes a 'mask' as an optional parameter to specify which
645 channels should be excluded.
646
647 Parameters:
648
649 mask: an optional mask specifying where the standard
650 deviation should be determined.
651
652 Example::
653
654 scan.set_unit('channel')
655 msk = scan.create_mask([100, 200], [500, 600])
656 scan.stddev(mask=m)
657
658 """
659 return self.stats(stat='stddev', mask=mask);
660
661
662 def get_column_names(self):
663 """\
664 Return a list of column names, which can be used for selection.
665 """
666 return list(Scantable.get_column_names(self))
667
668 def get_tsys(self, row=-1):
669 """\
670 Return the System temperatures.
671
672 Parameters:
673
674 row: the rowno to get the information for. (default all rows)
675
676 Returns:
677
678 a list of Tsys values for the current selection
679
680 """
681 if row > -1:
682 return self._get_column(self._gettsys, row)
683 return self._row_callback(self._gettsys, "Tsys")
684
685
686 def get_weather(self, row=-1):
687 """\
688 Return the weather informations.
689
690 Parameters:
691
692 row: the rowno to get the information for. (default all rows)
693
694 Returns:
695
696 a dict or list of of dicts of values for the current selection
697
698 """
699
700 values = self._get_column(self._get_weather, row)
701 if row > -1:
702 return {'temperature': values[0],
703 'pressure': values[1], 'humidity' : values[2],
704 'windspeed' : values[3], 'windaz' : values[4]
705 }
706 else:
707 out = []
708 for r in values:
709
710 out.append({'temperature': r[0],
711 'pressure': r[1], 'humidity' : r[2],
712 'windspeed' : r[3], 'windaz' : r[4]
713 })
714 return out
715
716 def _row_callback(self, callback, label):
717 out = ""
718 outvec = []
719 sep = '-'*50
720 for i in range(self.nrow()):
721 tm = self._gettime(i)
722 src = self._getsourcename(i)
723 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
724 out += 'Time[%s]:\n' % (tm)
725 if self.nbeam(-1) > 1:
726 out += ' Beam[%d] ' % (self.getbeam(i))
727 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
728 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
729 outvec.append(callback(i))
730 out += '= %3.3f\n' % (outvec[i])
731 out += sep+'\n'
732 if rcParams['verbose']:
733 asaplog.push(sep)
734 asaplog.push(" %s" % (label))
735 asaplog.push(sep)
736 asaplog.push(out)
737 print_log()
738 return outvec
739
740 def _get_column(self, callback, row=-1):
741 """
742 """
743 if row == -1:
744 return [callback(i) for i in range(self.nrow())]
745 else:
746 if 0 <= row < self.nrow():
747 return callback(row)
748
749
750 def get_time(self, row=-1, asdatetime=False):
751 """\
752 Get a list of time stamps for the observations.
753 Return a datetime object for each integration time stamp in the scantable.
754
755 Parameters:
756
757 row: row no of integration. Default -1 return all rows
758
759 asdatetime: return values as datetime objects rather than strings
760
761 """
762 from time import strptime
763 from datetime import datetime
764 times = self._get_column(self._gettime, row)
765 if not asdatetime:
766 return times
767 format = "%Y/%m/%d/%H:%M:%S"
768 if isinstance(times, list):
769 return [datetime(*strptime(i, format)[:6]) for i in times]
770 else:
771 return datetime(*strptime(times, format)[:6])
772
773
774 def get_inttime(self, row=-1):
775 """\
776 Get a list of integration times for the observations.
777 Return a time in seconds for each integration in the scantable.
778
779 Parameters:
780
781 row: row no of integration. Default -1 return all rows.
782
783 """
784 return self._get_column(self._getinttime, row)
785
786
787 def get_sourcename(self, row=-1):
788 """\
789 Get a list source names for the observations.
790 Return a string for each integration in the scantable.
791 Parameters:
792
793 row: row no of integration. Default -1 return all rows.
794
795 """
796 return self._get_column(self._getsourcename, row)
797
798 def get_elevation(self, row=-1):
799 """\
800 Get a list of elevations for the observations.
801 Return a float for each integration in the scantable.
802
803 Parameters:
804
805 row: row no of integration. Default -1 return all rows.
806
807 """
808 return self._get_column(self._getelevation, row)
809
810 def get_azimuth(self, row=-1):
811 """\
812 Get a list of azimuths for the observations.
813 Return a float for each integration in the scantable.
814
815 Parameters:
816 row: row no of integration. Default -1 return all rows.
817
818 """
819 return self._get_column(self._getazimuth, row)
820
821 def get_parangle(self, row=-1):
822 """\
823 Get a list of parallactic angles for the observations.
824 Return a float for each integration in the scantable.
825
826 Parameters:
827
828 row: row no of integration. Default -1 return all rows.
829
830 """
831 return self._get_column(self._getparangle, row)
832
833 def get_direction(self, row=-1):
834 """
835 Get a list of Positions on the sky (direction) for the observations.
836 Return a string for each integration in the scantable.
837
838 Parameters:
839
840 row: row no of integration. Default -1 return all rows
841
842 """
843 return self._get_column(self._getdirection, row)
844
845 def get_directionval(self, row=-1):
846 """\
847 Get a list of Positions on the sky (direction) for the observations.
848 Return a float for each integration in the scantable.
849
850 Parameters:
851
852 row: row no of integration. Default -1 return all rows
853
854 """
855 return self._get_column(self._getdirectionvec, row)
856
857 @print_log_dec
858 def set_unit(self, unit='channel'):
859 """\
860 Set the unit for all following operations on this scantable
861
862 Parameters:
863
864 unit: optional unit, default is 'channel'. Use one of '*Hz',
865 'km/s', 'channel' or equivalent ''
866
867 """
868 varlist = vars()
869 if unit in ['', 'pixel', 'channel']:
870 unit = ''
871 inf = list(self._getcoordinfo())
872 inf[0] = unit
873 self._setcoordinfo(inf)
874 self._add_history("set_unit", varlist)
875
876 @print_log_dec
877 def set_instrument(self, instr):
878 """\
879 Set the instrument for subsequent processing.
880
881 Parameters:
882
883 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
884 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
885
886 """
887 self._setInstrument(instr)
888 self._add_history("set_instument", vars())
889 print_log()
890
891 @print_log_dec
892 def set_feedtype(self, feedtype):
893 """\
894 Overwrite the feed type, which might not be set correctly.
895
896 Parameters:
897
898 feedtype: 'linear' or 'circular'
899
900 """
901 self._setfeedtype(feedtype)
902 self._add_history("set_feedtype", vars())
903 print_log()
904
905 @print_log_dec
906 def set_doppler(self, doppler='RADIO'):
907 """\
908 Set the doppler for all following operations on this scantable.
909
910 Parameters:
911
912 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
913
914 """
915 varlist = vars()
916 inf = list(self._getcoordinfo())
917 inf[2] = doppler
918 self._setcoordinfo(inf)
919 self._add_history("set_doppler", vars())
920 print_log()
921
922 @print_log_dec
923 def set_freqframe(self, frame=None):
924 """\
925 Set the frame type of the Spectral Axis.
926
927 Parameters:
928
929 frame: an optional frame type, default 'LSRK'. Valid frames are:
930 'TOPO', 'LSRD', 'LSRK', 'BARY',
931 'GEO', 'GALACTO', 'LGROUP', 'CMB'
932
933 Example::
934
935 scan.set_freqframe('BARY')
936
937 """
938 frame = frame or rcParams['scantable.freqframe']
939 varlist = vars()
940 # "REST" is not implemented in casacore
941 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
942 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
943 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
944 'GEO', 'GALACTO', 'LGROUP', 'CMB']
945
946 if frame in valid:
947 inf = list(self._getcoordinfo())
948 inf[1] = frame
949 self._setcoordinfo(inf)
950 self._add_history("set_freqframe", varlist)
951 else:
952 msg = "Please specify a valid freq type. Valid types are:\n", valid
953 if rcParams['verbose']:
954 #print msg
955 asaplog.push( msg )
956 print_log( 'ERROR' )
957 else:
958 raise TypeError(msg)
959 print_log()
960
961 def set_dirframe(self, frame=""):
962 """\
963 Set the frame type of the Direction on the sky.
964
965 Parameters:
966
967 frame: an optional frame type, default ''. Valid frames are:
968 'J2000', 'B1950', 'GALACTIC'
969
970 Example:
971
972 scan.set_dirframe('GALACTIC')
973
974 """
975 varlist = vars()
976 try:
977 Scantable.set_dirframe(self, frame)
978 except RuntimeError, msg:
979 if rcParams['verbose']:
980 #print msg
981 print_log()
982 asaplog.push( str(msg) )
983 print_log( 'ERROR' )
984 else:
985 raise
986 self._add_history("set_dirframe", varlist)
987
988 def get_unit(self):
989 """\
990 Get the default unit set in this scantable
991
992 Returns:
993
994 A unit string
995
996 """
997 inf = self._getcoordinfo()
998 unit = inf[0]
999 if unit == '': unit = 'channel'
1000 return unit
1001
1002 def get_abcissa(self, rowno=0):
1003 """\
1004 Get the abcissa in the current coordinate setup for the currently
1005 selected Beam/IF/Pol
1006
1007 Parameters:
1008
1009 rowno: an optional row number in the scantable. Default is the
1010 first row, i.e. rowno=0
1011
1012 Returns:
1013
1014 The abcissa values and the format string (as a dictionary)
1015
1016 """
1017 abc = self._getabcissa(rowno)
1018 lbl = self._getabcissalabel(rowno)
1019 print_log()
1020 return abc, lbl
1021
1022 def flag(self, mask=None, unflag=False):
1023 """\
1024 Flag the selected data using an optional channel mask.
1025
1026 Parameters:
1027
1028 mask: an optional channel mask, created with create_mask. Default
1029 (no mask) is all channels.
1030
1031 unflag: if True, unflag the data
1032
1033 """
1034 varlist = vars()
1035 mask = mask or []
1036 try:
1037 self._flag(mask, unflag)
1038 except RuntimeError, msg:
1039 if rcParams['verbose']:
1040 #print msg
1041 print_log()
1042 asaplog.push( str(msg) )
1043 print_log( 'ERROR' )
1044 return
1045 else: raise
1046 self._add_history("flag", varlist)
1047
1048 def flag_row(self, rows=[], unflag=False):
1049 """\
1050 Flag the selected data in row-based manner.
1051
1052 Parameters:
1053
1054 rows: list of row numbers to be flagged. Default is no row
1055 (must be explicitly specified to execute row-based flagging).
1056
1057 unflag: if True, unflag the data.
1058
1059 """
1060 varlist = vars()
1061 try:
1062 self._flag_row(rows, unflag)
1063 except RuntimeError, msg:
1064 if rcParams['verbose']:
1065 print_log()
1066 asaplog.push( str(msg) )
1067 print_log('ERROR')
1068 return
1069 else: raise
1070 self._add_history("flag_row", varlist)
1071
1072 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1073 """\
1074 Flag the selected data outside a specified range (in channel-base)
1075
1076 Parameters:
1077
1078 uthres: upper threshold.
1079
1080 dthres: lower threshold
1081
1082 clipoutside: True for flagging data outside the range [dthres:uthres].
1083 False for glagging data inside the range.
1084
1085 unflag: if True, unflag the data.
1086
1087 """
1088 varlist = vars()
1089 try:
1090 self._clip(uthres, dthres, clipoutside, unflag)
1091 except RuntimeError, msg:
1092 if rcParams['verbose']:
1093 print_log()
1094 asaplog.push(str(msg))
1095 print_log('ERROR')
1096 return
1097 else: raise
1098 self._add_history("clip", varlist)
1099
1100 @print_log_dec
1101 def lag_flag(self, start, end, unit="MHz", insitu=None):
1102 """\
1103 Flag the data in 'lag' space by providing a frequency to remove.
1104 Flagged data in the scantable gets interpolated over the region.
1105 No taper is applied.
1106
1107 Parameters:
1108
1109 start: the start frequency (really a period within the
1110 bandwidth) or period to remove
1111
1112 end: the end frequency or period to remove
1113
1114 unit: the frequency unit (default "MHz") or "" for
1115 explicit lag channels
1116
1117 *Notes*:
1118
1119 It is recommended to flag edges of the band or strong
1120 signals beforehand.
1121
1122 """
1123 if insitu is None: insitu = rcParams['insitu']
1124 self._math._setinsitu(insitu)
1125 varlist = vars()
1126 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1127 if not (unit == "" or base.has_key(unit)):
1128 raise ValueError("%s is not a valid unit." % unit)
1129 try:
1130 if unit == "":
1131 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1132 else:
1133 s = scantable(self._math._lag_flag(self, start*base[unit],
1134 end*base[unit], "frequency"))
1135 except RuntimeError, msg:
1136 if rcParams['verbose']:
1137 #print msg
1138 print_log()
1139 asaplog.push( str(msg) )
1140 print_log( 'ERROR' )
1141 return
1142 else: raise
1143 s._add_history("lag_flag", varlist)
1144 print_log()
1145 if insitu:
1146 self._assign(s)
1147 else:
1148 return s
1149
1150 @print_log_dec
1151 def create_mask(self, *args, **kwargs):
1152 """\
1153 Compute and return a mask based on [min, max] windows.
1154 The specified windows are to be INCLUDED, when the mask is
1155 applied.
1156
1157 Parameters:
1158
1159 [min, max], [min2, max2], ...
1160 Pairs of start/end points (inclusive)specifying the regions
1161 to be masked
1162
1163 invert: optional argument. If specified as True,
1164 return an inverted mask, i.e. the regions
1165 specified are EXCLUDED
1166
1167 row: create the mask using the specified row for
1168 unit conversions, default is row=0
1169 only necessary if frequency varies over rows.
1170
1171 Examples::
1172
1173 scan.set_unit('channel')
1174 # a)
1175 msk = scan.create_mask([400, 500], [800, 900])
1176 # masks everything outside 400 and 500
1177 # and 800 and 900 in the unit 'channel'
1178
1179 # b)
1180 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1181 # masks the regions between 400 and 500
1182 # and 800 and 900 in the unit 'channel'
1183
1184 # c)
1185 #mask only channel 400
1186 msk = scan.create_mask([400])
1187
1188 """
1189 row = kwargs.get("row", 0)
1190 data = self._getabcissa(row)
1191 u = self._getcoordinfo()[0]
1192 if rcParams['verbose']:
1193 if u == "": u = "channel"
1194 msg = "The current mask window unit is %s" % u
1195 i = self._check_ifs()
1196 if not i:
1197 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1198 asaplog.push(msg)
1199 n = self.nchan()
1200 msk = _n_bools(n, False)
1201 # test if args is a 'list' or a 'normal *args - UGLY!!!
1202
1203 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1204 and args or args[0]
1205 for window in ws:
1206 if len(window) == 1:
1207 window = [window[0], window[0]]
1208 if len(window) == 0 or len(window) > 2:
1209 raise ValueError("A window needs to be defined as [start(, end)]")
1210 if window[0] > window[1]:
1211 tmp = window[0]
1212 window[0] = window[1]
1213 window[1] = tmp
1214 for i in range(n):
1215 if data[i] >= window[0] and data[i] <= window[1]:
1216 msk[i] = True
1217 if kwargs.has_key('invert'):
1218 if kwargs.get('invert'):
1219 msk = mask_not(msk)
1220 print_log()
1221 return msk
1222
1223 def get_masklist(self, mask=None, row=0):
1224 """\
1225 Compute and return a list of mask windows, [min, max].
1226
1227 Parameters:
1228
1229 mask: channel mask, created with create_mask.
1230
1231 row: calcutate the masklist using the specified row
1232 for unit conversions, default is row=0
1233 only necessary if frequency varies over rows.
1234
1235 Returns:
1236
1237 [min, max], [min2, max2], ...
1238 Pairs of start/end points (inclusive)specifying
1239 the masked regions
1240
1241 """
1242 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1243 raise TypeError("The mask should be list or tuple.")
1244 if len(mask) < 2:
1245 raise TypeError("The mask elements should be > 1")
1246 if self.nchan() != len(mask):
1247 msg = "Number of channels in scantable != number of mask elements"
1248 raise TypeError(msg)
1249 data = self._getabcissa(row)
1250 u = self._getcoordinfo()[0]
1251 if rcParams['verbose']:
1252 if u == "": u = "channel"
1253 msg = "The current mask window unit is %s" % u
1254 i = self._check_ifs()
1255 if not i:
1256 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1257 asaplog.push(msg)
1258 masklist=[]
1259 ist, ien = None, None
1260 ist, ien=self.get_mask_indices(mask)
1261 if ist is not None and ien is not None:
1262 for i in xrange(len(ist)):
1263 range=[data[ist[i]],data[ien[i]]]
1264 range.sort()
1265 masklist.append([range[0],range[1]])
1266 return masklist
1267
1268 def get_mask_indices(self, mask=None):
1269 """\
1270 Compute and Return lists of mask start indices and mask end indices.
1271
1272 Parameters:
1273
1274 mask: channel mask, created with create_mask.
1275
1276 Returns:
1277
1278 List of mask start indices and that of mask end indices,
1279 i.e., [istart1,istart2,....], [iend1,iend2,....].
1280
1281 """
1282 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1283 raise TypeError("The mask should be list or tuple.")
1284 if len(mask) < 2:
1285 raise TypeError("The mask elements should be > 1")
1286 istart=[]
1287 iend=[]
1288 if mask[0]: istart.append(0)
1289 for i in range(len(mask)-1):
1290 if not mask[i] and mask[i+1]:
1291 istart.append(i+1)
1292 elif mask[i] and not mask[i+1]:
1293 iend.append(i)
1294 if mask[len(mask)-1]: iend.append(len(mask)-1)
1295 if len(istart) != len(iend):
1296 raise RuntimeError("Numbers of mask start != mask end.")
1297 for i in range(len(istart)):
1298 if istart[i] > iend[i]:
1299 raise RuntimeError("Mask start index > mask end index")
1300 break
1301 return istart,iend
1302
1303# def get_restfreqs(self):
1304# """
1305# Get the restfrequency(s) stored in this scantable.
1306# The return value(s) are always of unit 'Hz'
1307# Parameters:
1308# none
1309# Returns:
1310# a list of doubles
1311# """
1312# return list(self._getrestfreqs())
1313
1314 def get_restfreqs(self, ids=None):
1315 """\
1316 Get the restfrequency(s) stored in this scantable.
1317 The return value(s) are always of unit 'Hz'
1318
1319 Parameters:
1320
1321 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1322 be retrieved
1323
1324 Returns:
1325
1326 dictionary containing ids and a list of doubles for each id
1327
1328 """
1329 if ids is None:
1330 rfreqs={}
1331 idlist = self.getmolnos()
1332 for i in idlist:
1333 rfreqs[i]=list(self._getrestfreqs(i))
1334 return rfreqs
1335 else:
1336 if type(ids)==list or type(ids)==tuple:
1337 rfreqs={}
1338 for i in ids:
1339 rfreqs[i]=list(self._getrestfreqs(i))
1340 return rfreqs
1341 else:
1342 return list(self._getrestfreqs(ids))
1343 #return list(self._getrestfreqs(ids))
1344
1345 def set_restfreqs(self, freqs=None, unit='Hz'):
1346 """\
1347 Set or replace the restfrequency specified and
1348 If the 'freqs' argument holds a scalar,
1349 then that rest frequency will be applied to all the selected
1350 data. If the 'freqs' argument holds
1351 a vector, then it MUST be of equal or smaller length than
1352 the number of IFs (and the available restfrequencies will be
1353 replaced by this vector). In this case, *all* data have
1354 the restfrequency set per IF according
1355 to the corresponding value you give in the 'freqs' vector.
1356 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1357 IF 1 gets restfreq 2e9.
1358
1359 You can also specify the frequencies via a linecatalog.
1360
1361 Parameters:
1362
1363 freqs: list of rest frequency values or string idenitfiers
1364
1365 unit: unit for rest frequency (default 'Hz')
1366
1367
1368 Example::
1369
1370 # set the given restfrequency for the all currently selected IFs
1371 scan.set_restfreqs(freqs=1.4e9)
1372 # set restfrequencies for the n IFs (n > 1) in the order of the
1373 # list, i.e
1374 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1375 # len(list_of_restfreqs) == nIF
1376 # for nIF == 1 the following will set multiple restfrequency for
1377 # that IF
1378 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1379 # set multiple restfrequencies per IF. as a list of lists where
1380 # the outer list has nIF elements, the inner s arbitrary
1381 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1382
1383 *Note*:
1384
1385 To do more sophisticate Restfrequency setting, e.g. on a
1386 source and IF basis, use scantable.set_selection() before using
1387 this function::
1388
1389 # provided your scantable is called scan
1390 selection = selector()
1391 selection.set_name("ORION*")
1392 selection.set_ifs([1])
1393 scan.set_selection(selection)
1394 scan.set_restfreqs(freqs=86.6e9)
1395
1396 """
1397 varlist = vars()
1398 from asap import linecatalog
1399 # simple value
1400 if isinstance(freqs, int) or isinstance(freqs, float):
1401 self._setrestfreqs([freqs], [""], unit)
1402 # list of values
1403 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1404 # list values are scalars
1405 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1406 if len(freqs) == 1:
1407 self._setrestfreqs(freqs, [""], unit)
1408 else:
1409 # allow the 'old' mode of setting mulitple IFs
1410 sel = selector()
1411 savesel = self._getselection()
1412 iflist = self.getifnos()
1413 if len(freqs)>len(iflist):
1414 raise ValueError("number of elements in list of list "
1415 "exeeds the current IF selections")
1416 iflist = self.getifnos()
1417 for i, fval in enumerate(freqs):
1418 sel.set_ifs(iflist[i])
1419 self._setselection(sel)
1420 self._setrestfreqs([fval], [""], unit)
1421 self._setselection(savesel)
1422
1423 # list values are dict, {'value'=, 'name'=)
1424 elif isinstance(freqs[-1], dict):
1425 values = []
1426 names = []
1427 for d in freqs:
1428 values.append(d["value"])
1429 names.append(d["name"])
1430 self._setrestfreqs(values, names, unit)
1431 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1432 sel = selector()
1433 savesel = self._getselection()
1434 iflist = self.getifnos()
1435 if len(freqs)>len(iflist):
1436 raise ValueError("number of elements in list of list exeeds"
1437 " the current IF selections")
1438 for i, fval in enumerate(freqs):
1439 sel.set_ifs(iflist[i])
1440 self._setselection(sel)
1441 self._setrestfreqs(fval, [""], unit)
1442 self._setselection(savesel)
1443 # freqs are to be taken from a linecatalog
1444 elif isinstance(freqs, linecatalog):
1445 sel = selector()
1446 savesel = self._getselection()
1447 for i in xrange(freqs.nrow()):
1448 sel.set_ifs(iflist[i])
1449 self._setselection(sel)
1450 self._setrestfreqs([freqs.get_frequency(i)],
1451 [freqs.get_name(i)], "MHz")
1452 # ensure that we are not iterating past nIF
1453 if i == self.nif()-1: break
1454 self._setselection(savesel)
1455 else:
1456 return
1457 self._add_history("set_restfreqs", varlist)
1458
1459 def shift_refpix(self, delta):
1460 """\
1461 Shift the reference pixel of the Spectra Coordinate by an
1462 integer amount.
1463
1464 Parameters:
1465
1466 delta: the amount to shift by
1467
1468 *Note*:
1469
1470 Be careful using this with broadband data.
1471
1472 """
1473 Scantable.shift_refpix(self, delta)
1474
1475 def history(self, filename=None):
1476 """\
1477 Print the history. Optionally to a file.
1478
1479 Parameters:
1480
1481 filename: The name of the file to save the history to.
1482
1483 """
1484 hist = list(self._gethistory())
1485 out = "-"*80
1486 for h in hist:
1487 if h.startswith("---"):
1488 out += "\n"+h
1489 else:
1490 items = h.split("##")
1491 date = items[0]
1492 func = items[1]
1493 items = items[2:]
1494 out += "\n"+date+"\n"
1495 out += "Function: %s\n Parameters:" % (func)
1496 for i in items:
1497 s = i.split("=")
1498 out += "\n %s = %s" % (s[0], s[1])
1499 out += "\n"+"-"*80
1500 if filename is not None:
1501 if filename is "":
1502 filename = 'scantable_history.txt'
1503 import os
1504 filename = os.path.expandvars(os.path.expanduser(filename))
1505 if not os.path.isdir(filename):
1506 data = open(filename, 'w')
1507 data.write(out)
1508 data.close()
1509 else:
1510 msg = "Illegal file name '%s'." % (filename)
1511 if rcParams['verbose']:
1512 #print msg
1513 asaplog.push( msg )
1514 print_log( 'ERROR' )
1515 else:
1516 raise IOError(msg)
1517 if rcParams['verbose']:
1518 try:
1519 from IPython.genutils import page as pager
1520 except ImportError:
1521 from pydoc import pager
1522 pager(out)
1523 else:
1524 return out
1525 return
1526 #
1527 # Maths business
1528 #
1529 @print_log_dec
1530 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1531 """\
1532 Return the (time) weighted average of a scan.
1533
1534 *Note*:
1535
1536 in channels only - align if necessary
1537
1538 Parameters:
1539
1540 mask: an optional mask (only used for 'var' and 'tsys'
1541 weighting)
1542
1543 scanav: True averages each scan separately
1544 False (default) averages all scans together,
1545
1546 weight: Weighting scheme.
1547 'none' (mean no weight)
1548 'var' (1/var(spec) weighted)
1549 'tsys' (1/Tsys**2 weighted)
1550 'tint' (integration time weighted)
1551 'tintsys' (Tint/Tsys**2)
1552 'median' ( median averaging)
1553 The default is 'tint'
1554
1555 align: align the spectra in velocity before averaging. It takes
1556 the time of the first spectrum as reference time.
1557
1558 Example::
1559
1560 # time average the scantable without using a mask
1561 newscan = scan.average_time()
1562
1563 """
1564 varlist = vars()
1565 weight = weight or 'TINT'
1566 mask = mask or ()
1567 scanav = (scanav and 'SCAN') or 'NONE'
1568 scan = (self, )
1569 try:
1570 if align:
1571 scan = (self.freq_align(insitu=False), )
1572 s = None
1573 if weight.upper() == 'MEDIAN':
1574 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1575 scanav))
1576 else:
1577 s = scantable(self._math._average(scan, mask, weight.upper(),
1578 scanav))
1579 except RuntimeError, msg:
1580 if rcParams['verbose']:
1581 #print msg
1582 print_log()
1583 asaplog.push( str(msg) )
1584 print_log( 'ERROR' )
1585 return
1586 else: raise
1587 s._add_history("average_time", varlist)
1588 print_log()
1589 return s
1590
1591 @print_log_dec
1592 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1593 """\
1594 Return a scan where all spectra are converted to either
1595 Jansky or Kelvin depending upon the flux units of the scan table.
1596 By default the function tries to look the values up internally.
1597 If it can't find them (or if you want to over-ride), you must
1598 specify EITHER jyperk OR eta (and D which it will try to look up
1599 also if you don't set it). jyperk takes precedence if you set both.
1600
1601 Parameters:
1602
1603 jyperk: the Jy / K conversion factor
1604
1605 eta: the aperture efficiency
1606
1607 d: the geomtric diameter (metres)
1608
1609 insitu: if False a new scantable is returned.
1610 Otherwise, the scaling is done in-situ
1611 The default is taken from .asaprc (False)
1612
1613 """
1614 if insitu is None: insitu = rcParams['insitu']
1615 self._math._setinsitu(insitu)
1616 varlist = vars()
1617 jyperk = jyperk or -1.0
1618 d = d or -1.0
1619 eta = eta or -1.0
1620 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1621 s._add_history("convert_flux", varlist)
1622 print_log()
1623 if insitu: self._assign(s)
1624 else: return s
1625
1626 @print_log_dec
1627 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1628 """\
1629 Return a scan after applying a gain-elevation correction.
1630 The correction can be made via either a polynomial or a
1631 table-based interpolation (and extrapolation if necessary).
1632 You specify polynomial coefficients, an ascii table or neither.
1633 If you specify neither, then a polynomial correction will be made
1634 with built in coefficients known for certain telescopes (an error
1635 will occur if the instrument is not known).
1636 The data and Tsys are *divided* by the scaling factors.
1637
1638 Parameters:
1639
1640 poly: Polynomial coefficients (default None) to compute a
1641 gain-elevation correction as a function of
1642 elevation (in degrees).
1643
1644 filename: The name of an ascii file holding correction factors.
1645 The first row of the ascii file must give the column
1646 names and these MUST include columns
1647 "ELEVATION" (degrees) and "FACTOR" (multiply data
1648 by this) somewhere.
1649 The second row must give the data type of the
1650 column. Use 'R' for Real and 'I' for Integer.
1651 An example file would be
1652 (actual factors are arbitrary) :
1653
1654 TIME ELEVATION FACTOR
1655 R R R
1656 0.1 0 0.8
1657 0.2 20 0.85
1658 0.3 40 0.9
1659 0.4 60 0.85
1660 0.5 80 0.8
1661 0.6 90 0.75
1662
1663 method: Interpolation method when correcting from a table.
1664 Values are "nearest", "linear" (default), "cubic"
1665 and "spline"
1666
1667 insitu: if False a new scantable is returned.
1668 Otherwise, the scaling is done in-situ
1669 The default is taken from .asaprc (False)
1670
1671 """
1672
1673 if insitu is None: insitu = rcParams['insitu']
1674 self._math._setinsitu(insitu)
1675 varlist = vars()
1676 poly = poly or ()
1677 from os.path import expandvars
1678 filename = expandvars(filename)
1679 s = scantable(self._math._gainel(self, poly, filename, method))
1680 s._add_history("gain_el", varlist)
1681 print_log()
1682 if insitu:
1683 self._assign(s)
1684 else:
1685 return s
1686
1687 @print_log_dec
1688 def freq_align(self, reftime=None, method='cubic', insitu=None):
1689 """\
1690 Return a scan where all rows have been aligned in frequency/velocity.
1691 The alignment frequency frame (e.g. LSRK) is that set by function
1692 set_freqframe.
1693
1694 Parameters:
1695
1696 reftime: reference time to align at. By default, the time of
1697 the first row of data is used.
1698
1699 method: Interpolation method for regridding the spectra.
1700 Choose from "nearest", "linear", "cubic" (default)
1701 and "spline"
1702
1703 insitu: if False a new scantable is returned.
1704 Otherwise, the scaling is done in-situ
1705 The default is taken from .asaprc (False)
1706
1707 """
1708 if insitu is None: insitu = rcParams["insitu"]
1709 self._math._setinsitu(insitu)
1710 varlist = vars()
1711 reftime = reftime or ""
1712 s = scantable(self._math._freq_align(self, reftime, method))
1713 s._add_history("freq_align", varlist)
1714 print_log()
1715 if insitu: self._assign(s)
1716 else: return s
1717
1718 @print_log_dec
1719 def opacity(self, tau=None, insitu=None):
1720 """\
1721 Apply an opacity correction. The data
1722 and Tsys are multiplied by the correction factor.
1723
1724 Parameters:
1725
1726 tau: (list of) opacity from which the correction factor is
1727 exp(tau*ZD)
1728 where ZD is the zenith-distance.
1729 If a list is provided, it has to be of length nIF,
1730 nIF*nPol or 1 and in order of IF/POL, e.g.
1731 [opif0pol0, opif0pol1, opif1pol0 ...]
1732 if tau is `None` the opacities are determined from a
1733 model.
1734
1735 insitu: if False a new scantable is returned.
1736 Otherwise, the scaling is done in-situ
1737 The default is taken from .asaprc (False)
1738
1739 """
1740 if insitu is None: insitu = rcParams['insitu']
1741 self._math._setinsitu(insitu)
1742 varlist = vars()
1743 if not hasattr(tau, "__len__"):
1744 tau = [tau]
1745 s = scantable(self._math._opacity(self, tau))
1746 s._add_history("opacity", varlist)
1747 print_log()
1748 if insitu: self._assign(s)
1749 else: return s
1750
1751 @print_log_dec
1752 def bin(self, width=5, insitu=None):
1753 """\
1754 Return a scan where all spectra have been binned up.
1755
1756 Parameters:
1757
1758 width: The bin width (default=5) in pixels
1759
1760 insitu: if False a new scantable is returned.
1761 Otherwise, the scaling is done in-situ
1762 The default is taken from .asaprc (False)
1763
1764 """
1765 if insitu is None: insitu = rcParams['insitu']
1766 self._math._setinsitu(insitu)
1767 varlist = vars()
1768 s = scantable(self._math._bin(self, width))
1769 s._add_history("bin", varlist)
1770 print_log()
1771 if insitu:
1772 self._assign(s)
1773 else:
1774 return s
1775
1776 @print_log_dec
1777 def resample(self, width=5, method='cubic', insitu=None):
1778 """\
1779 Return a scan where all spectra have been binned up.
1780
1781 Parameters:
1782
1783 width: The bin width (default=5) in pixels
1784
1785 method: Interpolation method when correcting from a table.
1786 Values are "nearest", "linear", "cubic" (default)
1787 and "spline"
1788
1789 insitu: if False a new scantable is returned.
1790 Otherwise, the scaling is done in-situ
1791 The default is taken from .asaprc (False)
1792
1793 """
1794 if insitu is None: insitu = rcParams['insitu']
1795 self._math._setinsitu(insitu)
1796 varlist = vars()
1797 s = scantable(self._math._resample(self, method, width))
1798 s._add_history("resample", varlist)
1799 print_log()
1800 if insitu: self._assign(s)
1801 else: return s
1802
1803 @print_log_dec
1804 def average_pol(self, mask=None, weight='none'):
1805 """\
1806 Average the Polarisations together.
1807
1808 Parameters:
1809
1810 mask: An optional mask defining the region, where the
1811 averaging will be applied. The output will have all
1812 specified points masked.
1813
1814 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1815 weighted), or 'tsys' (1/Tsys**2 weighted)
1816
1817 """
1818 varlist = vars()
1819 mask = mask or ()
1820 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1821 s._add_history("average_pol", varlist)
1822 print_log()
1823 return s
1824
1825 @print_log_dec
1826 def average_beam(self, mask=None, weight='none'):
1827 """\
1828 Average the Beams together.
1829
1830 Parameters:
1831 mask: An optional mask defining the region, where the
1832 averaging will be applied. The output will have all
1833 specified points masked.
1834
1835 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1836 weighted), or 'tsys' (1/Tsys**2 weighted)
1837
1838 """
1839 varlist = vars()
1840 mask = mask or ()
1841 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1842 s._add_history("average_beam", varlist)
1843 print_log()
1844 return s
1845
1846 def parallactify(self, pflag):
1847 """\
1848 Set a flag to indicate whether this data should be treated as having
1849 been 'parallactified' (total phase == 0.0)
1850
1851 Parameters:
1852
1853 pflag: Bool indicating whether to turn this on (True) or
1854 off (False)
1855
1856 """
1857 varlist = vars()
1858 self._parallactify(pflag)
1859 self._add_history("parallactify", varlist)
1860
1861 @print_log_dec
1862 def convert_pol(self, poltype=None):
1863 """\
1864 Convert the data to a different polarisation type.
1865 Note that you will need cross-polarisation terms for most conversions.
1866
1867 Parameters:
1868
1869 poltype: The new polarisation type. Valid types are:
1870 "linear", "circular", "stokes" and "linpol"
1871
1872 """
1873 varlist = vars()
1874 try:
1875 s = scantable(self._math._convertpol(self, poltype))
1876 except RuntimeError, msg:
1877 if rcParams['verbose']:
1878 #print msg
1879 print_log()
1880 asaplog.push( str(msg) )
1881 print_log( 'ERROR' )
1882 return
1883 else:
1884 raise
1885 s._add_history("convert_pol", varlist)
1886 print_log()
1887 return s
1888
1889 @print_log_dec
1890 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1891 """\
1892 Smooth the spectrum by the specified kernel (conserving flux).
1893
1894 Parameters:
1895
1896 kernel: The type of smoothing kernel. Select from
1897 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1898 or 'poly'
1899
1900 width: The width of the kernel in pixels. For hanning this is
1901 ignored otherwise it defauls to 5 pixels.
1902 For 'gaussian' it is the Full Width Half
1903 Maximum. For 'boxcar' it is the full width.
1904 For 'rmedian' and 'poly' it is the half width.
1905
1906 order: Optional parameter for 'poly' kernel (default is 2), to
1907 specify the order of the polnomial. Ignored by all other
1908 kernels.
1909
1910 plot: plot the original and the smoothed spectra.
1911 In this each indivual fit has to be approved, by
1912 typing 'y' or 'n'
1913
1914 insitu: if False a new scantable is returned.
1915 Otherwise, the scaling is done in-situ
1916 The default is taken from .asaprc (False)
1917
1918 """
1919 if insitu is None: insitu = rcParams['insitu']
1920 self._math._setinsitu(insitu)
1921 varlist = vars()
1922
1923 if plot: orgscan = self.copy()
1924
1925 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1926 s._add_history("smooth", varlist)
1927
1928 if plot:
1929 if rcParams['plotter.gui']:
1930 from asap.asaplotgui import asaplotgui as asaplot
1931 else:
1932 from asap.asaplot import asaplot
1933 self._p=asaplot()
1934 self._p.set_panels()
1935 ylab=s._get_ordinate_label()
1936 #self._p.palette(0,["#777777","red"])
1937 for r in xrange(s.nrow()):
1938 xsm=s._getabcissa(r)
1939 ysm=s._getspectrum(r)
1940 xorg=orgscan._getabcissa(r)
1941 yorg=orgscan._getspectrum(r)
1942 self._p.clear()
1943 self._p.hold()
1944 self._p.set_axes('ylabel',ylab)
1945 self._p.set_axes('xlabel',s._getabcissalabel(r))
1946 self._p.set_axes('title',s._getsourcename(r))
1947 self._p.set_line(label='Original',color="#777777")
1948 self._p.plot(xorg,yorg)
1949 self._p.set_line(label='Smoothed',color="red")
1950 self._p.plot(xsm,ysm)
1951 ### Ugly part for legend
1952 for i in [0,1]:
1953 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1954 self._p.release()
1955 ### Ugly part for legend
1956 self._p.subplots[0]['lines']=[]
1957 res = raw_input("Accept smoothing ([y]/n): ")
1958 if res.upper() == 'N':
1959 s._setspectrum(yorg, r)
1960 self._p.unmap()
1961 self._p = None
1962 del orgscan
1963
1964 print_log()
1965 if insitu: self._assign(s)
1966 else: return s
1967
1968 @print_log_dec
1969 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1970 insitu=None):
1971 """\
1972 Return a scan which has been baselined (all rows) by a polynomial.
1973
1974 Parameters:
1975
1976 mask: an optional mask
1977
1978 order: the order of the polynomial (default is 0)
1979
1980 plot: plot the fit and the residual. In this each
1981 indivual fit has to be approved, by typing 'y'
1982 or 'n'
1983
1984 uselin: use linear polynomial fit
1985
1986 insitu: if False a new scantable is returned.
1987 Otherwise, the scaling is done in-situ
1988 The default is taken from .asaprc (False)
1989
1990 Example::
1991
1992 # return a scan baselined by a third order polynomial,
1993 # not using a mask
1994 bscan = scan.poly_baseline(order=3)
1995
1996 """
1997 if insitu is None: insitu = rcParams['insitu']
1998 if not insitu:
1999 workscan = self.copy()
2000 else:
2001 workscan = self
2002 varlist = vars()
2003 if mask is None:
2004 mask = [True for i in xrange(self.nchan(-1))]
2005
2006 from asap.asapfitter import fitter
2007 try:
2008 f = fitter()
2009 if uselin:
2010 f.set_function(lpoly=order)
2011 else:
2012 f.set_function(poly=order)
2013
2014 rows = range(workscan.nrow())
2015 if len(rows) > 0:
2016 self.blpars = []
2017
2018 for r in rows:
2019 # take into account flagtra info (CAS-1434)
2020 flagtra = workscan._getmask(r)
2021 actualmask = mask[:]
2022 if len(actualmask) == 0:
2023 actualmask = list(flagtra[:])
2024 else:
2025 if len(actualmask) != len(flagtra):
2026 raise RuntimeError, "Mask and flagtra have different length"
2027 else:
2028 for i in range(0, len(actualmask)):
2029 actualmask[i] = actualmask[i] and flagtra[i]
2030 f.set_scan(workscan, actualmask)
2031 f.x = workscan._getabcissa(r)
2032 f.y = workscan._getspectrum(r)
2033 f.data = None
2034 f.fit()
2035 if plot:
2036 f.plot(residual=True)
2037 x = raw_input("Accept fit ( [y]/n ): ")
2038 if x.upper() == 'N':
2039 self.blpars.append(None)
2040 continue
2041 workscan._setspectrum(f.fitter.getresidual(), r)
2042 self.blpars.append(f.get_parameters())
2043
2044 if plot:
2045 f._p.unmap()
2046 f._p = None
2047 workscan._add_history("poly_baseline", varlist)
2048 print_log()
2049 if insitu: self._assign(workscan)
2050 else: return workscan
2051 except RuntimeError:
2052 msg = "The fit failed, possibly because it didn't converge."
2053 if rcParams['verbose']:
2054 #print msg
2055 print_log()
2056 asaplog.push( str(msg) )
2057 print_log( 'ERROR' )
2058 return
2059 else:
2060 raise RuntimeError(msg)
2061
2062
2063 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
2064 threshold=3, chan_avg_limit=1, plot=False,
2065 insitu=None):
2066 """\
2067 Return a scan which has been baselined (all rows) by a polynomial.
2068 Spectral lines are detected first using linefinder and masked out
2069 to avoid them affecting the baseline solution.
2070
2071 Parameters:
2072
2073 mask: an optional mask retreived from scantable
2074
2075 edge: an optional number of channel to drop at the edge of
2076 spectrum. If only one value is
2077 specified, the same number will be dropped from
2078 both sides of the spectrum. Default is to keep
2079 all channels. Nested tuples represent individual
2080 edge selection for different IFs (a number of spectral
2081 channels can be different)
2082
2083 order: the order of the polynomial (default is 0)
2084
2085 threshold: the threshold used by line finder. It is better to
2086 keep it large as only strong lines affect the
2087 baseline solution.
2088
2089 chan_avg_limit:
2090 a maximum number of consequtive spectral channels to
2091 average during the search of weak and broad lines.
2092 The default is no averaging (and no search for weak
2093 lines). If such lines can affect the fitted baseline
2094 (e.g. a high order polynomial is fitted), increase this
2095 parameter (usually values up to 8 are reasonable). Most
2096 users of this method should find the default value
2097 sufficient.
2098
2099 plot: plot the fit and the residual. In this each
2100 indivual fit has to be approved, by typing 'y'
2101 or 'n'
2102
2103 insitu: if False a new scantable is returned.
2104 Otherwise, the scaling is done in-situ
2105 The default is taken from .asaprc (False)
2106
2107
2108 Example::
2109
2110 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2111
2112 """
2113 if insitu is None: insitu = rcParams['insitu']
2114 varlist = vars()
2115 from asap.asapfitter import fitter
2116 from asap.asaplinefind import linefinder
2117 from asap import _is_sequence_or_number as _is_valid
2118
2119 # check whether edge is set up for each IF individually
2120 individualedge = False;
2121 if len(edge) > 1:
2122 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2123 individualedge = True;
2124
2125 if not _is_valid(edge, int) and not individualedge:
2126 raise ValueError, "Parameter 'edge' has to be an integer or a \
2127 pair of integers specified as a tuple. Nested tuples are allowed \
2128 to make individual selection for different IFs."
2129
2130 curedge = (0, 0)
2131 if individualedge:
2132 for edgepar in edge:
2133 if not _is_valid(edgepar, int):
2134 raise ValueError, "Each element of the 'edge' tuple has \
2135 to be a pair of integers or an integer."
2136 else:
2137 curedge = edge;
2138
2139 # setup fitter
2140 f = fitter()
2141 f.set_function(poly=order)
2142
2143 # setup line finder
2144 fl = linefinder()
2145 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2146
2147 if not insitu:
2148 workscan = self.copy()
2149 else:
2150 workscan = self
2151
2152 fl.set_scan(workscan)
2153
2154 rows = range(workscan.nrow())
2155 # Save parameters of baseline fits & masklists as a class attribute.
2156 # NOTICE: It does not reflect changes in scantable!
2157 if len(rows) > 0:
2158 self.blpars=[]
2159 self.masklists=[]
2160 asaplog.push("Processing:")
2161 for r in rows:
2162 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2163 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2164 workscan.getpol(r), workscan.getcycle(r))
2165 asaplog.push(msg, False)
2166
2167 # figure out edge parameter
2168 if individualedge:
2169 if len(edge) >= workscan.getif(r):
2170 raise RuntimeError, "Number of edge elements appear to " \
2171 "be less than the number of IFs"
2172 curedge = edge[workscan.getif(r)]
2173
2174 # take into account flagtra info (CAS-1434)
2175 flagtra = workscan._getmask(r)
2176 actualmask = mask[:]
2177 if len(actualmask) == 0:
2178 actualmask = list(flagtra[:])
2179 else:
2180 if len(actualmask) != len(flagtra):
2181 raise RuntimeError, "Mask and flagtra have different length"
2182 else:
2183 for i in range(0, len(actualmask)):
2184 actualmask[i] = actualmask[i] and flagtra[i]
2185
2186 # setup line finder
2187 fl.find_lines(r, actualmask, curedge)
2188 outmask=fl.get_mask()
2189 f.set_scan(workscan, fl.get_mask())
2190 f.x = workscan._getabcissa(r)
2191 f.y = workscan._getspectrum(r)
2192 f.data = None
2193 f.fit()
2194
2195 # Show mask list
2196 masklist=workscan.get_masklist(fl.get_mask(),row=r)
2197 msg = "mask range: "+str(masklist)
2198 asaplog.push(msg, False)
2199
2200 if plot:
2201 f.plot(residual=True)
2202 x = raw_input("Accept fit ( [y]/n ): ")
2203 if x.upper() == 'N':
2204 self.blpars.append(None)
2205 self.masklists.append(None)
2206 continue
2207
2208 workscan._setspectrum(f.fitter.getresidual(), r)
2209 self.blpars.append(f.get_parameters())
2210 self.masklists.append(masklist)
2211 if plot:
2212 f._p.unmap()
2213 f._p = None
2214 workscan._add_history("auto_poly_baseline", varlist)
2215 if insitu:
2216 self._assign(workscan)
2217 else:
2218 return workscan
2219
2220 @print_log_dec
2221 def rotate_linpolphase(self, angle):
2222 """\
2223 Rotate the phase of the complex polarization O=Q+iU correlation.
2224 This is always done in situ in the raw data. So if you call this
2225 function more than once then each call rotates the phase further.
2226
2227 Parameters:
2228
2229 angle: The angle (degrees) to rotate (add) by.
2230
2231 Example::
2232
2233 scan.rotate_linpolphase(2.3)
2234
2235 """
2236 varlist = vars()
2237 self._math._rotate_linpolphase(self, angle)
2238 self._add_history("rotate_linpolphase", varlist)
2239 print_log()
2240 return
2241
2242 @print_log_dec
2243 def rotate_xyphase(self, angle):
2244 """\
2245 Rotate the phase of the XY correlation. This is always done in situ
2246 in the data. So if you call this function more than once
2247 then each call rotates the phase further.
2248
2249 Parameters:
2250
2251 angle: The angle (degrees) to rotate (add) by.
2252
2253 Example::
2254
2255 scan.rotate_xyphase(2.3)
2256
2257 """
2258 varlist = vars()
2259 self._math._rotate_xyphase(self, angle)
2260 self._add_history("rotate_xyphase", varlist)
2261 print_log()
2262 return
2263
2264 @print_log_dec
2265 def swap_linears(self):
2266 """\
2267 Swap the linear polarisations XX and YY, or better the first two
2268 polarisations as this also works for ciculars.
2269 """
2270 varlist = vars()
2271 self._math._swap_linears(self)
2272 self._add_history("swap_linears", varlist)
2273 print_log()
2274 return
2275
2276 @print_log_dec
2277 def invert_phase(self):
2278 """\
2279 Invert the phase of the complex polarisation
2280 """
2281 varlist = vars()
2282 self._math._invert_phase(self)
2283 self._add_history("invert_phase", varlist)
2284 return
2285
2286 @print_log_dec
2287 def add(self, offset, insitu=None):
2288 """\
2289 Return a scan where all spectra have the offset added
2290
2291 Parameters:
2292
2293 offset: the offset
2294
2295 insitu: if False a new scantable is returned.
2296 Otherwise, the scaling is done in-situ
2297 The default is taken from .asaprc (False)
2298
2299 """
2300 if insitu is None: insitu = rcParams['insitu']
2301 self._math._setinsitu(insitu)
2302 varlist = vars()
2303 s = scantable(self._math._unaryop(self, offset, "ADD", False))
2304 s._add_history("add", varlist)
2305 if insitu:
2306 self._assign(s)
2307 else:
2308 return s
2309
2310 @print_log_dec
2311 def scale(self, factor, tsys=True, insitu=None):
2312 """\
2313
2314 Return a scan where all spectra are scaled by the give 'factor'
2315
2316 Parameters:
2317
2318 factor: the scaling factor (float or 1D float list)
2319
2320 insitu: if False a new scantable is returned.
2321 Otherwise, the scaling is done in-situ
2322 The default is taken from .asaprc (False)
2323
2324 tsys: if True (default) then apply the operation to Tsys
2325 as well as the data
2326
2327 """
2328 if insitu is None: insitu = rcParams['insitu']
2329 self._math._setinsitu(insitu)
2330 varlist = vars()
2331 s = None
2332 import numpy
2333 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2334 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2335 from asapmath import _array2dOp
2336 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2337 else:
2338 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2339 else:
2340 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2341 s._add_history("scale", varlist)
2342 print_log()
2343 if insitu:
2344 self._assign(s)
2345 else:
2346 return s
2347
2348 def set_sourcetype(self, match, matchtype="pattern",
2349 sourcetype="reference"):
2350 """\
2351 Set the type of the source to be an source or reference scan
2352 using the provided pattern.
2353
2354 Parameters:
2355
2356 match: a Unix style pattern, regular expression or selector
2357
2358 matchtype: 'pattern' (default) UNIX style pattern or
2359 'regex' regular expression
2360
2361 sourcetype: the type of the source to use (source/reference)
2362
2363 """
2364 varlist = vars()
2365 basesel = self.get_selection()
2366 stype = -1
2367 if sourcetype.lower().startswith("r"):
2368 stype = 1
2369 elif sourcetype.lower().startswith("s"):
2370 stype = 0
2371 else:
2372 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2373 if matchtype.lower().startswith("p"):
2374 matchtype = "pattern"
2375 elif matchtype.lower().startswith("r"):
2376 matchtype = "regex"
2377 else:
2378 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2379 sel = selector()
2380 if isinstance(match, selector):
2381 sel = match
2382 else:
2383 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2384 self.set_selection(basesel+sel)
2385 self._setsourcetype(stype)
2386 self.set_selection(basesel)
2387 self._add_history("set_sourcetype", varlist)
2388
2389 @print_log_dec
2390 def auto_quotient(self, preserve=True, mode='paired', verify=False):
2391 """\
2392 This function allows to build quotients automatically.
2393 It assumes the observation to have the same number of
2394 "ons" and "offs"
2395
2396 Parameters:
2397
2398 preserve: you can preserve (default) the continuum or
2399 remove it. The equations used are
2400 preserve: Output = Toff * (on/off) - Toff
2401 remove: Output = Toff * (on/off) - Ton
2402
2403 mode: the on/off detection mode
2404 'paired' (default)
2405 identifies 'off' scans by the
2406 trailing '_R' (Mopra/Parkes) or
2407 '_e'/'_w' (Tid) and matches
2408 on/off pairs from the observing pattern
2409 'time'
2410 finds the closest off in time
2411
2412 """
2413 modes = ["time", "paired"]
2414 if not mode in modes:
2415 msg = "please provide valid mode. Valid modes are %s" % (modes)
2416 raise ValueError(msg)
2417 varlist = vars()
2418 s = None
2419 if mode.lower() == "paired":
2420 basesel = self.get_selection()
2421 sel = selector()+basesel
2422 sel.set_query("SRCTYPE==1")
2423 self.set_selection(sel)
2424 offs = self.copy()
2425 sel.set_query("SRCTYPE==0")
2426 self.set_selection(sel)
2427 ons = self.copy()
2428 s = scantable(self._math._quotient(ons, offs, preserve))
2429 self.set_selection(basesel)
2430 elif mode.lower() == "time":
2431 s = scantable(self._math._auto_quotient(self, mode, preserve))
2432 s._add_history("auto_quotient", varlist)
2433 print_log()
2434 return s
2435
2436 @print_log_dec
2437 def mx_quotient(self, mask = None, weight='median', preserve=True):
2438 """\
2439 Form a quotient using "off" beams when observing in "MX" mode.
2440
2441 Parameters:
2442
2443 mask: an optional mask to be used when weight == 'stddev'
2444
2445 weight: How to average the off beams. Default is 'median'.
2446
2447 preserve: you can preserve (default) the continuum or
2448 remove it. The equations used are:
2449
2450 preserve: Output = Toff * (on/off) - Toff
2451
2452 remove: Output = Toff * (on/off) - Ton
2453
2454 """
2455 mask = mask or ()
2456 varlist = vars()
2457 on = scantable(self._math._mx_extract(self, 'on'))
2458 preoff = scantable(self._math._mx_extract(self, 'off'))
2459 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2460 from asapmath import quotient
2461 q = quotient(on, off, preserve)
2462 q._add_history("mx_quotient", varlist)
2463 print_log()
2464 return q
2465
2466 @print_log_dec
2467 def freq_switch(self, insitu=None):
2468 """\
2469 Apply frequency switching to the data.
2470
2471 Parameters:
2472
2473 insitu: if False a new scantable is returned.
2474 Otherwise, the swictching is done in-situ
2475 The default is taken from .asaprc (False)
2476
2477 """
2478 if insitu is None: insitu = rcParams['insitu']
2479 self._math._setinsitu(insitu)
2480 varlist = vars()
2481 s = scantable(self._math._freqswitch(self))
2482 s._add_history("freq_switch", varlist)
2483 print_log()
2484 if insitu: self._assign(s)
2485 else: return s
2486
2487 @print_log_dec
2488 def recalc_azel(self):
2489 """Recalculate the azimuth and elevation for each position."""
2490 varlist = vars()
2491 self._recalcazel()
2492 self._add_history("recalc_azel", varlist)
2493 print_log()
2494 return
2495
2496 @print_log_dec
2497 def __add__(self, other):
2498 varlist = vars()
2499 s = None
2500 if isinstance(other, scantable):
2501 s = scantable(self._math._binaryop(self, other, "ADD"))
2502 elif isinstance(other, float):
2503 s = scantable(self._math._unaryop(self, other, "ADD", False))
2504 else:
2505 raise TypeError("Other input is not a scantable or float value")
2506 s._add_history("operator +", varlist)
2507 return s
2508
2509 @print_log_dec
2510 def __sub__(self, other):
2511 """
2512 implicit on all axes and on Tsys
2513 """
2514 varlist = vars()
2515 s = None
2516 if isinstance(other, scantable):
2517 s = scantable(self._math._binaryop(self, other, "SUB"))
2518 elif isinstance(other, float):
2519 s = scantable(self._math._unaryop(self, other, "SUB", False))
2520 else:
2521 raise TypeError("Other input is not a scantable or float value")
2522 s._add_history("operator -", varlist)
2523 return s
2524
2525 @print_log_dec
2526 def __mul__(self, other):
2527 """
2528 implicit on all axes and on Tsys
2529 """
2530 varlist = vars()
2531 s = None
2532 if isinstance(other, scantable):
2533 s = scantable(self._math._binaryop(self, other, "MUL"))
2534 elif isinstance(other, float):
2535 s = scantable(self._math._unaryop(self, other, "MUL", False))
2536 else:
2537 raise TypeError("Other input is not a scantable or float value")
2538 s._add_history("operator *", varlist)
2539 return s
2540
2541
2542 @print_log_dec
2543 def __div__(self, other):
2544 """
2545 implicit on all axes and on Tsys
2546 """
2547 varlist = vars()
2548 s = None
2549 if isinstance(other, scantable):
2550 s = scantable(self._math._binaryop(self, other, "DIV"))
2551 elif isinstance(other, float):
2552 if other == 0.0:
2553 raise ZeroDivisionError("Dividing by zero is not recommended")
2554 s = scantable(self._math._unaryop(self, other, "DIV", False))
2555 else:
2556 raise TypeError("Other input is not a scantable or float value")
2557 s._add_history("operator /", varlist)
2558 return s
2559
2560 def get_fit(self, row=0):
2561 """\
2562 Print or return the stored fits for a row in the scantable
2563
2564 Parameters:
2565
2566 row: the row which the fit has been applied to.
2567
2568 """
2569 if row > self.nrow():
2570 return
2571 from asap.asapfit import asapfit
2572 fit = asapfit(self._getfit(row))
2573 if rcParams['verbose']:
2574 #print fit
2575 asaplog.push( '%s' %(fit) )
2576 print_log()
2577 return
2578 else:
2579 return fit.as_dict()
2580
2581 def flag_nans(self):
2582 """\
2583 Utility function to flag NaN values in the scantable.
2584 """
2585 import numpy
2586 basesel = self.get_selection()
2587 for i in range(self.nrow()):
2588 sel = self.get_row_selector(i)
2589 self.set_selection(basesel+sel)
2590 nans = numpy.isnan(self._getspectrum(0))
2591 if numpy.any(nans):
2592 bnans = [ bool(v) for v in nans]
2593 self.flag(bnans)
2594 self.set_selection(basesel)
2595
2596 def get_row_selector(self, rowno):
2597 return selector(beams=self.getbeam(rowno),
2598 ifs=self.getif(rowno),
2599 pols=self.getpol(rowno),
2600 scans=self.getscan(rowno),
2601 cycles=self.getcycle(rowno))
2602
2603 def _add_history(self, funcname, parameters):
2604 if not rcParams['scantable.history']:
2605 return
2606 # create date
2607 sep = "##"
2608 from datetime import datetime
2609 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2610 hist = dstr+sep
2611 hist += funcname+sep#cdate+sep
2612 if parameters.has_key('self'): del parameters['self']
2613 for k, v in parameters.iteritems():
2614 if type(v) is dict:
2615 for k2, v2 in v.iteritems():
2616 hist += k2
2617 hist += "="
2618 if isinstance(v2, scantable):
2619 hist += 'scantable'
2620 elif k2 == 'mask':
2621 if isinstance(v2, list) or isinstance(v2, tuple):
2622 hist += str(self._zip_mask(v2))
2623 else:
2624 hist += str(v2)
2625 else:
2626 hist += str(v2)
2627 else:
2628 hist += k
2629 hist += "="
2630 if isinstance(v, scantable):
2631 hist += 'scantable'
2632 elif k == 'mask':
2633 if isinstance(v, list) or isinstance(v, tuple):
2634 hist += str(self._zip_mask(v))
2635 else:
2636 hist += str(v)
2637 else:
2638 hist += str(v)
2639 hist += sep
2640 hist = hist[:-2] # remove trailing '##'
2641 self._addhistory(hist)
2642
2643
2644 def _zip_mask(self, mask):
2645 mask = list(mask)
2646 i = 0
2647 segments = []
2648 while mask[i:].count(1):
2649 i += mask[i:].index(1)
2650 if mask[i:].count(0):
2651 j = i + mask[i:].index(0)
2652 else:
2653 j = len(mask)
2654 segments.append([i, j])
2655 i = j
2656 return segments
2657
2658 def _get_ordinate_label(self):
2659 fu = "("+self.get_fluxunit()+")"
2660 import re
2661 lbl = "Intensity"
2662 if re.match(".K.", fu):
2663 lbl = "Brightness Temperature "+ fu
2664 elif re.match(".Jy.", fu):
2665 lbl = "Flux density "+ fu
2666 return lbl
2667
2668 def _check_ifs(self):
2669 nchans = [self.nchan(i) for i in range(self.nif(-1))]
2670 nchans = filter(lambda t: t > 0, nchans)
2671 return (sum(nchans)/len(nchans) == nchans[0])
2672
2673 def _fill(self, names, unit, average, getpt, antenna):
2674 first = True
2675 fullnames = []
2676 for name in names:
2677 name = os.path.expandvars(name)
2678 name = os.path.expanduser(name)
2679 if not os.path.exists(name):
2680 msg = "File '%s' does not exists" % (name)
2681 if rcParams['verbose']:
2682 asaplog.push(msg)
2683 print_log( 'ERROR' )
2684 return
2685 raise IOError(msg)
2686 fullnames.append(name)
2687 if average:
2688 asaplog.push('Auto averaging integrations')
2689 stype = int(rcParams['scantable.storage'].lower() == 'disk')
2690 for name in fullnames:
2691 tbl = Scantable(stype)
2692 r = filler(tbl)
2693 rx = rcParams['scantable.reference']
2694 r.setreferenceexpr(rx)
2695 msg = "Importing %s..." % (name)
2696 asaplog.push(msg, False)
2697 print_log()
2698 r.open(name)# antenna, -1, -1, getpt)
2699 r.fill()
2700 if average:
2701 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2702 if not first:
2703 tbl = self._math._merge([self, tbl])
2704 Scantable.__init__(self, tbl)
2705 r.close()
2706 del r, tbl
2707 first = False
2708 if unit is not None:
2709 self.set_fluxunit(unit)
2710 if not is_casapy():
2711 self.set_freqframe(rcParams['scantable.freqframe'])
2712
2713 def __getitem__(self, key):
2714 if key < 0:
2715 key += self.nrow()
2716 if key >= self.nrow():
2717 raise IndexError("Row index out of range.")
2718 return self._getspectrum(key)
2719
2720 def __setitem__(self, key, value):
2721 if key < 0:
2722 key += self.nrow()
2723 if key >= self.nrow():
2724 raise IndexError("Row index out of range.")
2725 if not hasattr(value, "__len__") or \
2726 len(value) > self.nchan(self.getif(key)):
2727 raise ValueError("Spectrum length doesn't match.")
2728 return self._setspectrum(value, key)
2729
2730 def __len__(self):
2731 return self.nrow()
2732
2733 def __iter__(self):
2734 for i in range(len(self)):
2735 yield self[i]
Note: See TracBrowser for help on using the repository browser.