source: trunk/python/scantable.py@ 1856

Last change on this file since 1856 was 1856, checked in by Malte Marquarding, 15 years ago

removed redundant print_log calls as the @print_log_dec handles them already

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 90.8 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5 from functools import wraps as wraps_dec
6except ImportError:
7 from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, print_log, print_log_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or
18
19
20def preserve_selection(func):
21 @wraps_dec(func)
22 def wrap(obj, *args, **kw):
23 basesel = obj.get_selection()
24 val = func(obj, *args, **kw)
25 obj.set_selection(basesel)
26 return val
27 return wrap
28
29def is_scantable(filename):
30 """Is the given file a scantable?
31
32 Parameters:
33
34 filename: the name of the file/directory to test
35
36 """
37 return (os.path.isdir(filename)
38 and not os.path.exists(filename+'/table.f1')
39 and os.path.exists(filename+'/table.info'))
40
41
42class scantable(Scantable):
43 """\
44 The ASAP container for scans (single-dish data).
45 """
46
47 @print_log_dec
48 def __init__(self, filename, average=None, unit=None, getpt=None,
49 antenna=None, parallactify=None):
50 """\
51 Create a scantable from a saved one or make a reference
52
53 Parameters:
54
55 filename: the name of an asap table on disk
56 or
57 the name of a rpfits/sdfits/ms file
58 (integrations within scans are auto averaged
59 and the whole file is read) or
60 [advanced] a reference to an existing scantable
61
62 average: average all integrations withinb a scan on read.
63 The default (True) is taken from .asaprc.
64
65 unit: brightness unit; must be consistent with K or Jy.
66 Over-rides the default selected by the filler
67 (input rpfits/sdfits/ms) or replaces the value
68 in existing scantables
69
70 getpt: for MeasurementSet input data only:
71 If True, all pointing data are filled.
72 The deafult is False, which makes time to load
73 the MS data faster in some cases.
74
75 antenna: Antenna selection. integer (id) or string (name or id).
76
77 parallactify: Indicate that the data had been parallatified. Default
78 is taken from rc file.
79
80 """
81 if average is None:
82 average = rcParams['scantable.autoaverage']
83 if getpt is None:
84 getpt = True
85 if antenna is not None:
86 asaplog.push("Antenna selection currently unsupported."
87 "Using '0'")
88 print_log('WARN')
89 if antenna is None:
90 antenna = ''
91 elif type(antenna) == int:
92 antenna = '%s' % antenna
93 elif type(antenna) == list:
94 tmpstr = ''
95 for i in range( len(antenna) ):
96 if type(antenna[i]) == int:
97 tmpstr = tmpstr + ('%s,'%(antenna[i]))
98 elif type(antenna[i]) == str:
99 tmpstr=tmpstr+antenna[i]+','
100 else:
101 asaplog.push('Bad antenna selection.')
102 print_log('ERROR')
103 return
104 antenna = tmpstr.rstrip(',')
105 parallactify = parallactify or rcParams['scantable.parallactify']
106 varlist = vars()
107 from asap._asap import stmath
108 self._math = stmath( rcParams['insitu'] )
109 if isinstance(filename, Scantable):
110 Scantable.__init__(self, filename)
111 else:
112 if isinstance(filename, str):
113 filename = os.path.expandvars(filename)
114 filename = os.path.expanduser(filename)
115 if not os.path.exists(filename):
116 s = "File '%s' not found." % (filename)
117 if rcParams['verbose']:
118 asaplog.push(s)
119 print_log('ERROR')
120 return
121 raise IOError(s)
122 if is_scantable(filename):
123 ondisk = rcParams['scantable.storage'] == 'disk'
124 Scantable.__init__(self, filename, ondisk)
125 if unit is not None:
126 self.set_fluxunit(unit)
127 # do not reset to the default freqframe
128 #self.set_freqframe(rcParams['scantable.freqframe'])
129 elif os.path.isdir(filename) \
130 and not os.path.exists(filename+'/table.f1'):
131 msg = "The given file '%s'is not a valid " \
132 "asap table." % (filename)
133 if rcParams['verbose']:
134 #print msg
135 asaplog.push( msg )
136 print_log( 'ERROR' )
137 return
138 else:
139 raise IOError(msg)
140 else:
141 self._fill([filename], unit, average, getpt, antenna)
142 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
143 and isinstance(filename[-1], str):
144 self._fill(filename, unit, average, getpt, antenna)
145 self.parallactify(parallactify)
146 self._add_history("scantable", varlist)
147
148 @print_log_dec
149 def save(self, name=None, format=None, overwrite=False):
150 """\
151 Store the scantable on disk. This can be an asap (aips++) Table,
152 SDFITS or MS2 format.
153
154 Parameters:
155
156 name: the name of the outputfile. For format "ASCII"
157 this is the root file name (data in 'name'.txt
158 and header in 'name'_header.txt)
159
160 format: an optional file format. Default is ASAP.
161 Allowed are:
162
163 * 'ASAP' (save as ASAP [aips++] Table),
164 * 'SDFITS' (save as SDFITS file)
165 * 'ASCII' (saves as ascii text file)
166 * 'MS2' (saves as an casacore MeasurementSet V2)
167 * 'FITS' (save as image FITS - not readable by class)
168 * 'CLASS' (save as FITS readable by CLASS)
169
170 overwrite: If the file should be overwritten if it exists.
171 The default False is to return with warning
172 without writing the output. USE WITH CARE.
173
174 Example::
175
176 scan.save('myscan.asap')
177 scan.save('myscan.sdfits', 'SDFITS')
178
179 """
180 from os import path
181 format = format or rcParams['scantable.save']
182 suffix = '.'+format.lower()
183 if name is None or name == "":
184 name = 'scantable'+suffix
185 msg = "No filename given. Using default name %s..." % name
186 asaplog.push(msg)
187 name = path.expandvars(name)
188 if path.isfile(name) or path.isdir(name):
189 if not overwrite:
190 msg = "File %s exists." % name
191 if rcParams['verbose']:
192 #print msg
193 asaplog.push( msg )
194 print_log( 'ERROR' )
195 return
196 else:
197 raise IOError(msg)
198 format2 = format.upper()
199 if format2 == 'ASAP':
200 self._save(name)
201 else:
202 from asap._asap import stwriter as stw
203 writer = stw(format2)
204 writer.write(self, name)
205 return
206
207 def copy(self):
208 """Return a copy of this scantable.
209
210 *Note*:
211
212 This makes a full (deep) copy. scan2 = scan1 makes a reference.
213
214 Example::
215
216 copiedscan = scan.copy()
217
218 """
219 sd = scantable(Scantable._copy(self))
220 return sd
221
222 def drop_scan(self, scanid=None):
223 """\
224 Return a new scantable where the specified scan number(s) has(have)
225 been dropped.
226
227 Parameters:
228
229 scanid: a (list of) scan number(s)
230
231 """
232 from asap import _is_sequence_or_number as _is_valid
233 from asap import _to_list
234 from asap import unique
235 if not _is_valid(scanid):
236 if rcParams['verbose']:
237 asaplog.push( 'Please specify a scanno to drop from the scantable' )
238 print_log( 'ERROR' )
239 return
240 else:
241 raise RuntimeError("No scan given")
242 try:
243 scanid = _to_list(scanid)
244 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
245 for sid in scanid: allscans.remove(sid)
246 if len(allscans) == 0:
247 raise ValueError("Can't remove all scans")
248 except ValueError:
249 if rcParams['verbose']:
250 print_log()
251 asaplog.push( "Couldn't find any match." )
252 print_log( 'ERROR' )
253 return
254 else: raise
255 try:
256 sel = selector(scans=allscans)
257 return self._select_copy(sel)
258 except RuntimeError:
259 if rcParams['verbose']:
260 print_log()
261 asaplog.push( "Couldn't find any match." )
262 print_log( 'ERROR' )
263 else:
264 raise
265
266 def _select_copy(self, selection):
267 orig = self.get_selection()
268 self.set_selection(orig+selection)
269 cp = self.copy()
270 self.set_selection(orig)
271 return cp
272
273 def get_scan(self, scanid=None):
274 """\
275 Return a specific scan (by scanno) or collection of scans (by
276 source name) in a new scantable.
277
278 *Note*:
279
280 See scantable.drop_scan() for the inverse operation.
281
282 Parameters:
283
284 scanid: a (list of) scanno or a source name, unix-style
285 patterns are accepted for source name matching, e.g.
286 '*_R' gets all 'ref scans
287
288 Example::
289
290 # get all scans containing the source '323p459'
291 newscan = scan.get_scan('323p459')
292 # get all 'off' scans
293 refscans = scan.get_scan('*_R')
294 # get a susbset of scans by scanno (as listed in scan.summary())
295 newscan = scan.get_scan([0, 2, 7, 10])
296
297 """
298 if scanid is None:
299 if rcParams['verbose']:
300 #print "Please specify a scan no or name to " \
301 # "retrieve from the scantable"
302 asaplog.push( 'Please specify a scan no or name to retrieve'
303 ' from the scantable' )
304 print_log( 'ERROR' )
305 return
306 else:
307 raise RuntimeError("No scan given")
308
309 try:
310 bsel = self.get_selection()
311 sel = selector()
312 if type(scanid) is str:
313 sel.set_name(scanid)
314 return self._select_copy(sel)
315 elif type(scanid) is int:
316 sel.set_scans([scanid])
317 return self._select_copy(sel)
318 elif type(scanid) is list:
319 sel.set_scans(scanid)
320 return self._select_copy(sel)
321 else:
322 msg = "Illegal scanid type, use 'int' or 'list' if ints."
323 if rcParams['verbose']:
324 #print msg
325 asaplog.push( msg )
326 print_log( 'ERROR' )
327 else:
328 raise TypeError(msg)
329 except RuntimeError:
330 if rcParams['verbose']:
331 #print "Couldn't find any match."
332 print_log()
333 asaplog.push( "Couldn't find any match." )
334 print_log( 'ERROR' )
335 else: raise
336
337 def __str__(self):
338 return Scantable._summary(self, True)
339
340 def summary(self, filename=None):
341 """\
342 Print a summary of the contents of this scantable.
343
344 Parameters:
345
346 filename: the name of a file to write the putput to
347 Default - no file output
348
349 """
350 info = Scantable._summary(self, True)
351 if filename is not None:
352 if filename is "":
353 filename = 'scantable_summary.txt'
354 from os.path import expandvars, isdir
355 filename = expandvars(filename)
356 if not isdir(filename):
357 data = open(filename, 'w')
358 data.write(info)
359 data.close()
360 else:
361 msg = "Illegal file name '%s'." % (filename)
362 if rcParams['verbose']:
363 #print msg
364 asaplog.push( msg )
365 print_log( 'ERROR' )
366 else:
367 raise IOError(msg)
368 if rcParams['verbose']:
369 try:
370 from IPython.genutils import page as pager
371 except ImportError:
372 from pydoc import pager
373 pager(info)
374 else:
375 return info
376
377 def get_spectrum(self, rowno):
378 """Return the spectrum for the current row in the scantable as a list.
379
380 Parameters:
381
382 rowno: the row number to retrieve the spectrum from
383
384 """
385 return self._getspectrum(rowno)
386
387 def get_mask(self, rowno):
388 """Return the mask for the current row in the scantable as a list.
389
390 Parameters:
391
392 rowno: the row number to retrieve the mask from
393
394 """
395 return self._getmask(rowno)
396
397 def set_spectrum(self, spec, rowno):
398 """Return the spectrum for the current row in the scantable as a list.
399
400 Parameters:
401
402 spec: the new spectrum
403
404 rowno: the row number to set the spectrum for
405
406 """
407 assert(len(spec) == self.nchan())
408 return self._setspectrum(spec, rowno)
409
410 def get_coordinate(self, rowno):
411 """Return the (spectral) coordinate for a a given 'rowno'.
412
413 *Note*:
414
415 * This coordinate is only valid until a scantable method modifies
416 the frequency axis.
417 * This coordinate does contain the original frequency set-up
418 NOT the new frame. The conversions however are done using the user
419 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
420 use scantable.freq_align first. Without it there is no closure,
421 i.e.::
422
423 c = myscan.get_coordinate(0)
424 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
425
426 Parameters:
427
428 rowno: the row number for the spectral coordinate
429
430 """
431 return coordinate(Scantable.get_coordinate(self, rowno))
432
433 def get_selection(self):
434 """\
435 Get the selection object currently set on this scantable.
436
437 Example::
438
439 sel = scan.get_selection()
440 sel.set_ifs(0) # select IF 0
441 scan.set_selection(sel) # apply modified selection
442
443 """
444 return selector(self._getselection())
445
446 def set_selection(self, selection=None, **kw):
447 """\
448 Select a subset of the data. All following operations on this scantable
449 are only applied to thi selection.
450
451 Parameters:
452
453 selection: a selector object (default unset the selection), or
454 any combination of "pols", "ifs", "beams", "scans",
455 "cycles", "name", "query"
456
457 Examples::
458
459 sel = selector() # create a selection object
460 self.set_scans([0, 3]) # select SCANNO 0 and 3
461 scan.set_selection(sel) # set the selection
462 scan.summary() # will only print summary of scanno 0 an 3
463 scan.set_selection() # unset the selection
464 # or the equivalent
465 scan.set_selection(scans=[0,3])
466 scan.summary() # will only print summary of scanno 0 an 3
467 scan.set_selection() # unset the selection
468
469 """
470 if selection is None:
471 # reset
472 if len(kw) == 0:
473 selection = selector()
474 else:
475 # try keywords
476 for k in kw:
477 if k not in selector.fields:
478 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
479 selection = selector(**kw)
480 self._setselection(selection)
481
482 def get_row(self, row=0, insitu=None):
483 """\
484 Select a row in the scantable.
485 Return a scantable with single row.
486
487 Parameters:
488
489 row: row no of integration, default is 0.
490 insitu: if False a new scantable is returned. Otherwise, the
491 scaling is done in-situ. The default is taken from .asaprc
492 (False)
493
494 """
495 if insitu is None: insitu = rcParams['insitu']
496 if not insitu:
497 workscan = self.copy()
498 else:
499 workscan = self
500 # Select a row
501 sel=selector()
502 sel.set_scans([workscan.getscan(row)])
503 sel.set_cycles([workscan.getcycle(row)])
504 sel.set_beams([workscan.getbeam(row)])
505 sel.set_ifs([workscan.getif(row)])
506 sel.set_polarisations([workscan.getpol(row)])
507 sel.set_name(workscan._getsourcename(row))
508 workscan.set_selection(sel)
509 if not workscan.nrow() == 1:
510 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
511 raise RuntimeError(msg)
512 del sel
513 if insitu:
514 self._assign(workscan)
515 else:
516 return workscan
517
518 #def stats(self, stat='stddev', mask=None):
519 def stats(self, stat='stddev', mask=None, form='3.3f'):
520 """\
521 Determine the specified statistic of the current beam/if/pol
522 Takes a 'mask' as an optional parameter to specify which
523 channels should be excluded.
524
525 Parameters:
526
527 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
528 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
529
530 mask: an optional mask specifying where the statistic
531 should be determined.
532
533 form: format string to print statistic values
534
535 Example::
536
537 scan.set_unit('channel')
538 msk = scan.create_mask([100, 200], [500, 600])
539 scan.stats(stat='mean', mask=m)
540
541 """
542 mask = mask or []
543 if not self._check_ifs():
544 raise ValueError("Cannot apply mask as the IFs have different "
545 "number of channels. Please use setselection() "
546 "to select individual IFs")
547 rtnabc = False
548 if stat.lower().endswith('_abc'): rtnabc = True
549 getchan = False
550 if stat.lower().startswith('min') or stat.lower().startswith('max'):
551 chan = self._math._minmaxchan(self, mask, stat)
552 getchan = True
553 statvals = []
554 if not rtnabc: statvals = self._math._stats(self, mask, stat)
555
556 #def cb(i):
557 # return statvals[i]
558
559 #return self._row_callback(cb, stat)
560
561 label=stat
562 #callback=cb
563 out = ""
564 #outvec = []
565 sep = '-'*50
566 for i in range(self.nrow()):
567 refstr = ''
568 statunit= ''
569 if getchan:
570 qx, qy = self.chan2data(rowno=i, chan=chan[i])
571 if rtnabc:
572 statvals.append(qx['value'])
573 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
574 statunit= '['+qx['unit']+']'
575 else:
576 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
577
578 tm = self._gettime(i)
579 src = self._getsourcename(i)
580 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
581 out += 'Time[%s]:\n' % (tm)
582 if self.nbeam(-1) > 1:
583 out += ' Beam[%d] ' % (self.getbeam(i))
584 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
585 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
586 #outvec.append(callback(i))
587 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
588 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
589 out += sep+"\n"
590
591 if rcParams['verbose']:
592 import os
593 if os.environ.has_key( 'USER' ):
594 usr=os.environ['USER']
595 else:
596 import commands
597 usr=commands.getoutput( 'whoami' )
598 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
599 f=open(tmpfile,'w')
600 print >> f, sep
601 print >> f, ' %s %s' % (label, statunit)
602 print >> f, sep
603 print >> f, out
604 f.close()
605 f=open(tmpfile,'r')
606 x=f.readlines()
607 f.close()
608 blanc=''
609 asaplog.push(blanc.join(x), False)
610 #for xx in x:
611 # asaplog.push( xx, False )
612 print_log()
613 return statvals
614
615 def chan2data(self, rowno=0, chan=0):
616 """\
617 Returns channel/frequency/velocity and spectral value
618 at an arbitrary row and channel in the scantable.
619
620 Parameters:
621
622 rowno: a row number in the scantable. Default is the
623 first row, i.e. rowno=0
624
625 chan: a channel in the scantable. Default is the first
626 channel, i.e. pos=0
627
628 """
629 if isinstance(rowno, int) and isinstance(chan, int):
630 qx = {'unit': self.get_unit(),
631 'value': self._getabcissa(rowno)[chan]}
632 qy = {'unit': self.get_fluxunit(),
633 'value': self._getspectrum(rowno)[chan]}
634 return qx, qy
635
636 def stddev(self, mask=None):
637 """\
638 Determine the standard deviation of the current beam/if/pol
639 Takes a 'mask' as an optional parameter to specify which
640 channels should be excluded.
641
642 Parameters:
643
644 mask: an optional mask specifying where the standard
645 deviation should be determined.
646
647 Example::
648
649 scan.set_unit('channel')
650 msk = scan.create_mask([100, 200], [500, 600])
651 scan.stddev(mask=m)
652
653 """
654 return self.stats(stat='stddev', mask=mask);
655
656
657 def get_column_names(self):
658 """\
659 Return a list of column names, which can be used for selection.
660 """
661 return list(Scantable.get_column_names(self))
662
663 def get_tsys(self, row=-1):
664 """\
665 Return the System temperatures.
666
667 Parameters:
668
669 row: the rowno to get the information for. (default all rows)
670
671 Returns:
672
673 a list of Tsys values for the current selection
674
675 """
676 if row > -1:
677 return self._get_column(self._gettsys, row)
678 return self._row_callback(self._gettsys, "Tsys")
679
680
681 def get_weather(self, row=-1):
682 """\
683 Return the weather informations.
684
685 Parameters:
686
687 row: the rowno to get the information for. (default all rows)
688
689 Returns:
690
691 a dict or list of of dicts of values for the current selection
692
693 """
694
695 values = self._get_column(self._get_weather, row)
696 if row > -1:
697 return {'temperature': values[0],
698 'pressure': values[1], 'humidity' : values[2],
699 'windspeed' : values[3], 'windaz' : values[4]
700 }
701 else:
702 out = []
703 for r in values:
704
705 out.append({'temperature': r[0],
706 'pressure': r[1], 'humidity' : r[2],
707 'windspeed' : r[3], 'windaz' : r[4]
708 })
709 return out
710
711 def _row_callback(self, callback, label):
712 out = ""
713 outvec = []
714 sep = '-'*50
715 for i in range(self.nrow()):
716 tm = self._gettime(i)
717 src = self._getsourcename(i)
718 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
719 out += 'Time[%s]:\n' % (tm)
720 if self.nbeam(-1) > 1:
721 out += ' Beam[%d] ' % (self.getbeam(i))
722 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
723 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
724 outvec.append(callback(i))
725 out += '= %3.3f\n' % (outvec[i])
726 out += sep+'\n'
727 if rcParams['verbose']:
728 asaplog.push(sep)
729 asaplog.push(" %s" % (label))
730 asaplog.push(sep)
731 asaplog.push(out)
732 print_log()
733 return outvec
734
735 def _get_column(self, callback, row=-1):
736 """
737 """
738 if row == -1:
739 return [callback(i) for i in range(self.nrow())]
740 else:
741 if 0 <= row < self.nrow():
742 return callback(row)
743
744
745 def get_time(self, row=-1, asdatetime=False):
746 """\
747 Get a list of time stamps for the observations.
748 Return a datetime object for each integration time stamp in the scantable.
749
750 Parameters:
751
752 row: row no of integration. Default -1 return all rows
753
754 asdatetime: return values as datetime objects rather than strings
755
756 """
757 from time import strptime
758 from datetime import datetime
759 times = self._get_column(self._gettime, row)
760 if not asdatetime:
761 return times
762 format = "%Y/%m/%d/%H:%M:%S"
763 if isinstance(times, list):
764 return [datetime(*strptime(i, format)[:6]) for i in times]
765 else:
766 return datetime(*strptime(times, format)[:6])
767
768
769 def get_inttime(self, row=-1):
770 """\
771 Get a list of integration times for the observations.
772 Return a time in seconds for each integration in the scantable.
773
774 Parameters:
775
776 row: row no of integration. Default -1 return all rows.
777
778 """
779 return self._get_column(self._getinttime, row)
780
781
782 def get_sourcename(self, row=-1):
783 """\
784 Get a list source names for the observations.
785 Return a string for each integration in the scantable.
786 Parameters:
787
788 row: row no of integration. Default -1 return all rows.
789
790 """
791 return self._get_column(self._getsourcename, row)
792
793 def get_elevation(self, row=-1):
794 """\
795 Get a list of elevations for the observations.
796 Return a float for each integration in the scantable.
797
798 Parameters:
799
800 row: row no of integration. Default -1 return all rows.
801
802 """
803 return self._get_column(self._getelevation, row)
804
805 def get_azimuth(self, row=-1):
806 """\
807 Get a list of azimuths for the observations.
808 Return a float for each integration in the scantable.
809
810 Parameters:
811 row: row no of integration. Default -1 return all rows.
812
813 """
814 return self._get_column(self._getazimuth, row)
815
816 def get_parangle(self, row=-1):
817 """\
818 Get a list of parallactic angles for the observations.
819 Return a float for each integration in the scantable.
820
821 Parameters:
822
823 row: row no of integration. Default -1 return all rows.
824
825 """
826 return self._get_column(self._getparangle, row)
827
828 def get_direction(self, row=-1):
829 """
830 Get a list of Positions on the sky (direction) for the observations.
831 Return a string for each integration in the scantable.
832
833 Parameters:
834
835 row: row no of integration. Default -1 return all rows
836
837 """
838 return self._get_column(self._getdirection, row)
839
840 def get_directionval(self, row=-1):
841 """\
842 Get a list of Positions on the sky (direction) for the observations.
843 Return a float for each integration in the scantable.
844
845 Parameters:
846
847 row: row no of integration. Default -1 return all rows
848
849 """
850 return self._get_column(self._getdirectionvec, row)
851
852 @print_log_dec
853 def set_unit(self, unit='channel'):
854 """\
855 Set the unit for all following operations on this scantable
856
857 Parameters:
858
859 unit: optional unit, default is 'channel'. Use one of '*Hz',
860 'km/s', 'channel' or equivalent ''
861
862 """
863 varlist = vars()
864 if unit in ['', 'pixel', 'channel']:
865 unit = ''
866 inf = list(self._getcoordinfo())
867 inf[0] = unit
868 self._setcoordinfo(inf)
869 self._add_history("set_unit", varlist)
870
871 @print_log_dec
872 def set_instrument(self, instr):
873 """\
874 Set the instrument for subsequent processing.
875
876 Parameters:
877
878 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
879 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
880
881 """
882 self._setInstrument(instr)
883 self._add_history("set_instument", vars())
884
885 @print_log_dec
886 def set_feedtype(self, feedtype):
887 """\
888 Overwrite the feed type, which might not be set correctly.
889
890 Parameters:
891
892 feedtype: 'linear' or 'circular'
893
894 """
895 self._setfeedtype(feedtype)
896 self._add_history("set_feedtype", vars())
897
898 @print_log_dec
899 def set_doppler(self, doppler='RADIO'):
900 """\
901 Set the doppler for all following operations on this scantable.
902
903 Parameters:
904
905 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
906
907 """
908 varlist = vars()
909 inf = list(self._getcoordinfo())
910 inf[2] = doppler
911 self._setcoordinfo(inf)
912 self._add_history("set_doppler", vars())
913
914 @print_log_dec
915 def set_freqframe(self, frame=None):
916 """\
917 Set the frame type of the Spectral Axis.
918
919 Parameters:
920
921 frame: an optional frame type, default 'LSRK'. Valid frames are:
922 'TOPO', 'LSRD', 'LSRK', 'BARY',
923 'GEO', 'GALACTO', 'LGROUP', 'CMB'
924
925 Example::
926
927 scan.set_freqframe('BARY')
928
929 """
930 frame = frame or rcParams['scantable.freqframe']
931 varlist = vars()
932 # "REST" is not implemented in casacore
933 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
934 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
935 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
936 'GEO', 'GALACTO', 'LGROUP', 'CMB']
937
938 if frame in valid:
939 inf = list(self._getcoordinfo())
940 inf[1] = frame
941 self._setcoordinfo(inf)
942 self._add_history("set_freqframe", varlist)
943 else:
944 msg = "Please specify a valid freq type. Valid types are:\n", valid
945 if rcParams['verbose']:
946 #print msg
947 asaplog.push( msg )
948 print_log( 'ERROR' )
949 else:
950 raise TypeError(msg)
951
952 def set_dirframe(self, frame=""):
953 """\
954 Set the frame type of the Direction on the sky.
955
956 Parameters:
957
958 frame: an optional frame type, default ''. Valid frames are:
959 'J2000', 'B1950', 'GALACTIC'
960
961 Example:
962
963 scan.set_dirframe('GALACTIC')
964
965 """
966 varlist = vars()
967 try:
968 Scantable.set_dirframe(self, frame)
969 except RuntimeError, msg:
970 if rcParams['verbose']:
971 #print msg
972 print_log()
973 asaplog.push( str(msg) )
974 print_log( 'ERROR' )
975 else:
976 raise
977 self._add_history("set_dirframe", varlist)
978
979 def get_unit(self):
980 """\
981 Get the default unit set in this scantable
982
983 Returns:
984
985 A unit string
986
987 """
988 inf = self._getcoordinfo()
989 unit = inf[0]
990 if unit == '': unit = 'channel'
991 return unit
992
993 @print_log_dec
994 def get_abcissa(self, rowno=0):
995 """\
996 Get the abcissa in the current coordinate setup for the currently
997 selected Beam/IF/Pol
998
999 Parameters:
1000
1001 rowno: an optional row number in the scantable. Default is the
1002 first row, i.e. rowno=0
1003
1004 Returns:
1005
1006 The abcissa values and the format string (as a dictionary)
1007
1008 """
1009 abc = self._getabcissa(rowno)
1010 lbl = self._getabcissalabel(rowno)
1011 return abc, lbl
1012
1013 def flag(self, mask=None, unflag=False):
1014 """\
1015 Flag the selected data using an optional channel mask.
1016
1017 Parameters:
1018
1019 mask: an optional channel mask, created with create_mask. Default
1020 (no mask) is all channels.
1021
1022 unflag: if True, unflag the data
1023
1024 """
1025 varlist = vars()
1026 mask = mask or []
1027 try:
1028 self._flag(mask, unflag)
1029 except RuntimeError, msg:
1030 if rcParams['verbose']:
1031 #print msg
1032 print_log()
1033 asaplog.push( str(msg) )
1034 print_log( 'ERROR' )
1035 return
1036 else: raise
1037 self._add_history("flag", varlist)
1038
1039 def flag_row(self, rows=[], unflag=False):
1040 """\
1041 Flag the selected data in row-based manner.
1042
1043 Parameters:
1044
1045 rows: list of row numbers to be flagged. Default is no row
1046 (must be explicitly specified to execute row-based flagging).
1047
1048 unflag: if True, unflag the data.
1049
1050 """
1051 varlist = vars()
1052 try:
1053 self._flag_row(rows, unflag)
1054 except RuntimeError, msg:
1055 if rcParams['verbose']:
1056 print_log()
1057 asaplog.push( str(msg) )
1058 print_log('ERROR')
1059 return
1060 else: raise
1061 self._add_history("flag_row", varlist)
1062
1063 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1064 """\
1065 Flag the selected data outside a specified range (in channel-base)
1066
1067 Parameters:
1068
1069 uthres: upper threshold.
1070
1071 dthres: lower threshold
1072
1073 clipoutside: True for flagging data outside the range [dthres:uthres].
1074 False for glagging data inside the range.
1075
1076 unflag: if True, unflag the data.
1077
1078 """
1079 varlist = vars()
1080 try:
1081 self._clip(uthres, dthres, clipoutside, unflag)
1082 except RuntimeError, msg:
1083 if rcParams['verbose']:
1084 print_log()
1085 asaplog.push(str(msg))
1086 print_log('ERROR')
1087 return
1088 else: raise
1089 self._add_history("clip", varlist)
1090
1091 @print_log_dec
1092 def lag_flag(self, start, end, unit="MHz", insitu=None):
1093 """\
1094 Flag the data in 'lag' space by providing a frequency to remove.
1095 Flagged data in the scantable gets interpolated over the region.
1096 No taper is applied.
1097
1098 Parameters:
1099
1100 start: the start frequency (really a period within the
1101 bandwidth) or period to remove
1102
1103 end: the end frequency or period to remove
1104
1105 unit: the frequency unit (default "MHz") or "" for
1106 explicit lag channels
1107
1108 *Notes*:
1109
1110 It is recommended to flag edges of the band or strong
1111 signals beforehand.
1112
1113 """
1114 if insitu is None: insitu = rcParams['insitu']
1115 self._math._setinsitu(insitu)
1116 varlist = vars()
1117 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1118 if not (unit == "" or base.has_key(unit)):
1119 raise ValueError("%s is not a valid unit." % unit)
1120 try:
1121 if unit == "":
1122 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1123 else:
1124 s = scantable(self._math._lag_flag(self, start*base[unit],
1125 end*base[unit], "frequency"))
1126 except RuntimeError, msg:
1127 if rcParams['verbose']:
1128 #print msg
1129 print_log()
1130 asaplog.push( str(msg) )
1131 print_log( 'ERROR' )
1132 return
1133 else: raise
1134 s._add_history("lag_flag", varlist)
1135 if insitu:
1136 self._assign(s)
1137 else:
1138 return s
1139
1140 @print_log_dec
1141 def create_mask(self, *args, **kwargs):
1142 """\
1143 Compute and return a mask based on [min, max] windows.
1144 The specified windows are to be INCLUDED, when the mask is
1145 applied.
1146
1147 Parameters:
1148
1149 [min, max], [min2, max2], ...
1150 Pairs of start/end points (inclusive)specifying the regions
1151 to be masked
1152
1153 invert: optional argument. If specified as True,
1154 return an inverted mask, i.e. the regions
1155 specified are EXCLUDED
1156
1157 row: create the mask using the specified row for
1158 unit conversions, default is row=0
1159 only necessary if frequency varies over rows.
1160
1161 Examples::
1162
1163 scan.set_unit('channel')
1164 # a)
1165 msk = scan.create_mask([400, 500], [800, 900])
1166 # masks everything outside 400 and 500
1167 # and 800 and 900 in the unit 'channel'
1168
1169 # b)
1170 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1171 # masks the regions between 400 and 500
1172 # and 800 and 900 in the unit 'channel'
1173
1174 # c)
1175 #mask only channel 400
1176 msk = scan.create_mask([400])
1177
1178 """
1179 row = kwargs.get("row", 0)
1180 data = self._getabcissa(row)
1181 u = self._getcoordinfo()[0]
1182 if rcParams['verbose']:
1183 if u == "": u = "channel"
1184 msg = "The current mask window unit is %s" % u
1185 i = self._check_ifs()
1186 if not i:
1187 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1188 asaplog.push(msg)
1189 n = self.nchan()
1190 msk = _n_bools(n, False)
1191 # test if args is a 'list' or a 'normal *args - UGLY!!!
1192
1193 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1194 and args or args[0]
1195 for window in ws:
1196 if len(window) == 1:
1197 window = [window[0], window[0]]
1198 if len(window) == 0 or len(window) > 2:
1199 raise ValueError("A window needs to be defined as [start(, end)]")
1200 if window[0] > window[1]:
1201 tmp = window[0]
1202 window[0] = window[1]
1203 window[1] = tmp
1204 for i in range(n):
1205 if data[i] >= window[0] and data[i] <= window[1]:
1206 msk[i] = True
1207 if kwargs.has_key('invert'):
1208 if kwargs.get('invert'):
1209 msk = mask_not(msk)
1210 return msk
1211
1212 def get_masklist(self, mask=None, row=0):
1213 """\
1214 Compute and return a list of mask windows, [min, max].
1215
1216 Parameters:
1217
1218 mask: channel mask, created with create_mask.
1219
1220 row: calcutate the masklist using the specified row
1221 for unit conversions, default is row=0
1222 only necessary if frequency varies over rows.
1223
1224 Returns:
1225
1226 [min, max], [min2, max2], ...
1227 Pairs of start/end points (inclusive)specifying
1228 the masked regions
1229
1230 """
1231 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1232 raise TypeError("The mask should be list or tuple.")
1233 if len(mask) < 2:
1234 raise TypeError("The mask elements should be > 1")
1235 if self.nchan() != len(mask):
1236 msg = "Number of channels in scantable != number of mask elements"
1237 raise TypeError(msg)
1238 data = self._getabcissa(row)
1239 u = self._getcoordinfo()[0]
1240 if rcParams['verbose']:
1241 if u == "": u = "channel"
1242 msg = "The current mask window unit is %s" % u
1243 i = self._check_ifs()
1244 if not i:
1245 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1246 asaplog.push(msg)
1247 masklist=[]
1248 ist, ien = None, None
1249 ist, ien=self.get_mask_indices(mask)
1250 if ist is not None and ien is not None:
1251 for i in xrange(len(ist)):
1252 range=[data[ist[i]],data[ien[i]]]
1253 range.sort()
1254 masklist.append([range[0],range[1]])
1255 return masklist
1256
1257 def get_mask_indices(self, mask=None):
1258 """\
1259 Compute and Return lists of mask start indices and mask end indices.
1260
1261 Parameters:
1262
1263 mask: channel mask, created with create_mask.
1264
1265 Returns:
1266
1267 List of mask start indices and that of mask end indices,
1268 i.e., [istart1,istart2,....], [iend1,iend2,....].
1269
1270 """
1271 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1272 raise TypeError("The mask should be list or tuple.")
1273 if len(mask) < 2:
1274 raise TypeError("The mask elements should be > 1")
1275 istart=[]
1276 iend=[]
1277 if mask[0]: istart.append(0)
1278 for i in range(len(mask)-1):
1279 if not mask[i] and mask[i+1]:
1280 istart.append(i+1)
1281 elif mask[i] and not mask[i+1]:
1282 iend.append(i)
1283 if mask[len(mask)-1]: iend.append(len(mask)-1)
1284 if len(istart) != len(iend):
1285 raise RuntimeError("Numbers of mask start != mask end.")
1286 for i in range(len(istart)):
1287 if istart[i] > iend[i]:
1288 raise RuntimeError("Mask start index > mask end index")
1289 break
1290 return istart,iend
1291
1292# def get_restfreqs(self):
1293# """
1294# Get the restfrequency(s) stored in this scantable.
1295# The return value(s) are always of unit 'Hz'
1296# Parameters:
1297# none
1298# Returns:
1299# a list of doubles
1300# """
1301# return list(self._getrestfreqs())
1302
1303 def get_restfreqs(self, ids=None):
1304 """\
1305 Get the restfrequency(s) stored in this scantable.
1306 The return value(s) are always of unit 'Hz'
1307
1308 Parameters:
1309
1310 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1311 be retrieved
1312
1313 Returns:
1314
1315 dictionary containing ids and a list of doubles for each id
1316
1317 """
1318 if ids is None:
1319 rfreqs={}
1320 idlist = self.getmolnos()
1321 for i in idlist:
1322 rfreqs[i]=list(self._getrestfreqs(i))
1323 return rfreqs
1324 else:
1325 if type(ids)==list or type(ids)==tuple:
1326 rfreqs={}
1327 for i in ids:
1328 rfreqs[i]=list(self._getrestfreqs(i))
1329 return rfreqs
1330 else:
1331 return list(self._getrestfreqs(ids))
1332 #return list(self._getrestfreqs(ids))
1333
1334 def set_restfreqs(self, freqs=None, unit='Hz'):
1335 """\
1336 Set or replace the restfrequency specified and
1337 If the 'freqs' argument holds a scalar,
1338 then that rest frequency will be applied to all the selected
1339 data. If the 'freqs' argument holds
1340 a vector, then it MUST be of equal or smaller length than
1341 the number of IFs (and the available restfrequencies will be
1342 replaced by this vector). In this case, *all* data have
1343 the restfrequency set per IF according
1344 to the corresponding value you give in the 'freqs' vector.
1345 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1346 IF 1 gets restfreq 2e9.
1347
1348 You can also specify the frequencies via a linecatalog.
1349
1350 Parameters:
1351
1352 freqs: list of rest frequency values or string idenitfiers
1353
1354 unit: unit for rest frequency (default 'Hz')
1355
1356
1357 Example::
1358
1359 # set the given restfrequency for the all currently selected IFs
1360 scan.set_restfreqs(freqs=1.4e9)
1361 # set restfrequencies for the n IFs (n > 1) in the order of the
1362 # list, i.e
1363 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1364 # len(list_of_restfreqs) == nIF
1365 # for nIF == 1 the following will set multiple restfrequency for
1366 # that IF
1367 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1368 # set multiple restfrequencies per IF. as a list of lists where
1369 # the outer list has nIF elements, the inner s arbitrary
1370 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1371
1372 *Note*:
1373
1374 To do more sophisticate Restfrequency setting, e.g. on a
1375 source and IF basis, use scantable.set_selection() before using
1376 this function::
1377
1378 # provided your scantable is called scan
1379 selection = selector()
1380 selection.set_name("ORION*")
1381 selection.set_ifs([1])
1382 scan.set_selection(selection)
1383 scan.set_restfreqs(freqs=86.6e9)
1384
1385 """
1386 varlist = vars()
1387 from asap import linecatalog
1388 # simple value
1389 if isinstance(freqs, int) or isinstance(freqs, float):
1390 self._setrestfreqs([freqs], [""], unit)
1391 # list of values
1392 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1393 # list values are scalars
1394 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1395 if len(freqs) == 1:
1396 self._setrestfreqs(freqs, [""], unit)
1397 else:
1398 # allow the 'old' mode of setting mulitple IFs
1399 sel = selector()
1400 savesel = self._getselection()
1401 iflist = self.getifnos()
1402 if len(freqs)>len(iflist):
1403 raise ValueError("number of elements in list of list "
1404 "exeeds the current IF selections")
1405 iflist = self.getifnos()
1406 for i, fval in enumerate(freqs):
1407 sel.set_ifs(iflist[i])
1408 self._setselection(sel)
1409 self._setrestfreqs([fval], [""], unit)
1410 self._setselection(savesel)
1411
1412 # list values are dict, {'value'=, 'name'=)
1413 elif isinstance(freqs[-1], dict):
1414 values = []
1415 names = []
1416 for d in freqs:
1417 values.append(d["value"])
1418 names.append(d["name"])
1419 self._setrestfreqs(values, names, unit)
1420 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1421 sel = selector()
1422 savesel = self._getselection()
1423 iflist = self.getifnos()
1424 if len(freqs)>len(iflist):
1425 raise ValueError("number of elements in list of list exeeds"
1426 " the current IF selections")
1427 for i, fval in enumerate(freqs):
1428 sel.set_ifs(iflist[i])
1429 self._setselection(sel)
1430 self._setrestfreqs(fval, [""], unit)
1431 self._setselection(savesel)
1432 # freqs are to be taken from a linecatalog
1433 elif isinstance(freqs, linecatalog):
1434 sel = selector()
1435 savesel = self._getselection()
1436 for i in xrange(freqs.nrow()):
1437 sel.set_ifs(iflist[i])
1438 self._setselection(sel)
1439 self._setrestfreqs([freqs.get_frequency(i)],
1440 [freqs.get_name(i)], "MHz")
1441 # ensure that we are not iterating past nIF
1442 if i == self.nif()-1: break
1443 self._setselection(savesel)
1444 else:
1445 return
1446 self._add_history("set_restfreqs", varlist)
1447
1448 def shift_refpix(self, delta):
1449 """\
1450 Shift the reference pixel of the Spectra Coordinate by an
1451 integer amount.
1452
1453 Parameters:
1454
1455 delta: the amount to shift by
1456
1457 *Note*:
1458
1459 Be careful using this with broadband data.
1460
1461 """
1462 Scantable.shift_refpix(self, delta)
1463
1464 def history(self, filename=None):
1465 """\
1466 Print the history. Optionally to a file.
1467
1468 Parameters:
1469
1470 filename: The name of the file to save the history to.
1471
1472 """
1473 hist = list(self._gethistory())
1474 out = "-"*80
1475 for h in hist:
1476 if h.startswith("---"):
1477 out += "\n"+h
1478 else:
1479 items = h.split("##")
1480 date = items[0]
1481 func = items[1]
1482 items = items[2:]
1483 out += "\n"+date+"\n"
1484 out += "Function: %s\n Parameters:" % (func)
1485 for i in items:
1486 s = i.split("=")
1487 out += "\n %s = %s" % (s[0], s[1])
1488 out += "\n"+"-"*80
1489 if filename is not None:
1490 if filename is "":
1491 filename = 'scantable_history.txt'
1492 import os
1493 filename = os.path.expandvars(os.path.expanduser(filename))
1494 if not os.path.isdir(filename):
1495 data = open(filename, 'w')
1496 data.write(out)
1497 data.close()
1498 else:
1499 msg = "Illegal file name '%s'." % (filename)
1500 if rcParams['verbose']:
1501 #print msg
1502 asaplog.push( msg )
1503 print_log( 'ERROR' )
1504 else:
1505 raise IOError(msg)
1506 if rcParams['verbose']:
1507 try:
1508 from IPython.genutils import page as pager
1509 except ImportError:
1510 from pydoc import pager
1511 pager(out)
1512 else:
1513 return out
1514 return
1515 #
1516 # Maths business
1517 #
1518 @print_log_dec
1519 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1520 """\
1521 Return the (time) weighted average of a scan.
1522
1523 *Note*:
1524
1525 in channels only - align if necessary
1526
1527 Parameters:
1528
1529 mask: an optional mask (only used for 'var' and 'tsys'
1530 weighting)
1531
1532 scanav: True averages each scan separately
1533 False (default) averages all scans together,
1534
1535 weight: Weighting scheme.
1536 'none' (mean no weight)
1537 'var' (1/var(spec) weighted)
1538 'tsys' (1/Tsys**2 weighted)
1539 'tint' (integration time weighted)
1540 'tintsys' (Tint/Tsys**2)
1541 'median' ( median averaging)
1542 The default is 'tint'
1543
1544 align: align the spectra in velocity before averaging. It takes
1545 the time of the first spectrum as reference time.
1546
1547 Example::
1548
1549 # time average the scantable without using a mask
1550 newscan = scan.average_time()
1551
1552 """
1553 varlist = vars()
1554 weight = weight or 'TINT'
1555 mask = mask or ()
1556 scanav = (scanav and 'SCAN') or 'NONE'
1557 scan = (self, )
1558 try:
1559 if align:
1560 scan = (self.freq_align(insitu=False), )
1561 s = None
1562 if weight.upper() == 'MEDIAN':
1563 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1564 scanav))
1565 else:
1566 s = scantable(self._math._average(scan, mask, weight.upper(),
1567 scanav))
1568 except RuntimeError, msg:
1569 if rcParams['verbose']:
1570 #print msg
1571 print_log()
1572 asaplog.push( str(msg) )
1573 print_log( 'ERROR' )
1574 return
1575 else: raise
1576 s._add_history("average_time", varlist)
1577 return s
1578
1579 @print_log_dec
1580 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1581 """\
1582 Return a scan where all spectra are converted to either
1583 Jansky or Kelvin depending upon the flux units of the scan table.
1584 By default the function tries to look the values up internally.
1585 If it can't find them (or if you want to over-ride), you must
1586 specify EITHER jyperk OR eta (and D which it will try to look up
1587 also if you don't set it). jyperk takes precedence if you set both.
1588
1589 Parameters:
1590
1591 jyperk: the Jy / K conversion factor
1592
1593 eta: the aperture efficiency
1594
1595 d: the geomtric diameter (metres)
1596
1597 insitu: if False a new scantable is returned.
1598 Otherwise, the scaling is done in-situ
1599 The default is taken from .asaprc (False)
1600
1601 """
1602 if insitu is None: insitu = rcParams['insitu']
1603 self._math._setinsitu(insitu)
1604 varlist = vars()
1605 jyperk = jyperk or -1.0
1606 d = d or -1.0
1607 eta = eta or -1.0
1608 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1609 s._add_history("convert_flux", varlist)
1610 if insitu: self._assign(s)
1611 else: return s
1612
1613 @print_log_dec
1614 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1615 """\
1616 Return a scan after applying a gain-elevation correction.
1617 The correction can be made via either a polynomial or a
1618 table-based interpolation (and extrapolation if necessary).
1619 You specify polynomial coefficients, an ascii table or neither.
1620 If you specify neither, then a polynomial correction will be made
1621 with built in coefficients known for certain telescopes (an error
1622 will occur if the instrument is not known).
1623 The data and Tsys are *divided* by the scaling factors.
1624
1625 Parameters:
1626
1627 poly: Polynomial coefficients (default None) to compute a
1628 gain-elevation correction as a function of
1629 elevation (in degrees).
1630
1631 filename: The name of an ascii file holding correction factors.
1632 The first row of the ascii file must give the column
1633 names and these MUST include columns
1634 "ELEVATION" (degrees) and "FACTOR" (multiply data
1635 by this) somewhere.
1636 The second row must give the data type of the
1637 column. Use 'R' for Real and 'I' for Integer.
1638 An example file would be
1639 (actual factors are arbitrary) :
1640
1641 TIME ELEVATION FACTOR
1642 R R R
1643 0.1 0 0.8
1644 0.2 20 0.85
1645 0.3 40 0.9
1646 0.4 60 0.85
1647 0.5 80 0.8
1648 0.6 90 0.75
1649
1650 method: Interpolation method when correcting from a table.
1651 Values are "nearest", "linear" (default), "cubic"
1652 and "spline"
1653
1654 insitu: if False a new scantable is returned.
1655 Otherwise, the scaling is done in-situ
1656 The default is taken from .asaprc (False)
1657
1658 """
1659
1660 if insitu is None: insitu = rcParams['insitu']
1661 self._math._setinsitu(insitu)
1662 varlist = vars()
1663 poly = poly or ()
1664 from os.path import expandvars
1665 filename = expandvars(filename)
1666 s = scantable(self._math._gainel(self, poly, filename, method))
1667 s._add_history("gain_el", varlist)
1668 if insitu:
1669 self._assign(s)
1670 else:
1671 return s
1672
1673 @print_log_dec
1674 def freq_align(self, reftime=None, method='cubic', insitu=None):
1675 """\
1676 Return a scan where all rows have been aligned in frequency/velocity.
1677 The alignment frequency frame (e.g. LSRK) is that set by function
1678 set_freqframe.
1679
1680 Parameters:
1681
1682 reftime: reference time to align at. By default, the time of
1683 the first row of data is used.
1684
1685 method: Interpolation method for regridding the spectra.
1686 Choose from "nearest", "linear", "cubic" (default)
1687 and "spline"
1688
1689 insitu: if False a new scantable is returned.
1690 Otherwise, the scaling is done in-situ
1691 The default is taken from .asaprc (False)
1692
1693 """
1694 if insitu is None: insitu = rcParams["insitu"]
1695 self._math._setinsitu(insitu)
1696 varlist = vars()
1697 reftime = reftime or ""
1698 s = scantable(self._math._freq_align(self, reftime, method))
1699 s._add_history("freq_align", varlist)
1700 if insitu: self._assign(s)
1701 else: return s
1702
1703 @print_log_dec
1704 def opacity(self, tau=None, insitu=None):
1705 """\
1706 Apply an opacity correction. The data
1707 and Tsys are multiplied by the correction factor.
1708
1709 Parameters:
1710
1711 tau: (list of) opacity from which the correction factor is
1712 exp(tau*ZD)
1713 where ZD is the zenith-distance.
1714 If a list is provided, it has to be of length nIF,
1715 nIF*nPol or 1 and in order of IF/POL, e.g.
1716 [opif0pol0, opif0pol1, opif1pol0 ...]
1717 if tau is `None` the opacities are determined from a
1718 model.
1719
1720 insitu: if False a new scantable is returned.
1721 Otherwise, the scaling is done in-situ
1722 The default is taken from .asaprc (False)
1723
1724 """
1725 if insitu is None: insitu = rcParams['insitu']
1726 self._math._setinsitu(insitu)
1727 varlist = vars()
1728 if not hasattr(tau, "__len__"):
1729 tau = [tau]
1730 s = scantable(self._math._opacity(self, tau))
1731 s._add_history("opacity", varlist)
1732 if insitu: self._assign(s)
1733 else: return s
1734
1735 @print_log_dec
1736 def bin(self, width=5, insitu=None):
1737 """\
1738 Return a scan where all spectra have been binned up.
1739
1740 Parameters:
1741
1742 width: The bin width (default=5) in pixels
1743
1744 insitu: if False a new scantable is returned.
1745 Otherwise, the scaling is done in-situ
1746 The default is taken from .asaprc (False)
1747
1748 """
1749 if insitu is None: insitu = rcParams['insitu']
1750 self._math._setinsitu(insitu)
1751 varlist = vars()
1752 s = scantable(self._math._bin(self, width))
1753 s._add_history("bin", varlist)
1754 if insitu:
1755 self._assign(s)
1756 else:
1757 return s
1758
1759 @print_log_dec
1760 def resample(self, width=5, method='cubic', insitu=None):
1761 """\
1762 Return a scan where all spectra have been binned up.
1763
1764 Parameters:
1765
1766 width: The bin width (default=5) in pixels
1767
1768 method: Interpolation method when correcting from a table.
1769 Values are "nearest", "linear", "cubic" (default)
1770 and "spline"
1771
1772 insitu: if False a new scantable is returned.
1773 Otherwise, the scaling is done in-situ
1774 The default is taken from .asaprc (False)
1775
1776 """
1777 if insitu is None: insitu = rcParams['insitu']
1778 self._math._setinsitu(insitu)
1779 varlist = vars()
1780 s = scantable(self._math._resample(self, method, width))
1781 s._add_history("resample", varlist)
1782 if insitu: self._assign(s)
1783 else: return s
1784
1785 @print_log_dec
1786 def average_pol(self, mask=None, weight='none'):
1787 """\
1788 Average the Polarisations together.
1789
1790 Parameters:
1791
1792 mask: An optional mask defining the region, where the
1793 averaging will be applied. The output will have all
1794 specified points masked.
1795
1796 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1797 weighted), or 'tsys' (1/Tsys**2 weighted)
1798
1799 """
1800 varlist = vars()
1801 mask = mask or ()
1802 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1803 s._add_history("average_pol", varlist)
1804 return s
1805
1806 @print_log_dec
1807 def average_beam(self, mask=None, weight='none'):
1808 """\
1809 Average the Beams together.
1810
1811 Parameters:
1812 mask: An optional mask defining the region, where the
1813 averaging will be applied. The output will have all
1814 specified points masked.
1815
1816 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1817 weighted), or 'tsys' (1/Tsys**2 weighted)
1818
1819 """
1820 varlist = vars()
1821 mask = mask or ()
1822 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1823 s._add_history("average_beam", varlist)
1824 return s
1825
1826 def parallactify(self, pflag):
1827 """\
1828 Set a flag to indicate whether this data should be treated as having
1829 been 'parallactified' (total phase == 0.0)
1830
1831 Parameters:
1832
1833 pflag: Bool indicating whether to turn this on (True) or
1834 off (False)
1835
1836 """
1837 varlist = vars()
1838 self._parallactify(pflag)
1839 self._add_history("parallactify", varlist)
1840
1841 @print_log_dec
1842 def convert_pol(self, poltype=None):
1843 """\
1844 Convert the data to a different polarisation type.
1845 Note that you will need cross-polarisation terms for most conversions.
1846
1847 Parameters:
1848
1849 poltype: The new polarisation type. Valid types are:
1850 "linear", "circular", "stokes" and "linpol"
1851
1852 """
1853 varlist = vars()
1854 try:
1855 s = scantable(self._math._convertpol(self, poltype))
1856 except RuntimeError, msg:
1857 if rcParams['verbose']:
1858 #print msg
1859 print_log()
1860 asaplog.push( str(msg) )
1861 print_log( 'ERROR' )
1862 return
1863 else:
1864 raise
1865 s._add_history("convert_pol", varlist)
1866 return s
1867
1868 @print_log_dec
1869 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1870 """\
1871 Smooth the spectrum by the specified kernel (conserving flux).
1872
1873 Parameters:
1874
1875 kernel: The type of smoothing kernel. Select from
1876 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1877 or 'poly'
1878
1879 width: The width of the kernel in pixels. For hanning this is
1880 ignored otherwise it defauls to 5 pixels.
1881 For 'gaussian' it is the Full Width Half
1882 Maximum. For 'boxcar' it is the full width.
1883 For 'rmedian' and 'poly' it is the half width.
1884
1885 order: Optional parameter for 'poly' kernel (default is 2), to
1886 specify the order of the polnomial. Ignored by all other
1887 kernels.
1888
1889 plot: plot the original and the smoothed spectra.
1890 In this each indivual fit has to be approved, by
1891 typing 'y' or 'n'
1892
1893 insitu: if False a new scantable is returned.
1894 Otherwise, the scaling is done in-situ
1895 The default is taken from .asaprc (False)
1896
1897 """
1898 if insitu is None: insitu = rcParams['insitu']
1899 self._math._setinsitu(insitu)
1900 varlist = vars()
1901
1902 if plot: orgscan = self.copy()
1903
1904 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1905 s._add_history("smooth", varlist)
1906
1907 if plot:
1908 if rcParams['plotter.gui']:
1909 from asap.asaplotgui import asaplotgui as asaplot
1910 else:
1911 from asap.asaplot import asaplot
1912 self._p=asaplot()
1913 self._p.set_panels()
1914 ylab=s._get_ordinate_label()
1915 #self._p.palette(0,["#777777","red"])
1916 for r in xrange(s.nrow()):
1917 xsm=s._getabcissa(r)
1918 ysm=s._getspectrum(r)
1919 xorg=orgscan._getabcissa(r)
1920 yorg=orgscan._getspectrum(r)
1921 self._p.clear()
1922 self._p.hold()
1923 self._p.set_axes('ylabel',ylab)
1924 self._p.set_axes('xlabel',s._getabcissalabel(r))
1925 self._p.set_axes('title',s._getsourcename(r))
1926 self._p.set_line(label='Original',color="#777777")
1927 self._p.plot(xorg,yorg)
1928 self._p.set_line(label='Smoothed',color="red")
1929 self._p.plot(xsm,ysm)
1930 ### Ugly part for legend
1931 for i in [0,1]:
1932 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1933 self._p.release()
1934 ### Ugly part for legend
1935 self._p.subplots[0]['lines']=[]
1936 res = raw_input("Accept smoothing ([y]/n): ")
1937 if res.upper() == 'N':
1938 s._setspectrum(yorg, r)
1939 self._p.unmap()
1940 self._p = None
1941 del orgscan
1942
1943 if insitu: self._assign(s)
1944 else: return s
1945
1946 @print_log_dec
1947 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1948 insitu=None):
1949 """\
1950 Return a scan which has been baselined (all rows) by a polynomial.
1951
1952 Parameters:
1953
1954 mask: an optional mask
1955
1956 order: the order of the polynomial (default is 0)
1957
1958 plot: plot the fit and the residual. In this each
1959 indivual fit has to be approved, by typing 'y'
1960 or 'n'
1961
1962 uselin: use linear polynomial fit
1963
1964 insitu: if False a new scantable is returned.
1965 Otherwise, the scaling is done in-situ
1966 The default is taken from .asaprc (False)
1967
1968 Example::
1969
1970 # return a scan baselined by a third order polynomial,
1971 # not using a mask
1972 bscan = scan.poly_baseline(order=3)
1973
1974 """
1975 if insitu is None: insitu = rcParams['insitu']
1976 if not insitu:
1977 workscan = self.copy()
1978 else:
1979 workscan = self
1980 varlist = vars()
1981 if mask is None:
1982 mask = [True for i in xrange(self.nchan(-1))]
1983
1984 from asap.asapfitter import fitter
1985 try:
1986 f = fitter()
1987 if uselin:
1988 f.set_function(lpoly=order)
1989 else:
1990 f.set_function(poly=order)
1991
1992 rows = range(workscan.nrow())
1993 if len(rows) > 0:
1994 self.blpars = []
1995
1996 for r in rows:
1997 # take into account flagtra info (CAS-1434)
1998 flagtra = workscan._getmask(r)
1999 actualmask = mask[:]
2000 if len(actualmask) == 0:
2001 actualmask = list(flagtra[:])
2002 else:
2003 if len(actualmask) != len(flagtra):
2004 raise RuntimeError, "Mask and flagtra have different length"
2005 else:
2006 for i in range(0, len(actualmask)):
2007 actualmask[i] = actualmask[i] and flagtra[i]
2008 f.set_scan(workscan, actualmask)
2009 f.x = workscan._getabcissa(r)
2010 f.y = workscan._getspectrum(r)
2011 f.data = None
2012 f.fit()
2013 if plot:
2014 f.plot(residual=True)
2015 x = raw_input("Accept fit ( [y]/n ): ")
2016 if x.upper() == 'N':
2017 self.blpars.append(None)
2018 continue
2019 workscan._setspectrum(f.fitter.getresidual(), r)
2020 self.blpars.append(f.get_parameters())
2021
2022 if plot:
2023 f._p.unmap()
2024 f._p = None
2025 workscan._add_history("poly_baseline", varlist)
2026 if insitu:
2027 self._assign(workscan)
2028 else:
2029 return workscan
2030 except RuntimeError:
2031 msg = "The fit failed, possibly because it didn't converge."
2032 if rcParams['verbose']:
2033 #print msg
2034 print_log()
2035 asaplog.push( str(msg) )
2036 print_log( 'ERROR' )
2037 return
2038 else:
2039 raise RuntimeError(msg)
2040
2041
2042 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
2043 threshold=3, chan_avg_limit=1, plot=False,
2044 insitu=None):
2045 """\
2046 Return a scan which has been baselined (all rows) by a polynomial.
2047 Spectral lines are detected first using linefinder and masked out
2048 to avoid them affecting the baseline solution.
2049
2050 Parameters:
2051
2052 mask: an optional mask retreived from scantable
2053
2054 edge: an optional number of channel to drop at the edge of
2055 spectrum. If only one value is
2056 specified, the same number will be dropped from
2057 both sides of the spectrum. Default is to keep
2058 all channels. Nested tuples represent individual
2059 edge selection for different IFs (a number of spectral
2060 channels can be different)
2061
2062 order: the order of the polynomial (default is 0)
2063
2064 threshold: the threshold used by line finder. It is better to
2065 keep it large as only strong lines affect the
2066 baseline solution.
2067
2068 chan_avg_limit:
2069 a maximum number of consequtive spectral channels to
2070 average during the search of weak and broad lines.
2071 The default is no averaging (and no search for weak
2072 lines). If such lines can affect the fitted baseline
2073 (e.g. a high order polynomial is fitted), increase this
2074 parameter (usually values up to 8 are reasonable). Most
2075 users of this method should find the default value
2076 sufficient.
2077
2078 plot: plot the fit and the residual. In this each
2079 indivual fit has to be approved, by typing 'y'
2080 or 'n'
2081
2082 insitu: if False a new scantable is returned.
2083 Otherwise, the scaling is done in-situ
2084 The default is taken from .asaprc (False)
2085
2086
2087 Example::
2088
2089 scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2090
2091 """
2092 if insitu is None: insitu = rcParams['insitu']
2093 varlist = vars()
2094 from asap.asapfitter import fitter
2095 from asap.asaplinefind import linefinder
2096 from asap import _is_sequence_or_number as _is_valid
2097
2098 # check whether edge is set up for each IF individually
2099 individualedge = False;
2100 if len(edge) > 1:
2101 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2102 individualedge = True;
2103
2104 if not _is_valid(edge, int) and not individualedge:
2105 raise ValueError, "Parameter 'edge' has to be an integer or a \
2106 pair of integers specified as a tuple. Nested tuples are allowed \
2107 to make individual selection for different IFs."
2108
2109 curedge = (0, 0)
2110 if individualedge:
2111 for edgepar in edge:
2112 if not _is_valid(edgepar, int):
2113 raise ValueError, "Each element of the 'edge' tuple has \
2114 to be a pair of integers or an integer."
2115 else:
2116 curedge = edge;
2117
2118 # setup fitter
2119 f = fitter()
2120 f.set_function(poly=order)
2121
2122 # setup line finder
2123 fl = linefinder()
2124 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2125
2126 if not insitu:
2127 workscan = self.copy()
2128 else:
2129 workscan = self
2130
2131 fl.set_scan(workscan)
2132
2133 rows = range(workscan.nrow())
2134 # Save parameters of baseline fits & masklists as a class attribute.
2135 # NOTICE: It does not reflect changes in scantable!
2136 if len(rows) > 0:
2137 self.blpars=[]
2138 self.masklists=[]
2139 asaplog.push("Processing:")
2140 for r in rows:
2141 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2142 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2143 workscan.getpol(r), workscan.getcycle(r))
2144 asaplog.push(msg, False)
2145
2146 # figure out edge parameter
2147 if individualedge:
2148 if len(edge) >= workscan.getif(r):
2149 raise RuntimeError, "Number of edge elements appear to " \
2150 "be less than the number of IFs"
2151 curedge = edge[workscan.getif(r)]
2152
2153 # take into account flagtra info (CAS-1434)
2154 flagtra = workscan._getmask(r)
2155 actualmask = mask[:]
2156 if len(actualmask) == 0:
2157 actualmask = list(flagtra[:])
2158 else:
2159 if len(actualmask) != len(flagtra):
2160 raise RuntimeError, "Mask and flagtra have different length"
2161 else:
2162 for i in range(0, len(actualmask)):
2163 actualmask[i] = actualmask[i] and flagtra[i]
2164
2165 # setup line finder
2166 fl.find_lines(r, actualmask, curedge)
2167 outmask=fl.get_mask()
2168 f.set_scan(workscan, fl.get_mask())
2169 f.x = workscan._getabcissa(r)
2170 f.y = workscan._getspectrum(r)
2171 f.data = None
2172 f.fit()
2173
2174 # Show mask list
2175 masklist=workscan.get_masklist(fl.get_mask(),row=r)
2176 msg = "mask range: "+str(masklist)
2177 asaplog.push(msg, False)
2178
2179 if plot:
2180 f.plot(residual=True)
2181 x = raw_input("Accept fit ( [y]/n ): ")
2182 if x.upper() == 'N':
2183 self.blpars.append(None)
2184 self.masklists.append(None)
2185 continue
2186
2187 workscan._setspectrum(f.fitter.getresidual(), r)
2188 self.blpars.append(f.get_parameters())
2189 self.masklists.append(masklist)
2190 if plot:
2191 f._p.unmap()
2192 f._p = None
2193 workscan._add_history("auto_poly_baseline", varlist)
2194 if insitu:
2195 self._assign(workscan)
2196 else:
2197 return workscan
2198
2199 @print_log_dec
2200 def rotate_linpolphase(self, angle):
2201 """\
2202 Rotate the phase of the complex polarization O=Q+iU correlation.
2203 This is always done in situ in the raw data. So if you call this
2204 function more than once then each call rotates the phase further.
2205
2206 Parameters:
2207
2208 angle: The angle (degrees) to rotate (add) by.
2209
2210 Example::
2211
2212 scan.rotate_linpolphase(2.3)
2213
2214 """
2215 varlist = vars()
2216 self._math._rotate_linpolphase(self, angle)
2217 self._add_history("rotate_linpolphase", varlist)
2218 return
2219
2220 @print_log_dec
2221 def rotate_xyphase(self, angle):
2222 """\
2223 Rotate the phase of the XY correlation. This is always done in situ
2224 in the data. So if you call this function more than once
2225 then each call rotates the phase further.
2226
2227 Parameters:
2228
2229 angle: The angle (degrees) to rotate (add) by.
2230
2231 Example::
2232
2233 scan.rotate_xyphase(2.3)
2234
2235 """
2236 varlist = vars()
2237 self._math._rotate_xyphase(self, angle)
2238 self._add_history("rotate_xyphase", varlist)
2239 return
2240
2241 @print_log_dec
2242 def swap_linears(self):
2243 """\
2244 Swap the linear polarisations XX and YY, or better the first two
2245 polarisations as this also works for ciculars.
2246 """
2247 varlist = vars()
2248 self._math._swap_linears(self)
2249 self._add_history("swap_linears", varlist)
2250 return
2251
2252 @print_log_dec
2253 def invert_phase(self):
2254 """\
2255 Invert the phase of the complex polarisation
2256 """
2257 varlist = vars()
2258 self._math._invert_phase(self)
2259 self._add_history("invert_phase", varlist)
2260 return
2261
2262 @print_log_dec
2263 def add(self, offset, insitu=None):
2264 """\
2265 Return a scan where all spectra have the offset added
2266
2267 Parameters:
2268
2269 offset: the offset
2270
2271 insitu: if False a new scantable is returned.
2272 Otherwise, the scaling is done in-situ
2273 The default is taken from .asaprc (False)
2274
2275 """
2276 if insitu is None: insitu = rcParams['insitu']
2277 self._math._setinsitu(insitu)
2278 varlist = vars()
2279 s = scantable(self._math._unaryop(self, offset, "ADD", False))
2280 s._add_history("add", varlist)
2281 if insitu:
2282 self._assign(s)
2283 else:
2284 return s
2285
2286 @print_log_dec
2287 def scale(self, factor, tsys=True, insitu=None):
2288 """\
2289
2290 Return a scan where all spectra are scaled by the give 'factor'
2291
2292 Parameters:
2293
2294 factor: the scaling factor (float or 1D float list)
2295
2296 insitu: if False a new scantable is returned.
2297 Otherwise, the scaling is done in-situ
2298 The default is taken from .asaprc (False)
2299
2300 tsys: if True (default) then apply the operation to Tsys
2301 as well as the data
2302
2303 """
2304 if insitu is None: insitu = rcParams['insitu']
2305 self._math._setinsitu(insitu)
2306 varlist = vars()
2307 s = None
2308 import numpy
2309 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2310 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2311 from asapmath import _array2dOp
2312 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2313 else:
2314 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2315 else:
2316 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2317 s._add_history("scale", varlist)
2318 if insitu:
2319 self._assign(s)
2320 else:
2321 return s
2322
2323 def set_sourcetype(self, match, matchtype="pattern",
2324 sourcetype="reference"):
2325 """\
2326 Set the type of the source to be an source or reference scan
2327 using the provided pattern.
2328
2329 Parameters:
2330
2331 match: a Unix style pattern, regular expression or selector
2332
2333 matchtype: 'pattern' (default) UNIX style pattern or
2334 'regex' regular expression
2335
2336 sourcetype: the type of the source to use (source/reference)
2337
2338 """
2339 varlist = vars()
2340 basesel = self.get_selection()
2341 stype = -1
2342 if sourcetype.lower().startswith("r"):
2343 stype = 1
2344 elif sourcetype.lower().startswith("s"):
2345 stype = 0
2346 else:
2347 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2348 if matchtype.lower().startswith("p"):
2349 matchtype = "pattern"
2350 elif matchtype.lower().startswith("r"):
2351 matchtype = "regex"
2352 else:
2353 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2354 sel = selector()
2355 if isinstance(match, selector):
2356 sel = match
2357 else:
2358 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2359 self.set_selection(basesel+sel)
2360 self._setsourcetype(stype)
2361 self.set_selection(basesel)
2362 self._add_history("set_sourcetype", varlist)
2363
2364 @print_log_dec
2365 def auto_quotient(self, preserve=True, mode='paired', verify=False):
2366 """\
2367 This function allows to build quotients automatically.
2368 It assumes the observation to have the same number of
2369 "ons" and "offs"
2370
2371 Parameters:
2372
2373 preserve: you can preserve (default) the continuum or
2374 remove it. The equations used are
2375 preserve: Output = Toff * (on/off) - Toff
2376 remove: Output = Toff * (on/off) - Ton
2377
2378 mode: the on/off detection mode
2379 'paired' (default)
2380 identifies 'off' scans by the
2381 trailing '_R' (Mopra/Parkes) or
2382 '_e'/'_w' (Tid) and matches
2383 on/off pairs from the observing pattern
2384 'time'
2385 finds the closest off in time
2386
2387 """
2388 modes = ["time", "paired"]
2389 if not mode in modes:
2390 msg = "please provide valid mode. Valid modes are %s" % (modes)
2391 raise ValueError(msg)
2392 varlist = vars()
2393 s = None
2394 if mode.lower() == "paired":
2395 basesel = self.get_selection()
2396 sel = selector()+basesel
2397 sel.set_query("SRCTYPE==1")
2398 self.set_selection(sel)
2399 offs = self.copy()
2400 sel.set_query("SRCTYPE==0")
2401 self.set_selection(sel)
2402 ons = self.copy()
2403 s = scantable(self._math._quotient(ons, offs, preserve))
2404 self.set_selection(basesel)
2405 elif mode.lower() == "time":
2406 s = scantable(self._math._auto_quotient(self, mode, preserve))
2407 s._add_history("auto_quotient", varlist)
2408 return s
2409
2410 @print_log_dec
2411 def mx_quotient(self, mask = None, weight='median', preserve=True):
2412 """\
2413 Form a quotient using "off" beams when observing in "MX" mode.
2414
2415 Parameters:
2416
2417 mask: an optional mask to be used when weight == 'stddev'
2418
2419 weight: How to average the off beams. Default is 'median'.
2420
2421 preserve: you can preserve (default) the continuum or
2422 remove it. The equations used are:
2423
2424 preserve: Output = Toff * (on/off) - Toff
2425
2426 remove: Output = Toff * (on/off) - Ton
2427
2428 """
2429 mask = mask or ()
2430 varlist = vars()
2431 on = scantable(self._math._mx_extract(self, 'on'))
2432 preoff = scantable(self._math._mx_extract(self, 'off'))
2433 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2434 from asapmath import quotient
2435 q = quotient(on, off, preserve)
2436 q._add_history("mx_quotient", varlist)
2437 return q
2438
2439 @print_log_dec
2440 def freq_switch(self, insitu=None):
2441 """\
2442 Apply frequency switching to the data.
2443
2444 Parameters:
2445
2446 insitu: if False a new scantable is returned.
2447 Otherwise, the swictching is done in-situ
2448 The default is taken from .asaprc (False)
2449
2450 """
2451 if insitu is None: insitu = rcParams['insitu']
2452 self._math._setinsitu(insitu)
2453 varlist = vars()
2454 s = scantable(self._math._freqswitch(self))
2455 s._add_history("freq_switch", varlist)
2456 if insitu:
2457 self._assign(s)
2458 else:
2459 return s
2460
2461 @print_log_dec
2462 def recalc_azel(self):
2463 """Recalculate the azimuth and elevation for each position."""
2464 varlist = vars()
2465 self._recalcazel()
2466 self._add_history("recalc_azel", varlist)
2467 return
2468
2469 @print_log_dec
2470 def __add__(self, other):
2471 varlist = vars()
2472 s = None
2473 if isinstance(other, scantable):
2474 s = scantable(self._math._binaryop(self, other, "ADD"))
2475 elif isinstance(other, float):
2476 s = scantable(self._math._unaryop(self, other, "ADD", False))
2477 else:
2478 raise TypeError("Other input is not a scantable or float value")
2479 s._add_history("operator +", varlist)
2480 return s
2481
2482 @print_log_dec
2483 def __sub__(self, other):
2484 """
2485 implicit on all axes and on Tsys
2486 """
2487 varlist = vars()
2488 s = None
2489 if isinstance(other, scantable):
2490 s = scantable(self._math._binaryop(self, other, "SUB"))
2491 elif isinstance(other, float):
2492 s = scantable(self._math._unaryop(self, other, "SUB", False))
2493 else:
2494 raise TypeError("Other input is not a scantable or float value")
2495 s._add_history("operator -", varlist)
2496 return s
2497
2498 @print_log_dec
2499 def __mul__(self, other):
2500 """
2501 implicit on all axes and on Tsys
2502 """
2503 varlist = vars()
2504 s = None
2505 if isinstance(other, scantable):
2506 s = scantable(self._math._binaryop(self, other, "MUL"))
2507 elif isinstance(other, float):
2508 s = scantable(self._math._unaryop(self, other, "MUL", False))
2509 else:
2510 raise TypeError("Other input is not a scantable or float value")
2511 s._add_history("operator *", varlist)
2512 return s
2513
2514
2515 @print_log_dec
2516 def __div__(self, other):
2517 """
2518 implicit on all axes and on Tsys
2519 """
2520 varlist = vars()
2521 s = None
2522 if isinstance(other, scantable):
2523 s = scantable(self._math._binaryop(self, other, "DIV"))
2524 elif isinstance(other, float):
2525 if other == 0.0:
2526 raise ZeroDivisionError("Dividing by zero is not recommended")
2527 s = scantable(self._math._unaryop(self, other, "DIV", False))
2528 else:
2529 raise TypeError("Other input is not a scantable or float value")
2530 s._add_history("operator /", varlist)
2531 return s
2532
2533 @print_log_dec
2534 def get_fit(self, row=0):
2535 """\
2536 Print or return the stored fits for a row in the scantable
2537
2538 Parameters:
2539
2540 row: the row which the fit has been applied to.
2541
2542 """
2543 if row > self.nrow():
2544 return
2545 from asap.asapfit import asapfit
2546 fit = asapfit(self._getfit(row))
2547 if rcParams['verbose']:
2548 #print fit
2549 asaplog.push( '%s' %(fit) )
2550 return
2551 else:
2552 return fit.as_dict()
2553
2554 def flag_nans(self):
2555 """\
2556 Utility function to flag NaN values in the scantable.
2557 """
2558 import numpy
2559 basesel = self.get_selection()
2560 for i in range(self.nrow()):
2561 sel = self.get_row_selector(i)
2562 self.set_selection(basesel+sel)
2563 nans = numpy.isnan(self._getspectrum(0))
2564 if numpy.any(nans):
2565 bnans = [ bool(v) for v in nans]
2566 self.flag(bnans)
2567 self.set_selection(basesel)
2568
2569 def get_row_selector(self, rowno):
2570 return selector(beams=self.getbeam(rowno),
2571 ifs=self.getif(rowno),
2572 pols=self.getpol(rowno),
2573 scans=self.getscan(rowno),
2574 cycles=self.getcycle(rowno))
2575
2576 def _add_history(self, funcname, parameters):
2577 if not rcParams['scantable.history']:
2578 return
2579 # create date
2580 sep = "##"
2581 from datetime import datetime
2582 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2583 hist = dstr+sep
2584 hist += funcname+sep#cdate+sep
2585 if parameters.has_key('self'): del parameters['self']
2586 for k, v in parameters.iteritems():
2587 if type(v) is dict:
2588 for k2, v2 in v.iteritems():
2589 hist += k2
2590 hist += "="
2591 if isinstance(v2, scantable):
2592 hist += 'scantable'
2593 elif k2 == 'mask':
2594 if isinstance(v2, list) or isinstance(v2, tuple):
2595 hist += str(self._zip_mask(v2))
2596 else:
2597 hist += str(v2)
2598 else:
2599 hist += str(v2)
2600 else:
2601 hist += k
2602 hist += "="
2603 if isinstance(v, scantable):
2604 hist += 'scantable'
2605 elif k == 'mask':
2606 if isinstance(v, list) or isinstance(v, tuple):
2607 hist += str(self._zip_mask(v))
2608 else:
2609 hist += str(v)
2610 else:
2611 hist += str(v)
2612 hist += sep
2613 hist = hist[:-2] # remove trailing '##'
2614 self._addhistory(hist)
2615
2616
2617 def _zip_mask(self, mask):
2618 mask = list(mask)
2619 i = 0
2620 segments = []
2621 while mask[i:].count(1):
2622 i += mask[i:].index(1)
2623 if mask[i:].count(0):
2624 j = i + mask[i:].index(0)
2625 else:
2626 j = len(mask)
2627 segments.append([i, j])
2628 i = j
2629 return segments
2630
2631 def _get_ordinate_label(self):
2632 fu = "("+self.get_fluxunit()+")"
2633 import re
2634 lbl = "Intensity"
2635 if re.match(".K.", fu):
2636 lbl = "Brightness Temperature "+ fu
2637 elif re.match(".Jy.", fu):
2638 lbl = "Flux density "+ fu
2639 return lbl
2640
2641 def _check_ifs(self):
2642 nchans = [self.nchan(i) for i in range(self.nif(-1))]
2643 nchans = filter(lambda t: t > 0, nchans)
2644 return (sum(nchans)/len(nchans) == nchans[0])
2645
2646 def _fill(self, names, unit, average, getpt, antenna):
2647 first = True
2648 fullnames = []
2649 for name in names:
2650 name = os.path.expandvars(name)
2651 name = os.path.expanduser(name)
2652 if not os.path.exists(name):
2653 msg = "File '%s' does not exists" % (name)
2654 if rcParams['verbose']:
2655 asaplog.push(msg)
2656 print_log( 'ERROR' )
2657 return
2658 raise IOError(msg)
2659 fullnames.append(name)
2660 if average:
2661 asaplog.push('Auto averaging integrations')
2662 stype = int(rcParams['scantable.storage'].lower() == 'disk')
2663 for name in fullnames:
2664 tbl = Scantable(stype)
2665 r = filler(tbl)
2666 rx = rcParams['scantable.reference']
2667 r.setreferenceexpr(rx)
2668 msg = "Importing %s..." % (name)
2669 asaplog.push(msg, False)
2670 print_log()
2671 r.open(name)# antenna, -1, -1, getpt)
2672 r.fill()
2673 if average:
2674 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2675 if not first:
2676 tbl = self._math._merge([self, tbl])
2677 Scantable.__init__(self, tbl)
2678 r.close()
2679 del r, tbl
2680 first = False
2681 if unit is not None:
2682 self.set_fluxunit(unit)
2683 if not is_casapy():
2684 self.set_freqframe(rcParams['scantable.freqframe'])
2685
2686 def __getitem__(self, key):
2687 if key < 0:
2688 key += self.nrow()
2689 if key >= self.nrow():
2690 raise IndexError("Row index out of range.")
2691 return self._getspectrum(key)
2692
2693 def __setitem__(self, key, value):
2694 if key < 0:
2695 key += self.nrow()
2696 if key >= self.nrow():
2697 raise IndexError("Row index out of range.")
2698 if not hasattr(value, "__len__") or \
2699 len(value) > self.nchan(self.getif(key)):
2700 raise ValueError("Spectrum length doesn't match.")
2701 return self._setspectrum(value, key)
2702
2703 def __len__(self):
2704 return self.nrow()
2705
2706 def __iter__(self):
2707 for i in range(len(self)):
2708 yield self[i]
Note: See TracBrowser for help on using the repository browser.