source: branches/newfiller/python/scantable.py@ 1791

Last change on this file since 1791 was 1757, checked in by Kana Sugimoto, 14 years ago

New Development: Yes

JIRA Issue: Yes (CAS-2211)

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: ASAP 3.0.0 interface changes

Test Programs:

Put in Release Notes: Yes

Module(s): all the CASA sd tools and tasks are affected.

Description: Merged ATNF-ASAP 3.0.0 developments to CASA (alma) branch.

Note you also need to update casa/code/atnf.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 90.4 KB
Line 
1import os
2try:
3 from functools import wraps as wraps_dec
4except ImportError:
5 from asap.compatibility import wraps as wraps_dec
6
7from asap._asap import Scantable
8from asap import rcParams
9from asap import print_log, print_log_dec
10from asap import asaplog
11from asap import selector
12from asap import linecatalog
13from asap.coordinate import coordinate
14from asap import _n_bools, mask_not, mask_and, mask_or
15
16
17def preserve_selection(func):
18 @wraps_dec(func)
19 def wrap(obj, *args, **kw):
20 basesel = obj.get_selection()
21 val = func(obj, *args, **kw)
22 obj.set_selection(basesel)
23 return val
24 return wrap
25
26
27def is_scantable(filename):
28 return (os.path.isdir(filename)
29 and not os.path.exists(filename+'/table.f1')
30 and os.path.exists(filename+'/table.info'))
31
32
33class scantable(Scantable):
34 """
35 The ASAP container for scans
36 """
37
38 #@print_log_dec
39 def __init__(self, filename, average=None, unit=None, getpt=None, antenna=None, parallactify=None):
40 """
41 Create a scantable from a saved one or make a reference
42 Parameters:
43 filename: the name of an asap table on disk
44 or
45 the name of a rpfits/sdfits/ms file
46 (integrations within scans are auto averaged
47 and the whole file is read)
48 or
49 [advanced] a reference to an existing
50 scantable
51 average: average all integrations withinb a scan on read.
52 The default (True) is taken from .asaprc.
53 unit: brightness unit; must be consistent with K or Jy.
54 Over-rides the default selected by the reader
55 (input rpfits/sdfits/ms) or replaces the value
56 in existing scantables
57 getpt: for MeasurementSet input data only:
58 If True, all pointing data are filled.
59 The deafult is False, which makes time to load
60 the MS data faster in some cases.
61 antenna: Antenna selection. integer (id) or string (name
62 or id).
63 parallactify: Indcicate that the data had been parallatified.
64 Default is taken form rc file.
65 """
66 if average is None:
67 average = rcParams['scantable.autoaverage']
68 if getpt is None:
69 getpt = True
70 if antenna is None:
71 antenna = ''
72 elif type(antenna) == int:
73 antenna = '%s'%antenna
74 elif type(antenna) == list:
75 tmpstr = ''
76 for i in range( len(antenna) ):
77 if type(antenna[i]) == int:
78 tmpstr = tmpstr + ('%s,'%(antenna[i]))
79 elif type(antenna[i]) == str:
80 tmpstr=tmpstr+antenna[i]+','
81 else:
82 asaplog.push('Bad antenna selection.')
83 print_log('ERROR')
84 return
85 antenna = tmpstr.rstrip(',')
86 parallactify = parallactify or rcParams['scantable.parallactify']
87 varlist = vars()
88 from asap._asap import stmath
89 self._math = stmath( rcParams['insitu'] )
90 if isinstance(filename, Scantable):
91 Scantable.__init__(self, filename)
92 else:
93 if isinstance(filename, str):
94 filename = os.path.expandvars(filename)
95 filename = os.path.expanduser(filename)
96 if not os.path.exists(filename):
97 s = "File '%s' not found." % (filename)
98 if rcParams['verbose']:
99 asaplog.push(s)
100 print_log('ERROR')
101 return
102 raise IOError(s)
103 if is_scantable(filename):
104 ondisk = rcParams['scantable.storage'] == 'disk'
105 Scantable.__init__(self, filename, ondisk)
106 if unit is not None:
107 self.set_fluxunit(unit)
108 # do not reset to the default freqframe
109 #self.set_freqframe(rcParams['scantable.freqframe'])
110 elif os.path.isdir(filename) \
111 and not os.path.exists(filename+'/table.f1'):
112 msg = "The given file '%s'is not a valid " \
113 "asap table." % (filename)
114 if rcParams['verbose']:
115 #print msg
116 asaplog.push( msg )
117 print_log( 'ERROR' )
118 return
119 else:
120 raise IOError(msg)
121 else:
122 self._fill([filename], unit, average, getpt, antenna)
123 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
124 and isinstance(filename[-1], str):
125 self._fill(filename, unit, average, getpt, antenna)
126 self.parallactify(parallactify)
127 self._add_history("scantable", varlist)
128 print_log()
129
130 #@print_log_dec
131 def save(self, name=None, format=None, overwrite=False):
132 """
133 Store the scantable on disk. This can be an asap (aips++) Table,
134 SDFITS or MS2 format.
135 Parameters:
136 name: the name of the outputfile. For format "ASCII"
137 this is the root file name (data in 'name'.txt
138 and header in 'name'_header.txt)
139 format: an optional file format. Default is ASAP.
140 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
141 'SDFITS' (save as SDFITS file)
142 'ASCII' (saves as ascii text file)
143 'MS2' (saves as an aips++
144 MeasurementSet V2)
145 'FITS' (save as image FITS - not
146 readable by class)
147 'CLASS' (save as FITS readable by CLASS)
148 overwrite: If the file should be overwritten if it exists.
149 The default False is to return with warning
150 without writing the output. USE WITH CARE.
151 Example:
152 scan.save('myscan.asap')
153 scan.save('myscan.sdfits', 'SDFITS')
154 """
155 from os import path
156 format = format or rcParams['scantable.save']
157 suffix = '.'+format.lower()
158 if name is None or name == "":
159 name = 'scantable'+suffix
160 msg = "No filename given. Using default name %s..." % name
161 asaplog.push(msg)
162 name = path.expandvars(name)
163 if path.isfile(name) or path.isdir(name):
164 if not overwrite:
165 msg = "File %s exists." % name
166 if rcParams['verbose']:
167 #print msg
168 asaplog.push( msg )
169 print_log( 'ERROR' )
170 return
171 else:
172 raise IOError(msg)
173 format2 = format.upper()
174 if format2 == 'ASAP':
175 self._save(name)
176 else:
177 from asap._asap import stwriter as stw
178 writer = stw(format2)
179 writer.write(self, name)
180 print_log()
181 return
182
183 def copy(self):
184 """
185 Return a copy of this scantable.
186 Note:
187 This makes a full (deep) copy. scan2 = scan1 makes a reference.
188 Parameters:
189 none
190 Example:
191 copiedscan = scan.copy()
192 """
193 sd = scantable(Scantable._copy(self))
194 return sd
195
196 def drop_scan(self, scanid=None):
197 """
198 Return a new scantable where the specified scan number(s) has(have)
199 been dropped.
200 Parameters:
201 scanid: a (list of) scan number(s)
202 """
203 from asap import _is_sequence_or_number as _is_valid
204 from asap import _to_list
205 from asap import unique
206 if not _is_valid(scanid):
207 if rcParams['verbose']:
208 #print "Please specify a scanno to drop from the scantable"
209 asaplog.push( 'Please specify a scanno to drop from the scantable' )
210 print_log( 'ERROR' )
211 return
212 else:
213 raise RuntimeError("No scan given")
214 try:
215 scanid = _to_list(scanid)
216 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
217 for sid in scanid: allscans.remove(sid)
218 if len(allscans) == 0:
219 raise ValueError("Can't remove all scans")
220 except ValueError:
221 if rcParams['verbose']:
222 #print "Couldn't find any match."
223 print_log()
224 asaplog.push( "Couldn't find any match." )
225 print_log( 'ERROR' )
226 return
227 else: raise
228 try:
229 sel = selector(scans=allscans)
230 return self._select_copy(sel)
231 except RuntimeError:
232 if rcParams['verbose']:
233 #print "Couldn't find any match."
234 print_log()
235 asaplog.push( "Couldn't find any match." )
236 print_log( 'ERROR' )
237 else:
238 raise
239
240 def _select_copy(self, selection):
241 orig = self.get_selection()
242 self.set_selection(orig+selection)
243 cp = self.copy()
244 self.set_selection(orig)
245 return cp
246
247 def get_scan(self, scanid=None):
248 """
249 Return a specific scan (by scanno) or collection of scans (by
250 source name) in a new scantable.
251 Note:
252 See scantable.drop_scan() for the inverse operation.
253 Parameters:
254 scanid: a (list of) scanno or a source name, unix-style
255 patterns are accepted for source name matching, e.g.
256 '*_R' gets all 'ref scans
257 Example:
258 # get all scans containing the source '323p459'
259 newscan = scan.get_scan('323p459')
260 # get all 'off' scans
261 refscans = scan.get_scan('*_R')
262 # get a susbset of scans by scanno (as listed in scan.summary())
263 newscan = scan.get_scan([0, 2, 7, 10])
264 """
265 if scanid is None:
266 if rcParams['verbose']:
267 #print "Please specify a scan no or name to " \
268 # "retrieve from the scantable"
269 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
270 print_log( 'ERROR' )
271 return
272 else:
273 raise RuntimeError("No scan given")
274
275 try:
276 bsel = self.get_selection()
277 sel = selector()
278 if type(scanid) is str:
279 sel.set_name(scanid)
280 return self._select_copy(sel)
281 elif type(scanid) is int:
282 sel.set_scans([scanid])
283 return self._select_copy(sel)
284 elif type(scanid) is list:
285 sel.set_scans(scanid)
286 return self._select_copy(sel)
287 else:
288 msg = "Illegal scanid type, use 'int' or 'list' if ints."
289 if rcParams['verbose']:
290 #print msg
291 asaplog.push( msg )
292 print_log( 'ERROR' )
293 else:
294 raise TypeError(msg)
295 except RuntimeError:
296 if rcParams['verbose']:
297 #print "Couldn't find any match."
298 print_log()
299 asaplog.push( "Couldn't find any match." )
300 print_log( 'ERROR' )
301 else: raise
302
303 def __str__(self):
304 return Scantable._summary(self, True)
305
306 def summary(self, filename=None):
307 """
308 Print a summary of the contents of this scantable.
309 Parameters:
310 filename: the name of a file to write the putput to
311 Default - no file output
312 """
313 info = Scantable._summary(self, True)
314 if filename is not None:
315 if filename is "":
316 filename = 'scantable_summary.txt'
317 from os.path import expandvars, isdir
318 filename = expandvars(filename)
319 if not isdir(filename):
320 data = open(filename, 'w')
321 data.write(info)
322 data.close()
323 else:
324 msg = "Illegal file name '%s'." % (filename)
325 if rcParams['verbose']:
326 #print msg
327 asaplog.push( msg )
328 print_log( 'ERROR' )
329 else:
330 raise IOError(msg)
331 if rcParams['verbose']:
332 try:
333 from IPython.genutils import page as pager
334 except ImportError:
335 from pydoc import pager
336 pager(info)
337 else:
338 return info
339
340 def get_spectrum(self, rowno):
341 """Return the spectrum for the current row in the scantable as a list.
342 Parameters:
343 rowno: the row number to retrieve the spectrum from
344 """
345 return self._getspectrum(rowno)
346
347 def get_mask(self, rowno):
348 """Return the mask for the current row in the scantable as a list.
349 Parameters:
350 rowno: the row number to retrieve the mask from
351 """
352 return self._getmask(rowno)
353
354 def set_spectrum(self, spec, rowno):
355 """Return the spectrum for the current row in the scantable as a list.
356 Parameters:
357 spec: the spectrum
358 rowno: the row number to set the spectrum for
359 """
360 assert(len(spec) == self.nchan())
361 return self._setspectrum(spec, rowno)
362
363 def get_coordinate(self, rowno):
364 """Return the (spectral) coordinate for a a given 'rowno'.
365 NOTE:
366 * This coordinate is only valid until a scantable method modifies
367 the frequency axis.
368 * This coordinate does contain the original frequency set-up
369 NOT the new frame. The conversions however are done using the user
370 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
371 use scantable.freq_align first. Without it there is no closure,
372 i.e.
373 c = myscan.get_coordinate(0)
374 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
375
376 Parameters:
377 rowno: the row number for the spectral coordinate
378
379 """
380 return coordinate(Scantable.get_coordinate(self, rowno))
381
382 def get_selection(self):
383 """
384 Get the selection object currently set on this scantable.
385 Parameters:
386 none
387 Example:
388 sel = scan.get_selection()
389 sel.set_ifs(0) # select IF 0
390 scan.set_selection(sel) # apply modified selection
391 """
392 return selector(self._getselection())
393
394 def set_selection(self, selection=None, **kw):
395 """
396 Select a subset of the data. All following operations on this scantable
397 are only applied to thi selection.
398 Parameters:
399 selection: a selector object (default unset the selection),
400
401 or
402
403 any combination of
404 "pols", "ifs", "beams", "scans", "cycles", "name", "query"
405
406 Examples:
407 sel = selector() # create a selection object
408 self.set_scans([0, 3]) # select SCANNO 0 and 3
409 scan.set_selection(sel) # set the selection
410 scan.summary() # will only print summary of scanno 0 an 3
411 scan.set_selection() # unset the selection
412 # or the equivalent
413 scan.set_selection(scans=[0,3])
414 scan.summary() # will only print summary of scanno 0 an 3
415 scan.set_selection() # unset the selection
416 """
417 if selection is None:
418 # reset
419 if len(kw) == 0:
420 selection = selector()
421 else:
422 # try keywords
423 for k in kw:
424 if k not in selector.fields:
425 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
426 selection = selector(**kw)
427 self._setselection(selection)
428
429 def get_row(self, row=0, insitu=None):
430 """
431 Select a row in the scantable.
432 Return a scantable with single row.
433 Parameters:
434 row: row no of integration, default is 0.
435 insitu: if False a new scantable is returned.
436 Otherwise, the scaling is done in-situ
437 The default is taken from .asaprc (False)
438 """
439 if insitu is None: insitu = rcParams['insitu']
440 if not insitu:
441 workscan = self.copy()
442 else:
443 workscan = self
444 # Select a row
445 sel=selector()
446 sel.set_scans([workscan.getscan(row)])
447 sel.set_cycles([workscan.getcycle(row)])
448 sel.set_beams([workscan.getbeam(row)])
449 sel.set_ifs([workscan.getif(row)])
450 sel.set_polarisations([workscan.getpol(row)])
451 sel.set_name(workscan._getsourcename(row))
452 workscan.set_selection(sel)
453 if not workscan.nrow() == 1:
454 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
455 raise RuntimeError(msg)
456 del sel
457 if insitu:
458 self._assign(workscan)
459 else:
460 return workscan
461
462 #def stats(self, stat='stddev', mask=None):
463 def stats(self, stat='stddev', mask=None, form='3.3f'):
464 """
465 Determine the specified statistic of the current beam/if/pol
466 Takes a 'mask' as an optional parameter to specify which
467 channels should be excluded.
468 Parameters:
469 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
470 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
471 mask: an optional mask specifying where the statistic
472 should be determined.
473 form: format string to print statistic values
474 Example:
475 scan.set_unit('channel')
476 msk = scan.create_mask([100, 200], [500, 600])
477 scan.stats(stat='mean', mask=m)
478 """
479 mask = mask or []
480 if not self._check_ifs():
481 raise ValueError("Cannot apply mask as the IFs have different "
482 "number of channels. Please use setselection() "
483 "to select individual IFs")
484 rtnabc = False
485 if stat.lower().endswith('_abc'): rtnabc = True
486 getchan = False
487 if stat.lower().startswith('min') or stat.lower().startswith('max'):
488 chan = self._math._minmaxchan(self, mask, stat)
489 getchan = True
490 statvals = []
491 if not rtnabc: statvals = self._math._stats(self, mask, stat)
492
493 #def cb(i):
494 # return statvals[i]
495
496 #return self._row_callback(cb, stat)
497
498 label=stat
499 #callback=cb
500 out = ""
501 #outvec = []
502 sep = '-'*50
503 for i in range(self.nrow()):
504 refstr = ''
505 statunit= ''
506 if getchan:
507 qx, qy = self.chan2data(rowno=i, chan=chan[i])
508 if rtnabc:
509 statvals.append(qx['value'])
510 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
511 statunit= '['+qx['unit']+']'
512 else:
513 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
514
515 tm = self._gettime(i)
516 src = self._getsourcename(i)
517 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
518 out += 'Time[%s]:\n' % (tm)
519 if self.nbeam(-1) > 1:
520 out += ' Beam[%d] ' % (self.getbeam(i))
521 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
522 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
523 #outvec.append(callback(i))
524 #out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
525 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
526 out += sep+"\n"
527
528 if rcParams['verbose']:
529 import os
530 if os.environ.has_key( 'USER' ):
531 usr=os.environ['USER']
532 else:
533 import commands
534 usr=commands.getoutput( 'whoami' )
535 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
536 f=open(tmpfile,'w')
537 print >> f, sep
538 print >> f, ' %s %s' % (label, statunit)
539 print >> f, sep
540 print >> f, out
541 f.close()
542 f=open(tmpfile,'r')
543 x=f.readlines()
544 f.close()
545 blanc=''
546 asaplog.push(blanc.join(x), False)
547 #for xx in x:
548 # asaplog.push( xx, False )
549 print_log()
550 return statvals
551
552 def chan2data(self, rowno=0, chan=0):
553 """
554 Returns channel/frequency/velocity and spectral value
555 at an arbitrary row and channel in the scantable.
556 Parameters:
557 rowno: a row number in the scantable. Default is the
558 first row, i.e. rowno=0
559 chan: a channel in the scantable. Default is the first
560 channel, i.e. pos=0
561 """
562 if isinstance(rowno, int) and isinstance(chan, int):
563 qx = {'unit': self.get_unit(),
564 'value': self._getabcissa(rowno)[chan]}
565 qy = {'unit': self.get_fluxunit(),
566 'value': self._getspectrum(rowno)[chan]}
567 return qx, qy
568
569 def stddev(self, mask=None):
570 """
571 Determine the standard deviation of the current beam/if/pol
572 Takes a 'mask' as an optional parameter to specify which
573 channels should be excluded.
574 Parameters:
575 mask: an optional mask specifying where the standard
576 deviation should be determined.
577
578 Example:
579 scan.set_unit('channel')
580 msk = scan.create_mask([100, 200], [500, 600])
581 scan.stddev(mask=m)
582 """
583 return self.stats(stat='stddev', mask=mask);
584
585
586 def get_column_names(self):
587 """
588 Return a list of column names, which can be used for selection.
589 """
590 return list(Scantable.get_column_names(self))
591
592 def get_tsys(self, row=-1):
593 """
594 Return the System temperatures.
595 Returns:
596 a list of Tsys values for the current selection
597 """
598 if row > -1:
599 return self._get_column(self._gettsys, row)
600 return self._row_callback(self._gettsys, "Tsys")
601
602
603 def get_weather(self, row=-1):
604 values = self._get_column(self._get_weather, row)
605 if row > -1:
606 return {'temperature': values[0],
607 'pressure': values[1], 'humidity' : values[2],
608 'windspeed' : values[3], 'windaz' : values[4]
609 }
610 else:
611 out = []
612 for r in values:
613
614 out.append({'temperature': r[0],
615 'pressure': r[1], 'humidity' : r[2],
616 'windspeed' : r[3], 'windaz' : r[4]
617 })
618 return out
619
620 def _row_callback(self, callback, label):
621 out = ""
622 outvec = []
623 sep = '-'*50
624 for i in range(self.nrow()):
625 tm = self._gettime(i)
626 src = self._getsourcename(i)
627 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
628 out += 'Time[%s]:\n' % (tm)
629 if self.nbeam(-1) > 1:
630 out += ' Beam[%d] ' % (self.getbeam(i))
631 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
632 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
633 outvec.append(callback(i))
634 out += '= %3.3f\n' % (outvec[i])
635 out += sep+'\n'
636 if rcParams['verbose']:
637 asaplog.push(sep)
638 asaplog.push(" %s" % (label))
639 asaplog.push(sep)
640 asaplog.push(out)
641 print_log()
642 return outvec
643
644 def _get_column(self, callback, row=-1):
645 """
646 """
647 if row == -1:
648 return [callback(i) for i in range(self.nrow())]
649 else:
650 if 0 <= row < self.nrow():
651 return callback(row)
652
653
654 def get_time(self, row=-1, asdatetime=False):
655 """
656 Get a list of time stamps for the observations.
657 Return a datetime object for each integration time stamp in the scantable.
658 Parameters:
659 row: row no of integration. Default -1 return all rows
660 asdatetime: return values as datetime objects rather than strings
661 Example:
662 none
663 """
664 from time import strptime
665 from datetime import datetime
666 times = self._get_column(self._gettime, row)
667 if not asdatetime:
668 return times
669 format = "%Y/%m/%d/%H:%M:%S"
670 if isinstance(times, list):
671 return [datetime(*strptime(i, format)[:6]) for i in times]
672 else:
673 return datetime(*strptime(times, format)[:6])
674
675
676 def get_inttime(self, row=-1):
677 """
678 Get a list of integration times for the observations.
679 Return a time in seconds for each integration in the scantable.
680 Parameters:
681 row: row no of integration. Default -1 return all rows.
682 Example:
683 none
684 """
685 return self._get_column(self._getinttime, row)
686
687
688 def get_sourcename(self, row=-1):
689 """
690 Get a list source names for the observations.
691 Return a string for each integration in the scantable.
692 Parameters:
693 row: row no of integration. Default -1 return all rows.
694 Example:
695 none
696 """
697 return self._get_column(self._getsourcename, row)
698
699 def get_elevation(self, row=-1):
700 """
701 Get a list of elevations for the observations.
702 Return a float for each integration in the scantable.
703 Parameters:
704 row: row no of integration. Default -1 return all rows.
705 Example:
706 none
707 """
708 return self._get_column(self._getelevation, row)
709
710 def get_azimuth(self, row=-1):
711 """
712 Get a list of azimuths for the observations.
713 Return a float for each integration in the scantable.
714 Parameters:
715 row: row no of integration. Default -1 return all rows.
716 Example:
717 none
718 """
719 return self._get_column(self._getazimuth, row)
720
721 def get_parangle(self, row=-1):
722 """
723 Get a list of parallactic angles for the observations.
724 Return a float for each integration in the scantable.
725 Parameters:
726 row: row no of integration. Default -1 return all rows.
727 Example:
728 none
729 """
730 return self._get_column(self._getparangle, row)
731
732 def get_direction(self, row=-1):
733 """
734 Get a list of Positions on the sky (direction) for the observations.
735 Return a string for each integration in the scantable.
736 Parameters:
737 row: row no of integration. Default -1 return all rows
738 Example:
739 none
740 """
741 return self._get_column(self._getdirection, row)
742
743 def get_directionval(self, row=-1):
744 """
745 Get a list of Positions on the sky (direction) for the observations.
746 Return a float for each integration in the scantable.
747 Parameters:
748 row: row no of integration. Default -1 return all rows
749 Example:
750 none
751 """
752 return self._get_column(self._getdirectionvec, row)
753
754 #@print_log_dec
755 def set_unit(self, unit='channel'):
756 """
757 Set the unit for all following operations on this scantable
758 Parameters:
759 unit: optional unit, default is 'channel'
760 one of '*Hz', 'km/s', 'channel', ''
761 """
762 varlist = vars()
763 if unit in ['', 'pixel', 'channel']:
764 unit = ''
765 inf = list(self._getcoordinfo())
766 inf[0] = unit
767 self._setcoordinfo(inf)
768 self._add_history("set_unit", varlist)
769
770 #@print_log_dec
771 def set_instrument(self, instr):
772 """
773 Set the instrument for subsequent processing.
774 Parameters:
775 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
776 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
777 """
778 self._setInstrument(instr)
779 self._add_history("set_instument", vars())
780 print_log()
781
782 #@print_log_dec
783 def set_feedtype(self, feedtype):
784 """
785 Overwrite the feed type, which might not be set correctly.
786 Parameters:
787 feedtype: 'linear' or 'circular'
788 """
789 self._setfeedtype(feedtype)
790 self._add_history("set_feedtype", vars())
791 print_log()
792
793 #@print_log_dec
794 def set_doppler(self, doppler='RADIO'):
795 """
796 Set the doppler for all following operations on this scantable.
797 Parameters:
798 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
799 """
800 varlist = vars()
801 inf = list(self._getcoordinfo())
802 inf[2] = doppler
803 self._setcoordinfo(inf)
804 self._add_history("set_doppler", vars())
805 print_log()
806
807 #@print_log_dec
808 def set_freqframe(self, frame=None):
809 """
810 Set the frame type of the Spectral Axis.
811 Parameters:
812 frame: an optional frame type, default 'LSRK'. Valid frames are:
813 'TOPO', 'LSRD', 'LSRK', 'BARY',
814 'GEO', 'GALACTO', 'LGROUP', 'CMB'
815 Examples:
816 scan.set_freqframe('BARY')
817 """
818 frame = frame or rcParams['scantable.freqframe']
819 varlist = vars()
820 # "REST" is not implemented in casacore
821 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
822 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
823 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
824 'GEO', 'GALACTO', 'LGROUP', 'CMB']
825
826 if frame in valid:
827 inf = list(self._getcoordinfo())
828 inf[1] = frame
829 self._setcoordinfo(inf)
830 self._add_history("set_freqframe", varlist)
831 else:
832 msg = "Please specify a valid freq type. Valid types are:\n", valid
833 if rcParams['verbose']:
834 #print msg
835 asaplog.push( msg )
836 print_log( 'ERROR' )
837 else:
838 raise TypeError(msg)
839 print_log()
840
841 def set_dirframe(self, frame=""):
842 """
843 Set the frame type of the Direction on the sky.
844 Parameters:
845 frame: an optional frame type, default ''. Valid frames are:
846 'J2000', 'B1950', 'GALACTIC'
847 Examples:
848 scan.set_dirframe('GALACTIC')
849 """
850 varlist = vars()
851 try:
852 Scantable.set_dirframe(self, frame)
853 except RuntimeError, msg:
854 if rcParams['verbose']:
855 #print msg
856 print_log()
857 asaplog.push( str(msg) )
858 print_log( 'ERROR' )
859 else:
860 raise
861 self._add_history("set_dirframe", varlist)
862
863 def get_unit(self):
864 """
865 Get the default unit set in this scantable
866 Returns:
867 A unit string
868 """
869 inf = self._getcoordinfo()
870 unit = inf[0]
871 if unit == '': unit = 'channel'
872 return unit
873
874 def get_abcissa(self, rowno=0):
875 """
876 Get the abcissa in the current coordinate setup for the currently
877 selected Beam/IF/Pol
878 Parameters:
879 rowno: an optional row number in the scantable. Default is the
880 first row, i.e. rowno=0
881 Returns:
882 The abcissa values and the format string (as a dictionary)
883 """
884 abc = self._getabcissa(rowno)
885 lbl = self._getabcissalabel(rowno)
886 print_log()
887 return abc, lbl
888
889 def flag(self, mask=None, unflag=False):
890 """
891 Flag the selected data using an optional channel mask.
892 Parameters:
893 mask: an optional channel mask, created with create_mask. Default
894 (no mask) is all channels.
895 unflag: if True, unflag the data
896 """
897 varlist = vars()
898 mask = mask or []
899 try:
900 self._flag(mask, unflag)
901 except RuntimeError, msg:
902 if rcParams['verbose']:
903 #print msg
904 print_log()
905 asaplog.push( str(msg) )
906 print_log( 'ERROR' )
907 return
908 else: raise
909 self._add_history("flag", varlist)
910
911 def flag_row(self, rows=[], unflag=False):
912 """
913 Flag the selected data in row-based manner.
914 Parameters:
915 rows: list of row numbers to be flagged. Default is no row (must be explicitly specified to execute row-based flagging).
916 unflag: if True, unflag the data.
917 """
918 varlist = vars()
919 try:
920 self._flag_row(rows, unflag)
921 except RuntimeError, msg:
922 if rcParams['verbose']:
923 print_log()
924 asaplog.push( str(msg) )
925 print_log('ERROR')
926 return
927 else: raise
928 self._add_history("flag_row", varlist)
929
930 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
931 """
932 Flag the selected data outside a specified range (in channel-base)
933 Parameters:
934 uthres: upper threshold.
935 dthres: lower threshold
936 clipoutside: True for flagging data outside the range [dthres:uthres].
937 False for glagging data inside the range.
938 unflag : if True, unflag the data.
939 """
940 varlist = vars()
941 try:
942 self._clip(uthres, dthres, clipoutside, unflag)
943 except RuntimeError, msg:
944 if rcParams['verbose']:
945 print_log()
946 asaplog.push(str(msg))
947 print_log('ERROR')
948 return
949 else: raise
950 self._add_history("clip", varlist)
951
952 #@print_log_dec
953 def lag_flag(self, start, end, unit="MHz", insitu=None):
954 #def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
955 """
956 Flag the data in 'lag' space by providing a frequency to remove.
957 Flagged data in the scantable gets interpolated over the region.
958 No taper is applied.
959 Parameters:
960 start: the start frequency (really a period within the
961 bandwidth) or period to remove
962 end: the end frequency or period to remove
963 unit: the frequency unit (default "MHz") or "" for
964 explicit lag channels
965 Notes:
966 It is recommended to flag edges of the band or strong
967 signals beforehand.
968 """
969 if insitu is None: insitu = rcParams['insitu']
970 self._math._setinsitu(insitu)
971 varlist = vars()
972 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
973 if not (unit == "" or base.has_key(unit)):
974 raise ValueError("%s is not a valid unit." % unit)
975 try:
976 if unit == "":
977 s = scantable(self._math._lag_flag(self, start, end, "lags"))
978 else:
979 s = scantable(self._math._lag_flag(self, start*base[unit],
980 end*base[unit], "frequency"))
981 except RuntimeError, msg:
982 if rcParams['verbose']:
983 #print msg
984 print_log()
985 asaplog.push( str(msg) )
986 print_log( 'ERROR' )
987 return
988 else: raise
989 s._add_history("lag_flag", varlist)
990 print_log()
991 if insitu:
992 self._assign(s)
993 else:
994 return s
995
996 #@print_log_dec
997 def create_mask(self, *args, **kwargs):
998 """
999 Compute and return a mask based on [min, max] windows.
1000 The specified windows are to be INCLUDED, when the mask is
1001 applied.
1002 Parameters:
1003 [min, max], [min2, max2], ...
1004 Pairs of start/end points (inclusive)specifying the regions
1005 to be masked
1006 invert: optional argument. If specified as True,
1007 return an inverted mask, i.e. the regions
1008 specified are EXCLUDED
1009 row: create the mask using the specified row for
1010 unit conversions, default is row=0
1011 only necessary if frequency varies over rows.
1012 Example:
1013 scan.set_unit('channel')
1014 a)
1015 msk = scan.create_mask([400, 500], [800, 900])
1016 # masks everything outside 400 and 500
1017 # and 800 and 900 in the unit 'channel'
1018
1019 b)
1020 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1021 # masks the regions between 400 and 500
1022 # and 800 and 900 in the unit 'channel'
1023 c)
1024 mask only channel 400
1025 msk = scan.create_mask([400])
1026 """
1027 row = kwargs.get("row", 0)
1028 data = self._getabcissa(row)
1029 u = self._getcoordinfo()[0]
1030 if rcParams['verbose']:
1031 if u == "": u = "channel"
1032 msg = "The current mask window unit is %s" % u
1033 i = self._check_ifs()
1034 if not i:
1035 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1036 asaplog.push(msg)
1037 n = self.nchan()
1038 msk = _n_bools(n, False)
1039 # test if args is a 'list' or a 'normal *args - UGLY!!!
1040
1041 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1042 and args or args[0]
1043 for window in ws:
1044 if len(window) == 1:
1045 window = [window[0], window[0]]
1046 if len(window) == 0 or len(window) > 2:
1047 raise ValueError("A window needs to be defined as [start(, end)]")
1048 if window[0] > window[1]:
1049 tmp = window[0]
1050 window[0] = window[1]
1051 window[1] = tmp
1052 for i in range(n):
1053 if data[i] >= window[0] and data[i] <= window[1]:
1054 msk[i] = True
1055 if kwargs.has_key('invert'):
1056 if kwargs.get('invert'):
1057 msk = mask_not(msk)
1058 print_log()
1059 return msk
1060
1061 def get_masklist(self, mask=None, row=0):
1062 """
1063 Compute and return a list of mask windows, [min, max].
1064 Parameters:
1065 mask: channel mask, created with create_mask.
1066 row: calcutate the masklist using the specified row
1067 for unit conversions, default is row=0
1068 only necessary if frequency varies over rows.
1069 Returns:
1070 [min, max], [min2, max2], ...
1071 Pairs of start/end points (inclusive)specifying
1072 the masked regions
1073 """
1074 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1075 raise TypeError("The mask should be list or tuple.")
1076 if len(mask) < 2:
1077 raise TypeError("The mask elements should be > 1")
1078 if self.nchan() != len(mask):
1079 msg = "Number of channels in scantable != number of mask elements"
1080 raise TypeError(msg)
1081 data = self._getabcissa(row)
1082 u = self._getcoordinfo()[0]
1083 if rcParams['verbose']:
1084 if u == "": u = "channel"
1085 msg = "The current mask window unit is %s" % u
1086 i = self._check_ifs()
1087 if not i:
1088 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1089 asaplog.push(msg)
1090 masklist=[]
1091 ist, ien = None, None
1092 ist, ien=self.get_mask_indices(mask)
1093 if ist is not None and ien is not None:
1094 for i in xrange(len(ist)):
1095 range=[data[ist[i]],data[ien[i]]]
1096 range.sort()
1097 masklist.append([range[0],range[1]])
1098 return masklist
1099
1100 def get_mask_indices(self, mask=None):
1101 """
1102 Compute and Return lists of mask start indices and mask end indices.
1103 Parameters:
1104 mask: channel mask, created with create_mask.
1105 Returns:
1106 List of mask start indices and that of mask end indices,
1107 i.e., [istart1,istart2,....], [iend1,iend2,....].
1108 """
1109 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1110 raise TypeError("The mask should be list or tuple.")
1111 if len(mask) < 2:
1112 raise TypeError("The mask elements should be > 1")
1113 istart=[]
1114 iend=[]
1115 if mask[0]: istart.append(0)
1116 for i in range(len(mask)-1):
1117 if not mask[i] and mask[i+1]:
1118 istart.append(i+1)
1119 elif mask[i] and not mask[i+1]:
1120 iend.append(i)
1121 if mask[len(mask)-1]: iend.append(len(mask)-1)
1122 if len(istart) != len(iend):
1123 raise RuntimeError("Numbers of mask start != mask end.")
1124 for i in range(len(istart)):
1125 if istart[i] > iend[i]:
1126 raise RuntimeError("Mask start index > mask end index")
1127 break
1128 return istart,iend
1129
1130# def get_restfreqs(self):
1131# """
1132# Get the restfrequency(s) stored in this scantable.
1133# The return value(s) are always of unit 'Hz'
1134# Parameters:
1135# none
1136# Returns:
1137# a list of doubles
1138# """
1139# return list(self._getrestfreqs())
1140
1141 def get_restfreqs(self, ids=None):
1142 """
1143 Get the restfrequency(s) stored in this scantable.
1144 The return value(s) are always of unit 'Hz'
1145 Parameters:
1146 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1147 be retrieved
1148 Returns:
1149 dictionary containing ids and a list of doubles for each id
1150 """
1151 if ids is None:
1152 rfreqs={}
1153 idlist = self.getmolnos()
1154 for i in idlist:
1155 rfreqs[i]=list(self._getrestfreqs(i))
1156 return rfreqs
1157 else:
1158 if type(ids)==list or type(ids)==tuple:
1159 rfreqs={}
1160 for i in ids:
1161 rfreqs[i]=list(self._getrestfreqs(i))
1162 return rfreqs
1163 else:
1164 return list(self._getrestfreqs(ids))
1165 #return list(self._getrestfreqs(ids))
1166
1167 def set_restfreqs(self, freqs=None, unit='Hz'):
1168 """
1169 ********NEED TO BE UPDATED begin************
1170 Set or replace the restfrequency specified and
1171 If the 'freqs' argument holds a scalar,
1172 then that rest frequency will be applied to all the selected
1173 data. If the 'freqs' argument holds
1174 a vector, then it MUST be of equal or smaller length than
1175 the number of IFs (and the available restfrequencies will be
1176 replaced by this vector). In this case, *all* data have
1177 the restfrequency set per IF according
1178 to the corresponding value you give in the 'freqs' vector.
1179 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1180 IF 1 gets restfreq 2e9.
1181 ********NEED TO BE UPDATED end************
1182 You can also specify the frequencies via a linecatalog.
1183
1184 Parameters:
1185 freqs: list of rest frequency values or string idenitfiers
1186 unit: unit for rest frequency (default 'Hz')
1187
1188 Example:
1189 # set the given restfrequency for the all currently selected IFs
1190 scan.set_restfreqs(freqs=1.4e9)
1191 # set multiple restfrequencies to all the selected data
1192 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1193 # If the number of IFs in the data is >= 2 the IF0 gets the first
1194 # value IF1 the second... NOTE that freqs needs to be
1195 # specified in list of list (e.g. [[],[],...] ).
1196 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
1197 #set the given restfrequency for the whole table (by name)
1198 scan.set_restfreqs(freqs="OH1667")
1199
1200 Note:
1201 To do more sophisticate Restfrequency setting, e.g. on a
1202 source and IF basis, use scantable.set_selection() before using
1203 this function.
1204 # provide your scantable is called scan
1205 selection = selector()
1206 selection.set_name("ORION*")
1207 selection.set_ifs([1])
1208 scan.set_selection(selection)
1209 scan.set_restfreqs(freqs=86.6e9)
1210
1211 """
1212 varlist = vars()
1213 from asap import linecatalog
1214 # simple value
1215 if isinstance(freqs, int) or isinstance(freqs, float):
1216 # TT mod
1217 #self._setrestfreqs(freqs, "",unit)
1218 self._setrestfreqs([freqs], [""],unit)
1219 # list of values
1220 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1221 # list values are scalars
1222 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1223 self._setrestfreqs(freqs, [""],unit)
1224 # list values are tuples, (value, name)
1225 elif isinstance(freqs[-1], dict):
1226 #sel = selector()
1227 #savesel = self._getselection()
1228 #iflist = self.getifnos()
1229 #for i in xrange(len(freqs)):
1230 # sel.set_ifs(iflist[i])
1231 # self._setselection(sel)
1232 # self._setrestfreqs(freqs[i], "",unit)
1233 #self._setselection(savesel)
1234 self._setrestfreqs(freqs["value"],
1235 freqs["name"], "MHz")
1236 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1237 sel = selector()
1238 savesel = self._getselection()
1239 iflist = self.getifnos()
1240 if len(freqs)>len(iflist):
1241 raise ValueError("number of elements in list of list exeeds the current IF selections")
1242 for i in xrange(len(freqs)):
1243 sel.set_ifs(iflist[i])
1244 self._setselection(sel)
1245 self._setrestfreqs(freqs[i]["value"],
1246 freqs[i]["name"], "MHz")
1247 self._setselection(savesel)
1248 # freqs are to be taken from a linecatalog
1249 elif isinstance(freqs, linecatalog):
1250 sel = selector()
1251 savesel = self._getselection()
1252 for i in xrange(freqs.nrow()):
1253 sel.set_ifs(iflist[i])
1254 self._setselection(sel)
1255 self._setrestfreqs(freqs.get_frequency(i),
1256 freqs.get_name(i), "MHz")
1257 # ensure that we are not iterating past nIF
1258 if i == self.nif()-1: break
1259 self._setselection(savesel)
1260 else:
1261 return
1262 self._add_history("set_restfreqs", varlist)
1263
1264 def shift_refpix(self, delta):
1265 """
1266 Shift the reference pixel of the Spectra Coordinate by an
1267 integer amount.
1268 Parameters:
1269 delta: the amount to shift by
1270 Note:
1271 Be careful using this with broadband data.
1272 """
1273 Scantable.shift_refpix(self, delta)
1274
1275 def history(self, filename=None):
1276 """
1277 Print the history. Optionally to a file.
1278 Parameters:
1279 filename: The name of the file to save the history to.
1280 """
1281 hist = list(self._gethistory())
1282 out = "-"*80
1283 for h in hist:
1284 if h.startswith("---"):
1285 out += "\n"+h
1286 else:
1287 items = h.split("##")
1288 date = items[0]
1289 func = items[1]
1290 items = items[2:]
1291 out += "\n"+date+"\n"
1292 out += "Function: %s\n Parameters:" % (func)
1293 for i in items:
1294 s = i.split("=")
1295 out += "\n %s = %s" % (s[0], s[1])
1296 out += "\n"+"-"*80
1297 if filename is not None:
1298 if filename is "":
1299 filename = 'scantable_history.txt'
1300 import os
1301 filename = os.path.expandvars(os.path.expanduser(filename))
1302 if not os.path.isdir(filename):
1303 data = open(filename, 'w')
1304 data.write(out)
1305 data.close()
1306 else:
1307 msg = "Illegal file name '%s'." % (filename)
1308 if rcParams['verbose']:
1309 #print msg
1310 asaplog.push( msg )
1311 print_log( 'ERROR' )
1312 else:
1313 raise IOError(msg)
1314 if rcParams['verbose']:
1315 try:
1316 from IPython.genutils import page as pager
1317 except ImportError:
1318 from pydoc import pager
1319 pager(out)
1320 else:
1321 return out
1322 return
1323 #
1324 # Maths business
1325 #
1326 #@print_log_dec
1327 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1328 """
1329 Return the (time) weighted average of a scan.
1330 Note:
1331 in channels only - align if necessary
1332 Parameters:
1333 mask: an optional mask (only used for 'var' and 'tsys'
1334 weighting)
1335 scanav: True averages each scan separately
1336 False (default) averages all scans together,
1337 weight: Weighting scheme.
1338 'none' (mean no weight)
1339 'var' (1/var(spec) weighted)
1340 'tsys' (1/Tsys**2 weighted)
1341 'tint' (integration time weighted)
1342 'tintsys' (Tint/Tsys**2)
1343 'median' ( median averaging)
1344 The default is 'tint'
1345 align: align the spectra in velocity before averaging. It takes
1346 the time of the first spectrum as reference time.
1347 Example:
1348 # time average the scantable without using a mask
1349 newscan = scan.average_time()
1350 """
1351 varlist = vars()
1352 weight = weight or 'TINT'
1353 mask = mask or ()
1354 scanav = (scanav and 'SCAN') or 'NONE'
1355 scan = (self, )
1356 try:
1357 if align:
1358 scan = (self.freq_align(insitu=False), )
1359 s = None
1360 if weight.upper() == 'MEDIAN':
1361 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1362 scanav))
1363 else:
1364 s = scantable(self._math._average(scan, mask, weight.upper(),
1365 scanav))
1366 except RuntimeError, msg:
1367 if rcParams['verbose']:
1368 #print msg
1369 print_log()
1370 asaplog.push( str(msg) )
1371 print_log( 'ERROR' )
1372 return
1373 else: raise
1374 s._add_history("average_time", varlist)
1375 print_log()
1376 return s
1377
1378 #@print_log_dec
1379 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1380 """
1381 Return a scan where all spectra are converted to either
1382 Jansky or Kelvin depending upon the flux units of the scan table.
1383 By default the function tries to look the values up internally.
1384 If it can't find them (or if you want to over-ride), you must
1385 specify EITHER jyperk OR eta (and D which it will try to look up
1386 also if you don't set it). jyperk takes precedence if you set both.
1387 Parameters:
1388 jyperk: the Jy / K conversion factor
1389 eta: the aperture efficiency
1390 d: the geomtric diameter (metres)
1391 insitu: if False a new scantable is returned.
1392 Otherwise, the scaling is done in-situ
1393 The default is taken from .asaprc (False)
1394 """
1395 if insitu is None: insitu = rcParams['insitu']
1396 self._math._setinsitu(insitu)
1397 varlist = vars()
1398 jyperk = jyperk or -1.0
1399 d = d or -1.0
1400 eta = eta or -1.0
1401 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1402 s._add_history("convert_flux", varlist)
1403 print_log()
1404 if insitu: self._assign(s)
1405 else: return s
1406
1407 #@print_log_dec
1408 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1409 """
1410 Return a scan after applying a gain-elevation correction.
1411 The correction can be made via either a polynomial or a
1412 table-based interpolation (and extrapolation if necessary).
1413 You specify polynomial coefficients, an ascii table or neither.
1414 If you specify neither, then a polynomial correction will be made
1415 with built in coefficients known for certain telescopes (an error
1416 will occur if the instrument is not known).
1417 The data and Tsys are *divided* by the scaling factors.
1418 Parameters:
1419 poly: Polynomial coefficients (default None) to compute a
1420 gain-elevation correction as a function of
1421 elevation (in degrees).
1422 filename: The name of an ascii file holding correction factors.
1423 The first row of the ascii file must give the column
1424 names and these MUST include columns
1425 "ELEVATION" (degrees) and "FACTOR" (multiply data
1426 by this) somewhere.
1427 The second row must give the data type of the
1428 column. Use 'R' for Real and 'I' for Integer.
1429 An example file would be
1430 (actual factors are arbitrary) :
1431
1432 TIME ELEVATION FACTOR
1433 R R R
1434 0.1 0 0.8
1435 0.2 20 0.85
1436 0.3 40 0.9
1437 0.4 60 0.85
1438 0.5 80 0.8
1439 0.6 90 0.75
1440 method: Interpolation method when correcting from a table.
1441 Values are "nearest", "linear" (default), "cubic"
1442 and "spline"
1443 insitu: if False a new scantable is returned.
1444 Otherwise, the scaling is done in-situ
1445 The default is taken from .asaprc (False)
1446 """
1447
1448 if insitu is None: insitu = rcParams['insitu']
1449 self._math._setinsitu(insitu)
1450 varlist = vars()
1451 poly = poly or ()
1452 from os.path import expandvars
1453 filename = expandvars(filename)
1454 s = scantable(self._math._gainel(self, poly, filename, method))
1455 s._add_history("gain_el", varlist)
1456 print_log()
1457 if insitu:
1458 self._assign(s)
1459 else:
1460 return s
1461
1462 #@print_log_dec
1463 def freq_align(self, reftime=None, method='cubic', insitu=None):
1464 """
1465 Return a scan where all rows have been aligned in frequency/velocity.
1466 The alignment frequency frame (e.g. LSRK) is that set by function
1467 set_freqframe.
1468 Parameters:
1469 reftime: reference time to align at. By default, the time of
1470 the first row of data is used.
1471 method: Interpolation method for regridding the spectra.
1472 Choose from "nearest", "linear", "cubic" (default)
1473 and "spline"
1474 insitu: if False a new scantable is returned.
1475 Otherwise, the scaling is done in-situ
1476 The default is taken from .asaprc (False)
1477 """
1478 if insitu is None: insitu = rcParams["insitu"]
1479 self._math._setinsitu(insitu)
1480 varlist = vars()
1481 reftime = reftime or ""
1482 s = scantable(self._math._freq_align(self, reftime, method))
1483 s._add_history("freq_align", varlist)
1484 print_log()
1485 if insitu: self._assign(s)
1486 else: return s
1487
1488 #@print_log_dec
1489 def opacity(self, tau=None, insitu=None):
1490 """
1491 Apply an opacity correction. The data
1492 and Tsys are multiplied by the correction factor.
1493 Parameters:
1494 tau: (list of) opacity from which the correction factor is
1495 exp(tau*ZD)
1496 where ZD is the zenith-distance.
1497 If a list is provided, it has to be of length nIF,
1498 nIF*nPol or 1 and in order of IF/POL, e.g.
1499 [opif0pol0, opif0pol1, opif1pol0 ...]
1500 if tau is `None` the opacities are determined from a
1501 model.
1502 insitu: if False a new scantable is returned.
1503 Otherwise, the scaling is done in-situ
1504 The default is taken from .asaprc (False)
1505 """
1506 if insitu is None: insitu = rcParams['insitu']
1507 self._math._setinsitu(insitu)
1508 varlist = vars()
1509 if not hasattr(tau, "__len__"):
1510 tau = [tau]
1511 s = scantable(self._math._opacity(self, tau))
1512 s._add_history("opacity", varlist)
1513 print_log()
1514 if insitu: self._assign(s)
1515 else: return s
1516
1517 #@print_log_dec
1518 def bin(self, width=5, insitu=None):
1519 """
1520 Return a scan where all spectra have been binned up.
1521 Parameters:
1522 width: The bin width (default=5) in pixels
1523 insitu: if False a new scantable is returned.
1524 Otherwise, the scaling is done in-situ
1525 The default is taken from .asaprc (False)
1526 """
1527 if insitu is None: insitu = rcParams['insitu']
1528 self._math._setinsitu(insitu)
1529 varlist = vars()
1530 s = scantable(self._math._bin(self, width))
1531 s._add_history("bin", varlist)
1532 print_log()
1533 if insitu:
1534 self._assign(s)
1535 else:
1536 return s
1537
1538 #@print_log_dec
1539 def resample(self, width=5, method='cubic', insitu=None):
1540 """
1541 Return a scan where all spectra have been binned up.
1542
1543 Parameters:
1544 width: The bin width (default=5) in pixels
1545 method: Interpolation method when correcting from a table.
1546 Values are "nearest", "linear", "cubic" (default)
1547 and "spline"
1548 insitu: if False a new scantable is returned.
1549 Otherwise, the scaling is done in-situ
1550 The default is taken from .asaprc (False)
1551 """
1552 if insitu is None: insitu = rcParams['insitu']
1553 self._math._setinsitu(insitu)
1554 varlist = vars()
1555 s = scantable(self._math._resample(self, method, width))
1556 s._add_history("resample", varlist)
1557 print_log()
1558 if insitu: self._assign(s)
1559 else: return s
1560
1561 #@print_log_dec
1562 def average_pol(self, mask=None, weight='none'):
1563 """
1564 Average the Polarisations together.
1565 Parameters:
1566 mask: An optional mask defining the region, where the
1567 averaging will be applied. The output will have all
1568 specified points masked.
1569 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1570 weighted), or 'tsys' (1/Tsys**2 weighted)
1571 """
1572 varlist = vars()
1573 mask = mask or ()
1574 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1575 s._add_history("average_pol", varlist)
1576 print_log()
1577 return s
1578
1579 #@print_log_dec
1580 def average_beam(self, mask=None, weight='none'):
1581 """
1582 Average the Beams together.
1583 Parameters:
1584 mask: An optional mask defining the region, where the
1585 averaging will be applied. The output will have all
1586 specified points masked.
1587 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1588 weighted), or 'tsys' (1/Tsys**2 weighted)
1589 """
1590 varlist = vars()
1591 mask = mask or ()
1592 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1593 s._add_history("average_beam", varlist)
1594 print_log()
1595 return s
1596
1597 def parallactify(self, pflag):
1598 """
1599 Set a flag to inidcate whether this data should be treated as having
1600 been 'parallactified' (total phase == 0.0)
1601 Parameters:
1602 pflag: Bool inidcating whether to turn this on (True) or
1603 off (False)
1604 """
1605 varlist = vars()
1606 self._parallactify(pflag)
1607 self._add_history("parallactify", varlist)
1608
1609 #@print_log_dec
1610 def convert_pol(self, poltype=None):
1611 """
1612 Convert the data to a different polarisation type.
1613 Note that you will need cross-polarisation terms for most conversions.
1614 Parameters:
1615 poltype: The new polarisation type. Valid types are:
1616 "linear", "circular", "stokes" and "linpol"
1617 """
1618 varlist = vars()
1619 try:
1620 s = scantable(self._math._convertpol(self, poltype))
1621 except RuntimeError, msg:
1622 if rcParams['verbose']:
1623 #print msg
1624 print_log()
1625 asaplog.push( str(msg) )
1626 print_log( 'ERROR' )
1627 return
1628 else:
1629 raise
1630 s._add_history("convert_pol", varlist)
1631 print_log()
1632 return s
1633
1634 #@print_log_dec
1635 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1636 """
1637 Smooth the spectrum by the specified kernel (conserving flux).
1638 Parameters:
1639 kernel: The type of smoothing kernel. Select from
1640 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1641 or 'poly'
1642 width: The width of the kernel in pixels. For hanning this is
1643 ignored otherwise it defauls to 5 pixels.
1644 For 'gaussian' it is the Full Width Half
1645 Maximum. For 'boxcar' it is the full width.
1646 For 'rmedian' and 'poly' it is the half width.
1647 order: Optional parameter for 'poly' kernel (default is 2), to
1648 specify the order of the polnomial. Ignored by all other
1649 kernels.
1650 plot: plot the original and the smoothed spectra.
1651 In this each indivual fit has to be approved, by
1652 typing 'y' or 'n'
1653 insitu: if False a new scantable is returned.
1654 Otherwise, the scaling is done in-situ
1655 The default is taken from .asaprc (False)
1656 Example:
1657 none
1658 """
1659 if insitu is None: insitu = rcParams['insitu']
1660 self._math._setinsitu(insitu)
1661 varlist = vars()
1662
1663 if plot: orgscan = self.copy()
1664
1665 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1666 s._add_history("smooth", varlist)
1667
1668 if plot:
1669 if rcParams['plotter.gui']:
1670 from asap.asaplotgui import asaplotgui as asaplot
1671 else:
1672 from asap.asaplot import asaplot
1673 self._p=asaplot()
1674 self._p.set_panels()
1675 ylab=s._get_ordinate_label()
1676 #self._p.palette(0,["#777777","red"])
1677 for r in xrange(s.nrow()):
1678 xsm=s._getabcissa(r)
1679 ysm=s._getspectrum(r)
1680 xorg=orgscan._getabcissa(r)
1681 yorg=orgscan._getspectrum(r)
1682 self._p.clear()
1683 self._p.hold()
1684 self._p.set_axes('ylabel',ylab)
1685 self._p.set_axes('xlabel',s._getabcissalabel(r))
1686 self._p.set_axes('title',s._getsourcename(r))
1687 self._p.set_line(label='Original',color="#777777")
1688 self._p.plot(xorg,yorg)
1689 self._p.set_line(label='Smoothed',color="red")
1690 self._p.plot(xsm,ysm)
1691 ### Ugly part for legend
1692 for i in [0,1]:
1693 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1694 self._p.release()
1695 ### Ugly part for legend
1696 self._p.subplots[0]['lines']=[]
1697 res = raw_input("Accept smoothing ([y]/n): ")
1698 if res.upper() == 'N':
1699 s._setspectrum(yorg, r)
1700 self._p.unmap()
1701 self._p = None
1702 del orgscan
1703
1704 print_log()
1705 if insitu: self._assign(s)
1706 else: return s
1707
1708 #@print_log_dec
1709 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1710 insitu=None):
1711 """
1712 Return a scan which has been baselined (all rows) by a polynomial.
1713 Parameters:
1714 mask: an optional mask
1715 order: the order of the polynomial (default is 0)
1716 plot: plot the fit and the residual. In this each
1717 indivual fit has to be approved, by typing 'y'
1718 or 'n'
1719 uselin: use linear polynomial fit
1720 insitu: if False a new scantable is returned.
1721 Otherwise, the scaling is done in-situ
1722 The default is taken from .asaprc (False)
1723 Example:
1724 # return a scan baselined by a third order polynomial,
1725 # not using a mask
1726 bscan = scan.poly_baseline(order=3)
1727 """
1728 if insitu is None: insitu = rcParams['insitu']
1729 if not insitu:
1730 workscan = self.copy()
1731 else:
1732 workscan = self
1733 varlist = vars()
1734 if mask is None:
1735 mask = [True for i in xrange(self.nchan(-1))]
1736
1737 from asap.asapfitter import fitter
1738 try:
1739 f = fitter()
1740 if uselin:
1741 f.set_function(lpoly=order)
1742 else:
1743 f.set_function(poly=order)
1744
1745 rows = range(workscan.nrow())
1746 if len(rows) > 0:
1747 self.blpars = []
1748
1749 for r in rows:
1750 # take into account flagtra info (CAS-1434)
1751 flagtra = workscan._getmask(r)
1752 actualmask = mask[:]
1753 if len(actualmask) == 0:
1754 actualmask = list(flagtra[:])
1755 else:
1756 if len(actualmask) != len(flagtra):
1757 raise RuntimeError, "Mask and flagtra have different length"
1758 else:
1759 for i in range(0, len(actualmask)):
1760 actualmask[i] = actualmask[i] and flagtra[i]
1761 f.set_scan(workscan, actualmask)
1762 f.x = workscan._getabcissa(r)
1763 f.y = workscan._getspectrum(r)
1764 f.data = None
1765 f.fit()
1766 if plot:
1767 f.plot(residual=True)
1768 x = raw_input("Accept fit ( [y]/n ): ")
1769 if x.upper() == 'N':
1770 self.blpars.append(None)
1771 continue
1772 workscan._setspectrum(f.fitter.getresidual(), r)
1773 self.blpars.append(f.get_parameters())
1774
1775 if plot:
1776 f._p.unmap()
1777 f._p = None
1778 workscan._add_history("poly_baseline", varlist)
1779 print_log()
1780 if insitu: self._assign(workscan)
1781 else: return workscan
1782 except RuntimeError:
1783 msg = "The fit failed, possibly because it didn't converge."
1784 if rcParams['verbose']:
1785 #print msg
1786 print_log()
1787 asaplog.push( str(msg) )
1788 print_log( 'ERROR' )
1789 return
1790 else:
1791 raise RuntimeError(msg)
1792
1793
1794 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1795 threshold=3, chan_avg_limit=1, plot=False,
1796 insitu=None):
1797 """
1798 Return a scan which has been baselined (all rows) by a polynomial.
1799 Spectral lines are detected first using linefinder and masked out
1800 to avoid them affecting the baseline solution.
1801
1802 Parameters:
1803 mask: an optional mask retreived from scantable
1804 edge: an optional number of channel to drop at
1805 the edge of spectrum. If only one value is
1806 specified, the same number will be dropped from
1807 both sides of the spectrum. Default is to keep
1808 all channels. Nested tuples represent individual
1809 edge selection for different IFs (a number of spectral
1810 channels can be different)
1811 order: the order of the polynomial (default is 0)
1812 threshold: the threshold used by line finder. It is better to
1813 keep it large as only strong lines affect the
1814 baseline solution.
1815 chan_avg_limit:
1816 a maximum number of consequtive spectral channels to
1817 average during the search of weak and broad lines.
1818 The default is no averaging (and no search for weak
1819 lines). If such lines can affect the fitted baseline
1820 (e.g. a high order polynomial is fitted), increase this
1821 parameter (usually values up to 8 are reasonable). Most
1822 users of this method should find the default value
1823 sufficient.
1824 plot: plot the fit and the residual. In this each
1825 indivual fit has to be approved, by typing 'y'
1826 or 'n'
1827 insitu: if False a new scantable is returned.
1828 Otherwise, the scaling is done in-situ
1829 The default is taken from .asaprc (False)
1830
1831 Example:
1832 scan2=scan.auto_poly_baseline(order=7)
1833 """
1834 if insitu is None: insitu = rcParams['insitu']
1835 varlist = vars()
1836 from asap.asapfitter import fitter
1837 from asap.asaplinefind import linefinder
1838 from asap import _is_sequence_or_number as _is_valid
1839
1840 # check whether edge is set up for each IF individually
1841 individualedge = False;
1842 if len(edge) > 1:
1843 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1844 individualedge = True;
1845
1846 if not _is_valid(edge, int) and not individualedge:
1847 raise ValueError, "Parameter 'edge' has to be an integer or a \
1848 pair of integers specified as a tuple. Nested tuples are allowed \
1849 to make individual selection for different IFs."
1850
1851 curedge = (0, 0)
1852 if individualedge:
1853 for edgepar in edge:
1854 if not _is_valid(edgepar, int):
1855 raise ValueError, "Each element of the 'edge' tuple has \
1856 to be a pair of integers or an integer."
1857 else:
1858 curedge = edge;
1859
1860 # setup fitter
1861 f = fitter()
1862 f.set_function(poly=order)
1863
1864 # setup line finder
1865 fl = linefinder()
1866 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1867
1868 if not insitu:
1869 workscan = self.copy()
1870 else:
1871 workscan = self
1872
1873 fl.set_scan(workscan)
1874
1875 rows = range(workscan.nrow())
1876 # Save parameters of baseline fits & masklists as a class attribute.
1877 # NOTICE: It does not reflect changes in scantable!
1878 if len(rows) > 0:
1879 self.blpars=[]
1880 self.masklists=[]
1881 asaplog.push("Processing:")
1882 for r in rows:
1883 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1884 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1885 workscan.getpol(r), workscan.getcycle(r))
1886 asaplog.push(msg, False)
1887
1888 # figure out edge parameter
1889 if individualedge:
1890 if len(edge) >= workscan.getif(r):
1891 raise RuntimeError, "Number of edge elements appear to " \
1892 "be less than the number of IFs"
1893 curedge = edge[workscan.getif(r)]
1894
1895 # take into account flagtra info (CAS-1434)
1896 flagtra = workscan._getmask(r)
1897 actualmask = mask[:]
1898 if len(actualmask) == 0:
1899 actualmask = list(flagtra[:])
1900 else:
1901 if len(actualmask) != len(flagtra):
1902 raise RuntimeError, "Mask and flagtra have different length"
1903 else:
1904 for i in range(0, len(actualmask)):
1905 actualmask[i] = actualmask[i] and flagtra[i]
1906
1907 # setup line finder
1908 fl.find_lines(r, actualmask, curedge)
1909 outmask=fl.get_mask()
1910 f.set_scan(workscan, fl.get_mask())
1911 f.x = workscan._getabcissa(r)
1912 f.y = workscan._getspectrum(r)
1913 f.data = None
1914 f.fit()
1915
1916 # Show mask list
1917 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1918 msg = "mask range: "+str(masklist)
1919 asaplog.push(msg, False)
1920
1921 if plot:
1922 f.plot(residual=True)
1923 x = raw_input("Accept fit ( [y]/n ): ")
1924 if x.upper() == 'N':
1925 self.blpars.append(None)
1926 self.masklists.append(None)
1927 continue
1928
1929 workscan._setspectrum(f.fitter.getresidual(), r)
1930 self.blpars.append(f.get_parameters())
1931 self.masklists.append(masklist)
1932 if plot:
1933 f._p.unmap()
1934 f._p = None
1935 workscan._add_history("auto_poly_baseline", varlist)
1936 if insitu:
1937 self._assign(workscan)
1938 else:
1939 return workscan
1940
1941 #@print_log_dec
1942 def rotate_linpolphase(self, angle):
1943 """
1944 Rotate the phase of the complex polarization O=Q+iU correlation.
1945 This is always done in situ in the raw data. So if you call this
1946 function more than once then each call rotates the phase further.
1947 Parameters:
1948 angle: The angle (degrees) to rotate (add) by.
1949 Examples:
1950 scan.rotate_linpolphase(2.3)
1951 """
1952 varlist = vars()
1953 self._math._rotate_linpolphase(self, angle)
1954 self._add_history("rotate_linpolphase", varlist)
1955 print_log()
1956 return
1957
1958 #@print_log_dec
1959 def rotate_xyphase(self, angle):
1960 """
1961 Rotate the phase of the XY correlation. This is always done in situ
1962 in the data. So if you call this function more than once
1963 then each call rotates the phase further.
1964 Parameters:
1965 angle: The angle (degrees) to rotate (add) by.
1966 Examples:
1967 scan.rotate_xyphase(2.3)
1968 """
1969 varlist = vars()
1970 self._math._rotate_xyphase(self, angle)
1971 self._add_history("rotate_xyphase", varlist)
1972 print_log()
1973 return
1974
1975 #@print_log_dec
1976 def swap_linears(self):
1977 """
1978 Swap the linear polarisations XX and YY, or better the first two
1979 polarisations as this also works for ciculars.
1980 """
1981 varlist = vars()
1982 self._math._swap_linears(self)
1983 self._add_history("swap_linears", varlist)
1984 print_log()
1985 return
1986
1987 #@print_log_dec
1988 def invert_phase(self):
1989 """
1990 Invert the phase of the complex polarisation
1991 """
1992 varlist = vars()
1993 self._math._invert_phase(self)
1994 self._add_history("invert_phase", varlist)
1995 print_log()
1996 return
1997
1998 #@print_log_dec
1999 def add(self, offset, insitu=None):
2000 """
2001 Return a scan where all spectra have the offset added
2002 Parameters:
2003 offset: the offset
2004 insitu: if False a new scantable is returned.
2005 Otherwise, the scaling is done in-situ
2006 The default is taken from .asaprc (False)
2007 """
2008 if insitu is None: insitu = rcParams['insitu']
2009 self._math._setinsitu(insitu)
2010 varlist = vars()
2011 s = scantable(self._math._unaryop(self, offset, "ADD", False))
2012 s._add_history("add", varlist)
2013 print_log()
2014 if insitu:
2015 self._assign(s)
2016 else:
2017 return s
2018
2019 #@print_log_dec
2020 def scale(self, factor, tsys=True, insitu=None):
2021 """
2022 Return a scan where all spectra are scaled by the give 'factor'
2023 Parameters:
2024 factor: the scaling factor (float or 1D float list)
2025 insitu: if False a new scantable is returned.
2026 Otherwise, the scaling is done in-situ
2027 The default is taken from .asaprc (False)
2028 tsys: if True (default) then apply the operation to Tsys
2029 as well as the data
2030 """
2031 if insitu is None: insitu = rcParams['insitu']
2032 self._math._setinsitu(insitu)
2033 varlist = vars()
2034 s = None
2035 import numpy
2036 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2037 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2038 from asapmath import _array2dOp
2039 s = _array2dOp( self.copy(), factor, "MUL", tsys )
2040 else:
2041 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2042 else:
2043 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2044 s._add_history("scale", varlist)
2045 print_log()
2046 if insitu:
2047 self._assign(s)
2048 else:
2049 return s
2050
2051 def set_sourcetype(self, match, matchtype="pattern",
2052 sourcetype="reference"):
2053 """
2054 Set the type of the source to be an source or reference scan
2055 using the provided pattern:
2056 Parameters:
2057 match: a Unix style pattern, regular expression or selector
2058 matchtype: 'pattern' (default) UNIX style pattern or
2059 'regex' regular expression
2060 sourcetype: the type of the source to use (source/reference)
2061 """
2062 varlist = vars()
2063 basesel = self.get_selection()
2064 stype = -1
2065 if sourcetype.lower().startswith("r"):
2066 stype = 1
2067 elif sourcetype.lower().startswith("s"):
2068 stype = 0
2069 else:
2070 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2071 if matchtype.lower().startswith("p"):
2072 matchtype = "pattern"
2073 elif matchtype.lower().startswith("r"):
2074 matchtype = "regex"
2075 else:
2076 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2077 sel = selector()
2078 if isinstance(match, selector):
2079 sel = match
2080 else:
2081 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2082 self.set_selection(basesel+sel)
2083 self._setsourcetype(stype)
2084 self.set_selection(basesel)
2085 self._add_history("set_sourcetype", varlist)
2086
2087 #@print_log_dec
2088 def auto_quotient(self, preserve=True, mode='paired', verify=False):
2089 """
2090 This function allows to build quotients automatically.
2091 It assumes the observation to have the same number of
2092 "ons" and "offs"
2093 Parameters:
2094 preserve: you can preserve (default) the continuum or
2095 remove it. The equations used are
2096 preserve: Output = Toff * (on/off) - Toff
2097 remove: Output = Toff * (on/off) - Ton
2098 mode: the on/off detection mode
2099 'paired' (default)
2100 identifies 'off' scans by the
2101 trailing '_R' (Mopra/Parkes) or
2102 '_e'/'_w' (Tid) and matches
2103 on/off pairs from the observing pattern
2104 'time'
2105 finds the closest off in time
2106
2107 """
2108 modes = ["time", "paired"]
2109 if not mode in modes:
2110 msg = "please provide valid mode. Valid modes are %s" % (modes)
2111 raise ValueError(msg)
2112 varlist = vars()
2113 s = None
2114 if mode.lower() == "paired":
2115 basesel = self.get_selection()
2116 sel = selector()+basesel
2117 sel.set_query("SRCTYPE==1")
2118 self.set_selection(sel)
2119 offs = self.copy()
2120 sel.set_query("SRCTYPE==0")
2121 self.set_selection(sel)
2122 ons = self.copy()
2123 s = scantable(self._math._quotient(ons, offs, preserve))
2124 self.set_selection(basesel)
2125 elif mode.lower() == "time":
2126 s = scantable(self._math._auto_quotient(self, mode, preserve))
2127 s._add_history("auto_quotient", varlist)
2128 print_log()
2129 return s
2130
2131 #@print_log_dec
2132 def mx_quotient(self, mask = None, weight='median', preserve=True):
2133 """
2134 Form a quotient using "off" beams when observing in "MX" mode.
2135 Parameters:
2136 mask: an optional mask to be used when weight == 'stddev'
2137 weight: How to average the off beams. Default is 'median'.
2138 preserve: you can preserve (default) the continuum or
2139 remove it. The equations used are
2140 preserve: Output = Toff * (on/off) - Toff
2141 remove: Output = Toff * (on/off) - Ton
2142 """
2143 mask = mask or ()
2144 varlist = vars()
2145 on = scantable(self._math._mx_extract(self, 'on'))
2146 preoff = scantable(self._math._mx_extract(self, 'off'))
2147 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2148 from asapmath import quotient
2149 q = quotient(on, off, preserve)
2150 q._add_history("mx_quotient", varlist)
2151 print_log()
2152 return q
2153
2154 #@print_log_dec
2155 def freq_switch(self, insitu=None):
2156 """
2157 Apply frequency switching to the data.
2158 Parameters:
2159 insitu: if False a new scantable is returned.
2160 Otherwise, the swictching is done in-situ
2161 The default is taken from .asaprc (False)
2162 Example:
2163 none
2164 """
2165 if insitu is None: insitu = rcParams['insitu']
2166 self._math._setinsitu(insitu)
2167 varlist = vars()
2168 s = scantable(self._math._freqswitch(self))
2169 s._add_history("freq_switch", varlist)
2170 print_log()
2171 if insitu: self._assign(s)
2172 else: return s
2173
2174 #@print_log_dec
2175 def recalc_azel(self):
2176 """
2177 Recalculate the azimuth and elevation for each position.
2178 Parameters:
2179 none
2180 Example:
2181 """
2182 varlist = vars()
2183 self._recalcazel()
2184 self._add_history("recalc_azel", varlist)
2185 print_log()
2186 return
2187
2188 #@print_log_dec
2189 def __add__(self, other):
2190 """
2191 implicit on all axes and on Tsys
2192 """
2193 return self._operation( other, "ADD" )
2194
2195 #@print_log_dec
2196 def __sub__(self, other):
2197 """
2198 implicit on all axes and on Tsys
2199 """
2200 return self._operation( other, 'SUB' )
2201
2202 #@print_log_dec
2203 def __mul__(self, other):
2204 """
2205 implicit on all axes and on Tsys
2206 """
2207 return self._operation( other, 'MUL' )
2208
2209 #@print_log_dec
2210 def __div__(self, other):
2211 """
2212 implicit on all axes and on Tsys
2213 """
2214 return self._operation( other, 'DIV' )
2215
2216 def get_fit(self, row=0):
2217 """
2218 Print or return the stored fits for a row in the scantable
2219 Parameters:
2220 row: the row which the fit has been applied to.
2221 """
2222 if row > self.nrow():
2223 return
2224 from asap.asapfit import asapfit
2225 fit = asapfit(self._getfit(row))
2226 if rcParams['verbose']:
2227 #print fit
2228 asaplog.push( '%s' %(fit) )
2229 print_log()
2230 return
2231 else:
2232 return fit.as_dict()
2233
2234 def flag_nans(self):
2235 """
2236 Utility function to flag NaN values in the scantable.
2237 """
2238 import numpy
2239 basesel = self.get_selection()
2240 for i in range(self.nrow()):
2241 sel = self.get_row_selector(i)
2242 self.set_selection(basesel+sel)
2243 nans = numpy.isnan(self._getspectrum(0))
2244 if numpy.any(nans):
2245 bnans = [ bool(v) for v in nans]
2246 self.flag(bnans)
2247 self.set_selection(basesel)
2248
2249 def get_row_selector(self, rowno):
2250 return selector(beams=self.getbeam(rowno),
2251 ifs=self.getif(rowno),
2252 pols=self.getpol(rowno),
2253 scans=self.getscan(rowno),
2254 cycles=self.getcycle(rowno))
2255
2256 def _add_history(self, funcname, parameters):
2257 if not rcParams['scantable.history']:
2258 return
2259 # create date
2260 sep = "##"
2261 from datetime import datetime
2262 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2263 hist = dstr+sep
2264 hist += funcname+sep#cdate+sep
2265 if parameters.has_key('self'): del parameters['self']
2266 for k, v in parameters.iteritems():
2267 if type(v) is dict:
2268 for k2, v2 in v.iteritems():
2269 hist += k2
2270 hist += "="
2271 if isinstance(v2, scantable):
2272 hist += 'scantable'
2273 elif k2 == 'mask':
2274 if isinstance(v2, list) or isinstance(v2, tuple):
2275 hist += str(self._zip_mask(v2))
2276 else:
2277 hist += str(v2)
2278 else:
2279 hist += str(v2)
2280 else:
2281 hist += k
2282 hist += "="
2283 if isinstance(v, scantable):
2284 hist += 'scantable'
2285 elif k == 'mask':
2286 if isinstance(v, list) or isinstance(v, tuple):
2287 hist += str(self._zip_mask(v))
2288 else:
2289 hist += str(v)
2290 else:
2291 hist += str(v)
2292 hist += sep
2293 hist = hist[:-2] # remove trailing '##'
2294 self._addhistory(hist)
2295
2296
2297 def _zip_mask(self, mask):
2298 mask = list(mask)
2299 i = 0
2300 segments = []
2301 while mask[i:].count(1):
2302 i += mask[i:].index(1)
2303 if mask[i:].count(0):
2304 j = i + mask[i:].index(0)
2305 else:
2306 j = len(mask)
2307 segments.append([i, j])
2308 i = j
2309 return segments
2310
2311 def _get_ordinate_label(self):
2312 fu = "("+self.get_fluxunit()+")"
2313 import re
2314 lbl = "Intensity"
2315 if re.match(".K.", fu):
2316 lbl = "Brightness Temperature "+ fu
2317 elif re.match(".Jy.", fu):
2318 lbl = "Flux density "+ fu
2319 return lbl
2320
2321 def _check_ifs(self):
2322 nchans = [self.nchan(i) for i in range(self.nif(-1))]
2323 nchans = filter(lambda t: t > 0, nchans)
2324 return (sum(nchans)/len(nchans) == nchans[0])
2325
2326 def _fill(self, names, unit, average, getpt, antenna):
2327 import os
2328 from asap._asap import stfiller
2329 first = True
2330 fullnames = []
2331 for name in names:
2332 name = os.path.expandvars(name)
2333 name = os.path.expanduser(name)
2334 if not os.path.exists(name):
2335 msg = "File '%s' does not exists" % (name)
2336 if rcParams['verbose']:
2337 asaplog.push(msg)
2338 #print asaplog.pop().strip()
2339 print_log( 'ERROR' )
2340 return
2341 raise IOError(msg)
2342 fullnames.append(name)
2343 if average:
2344 asaplog.push('Auto averaging integrations')
2345 stype = int(rcParams['scantable.storage'].lower() == 'disk')
2346 for name in fullnames:
2347 tbl = Scantable(stype)
2348 r = stfiller(tbl)
2349 rx = rcParams['scantable.reference']
2350 r._setreferenceexpr(rx)
2351 msg = "Importing %s..." % (name)
2352 asaplog.push(msg, False)
2353 print_log()
2354 r._open(name, antenna, -1, -1, getpt)
2355 r._read()
2356 if average:
2357 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2358 if not first:
2359 tbl = self._math._merge([self, tbl])
2360 Scantable.__init__(self, tbl)
2361 r._close()
2362 del r, tbl
2363 first = False
2364 if unit is not None:
2365 self.set_fluxunit(unit)
2366 #self.set_freqframe(rcParams['scantable.freqframe'])
2367
2368 def __getitem__(self, key):
2369 if key < 0:
2370 key += self.nrow()
2371 if key >= self.nrow():
2372 raise IndexError("Row index out of range.")
2373 return self._getspectrum(key)
2374
2375 def __setitem__(self, key, value):
2376 if key < 0:
2377 key += self.nrow()
2378 if key >= self.nrow():
2379 raise IndexError("Row index out of range.")
2380 if not hasattr(value, "__len__") or \
2381 len(value) > self.nchan(self.getif(key)):
2382 raise ValueError("Spectrum length doesn't match.")
2383 return self._setspectrum(value, key)
2384
2385 def __len__(self):
2386 return self.nrow()
2387
2388 def __iter__(self):
2389 for i in range(len(self)):
2390 yield self[i]
2391
2392 def _operation(self, other, opmode):
2393 varlist = vars()
2394 s = None
2395 import numpy
2396 if isinstance(other, scantable):
2397 s = scantable(self._math._binaryop(self.copy(), other, opmode))
2398 elif isinstance(other, float) or isinstance(other, int):
2399 if opmode == 'DIV' and float(other) == 0.0:
2400 raise ZeroDivisionError("Dividing by zero is not recommended")
2401 s = scantable(self._math._unaryop(self.copy(), other, opmode, False))
2402 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
2403 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
2404 from asapmath import _array2dOp
2405 s = _array2dOp( self.copy(), other, opmode, False )
2406 else:
2407 s = scantable(self._math._arrayop(self.copy(), other, opmode, False))
2408 else:
2409 raise TypeError("Other input is not a scantable or float value or float list")
2410 opdic = {}
2411 opdic['ADD'] = '+'
2412 opdic['SUB'] = '-'
2413 opdic['MUL'] = '*'
2414 opdic['DIV'] = '/'
2415 s._add_history("operator %s" % opdic[opmode], varlist)
2416 print_log()
2417 return s
2418
2419
Note: See TracBrowser for help on using the repository browser.