source: branches/alma/python/scantable.py@ 1394

Last change on this file since 1394 was 1389, checked in by TakTsutsumi, 17 years ago

merged from NRAO version of ASAP2.1 with ALMA specific modifications

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 65.9 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 self._add_history("scantable", varlist)
75 print_log()
76
77 def save(self, name=None, format=None, overwrite=False):
78 """
79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
81 Parameters:
82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
84 and header in 'name'_header.txt)
85 format: an optional file format. Default is ASAP.
86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87 'SDFITS' (save as SDFITS file)
88 'ASCII' (saves as ascii text file)
89 'MS2' (saves as an aips++
90 MeasurementSet V2)
91 overwrite: If the file should be overwritten if it exists.
92 The default False is to return with warning
93 without writing the output. USE WITH CARE.
94 Example:
95 scan.save('myscan.asap')
96 scan.save('myscan.sdfits', 'SDFITS')
97 """
98 from os import path
99 if format is None: format = rcParams['scantable.save']
100 suffix = '.'+format.lower()
101 if name is None or name == "":
102 name = 'scantable'+suffix
103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
105 name = path.expandvars(name)
106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
114 format2 = format.upper()
115 if format2 == 'ASAP':
116 self._save(name)
117 else:
118 from asap._asap import stwriter as stw
119 writer = stw(format2)
120 writer.write(self, name)
121 print_log()
122 return
123
124 def copy(self):
125 """
126 Return a copy of this scantable.
127 Note:
128 This makes a full (deep) copy. scan2 = scan1 makes a reference.
129 Parameters:
130 none
131 Example:
132 copiedscan = scan.copy()
133 """
134 sd = scantable(Scantable._copy(self))
135 return sd
136
137 def drop_scan(self, scanid=None):
138 """
139 Return a new scantable where the specified scan number(s) has(have)
140 been dropped.
141 Parameters:
142 scanid: a (list of) scan number(s)
143 """
144 from asap import _is_sequence_or_number as _is_valid
145 from asap import _to_list
146 from asap import unique
147 if not _is_valid(scanid):
148 if rcParams['verbose']:
149 print "Please specify a scanno to drop from the scantable"
150 return
151 else:
152 raise RuntimeError("No scan given")
153 try:
154 scanid = _to_list(scanid)
155 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156 for sid in scanid: allscans.remove(sid)
157 if len(allscans) == 0:
158 raise ValueError("Can't remove all scans")
159 except ValueError:
160 if rcParams['verbose']:
161 print "Couldn't find any match."
162 return
163 else: raise
164 try:
165 bsel = self.get_selection()
166 sel = selector()
167 sel.set_scans(allscans)
168 self.set_selection(bsel+sel)
169 scopy = self._copy()
170 self.set_selection(bsel)
171 return scantable(scopy)
172 except RuntimeError:
173 if rcParams['verbose']:
174 print "Couldn't find any match."
175 else:
176 raise
177
178
179 def get_scan(self, scanid=None):
180 """
181 Return a specific scan (by scanno) or collection of scans (by
182 source name) in a new scantable.
183 Note:
184 See scantable.drop_scan() for the inverse operation.
185 Parameters:
186 scanid: a (list of) scanno or a source name, unix-style
187 patterns are accepted for source name matching, e.g.
188 '*_R' gets all 'ref scans
189 Example:
190 # get all scans containing the source '323p459'
191 newscan = scan.get_scan('323p459')
192 # get all 'off' scans
193 refscans = scan.get_scan('*_R')
194 # get a susbset of scans by scanno (as listed in scan.summary())
195 newscan = scan.get_scan([0, 2, 7, 10])
196 """
197 if scanid is None:
198 if rcParams['verbose']:
199 print "Please specify a scan no or name to " \
200 "retrieve from the scantable"
201 return
202 else:
203 raise RuntimeError("No scan given")
204
205 try:
206 bsel = self.get_selection()
207 sel = selector()
208 if type(scanid) is str:
209 sel.set_name(scanid)
210 self.set_selection(bsel+sel)
211 scopy = self._copy()
212 self.set_selection(bsel)
213 return scantable(scopy)
214 elif type(scanid) is int:
215 sel.set_scans([scanid])
216 self.set_selection(bsel+sel)
217 scopy = self._copy()
218 self.set_selection(bsel)
219 return scantable(scopy)
220 elif type(scanid) is list:
221 sel.set_scans(scanid)
222 self.set_selection(sel)
223 scopy = self._copy()
224 self.set_selection(bsel)
225 return scantable(scopy)
226 else:
227 msg = "Illegal scanid type, use 'int' or 'list' if ints."
228 if rcParams['verbose']:
229 print msg
230 else:
231 raise TypeError(msg)
232 except RuntimeError:
233 if rcParams['verbose']: print "Couldn't find any match."
234 else: raise
235
236 def __str__(self):
237 return Scantable._summary(self, True)
238
239 def summary(self, filename=None):
240 """
241 Print a summary of the contents of this scantable.
242 Parameters:
243 filename: the name of a file to write the putput to
244 Default - no file output
245 verbose: print extra info such as the frequency table
246 The default (False) is taken from .asaprc
247 """
248 info = Scantable._summary(self, True)
249 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
250 if filename is not None:
251 if filename is "":
252 filename = 'scantable_summary.txt'
253 from os.path import expandvars, isdir
254 filename = expandvars(filename)
255 if not isdir(filename):
256 data = open(filename, 'w')
257 data.write(info)
258 data.close()
259 else:
260 msg = "Illegal file name '%s'." % (filename)
261 if rcParams['verbose']:
262 print msg
263 else:
264 raise IOError(msg)
265 if rcParams['verbose']:
266 try:
267 from IPython.genutils import page as pager
268 except ImportError:
269 from pydoc import pager
270 pager(info)
271 else:
272 return info
273
274
275 def get_selection(self):
276 """
277 Get the selection object currently set on this scantable.
278 Parameters:
279 none
280 Example:
281 sel = scan.get_selection()
282 sel.set_ifs(0) # select IF 0
283 scan.set_selection(sel) # apply modified selection
284 """
285 return selector(self._getselection())
286
287 def set_selection(self, selection=selector()):
288 """
289 Select a subset of the data. All following operations on this scantable
290 are only applied to thi selection.
291 Parameters:
292 selection: a selector object (default unset the selection)
293 Examples:
294 sel = selector() # create a selection object
295 self.set_scans([0, 3]) # select SCANNO 0 and 3
296 scan.set_selection(sel) # set the selection
297 scan.summary() # will only print summary of scanno 0 an 3
298 scan.set_selection() # unset the selection
299 """
300 self._setselection(selection)
301
302 def stats(self, stat='stddev', mask=None):
303 """
304 Determine the specified statistic of the current beam/if/pol
305 Takes a 'mask' as an optional parameter to specify which
306 channels should be excluded.
307 Parameters:
308 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
309 'var', 'stddev', 'avdev', 'rms', 'median'
310 mask: an optional mask specifying where the statistic
311 should be determined.
312 Example:
313 scan.set_unit('channel')
314 msk = scan.create_mask([100, 200], [500, 600])
315 scan.stats(stat='mean', mask=m)
316 """
317 if mask == None:
318 mask = []
319 axes = ['Beam', 'IF', 'Pol', 'Time']
320 if not self._check_ifs():
321 raise ValueError("Cannot apply mask as the IFs have different "
322 "number of channels. Please use setselection() "
323 "to select individual IFs")
324
325 statvals = self._math._stats(self, mask, stat)
326 out = ''
327 axes = []
328 for i in range(self.nrow()):
329 axis = []
330 axis.append(self.getscan(i))
331 axis.append(self.getbeam(i))
332 axis.append(self.getif(i))
333 axis.append(self.getpol(i))
334 axis.append(self.getcycle(i))
335 axes.append(axis)
336 tm = self._gettime(i)
337 src = self._getsourcename(i)
338 out += 'Scan[%d] (%s) ' % (axis[0], src)
339 out += 'Time[%s]:\n' % (tm)
340 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
341 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
342 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
343 out += '= %3.3f\n' % (statvals[i])
344 out += "--------------------------------------------------\n"
345
346 if rcParams['verbose']:
347 print "--------------------------------------------------"
348 print " ", stat
349 print "--------------------------------------------------"
350 print out
351 #else:
352 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
353 # 'axes' : axes,
354 # 'data': statvals}
355 return statvals
356
357 def stddev(self, mask=None):
358 """
359 Determine the standard deviation of the current beam/if/pol
360 Takes a 'mask' as an optional parameter to specify which
361 channels should be excluded.
362 Parameters:
363 mask: an optional mask specifying where the standard
364 deviation should be determined.
365
366 Example:
367 scan.set_unit('channel')
368 msk = scan.create_mask([100, 200], [500, 600])
369 scan.stddev(mask=m)
370 """
371 return self.stats(stat='stddev', mask=mask);
372
373
374 def get_column_names(self):
375 """
376 Return a list of column names, which can be used for selection.
377 """
378 return list(Scantable.get_column_names(self))
379
380 def get_tsys(self):
381 """
382 Return the System temperatures.
383 Returns:
384 a list of Tsys values for the current selection
385 """
386
387 return self._row_callback(self._gettsys, "Tsys")
388
389 def _row_callback(self, callback, label):
390 axes = []
391 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
392 out = ""
393 outvec = []
394 for i in range(self.nrow()):
395 axis = []
396 axis.append(self.getscan(i))
397 axis.append(self.getbeam(i))
398 axis.append(self.getif(i))
399 axis.append(self.getpol(i))
400 axis.append(self.getcycle(i))
401 axes.append(axis)
402 tm = self._gettime(i)
403 src = self._getsourcename(i)
404 out += 'Scan[%d] (%s) ' % (axis[0], src)
405 out += 'Time[%s]:\n' % (tm)
406 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
407 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
408 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
409 outvec.append(callback(i))
410 out += '= %3.3f\n' % (outvec[i])
411 out += "--------------------------------------------------\n"
412 if rcParams['verbose']:
413 print "--------------------------------------------------"
414 print " %s" % (label)
415 print "--------------------------------------------------"
416 print out
417 # disabled because the vector seems more useful
418 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
419 return outvec
420
421 def _get_column(self, callback, row=-1):
422 """
423 """
424 if row == -1:
425 return [callback(i) for i in range(self.nrow())]
426 else:
427 if 0 <= row < self.nrow():
428 return callback(row)
429
430
431 def get_time(self, row=-1, asdatetime=False):
432 """
433 Get a list of time stamps for the observations.
434 Return a datetime object for each integration time stamp in the scantable.
435 Parameters:
436 row: row no of integration. Default -1 return all rows
437 asdatetime: return values as datetime objects rather than strings
438 Example:
439 none
440 """
441 from time import strptime
442 from datetime import datetime
443 if not asdatetime:
444 return self._get_column(self._gettime, row)
445 format = "%Y/%m/%d/%H:%M:%S"
446 if isinstance(times, list):
447 return [datetime(*strptime(i, format)[:6]) for i in times]
448 else:
449 return datetime(*strptime(times, format)[:6])
450
451
452 def get_inttime(self, row=-1):
453 """
454 Get a list of integration times for the observations.
455 Return a time in seconds for each integration in the scantable.
456 Parameters:
457 row: row no of integration. Default -1 return all rows.
458 Example:
459 none
460 """
461 return self._get_column(self._getinttime, row)
462
463
464 def get_sourcename(self, row=-1):
465 """
466 Get a list source names for the observations.
467 Return a string for each integration in the scantable.
468 Parameters:
469 row: row no of integration. Default -1 return all rows.
470 Example:
471 none
472 """
473 return self._get_column(self._getsourcename, row)
474
475 def get_elevation(self, row=-1):
476 """
477 Get a list of elevations for the observations.
478 Return a float for each integration in the scantable.
479 Parameters:
480 row: row no of integration. Default -1 return all rows.
481 Example:
482 none
483 """
484 return self._get_column(self._getelevation, row)
485
486 def get_azimuth(self, row=-1):
487 """
488 Get a list of azimuths for the observations.
489 Return a float for each integration in the scantable.
490 Parameters:
491 row: row no of integration. Default -1 return all rows.
492 Example:
493 none
494 """
495 return self._get_column(self._getazimuth, row)
496
497 def get_parangle(self, row=-1):
498 """
499 Get a list of parallactic angles for the observations.
500 Return a float for each integration in the scantable.
501 Parameters:
502 row: row no of integration. Default -1 return all rows.
503 Example:
504 none
505 """
506 return self._get_column(self._getparangle, row)
507
508 def get_direction(self, row=-1):
509 """
510 Get a list of Positions on the sky (direction) for the observations.
511 Return a float for each integration in the scantable.
512 Parameters:
513 row: row no of integration. Default -1 return all rows
514 Example:
515 none
516 """
517 return self._get_column(self._getdirection, row)
518
519 def get_directionval(self, row=-1):
520 """
521 Get a list of Positions on the sky (direction) for the observations.
522 Return a float for each integration in the scantable.
523 Parameters:
524 row: row no of integration. Default -1 return all rows
525 Example:
526 none
527 """
528 return self._get_column(self._getdirectionvec, row)
529
530
531 def set_unit(self, unit='channel'):
532 """
533 Set the unit for all following operations on this scantable
534 Parameters:
535 unit: optional unit, default is 'channel'
536 one of '*Hz', 'km/s', 'channel', ''
537 """
538 varlist = vars()
539 if unit in ['', 'pixel', 'channel']:
540 unit = ''
541 inf = list(self._getcoordinfo())
542 inf[0] = unit
543 self._setcoordinfo(inf)
544 self._add_history("set_unit", varlist)
545
546 def set_instrument(self, instr):
547 """
548 Set the instrument for subsequent processing.
549 Parameters:
550 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
551 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
552 """
553 self._setInstrument(instr)
554 self._add_history("set_instument", vars())
555 print_log()
556
557 def set_feedtype(self, feedtype):
558 """
559 Overwrite the feed type, which might not be set correctly.
560 Parameters:
561 feedtype: 'linear' or 'circular'
562 """
563 self._setfeedtype(feedtype)
564 self._add_history("set_feedtype", vars())
565 print_log()
566
567 def set_doppler(self, doppler='RADIO'):
568 """
569 Set the doppler for all following operations on this scantable.
570 Parameters:
571 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
572 """
573 varlist = vars()
574 inf = list(self._getcoordinfo())
575 inf[2] = doppler
576 self._setcoordinfo(inf)
577 self._add_history("set_doppler", vars())
578 print_log()
579
580 def set_freqframe(self, frame=None):
581 """
582 Set the frame type of the Spectral Axis.
583 Parameters:
584 frame: an optional frame type, default 'LSRK'. Valid frames are:
585 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
586 'GEO', 'GALACTO', 'LGROUP', 'CMB'
587 Examples:
588 scan.set_freqframe('BARY')
589 """
590 if frame is None: frame = rcParams['scantable.freqframe']
591 varlist = vars()
592 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
593 'GEO', 'GALACTO', 'LGROUP', 'CMB']
594
595 if frame in valid:
596 inf = list(self._getcoordinfo())
597 inf[1] = frame
598 self._setcoordinfo(inf)
599 self._add_history("set_freqframe", varlist)
600 else:
601 msg = "Please specify a valid freq type. Valid types are:\n", valid
602 if rcParams['verbose']:
603 print msg
604 else:
605 raise TypeError(msg)
606 print_log()
607
608 def set_dirframe(self, frame=""):
609 """
610 Set the frame type of the Direction on the sky.
611 Parameters:
612 frame: an optional frame type, default ''. Valid frames are:
613 'J2000', 'B1950', 'GALACTIC'
614 Examples:
615 scan.set_dirframe('GALACTIC')
616 """
617 varlist = vars()
618 try:
619 Scantable.set_dirframe(self, frame)
620 except RuntimeError, msg:
621 if rcParams['verbose']:
622 print msg
623 else:
624 raise
625 self._add_history("set_dirframe", varlist)
626
627 def get_unit(self):
628 """
629 Get the default unit set in this scantable
630 Returns:
631 A unit string
632 """
633 inf = self._getcoordinfo()
634 unit = inf[0]
635 if unit == '': unit = 'channel'
636 return unit
637
638 def get_abcissa(self, rowno=0):
639 """
640 Get the abcissa in the current coordinate setup for the currently
641 selected Beam/IF/Pol
642 Parameters:
643 rowno: an optional row number in the scantable. Default is the
644 first row, i.e. rowno=0
645 Returns:
646 The abcissa values and the format string (as a dictionary)
647 """
648 abc = self._getabcissa(rowno)
649 lbl = self._getabcissalabel(rowno)
650 print_log()
651 return abc, lbl
652
653 def flag(self, mask=None):
654 """
655 Flag the selected data using an optional channel mask.
656 Parameters:
657 mask: an optional channel mask, created with create_mask. Default
658 (no mask) is all channels.
659 """
660 varlist = vars()
661 if mask is None:
662 mask = []
663 try:
664 self._flag(mask)
665 except RuntimeError, msg:
666 if rcParams['verbose']:
667 print msg
668 return
669 else: raise
670 self._add_history("flag", varlist)
671
672 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
673 """
674 Flag the data in 'lag' space by providing a frequency to remove.
675 Flagged data in the scantable gets set to 0.0 before the fft.
676 No taper is applied.
677 Parameters:
678 frequency: the frequency (really a period within the bandwidth)
679 to remove
680 width: the width of the frequency to remove, to remove a
681 range of frequencies aroung the centre.
682 unit: the frequency unit (default "GHz")
683 Notes:
684 It is recommended to flag edges of the band or strong
685 signals beforehand.
686 """
687 if insitu is None: insitu = rcParams['insitu']
688 self._math._setinsitu(insitu)
689 varlist = vars()
690 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
691 if not base.has_key(unit):
692 raise ValueError("%s is not a valid unit." % unit)
693 try:
694 s = scantable(self._math._lag_flag(self, frequency*base[unit],
695 width*base[unit]))
696 except RuntimeError, msg:
697 if rcParams['verbose']:
698 print msg
699 return
700 else: raise
701 s._add_history("lag_flag", varlist)
702 print_log()
703 if insitu:
704 self._assign(s)
705 else:
706 return s
707
708
709 def create_mask(self, *args, **kwargs):
710 """
711 Compute and return a mask based on [min, max] windows.
712 The specified windows are to be INCLUDED, when the mask is
713 applied.
714 Parameters:
715 [min, max], [min2, max2], ...
716 Pairs of start/end points (inclusive)specifying the regions
717 to be masked
718 invert: optional argument. If specified as True,
719 return an inverted mask, i.e. the regions
720 specified are EXCLUDED
721 row: create the mask using the specified row for
722 unit conversions, default is row=0
723 only necessary if frequency varies over rows.
724 Example:
725 scan.set_unit('channel')
726 a)
727 msk = scan.create_mask([400, 500], [800, 900])
728 # masks everything outside 400 and 500
729 # and 800 and 900 in the unit 'channel'
730
731 b)
732 msk = scan.create_mask([400, 500], [800, 900], invert=True)
733 # masks the regions between 400 and 500
734 # and 800 and 900 in the unit 'channel'
735 c)
736 mask only channel 400
737 msk = scan.create_mask([400, 400])
738 """
739 row = 0
740 if kwargs.has_key("row"):
741 row = kwargs.get("row")
742 data = self._getabcissa(row)
743 u = self._getcoordinfo()[0]
744 if rcParams['verbose']:
745 if u == "": u = "channel"
746 msg = "The current mask window unit is %s" % u
747 i = self._check_ifs()
748 if not i:
749 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
750 asaplog.push(msg)
751 n = self.nchan()
752 msk = _n_bools(n, False)
753 # test if args is a 'list' or a 'normal *args - UGLY!!!
754
755 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
756 and args or args[0]
757 for window in ws:
758 if (len(window) != 2 or window[0] > window[1] ):
759 raise TypeError("A window needs to be defined as [min, max]")
760 for i in range(n):
761 if data[i] >= window[0] and data[i] <= window[1]:
762 msk[i] = True
763 if kwargs.has_key('invert'):
764 if kwargs.get('invert'):
765 msk = mask_not(msk)
766 print_log()
767 return msk
768
769 def get_restfreqs(self):
770 """
771 Get the restfrequency(s) stored in this scantable.
772 The return value(s) are always of unit 'Hz'
773 Parameters:
774 none
775 Returns:
776 a list of doubles
777 """
778 return list(self._getrestfreqs())
779
780
781 def set_restfreqs(self, freqs=None, unit='Hz'):
782 """
783 Set or replace the restfrequency specified and
784 If the 'freqs' argument holds a scalar,
785 then that rest frequency will be applied to all the selected
786 data. If the 'freqs' argument holds
787 a vector, then it MUST be of equal or smaller length than
788 the number of IFs (and the available restfrequencies will be
789 replaced by this vector). In this case, *all* data have
790 the restfrequency set per IF according
791 to the corresponding value you give in the 'freqs' vector.
792 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
793 IF 1 gets restfreq 2e9.
794 You can also specify the frequencies via a linecatalog/
795
796 Parameters:
797 freqs: list of rest frequency values or string idenitfiers
798 unit: unit for rest frequency (default 'Hz')
799
800 Example:
801 # set the given restfrequency for the whole table
802 scan.set_restfreqs(freqs=1.4e9)
803 # If thee number of IFs in the data is >= 2 IF0 gets the first
804 # value IF1 the second...
805 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
806 #set the given restfrequency for the whole table (by name)
807 scan.set_restfreqs(freqs="OH1667")
808
809 Note:
810 To do more sophisticate Restfrequency setting, e.g. on a
811 source and IF basis, use scantable.set_selection() before using
812 this function.
813 # provide your scantable is call scan
814 selection = selector()
815 selection.set_name("ORION*")
816 selection.set_ifs([1])
817 scan.set_selection(selection)
818 scan.set_restfreqs(freqs=86.6e9)
819
820 """
821 varlist = vars()
822 from asap import linecatalog
823 # simple value
824 if isinstance(freqs, int) or isinstance(freqs, float):
825 self._setrestfreqs(freqs, "",unit)
826 # list of values
827 elif isinstance(freqs, list) or isinstance(freqs, tuple):
828 # list values are scalars
829 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
830 sel = selector()
831 savesel = self._getselection()
832 iflist = self.getifnos()
833 for i in xrange(len(freqs)):
834 sel.set_ifs(iflist[i])
835 self._setselection(sel)
836 self._setrestfreqs(freqs[i], "",unit)
837 self._setselection(savesel)
838 # list values are tuples, (value, name)
839 elif isinstance(freqs[-1], dict):
840 sel = selector()
841 savesel = self._getselection()
842 iflist = self.getifnos()
843 for i in xrange(len(freqs)):
844 sel.set_ifs(iflist[i])
845 self._setselection(sel)
846 self._setrestfreqs(freqs[i]["value"],
847 freqs[i]["name"], "MHz")
848 self._setselection(savesel)
849 # freqs are to be taken from a linecatalog
850 elif isinstance(freqs, linecatalog):
851 sel = selector()
852 savesel = self._getselection()
853 iflist = self.getifnos()
854 for i in xrange(freqs.nrow()):
855 sel.set_ifs(iflist[i])
856 self._setselection(sel)
857 self._setrestfreqs(freqs.get_frequency(i),
858 freqs.get_name(i), "MHz")
859 # ensure that we are not iterating past nIF
860 if i == self.nif()-1: break
861 self._setselection(savesel)
862 else:
863 return
864 self._add_history("set_restfreqs", varlist)
865
866 def shift_refpix(self, delta):
867 """
868 Shift the reference pixel of the Spectra Coordinate by an
869 integer amount.
870 Parameters:
871 delta: the amount to shift by
872 Note:
873 Be careful using this with broadband data.
874 """
875 Scantable.shift(self, delta)
876
877 def history(self, filename=None):
878 """
879 Print the history. Optionally to a file.
880 Parameters:
881 filename: The name of the file to save the history to.
882 """
883 hist = list(self._gethistory())
884 out = "-"*80
885 for h in hist:
886 if h.startswith("---"):
887 out += "\n"+h
888 else:
889 items = h.split("##")
890 date = items[0]
891 func = items[1]
892 items = items[2:]
893 out += "\n"+date+"\n"
894 out += "Function: %s\n Parameters:" % (func)
895 for i in items:
896 s = i.split("=")
897 out += "\n %s = %s" % (s[0], s[1])
898 out += "\n"+"-"*80
899 if filename is not None:
900 if filename is "":
901 filename = 'scantable_history.txt'
902 import os
903 filename = os.path.expandvars(os.path.expanduser(filename))
904 if not os.path.isdir(filename):
905 data = open(filename, 'w')
906 data.write(out)
907 data.close()
908 else:
909 msg = "Illegal file name '%s'." % (filename)
910 if rcParams['verbose']:
911 print msg
912 else:
913 raise IOError(msg)
914 if rcParams['verbose']:
915 try:
916 from IPython.genutils import page as pager
917 except ImportError:
918 from pydoc import pager
919 pager(out)
920 else:
921 return out
922 return
923 #
924 # Maths business
925 #
926
927 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
928 """
929 Return the (time) weighted average of a scan.
930 Note:
931 in channels only - align if necessary
932 Parameters:
933 mask: an optional mask (only used for 'var' and 'tsys'
934 weighting)
935 scanav: True averages each scan separately
936 False (default) averages all scans together,
937 weight: Weighting scheme.
938 'none' (mean no weight)
939 'var' (1/var(spec) weighted)
940 'tsys' (1/Tsys**2 weighted)
941 'tint' (integration time weighted)
942 'tintsys' (Tint/Tsys**2)
943 'median' ( median averaging)
944 The default is 'tint'
945 align: align the spectra in velocity before averaging. It takes
946 the time of the first spectrum as reference time.
947 Example:
948 # time average the scantable without using a mask
949 newscan = scan.average_time()
950 """
951 varlist = vars()
952 if weight is None: weight = 'TINT'
953 if mask is None: mask = ()
954 if scanav: scanav = "SCAN"
955 else: scanav = "NONE"
956 scan = (self, )
957 try:
958 if align:
959 scan = (self.freq_align(insitu=False), )
960 s = None
961 if weight.upper() == 'MEDIAN':
962 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
963 scanav))
964 else:
965 s = scantable(self._math._average(scan, mask, weight.upper(),
966 scanav))
967 except RuntimeError, msg:
968 if rcParams['verbose']:
969 print msg
970 return
971 else: raise
972 s._add_history("average_time", varlist)
973 print_log()
974 return s
975
976 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
977 """
978 Return a scan where all spectra are converted to either
979 Jansky or Kelvin depending upon the flux units of the scan table.
980 By default the function tries to look the values up internally.
981 If it can't find them (or if you want to over-ride), you must
982 specify EITHER jyperk OR eta (and D which it will try to look up
983 also if you don't set it). jyperk takes precedence if you set both.
984 Parameters:
985 jyperk: the Jy / K conversion factor
986 eta: the aperture efficiency
987 d: the geomtric diameter (metres)
988 insitu: if False a new scantable is returned.
989 Otherwise, the scaling is done in-situ
990 The default is taken from .asaprc (False)
991 """
992 if insitu is None: insitu = rcParams['insitu']
993 self._math._setinsitu(insitu)
994 varlist = vars()
995 if jyperk is None: jyperk = -1.0
996 if d is None: d = -1.0
997 if eta is None: eta = -1.0
998 s = scantable(self._math._convertflux(self, d, eta, jyperk))
999 s._add_history("convert_flux", varlist)
1000 print_log()
1001 if insitu: self._assign(s)
1002 else: return s
1003
1004 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1005 """
1006 Return a scan after applying a gain-elevation correction.
1007 The correction can be made via either a polynomial or a
1008 table-based interpolation (and extrapolation if necessary).
1009 You specify polynomial coefficients, an ascii table or neither.
1010 If you specify neither, then a polynomial correction will be made
1011 with built in coefficients known for certain telescopes (an error
1012 will occur if the instrument is not known).
1013 The data and Tsys are *divided* by the scaling factors.
1014 Parameters:
1015 poly: Polynomial coefficients (default None) to compute a
1016 gain-elevation correction as a function of
1017 elevation (in degrees).
1018 filename: The name of an ascii file holding correction factors.
1019 The first row of the ascii file must give the column
1020 names and these MUST include columns
1021 "ELEVATION" (degrees) and "FACTOR" (multiply data
1022 by this) somewhere.
1023 The second row must give the data type of the
1024 column. Use 'R' for Real and 'I' for Integer.
1025 An example file would be
1026 (actual factors are arbitrary) :
1027
1028 TIME ELEVATION FACTOR
1029 R R R
1030 0.1 0 0.8
1031 0.2 20 0.85
1032 0.3 40 0.9
1033 0.4 60 0.85
1034 0.5 80 0.8
1035 0.6 90 0.75
1036 method: Interpolation method when correcting from a table.
1037 Values are "nearest", "linear" (default), "cubic"
1038 and "spline"
1039 insitu: if False a new scantable is returned.
1040 Otherwise, the scaling is done in-situ
1041 The default is taken from .asaprc (False)
1042 """
1043
1044 if insitu is None: insitu = rcParams['insitu']
1045 self._math._setinsitu(insitu)
1046 varlist = vars()
1047 if poly is None:
1048 poly = ()
1049 from os.path import expandvars
1050 filename = expandvars(filename)
1051 s = scantable(self._math._gainel(self, poly, filename, method))
1052 s._add_history("gain_el", varlist)
1053 print_log()
1054 if insitu: self._assign(s)
1055 else: return s
1056
1057 def freq_align(self, reftime=None, method='cubic', insitu=None):
1058 """
1059 Return a scan where all rows have been aligned in frequency/velocity.
1060 The alignment frequency frame (e.g. LSRK) is that set by function
1061 set_freqframe.
1062 Parameters:
1063 reftime: reference time to align at. By default, the time of
1064 the first row of data is used.
1065 method: Interpolation method for regridding the spectra.
1066 Choose from "nearest", "linear", "cubic" (default)
1067 and "spline"
1068 insitu: if False a new scantable is returned.
1069 Otherwise, the scaling is done in-situ
1070 The default is taken from .asaprc (False)
1071 """
1072 if insitu is None: insitu = rcParams["insitu"]
1073 self._math._setinsitu(insitu)
1074 varlist = vars()
1075 if reftime is None: reftime = ""
1076 s = scantable(self._math._freq_align(self, reftime, method))
1077 s._add_history("freq_align", varlist)
1078 print_log()
1079 if insitu: self._assign(s)
1080 else: return s
1081
1082 def opacity(self, tau, insitu=None):
1083 """
1084 Apply an opacity correction. The data
1085 and Tsys are multiplied by the correction factor.
1086 Parameters:
1087 tau: Opacity from which the correction factor is
1088 exp(tau*ZD)
1089 where ZD is the zenith-distance
1090 insitu: if False a new scantable is returned.
1091 Otherwise, the scaling is done in-situ
1092 The default is taken from .asaprc (False)
1093 """
1094 if insitu is None: insitu = rcParams['insitu']
1095 self._math._setinsitu(insitu)
1096 varlist = vars()
1097 s = scantable(self._math._opacity(self, tau))
1098 s._add_history("opacity", varlist)
1099 print_log()
1100 if insitu: self._assign(s)
1101 else: return s
1102
1103 def bin(self, width=5, insitu=None):
1104 """
1105 Return a scan where all spectra have been binned up.
1106 Parameters:
1107 width: The bin width (default=5) in pixels
1108 insitu: if False a new scantable is returned.
1109 Otherwise, the scaling is done in-situ
1110 The default is taken from .asaprc (False)
1111 """
1112 if insitu is None: insitu = rcParams['insitu']
1113 self._math._setinsitu(insitu)
1114 varlist = vars()
1115 s = scantable(self._math._bin(self, width))
1116 s._add_history("bin", varlist)
1117 print_log()
1118 if insitu: self._assign(s)
1119 else: return s
1120
1121
1122 def resample(self, width=5, method='cubic', insitu=None):
1123 """
1124 Return a scan where all spectra have been binned up.
1125
1126 Parameters:
1127 width: The bin width (default=5) in pixels
1128 method: Interpolation method when correcting from a table.
1129 Values are "nearest", "linear", "cubic" (default)
1130 and "spline"
1131 insitu: if False a new scantable is returned.
1132 Otherwise, the scaling is done in-situ
1133 The default is taken from .asaprc (False)
1134 """
1135 if insitu is None: insitu = rcParams['insitu']
1136 self._math._setinsitu(insitu)
1137 varlist = vars()
1138 s = scantable(self._math._resample(self, method, width))
1139 s._add_history("resample", varlist)
1140 print_log()
1141 if insitu: self._assign(s)
1142 else: return s
1143
1144
1145 def average_pol(self, mask=None, weight='none'):
1146 """
1147 Average the Polarisations together.
1148 Parameters:
1149 mask: An optional mask defining the region, where the
1150 averaging will be applied. The output will have all
1151 specified points masked.
1152 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1153 weighted), or 'tsys' (1/Tsys**2 weighted)
1154 """
1155 varlist = vars()
1156 if mask is None:
1157 mask = ()
1158 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1159 s._add_history("average_pol", varlist)
1160 print_log()
1161 return s
1162
1163 def average_beam(self, mask=None, weight='none'):
1164 """
1165 Average the Beams together.
1166 Parameters:
1167 mask: An optional mask defining the region, where the
1168 averaging will be applied. The output will have all
1169 specified points masked.
1170 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1171 weighted), or 'tsys' (1/Tsys**2 weighted)
1172 """
1173 varlist = vars()
1174 if mask is None:
1175 mask = ()
1176 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1177 s._add_history("average_beam", varlist)
1178 print_log()
1179 return s
1180
1181 def convert_pol(self, poltype=None):
1182 """
1183 Convert the data to a different polarisation type.
1184 Parameters:
1185 poltype: The new polarisation type. Valid types are:
1186 "linear", "stokes" and "circular"
1187 """
1188 varlist = vars()
1189 try:
1190 s = scantable(self._math._convertpol(self, poltype))
1191 except RuntimeError, msg:
1192 if rcParams['verbose']:
1193 print msg
1194 return
1195 else:
1196 raise
1197 s._add_history("convert_pol", varlist)
1198 print_log()
1199 return s
1200
1201 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1202 """
1203 Smooth the spectrum by the specified kernel (conserving flux).
1204 Parameters:
1205 kernel: The type of smoothing kernel. Select from
1206 'hanning' (default), 'gaussian', 'boxcar' and
1207 'rmedian'
1208 width: The width of the kernel in pixels. For hanning this is
1209 ignored otherwise it defauls to 5 pixels.
1210 For 'gaussian' it is the Full Width Half
1211 Maximum. For 'boxcar' it is the full width.
1212 For 'rmedian' it is the half width.
1213 insitu: if False a new scantable is returned.
1214 Otherwise, the scaling is done in-situ
1215 The default is taken from .asaprc (False)
1216 Example:
1217 none
1218 """
1219 if insitu is None: insitu = rcParams['insitu']
1220 self._math._setinsitu(insitu)
1221 varlist = vars()
1222 s = scantable(self._math._smooth(self, kernel.lower(), width))
1223 s._add_history("smooth", varlist)
1224 print_log()
1225 if insitu: self._assign(s)
1226 else: return s
1227
1228
1229 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1230 """
1231 Return a scan which has been baselined (all rows) by a polynomial.
1232 Parameters:
1233 mask: an optional mask
1234 order: the order of the polynomial (default is 0)
1235 plot: plot the fit and the residual. In this each
1236 indivual fit has to be approved, by typing 'y'
1237 or 'n'
1238 uselin: use linear polynomial fit
1239 insitu: if False a new scantable is returned.
1240 Otherwise, the scaling is done in-situ
1241 The default is taken from .asaprc (False)
1242 Example:
1243 # return a scan baselined by a third order polynomial,
1244 # not using a mask
1245 bscan = scan.poly_baseline(order=3)
1246 """
1247 if insitu is None: insitu = rcParams['insitu']
1248 varlist = vars()
1249 if mask is None:
1250 mask = [True for i in xrange(self.nchan(-1))]
1251 from asap.asapfitter import fitter
1252 try:
1253 f = fitter()
1254 f.set_scan(self, mask)
1255 #f.set_function(poly=order)
1256 if uselin:
1257 f.set_function(lpoly=order)
1258 else:
1259 f.set_function(poly=order)
1260 s = f.auto_fit(insitu, plot=plot)
1261 s._add_history("poly_baseline", varlist)
1262 print_log()
1263 if insitu: self._assign(s)
1264 else: return s
1265 except RuntimeError:
1266 msg = "The fit failed, possibly because it didn't converge."
1267 if rcParams['verbose']:
1268 print msg
1269 return
1270 else:
1271 raise RuntimeError(msg)
1272
1273
1274 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1275 threshold=3, chan_avg_limit=1, plot=False,
1276 insitu=None):
1277 """
1278 Return a scan which has been baselined (all rows) by a polynomial.
1279 Spectral lines are detected first using linefinder and masked out
1280 to avoid them affecting the baseline solution.
1281
1282 Parameters:
1283 mask: an optional mask retreived from scantable
1284 edge: an optional number of channel to drop at
1285 the edge of spectrum. If only one value is
1286 specified, the same number will be dropped from
1287 both sides of the spectrum. Default is to keep
1288 all channels. Nested tuples represent individual
1289 edge selection for different IFs (a number of spectral
1290 channels can be different)
1291 order: the order of the polynomial (default is 0)
1292 threshold: the threshold used by line finder. It is better to
1293 keep it large as only strong lines affect the
1294 baseline solution.
1295 chan_avg_limit:
1296 a maximum number of consequtive spectral channels to
1297 average during the search of weak and broad lines.
1298 The default is no averaging (and no search for weak
1299 lines). If such lines can affect the fitted baseline
1300 (e.g. a high order polynomial is fitted), increase this
1301 parameter (usually values up to 8 are reasonable). Most
1302 users of this method should find the default value
1303 sufficient.
1304 plot: plot the fit and the residual. In this each
1305 indivual fit has to be approved, by typing 'y'
1306 or 'n'
1307 insitu: if False a new scantable is returned.
1308 Otherwise, the scaling is done in-situ
1309 The default is taken from .asaprc (False)
1310
1311 Example:
1312 scan2=scan.auto_poly_baseline(order=7)
1313 """
1314 if insitu is None: insitu = rcParams['insitu']
1315 varlist = vars()
1316 from asap.asapfitter import fitter
1317 from asap.asaplinefind import linefinder
1318 from asap import _is_sequence_or_number as _is_valid
1319
1320 # check whether edge is set up for each IF individually
1321 individualedge = False;
1322 if len(edge) > 1:
1323 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1324 individualedge = True;
1325
1326 if not _is_valid(edge, int) and not individualedge:
1327 raise ValueError, "Parameter 'edge' has to be an integer or a \
1328 pair of integers specified as a tuple. Nested tuples are allowed \
1329 to make individual selection for different IFs."
1330
1331 curedge = (0, 0)
1332 if individualedge:
1333 for edgepar in edge:
1334 if not _is_valid(edgepar, int):
1335 raise ValueError, "Each element of the 'edge' tuple has \
1336 to be a pair of integers or an integer."
1337 else:
1338 curedge = edge;
1339
1340 # setup fitter
1341 f = fitter()
1342 f.set_function(poly=order)
1343
1344 # setup line finder
1345 fl = linefinder()
1346 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1347
1348 if not insitu:
1349 workscan = self.copy()
1350 else:
1351 workscan = self
1352
1353 fl.set_scan(workscan)
1354
1355 rows = range(workscan.nrow())
1356 asaplog.push("Processing:")
1357 for r in rows:
1358 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1359 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1360 workscan.getpol(r), workscan.getcycle(r))
1361 asaplog.push(msg, False)
1362
1363 # figure out edge parameter
1364 if individualedge:
1365 if len(edge) >= workscan.getif(r):
1366 raise RuntimeError, "Number of edge elements appear to " \
1367 "be less than the number of IFs"
1368 curedge = edge[workscan.getif(r)]
1369
1370 # setup line finder
1371 fl.find_lines(r, mask, curedge)
1372 f.set_scan(workscan, fl.get_mask())
1373 f.x = workscan._getabcissa(r)
1374 f.y = workscan._getspectrum(r)
1375 f.data = None
1376 f.fit()
1377 x = f.get_parameters()
1378 if plot:
1379 f.plot(residual=True)
1380 x = raw_input("Accept fit ( [y]/n ): ")
1381 if x.upper() == 'N':
1382 continue
1383 workscan._setspectrum(f.fitter.getresidual(), r)
1384 if plot:
1385 f._p.unmap()
1386 f._p = None
1387 workscan._add_history("auto_poly_baseline", varlist)
1388 if insitu:
1389 self._assign(workscan)
1390 else:
1391 return workscan
1392
1393 def rotate_linpolphase(self, angle):
1394 """
1395 Rotate the phase of the complex polarization O=Q+iU correlation.
1396 This is always done in situ in the raw data. So if you call this
1397 function more than once then each call rotates the phase further.
1398 Parameters:
1399 angle: The angle (degrees) to rotate (add) by.
1400 Examples:
1401 scan.rotate_linpolphase(2.3)
1402 """
1403 varlist = vars()
1404 self._math._rotate_linpolphase(self, angle)
1405 self._add_history("rotate_linpolphase", varlist)
1406 print_log()
1407 return
1408
1409
1410 def rotate_xyphase(self, angle):
1411 """
1412 Rotate the phase of the XY correlation. This is always done in situ
1413 in the data. So if you call this function more than once
1414 then each call rotates the phase further.
1415 Parameters:
1416 angle: The angle (degrees) to rotate (add) by.
1417 Examples:
1418 scan.rotate_xyphase(2.3)
1419 """
1420 varlist = vars()
1421 self._math._rotate_xyphase(self, angle)
1422 self._add_history("rotate_xyphase", varlist)
1423 print_log()
1424 return
1425
1426 def swap_linears(self):
1427 """
1428 Swap the linear polarisations XX and YY, or better the first two
1429 polarisations as this also works for ciculars.
1430 """
1431 varlist = vars()
1432 self._math._swap_linears(self)
1433 self._add_history("swap_linears", varlist)
1434 print_log()
1435 return
1436
1437 def invert_phase(self):
1438 """
1439 Invert the phase of the complex polarisation
1440 """
1441 varlist = vars()
1442 self._math._invert_phase(self)
1443 self._add_history("invert_phase", varlist)
1444 print_log()
1445 return
1446
1447 def add(self, offset, insitu=None):
1448 """
1449 Return a scan where all spectra have the offset added
1450 Parameters:
1451 offset: the offset
1452 insitu: if False a new scantable is returned.
1453 Otherwise, the scaling is done in-situ
1454 The default is taken from .asaprc (False)
1455 """
1456 if insitu is None: insitu = rcParams['insitu']
1457 self._math._setinsitu(insitu)
1458 varlist = vars()
1459 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1460 s._add_history("add", varlist)
1461 print_log()
1462 if insitu:
1463 self._assign(s)
1464 else:
1465 return s
1466
1467 def scale(self, factor, tsys=True, insitu=None):
1468 """
1469 Return a scan where all spectra are scaled by the give 'factor'
1470 Parameters:
1471 factor: the scaling factor
1472 insitu: if False a new scantable is returned.
1473 Otherwise, the scaling is done in-situ
1474 The default is taken from .asaprc (False)
1475 tsys: if True (default) then apply the operation to Tsys
1476 as well as the data
1477 """
1478 if insitu is None: insitu = rcParams['insitu']
1479 self._math._setinsitu(insitu)
1480 varlist = vars()
1481 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1482 s._add_history("scale", varlist)
1483 print_log()
1484 if insitu:
1485 self._assign(s)
1486 else:
1487 return s
1488
1489 def auto_quotient(self, preserve=True, mode='paired'):
1490 """
1491 This function allows to build quotients automatically.
1492 It assumes the observation to have the same numer of
1493 "ons" and "offs"
1494 Parameters:
1495 preserve: you can preserve (default) the continuum or
1496 remove it. The equations used are
1497 preserve: Output = Toff * (on/off) - Toff
1498 remove: Output = Toff * (on/off) - Ton
1499 mode: the on/off detection mode
1500 'paired' (default)
1501 identifies 'off' scans by the
1502 trailing '_R' (Mopra/Parkes) or
1503 '_e'/'_w' (Tid) and matches
1504 on/off pairs from the observing pattern
1505 'time'
1506 finds the closest off in time
1507
1508 """
1509 modes = ["time", "paired"]
1510 if not mode in modes:
1511 msg = "please provide valid mode. Valid modes are %s" % (modes)
1512 raise ValueError(msg)
1513 varlist = vars()
1514 s = None
1515 if mode.lower() == "paired":
1516 basesel = self.get_selection()
1517 sel = selector()+basesel
1518 sel.set_query("SRCTYPE==1")
1519 self.set_selection(sel)
1520 offs = self.copy()
1521 sel.set_query("SRCTYPE==0")
1522 self.set_selection(sel)
1523 ons = self.copy()
1524 s = scantable(self._math._quotient(ons, offs, preserve))
1525 self.set_selection(basesel)
1526 elif mode.lower() == "time":
1527 s = scantable(self._math._auto_quotient(self, mode, preserve))
1528 s._add_history("auto_quotient", varlist)
1529 print_log()
1530 return s
1531
1532 def mx_quotient(self, mask = None, weight='median', preserve=True):
1533 """
1534 Form a quotient using "off" beams when observing in "MX" mode.
1535 Parameters:
1536 mask: an optional mask to be used when weight == 'stddev'
1537 weight: How to average the off beams. Default is 'median'.
1538 preserve: you can preserve (default) the continuum or
1539 remove it. The equations used are
1540 preserve: Output = Toff * (on/off) - Toff
1541 remove: Output = Toff * (on/off) - Ton
1542 """
1543 if mask is None: mask = ()
1544 varlist = vars()
1545 on = scantable(self._math._mx_extract(self, 'on'))
1546 preoff = scantable(self._math._mx_extract(self, 'off'))
1547 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1548 from asapmath import quotient
1549 q = quotient(on, off, preserve)
1550 q._add_history("mx_quotient", varlist)
1551 print_log()
1552 return q
1553
1554 def freq_switch(self, insitu=None):
1555 """
1556 Apply frequency switching to the data.
1557 Parameters:
1558 insitu: if False a new scantable is returned.
1559 Otherwise, the swictching is done in-situ
1560 The default is taken from .asaprc (False)
1561 Example:
1562 none
1563 """
1564 if insitu is None: insitu = rcParams['insitu']
1565 self._math._setinsitu(insitu)
1566 varlist = vars()
1567 s = scantable(self._math._freqswitch(self))
1568 s._add_history("freq_switch", varlist)
1569 print_log()
1570 if insitu: self._assign(s)
1571 else: return s
1572
1573 def recalc_azel(self):
1574 """
1575 Recalculate the azimuth and elevation for each position.
1576 Parameters:
1577 none
1578 Example:
1579 """
1580 varlist = vars()
1581 self._recalcazel()
1582 self._add_history("recalc_azel", varlist)
1583 print_log()
1584 return
1585
1586 def __add__(self, other):
1587 varlist = vars()
1588 s = None
1589 if isinstance(other, scantable):
1590 s = scantable(self._math._binaryop(self, other, "ADD"))
1591 elif isinstance(other, float):
1592 s = scantable(self._math._unaryop(self, other, "ADD", False))
1593 else:
1594 raise TypeError("Other input is not a scantable or float value")
1595 s._add_history("operator +", varlist)
1596 print_log()
1597 return s
1598
1599 def __sub__(self, other):
1600 """
1601 implicit on all axes and on Tsys
1602 """
1603 varlist = vars()
1604 s = None
1605 if isinstance(other, scantable):
1606 s = scantable(self._math._binaryop(self, other, "SUB"))
1607 elif isinstance(other, float):
1608 s = scantable(self._math._unaryop(self, other, "SUB", False))
1609 else:
1610 raise TypeError("Other input is not a scantable or float value")
1611 s._add_history("operator -", varlist)
1612 print_log()
1613 return s
1614
1615 def __mul__(self, other):
1616 """
1617 implicit on all axes and on Tsys
1618 """
1619 varlist = vars()
1620 s = None
1621 if isinstance(other, scantable):
1622 s = scantable(self._math._binaryop(self, other, "MUL"))
1623 elif isinstance(other, float):
1624 s = scantable(self._math._unaryop(self, other, "MUL", False))
1625 else:
1626 raise TypeError("Other input is not a scantable or float value")
1627 s._add_history("operator *", varlist)
1628 print_log()
1629 return s
1630
1631
1632 def __div__(self, other):
1633 """
1634 implicit on all axes and on Tsys
1635 """
1636 varlist = vars()
1637 s = None
1638 if isinstance(other, scantable):
1639 s = scantable(self._math._binaryop(self, other, "DIV"))
1640 elif isinstance(other, float):
1641 if other == 0.0:
1642 raise ZeroDivisionError("Dividing by zero is not recommended")
1643 s = scantable(self._math._unaryop(self, other, "DIV", False))
1644 else:
1645 raise TypeError("Other input is not a scantable or float value")
1646 s._add_history("operator /", varlist)
1647 print_log()
1648 return s
1649
1650 def get_fit(self, row=0):
1651 """
1652 Print or return the stored fits for a row in the scantable
1653 Parameters:
1654 row: the row which the fit has been applied to.
1655 """
1656 if row > self.nrow():
1657 return
1658 from asap.asapfit import asapfit
1659 fit = asapfit(self._getfit(row))
1660 if rcParams['verbose']:
1661 print fit
1662 return
1663 else:
1664 return fit.as_dict()
1665
1666 def _add_history(self, funcname, parameters):
1667 # create date
1668 sep = "##"
1669 from datetime import datetime
1670 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1671 hist = dstr+sep
1672 hist += funcname+sep#cdate+sep
1673 if parameters.has_key('self'): del parameters['self']
1674 for k, v in parameters.iteritems():
1675 if type(v) is dict:
1676 for k2, v2 in v.iteritems():
1677 hist += k2
1678 hist += "="
1679 if isinstance(v2, scantable):
1680 hist += 'scantable'
1681 elif k2 == 'mask':
1682 if isinstance(v2, list) or isinstance(v2, tuple):
1683 hist += str(self._zip_mask(v2))
1684 else:
1685 hist += str(v2)
1686 else:
1687 hist += str(v2)
1688 else:
1689 hist += k
1690 hist += "="
1691 if isinstance(v, scantable):
1692 hist += 'scantable'
1693 elif k == 'mask':
1694 if isinstance(v, list) or isinstance(v, tuple):
1695 hist += str(self._zip_mask(v))
1696 else:
1697 hist += str(v)
1698 else:
1699 hist += str(v)
1700 hist += sep
1701 hist = hist[:-2] # remove trailing '##'
1702 self._addhistory(hist)
1703
1704
1705 def _zip_mask(self, mask):
1706 mask = list(mask)
1707 i = 0
1708 segments = []
1709 while mask[i:].count(1):
1710 i += mask[i:].index(1)
1711 if mask[i:].count(0):
1712 j = i + mask[i:].index(0)
1713 else:
1714 j = len(mask)
1715 segments.append([i, j])
1716 i = j
1717 return segments
1718
1719 def _get_ordinate_label(self):
1720 fu = "("+self.get_fluxunit()+")"
1721 import re
1722 lbl = "Intensity"
1723 if re.match(".K.", fu):
1724 lbl = "Brightness Temperature "+ fu
1725 elif re.match(".Jy.", fu):
1726 lbl = "Flux density "+ fu
1727 return lbl
1728
1729 def _check_ifs(self):
1730 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1731 nchans = filter(lambda t: t > 0, nchans)
1732 return (sum(nchans)/len(nchans) == nchans[0])
1733
1734 def _fill(self, names, unit, average):
1735 import os
1736 from asap._asap import stfiller
1737 first = True
1738 fullnames = []
1739 for name in names:
1740 name = os.path.expandvars(name)
1741 name = os.path.expanduser(name)
1742 if not os.path.exists(name):
1743 msg = "File '%s' does not exists" % (name)
1744 if rcParams['verbose']:
1745 asaplog.push(msg)
1746 print asaplog.pop().strip()
1747 return
1748 raise IOError(msg)
1749 fullnames.append(name)
1750 if average:
1751 asaplog.push('Auto averaging integrations')
1752 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1753 for name in fullnames:
1754 tbl = Scantable(stype)
1755 r = stfiller(tbl)
1756 msg = "Importing %s..." % (name)
1757 asaplog.push(msg, False)
1758 print_log()
1759 r._open(name, -1, -1)
1760 r._read()
1761 #tbl = r._getdata()
1762 if average:
1763 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1764 #tbl = tbl2
1765 if not first:
1766 tbl = self._math._merge([self, tbl])
1767 #tbl = tbl2
1768 Scantable.__init__(self, tbl)
1769 r._close()
1770 del r, tbl
1771 first = False
1772 if unit is not None:
1773 self.set_fluxunit(unit)
1774 self.set_freqframe(rcParams['scantable.freqframe'])
1775
Note: See TracBrowser for help on using the repository browser.