source: trunk/python/scantable.py@ 1331

Last change on this file since 1331 was 1322, checked in by mar637, 18 years ago

Fix for Ticket #101. set_restfreqs handles non-consecutive IFs now.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 63.6 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 self._add_history("scantable", varlist)
75 print_log()
76
77 def save(self, name=None, format=None, overwrite=False):
78 """
79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
81 Parameters:
82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
84 and header in 'name'_header.txt)
85 format: an optional file format. Default is ASAP.
86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87 'SDFITS' (save as SDFITS file)
88 'ASCII' (saves as ascii text file)
89 'MS2' (saves as an aips++
90 MeasurementSet V2)
91 overwrite: If the file should be overwritten if it exists.
92 The default False is to return with warning
93 without writing the output. USE WITH CARE.
94 Example:
95 scan.save('myscan.asap')
96 scan.save('myscan.sdfits', 'SDFITS')
97 """
98 from os import path
99 if format is None: format = rcParams['scantable.save']
100 suffix = '.'+format.lower()
101 if name is None or name == "":
102 name = 'scantable'+suffix
103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
105 name = path.expandvars(name)
106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
114 format2 = format.upper()
115 if format2 == 'ASAP':
116 self._save(name)
117 else:
118 from asap._asap import stwriter as stw
119 writer = stw(format2)
120 writer.write(self, name)
121 print_log()
122 return
123
124 def copy(self):
125 """
126 Return a copy of this scantable.
127 Parameters:
128 none
129 Example:
130 copiedscan = scan.copy()
131 """
132 sd = scantable(Scantable._copy(self))
133 return sd
134
135 def drop_scan(self, scanid=None):
136 """
137 Return a new scantable where the specified scan number(s) has(have)
138 been dropped.
139 Parameters:
140 scanid: a (list of) scan number(s)
141 """
142 from asap import _is_sequence_or_number as _is_valid
143 from asap import _to_list
144 from asap import unique
145 if not _is_valid(scanid):
146 if rcParams['verbose']:
147 print "Please specify a scanno to drop from the scantable"
148 return
149 else:
150 raise RuntimeError("No scan given")
151 try:
152 scanid = _to_list(scanid)
153 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
154 for sid in scanid: allscans.remove(sid)
155 if len(allscans) == 0:
156 raise ValueError("Can't remove all scans")
157 except ValueError:
158 if rcParams['verbose']:
159 print "Couldn't find any match."
160 return
161 else: raise
162 try:
163 bsel = self.get_selection()
164 sel = selector()
165 sel.set_scans(allscans)
166 self.set_selection(bsel+sel)
167 scopy = self._copy()
168 self.set_selection(bsel)
169 return scantable(scopy)
170 except RuntimeError:
171 if rcParams['verbose']:
172 print "Couldn't find any match."
173 else:
174 raise
175
176
177 def get_scan(self, scanid=None):
178 """
179 Return a specific scan (by scanno) or collection of scans (by
180 source name) in a new scantable.
181 Parameters:
182 scanid: a (list of) scanno or a source name, unix-style
183 patterns are accepted for source name matching, e.g.
184 '*_R' gets all 'ref scans
185 Example:
186 # get all scans containing the source '323p459'
187 newscan = scan.get_scan('323p459')
188 # get all 'off' scans
189 refscans = scan.get_scan('*_R')
190 # get a susbset of scans by scanno (as listed in scan.summary())
191 newscan = scan.get_scan([0, 2, 7, 10])
192 """
193 if scanid is None:
194 if rcParams['verbose']:
195 print "Please specify a scan no or name to " \
196 "retrieve from the scantable"
197 return
198 else:
199 raise RuntimeError("No scan given")
200
201 try:
202 bsel = self.get_selection()
203 sel = selector()
204 if type(scanid) is str:
205 sel.set_name(scanid)
206 self.set_selection(bsel+sel)
207 scopy = self._copy()
208 self.set_selection(bsel)
209 return scantable(scopy)
210 elif type(scanid) is int:
211 sel.set_scans([scanid])
212 self.set_selection(bsel+sel)
213 scopy = self._copy()
214 self.set_selection(bsel)
215 return scantable(scopy)
216 elif type(scanid) is list:
217 sel.set_scans(scanid)
218 self.set_selection(sel)
219 scopy = self._copy()
220 self.set_selection(bsel)
221 return scantable(scopy)
222 else:
223 msg = "Illegal scanid type, use 'int' or 'list' if ints."
224 if rcParams['verbose']:
225 print msg
226 else:
227 raise TypeError(msg)
228 except RuntimeError:
229 if rcParams['verbose']: print "Couldn't find any match."
230 else: raise
231
232 def __str__(self):
233 return Scantable._summary(self, True)
234
235 def summary(self, filename=None):
236 """
237 Print a summary of the contents of this scantable.
238 Parameters:
239 filename: the name of a file to write the putput to
240 Default - no file output
241 verbose: print extra info such as the frequency table
242 The default (False) is taken from .asaprc
243 """
244 info = Scantable._summary(self, True)
245 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
246 if filename is not None:
247 if filename is "":
248 filename = 'scantable_summary.txt'
249 from os.path import expandvars, isdir
250 filename = expandvars(filename)
251 if not isdir(filename):
252 data = open(filename, 'w')
253 data.write(info)
254 data.close()
255 else:
256 msg = "Illegal file name '%s'." % (filename)
257 if rcParams['verbose']:
258 print msg
259 else:
260 raise IOError(msg)
261 if rcParams['verbose']:
262 try:
263 from IPython.genutils import page as pager
264 except ImportError:
265 from pydoc import pager
266 pager(info)
267 else:
268 return info
269
270
271 def get_selection(self):
272 """
273 Get the selection object currently set on this scantable.
274 Parameters:
275 none
276 Example:
277 sel = scan.get_selection()
278 sel.set_ifs(0) # select IF 0
279 scan.set_selection(sel) # apply modified selection
280 """
281 return selector(self._getselection())
282
283 def set_selection(self, selection=selector()):
284 """
285 Select a subset of the data. All following operations on this scantable
286 are only applied to thi selection.
287 Parameters:
288 selection: a selector object (default unset the selection)
289 Examples:
290 sel = selector() # create a selection object
291 self.set_scans([0, 3]) # select SCANNO 0 and 3
292 scan.set_selection(sel) # set the selection
293 scan.summary() # will only print summary of scanno 0 an 3
294 scan.set_selection() # unset the selection
295 """
296 self._setselection(selection)
297
298 def stats(self, stat='stddev', mask=None):
299 """
300 Determine the specified statistic of the current beam/if/pol
301 Takes a 'mask' as an optional parameter to specify which
302 channels should be excluded.
303 Parameters:
304 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
305 'var', 'stddev', 'avdev', 'rms', 'median'
306 mask: an optional mask specifying where the statistic
307 should be determined.
308 Example:
309 scan.set_unit('channel')
310 msk = scan.create_mask([100, 200], [500, 600])
311 scan.stats(stat='mean', mask=m)
312 """
313 if mask == None:
314 mask = []
315 axes = ['Beam', 'IF', 'Pol', 'Time']
316 if not self._check_ifs():
317 raise ValueError("Cannot apply mask as the IFs have different "
318 "number of channels. Please use setselection() "
319 "to select individual IFs")
320
321 statvals = self._math._stats(self, mask, stat)
322 out = ''
323 axes = []
324 for i in range(self.nrow()):
325 axis = []
326 axis.append(self.getscan(i))
327 axis.append(self.getbeam(i))
328 axis.append(self.getif(i))
329 axis.append(self.getpol(i))
330 axis.append(self.getcycle(i))
331 axes.append(axis)
332 tm = self._gettime(i)
333 src = self._getsourcename(i)
334 out += 'Scan[%d] (%s) ' % (axis[0], src)
335 out += 'Time[%s]:\n' % (tm)
336 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
337 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
338 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
339 out += '= %3.3f\n' % (statvals[i])
340 out += "--------------------------------------------------\n"
341
342 if rcParams['verbose']:
343 print "--------------------------------------------------"
344 print " ", stat
345 print "--------------------------------------------------"
346 print out
347 #else:
348 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
349 # 'axes' : axes,
350 # 'data': statvals}
351 return statvals
352
353 def stddev(self, mask=None):
354 """
355 Determine the standard deviation of the current beam/if/pol
356 Takes a 'mask' as an optional parameter to specify which
357 channels should be excluded.
358 Parameters:
359 mask: an optional mask specifying where the standard
360 deviation should be determined.
361
362 Example:
363 scan.set_unit('channel')
364 msk = scan.create_mask([100, 200], [500, 600])
365 scan.stddev(mask=m)
366 """
367 return self.stats(stat='stddev', mask=mask);
368
369
370 def get_column_names(self):
371 """
372 Return a list of column names, which can be used for selection.
373 """
374 return list(Scantable.get_column_names(self))
375
376 def get_tsys(self):
377 """
378 Return the System temperatures.
379 Parameters:
380
381 Returns:
382 a list of Tsys values for the current selection
383 """
384
385 return self._row_callback(self._gettsys, "Tsys")
386
387 def _row_callback(self, callback, label):
388 axes = []
389 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
390 out = ""
391 outvec = []
392 for i in range(self.nrow()):
393 axis = []
394 axis.append(self.getscan(i))
395 axis.append(self.getbeam(i))
396 axis.append(self.getif(i))
397 axis.append(self.getpol(i))
398 axis.append(self.getcycle(i))
399 axes.append(axis)
400 tm = self._gettime(i)
401 src = self._getsourcename(i)
402 out += 'Scan[%d] (%s) ' % (axis[0], src)
403 out += 'Time[%s]:\n' % (tm)
404 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
405 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
406 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
407 outvec.append(callback(i))
408 out += '= %3.3f\n' % (outvec[i])
409 out += "--------------------------------------------------\n"
410 if rcParams['verbose']:
411 print "--------------------------------------------------"
412 print " %s" % (label)
413 print "--------------------------------------------------"
414 print out
415 # disabled because the vector seems more useful
416 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
417 return outvec
418
419 def _get_column(self, callback, row=-1):
420 """
421 """
422 if row == -1:
423 return [callback(i) for i in range(self.nrow())]
424 else:
425 if 0 <= row < self.nrow():
426 return callback(row)
427
428
429 def get_time(self, row=-1):
430 """
431 Get a list of time stamps for the observations.
432 Return a string for each integration in the scantable.
433 Parameters:
434 row: row no of integration. Default -1 return all rows
435 Example:
436 none
437 """
438 from time import strptime
439 from datetime import datetime
440 times = self._get_column(self._gettime, row)
441 format = "%Y/%m/%d/%H:%M:%S"
442 if isinstance(times, list):
443 return [datetime(*strptime(i, format)[:6]) for i in times]
444 else:
445 return datetime(*strptime(times, format)[:6])
446
447 def get_sourcename(self, row=-1):
448 """
449 Get a list source names for the observations.
450 Return a string for each integration in the scantable.
451 Parameters:
452 row: row no of integration. Default -1 return all rows
453 Example:
454 none
455 """
456 return self._get_column(self._getsourcename, row)
457
458 def get_elevation(self, row=-1):
459 """
460 Get a list of elevations for the observations.
461 Return a float for each integration in the scantable.
462 Parameters:
463 row: row no of integration. Default -1 return all rows
464 Example:
465 none
466 """
467 return self._get_column(self._getelevation, row)
468
469 def get_azimuth(self, row=-1):
470 """
471 Get a list of azimuths for the observations.
472 Return a float for each integration in the scantable.
473 Parameters:
474 row: row no of integration. Default -1 return all rows
475 Example:
476 none
477 """
478 return self._get_column(self._getazimuth, row)
479
480 def get_parangle(self, row=-1):
481 """
482 Get a list of parallactic angles for the observations.
483 Return a float for each integration in the scantable.
484 Parameters:
485 row: row no of integration. Default -1 return all rows
486 Example:
487 none
488 """
489 return self._get_column(self._getparangle, row)
490
491 def get_direction(self, row=-1):
492 """
493 Get a list of Positions on the sky (direction) for the observations.
494 Return a float for each integration in the scantable.
495 Parameters:
496 row: row no of integration. Default -1 return all rows
497 Example:
498 none
499 """
500 return self._get_column(self._getdirection, row)
501
502 def set_unit(self, unit='channel'):
503 """
504 Set the unit for all following operations on this scantable
505 Parameters:
506 unit: optional unit, default is 'channel'
507 one of '*Hz', 'km/s', 'channel', ''
508 """
509 varlist = vars()
510 if unit in ['', 'pixel', 'channel']:
511 unit = ''
512 inf = list(self._getcoordinfo())
513 inf[0] = unit
514 self._setcoordinfo(inf)
515 self._add_history("set_unit", varlist)
516
517 def set_instrument(self, instr):
518 """
519 Set the instrument for subsequent processing
520 Parameters:
521 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
522 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
523 """
524 self._setInstrument(instr)
525 self._add_history("set_instument", vars())
526 print_log()
527
528 def set_feedtype(self, feedtype):
529 """
530 Overwrite the feed type, which might not be set correctly.
531 Parameters:
532 feedtype: 'linear' or 'circular'
533 """
534 self._setfeedtype(feedtype)
535 self._add_history("set_feedtype", vars())
536 print_log()
537
538 def set_doppler(self, doppler='RADIO'):
539 """
540 Set the doppler for all following operations on this scantable.
541 Parameters:
542 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
543 """
544 varlist = vars()
545 inf = list(self._getcoordinfo())
546 inf[2] = doppler
547 self._setcoordinfo(inf)
548 self._add_history("set_doppler", vars())
549 print_log()
550
551 def set_freqframe(self, frame=None):
552 """
553 Set the frame type of the Spectral Axis.
554 Parameters:
555 frame: an optional frame type, default 'LSRK'. Valid frames are:
556 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
557 'GEO', 'GALACTO', 'LGROUP', 'CMB'
558 Examples:
559 scan.set_freqframe('BARY')
560 """
561 if frame is None: frame = rcParams['scantable.freqframe']
562 varlist = vars()
563 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
564 'GEO', 'GALACTO', 'LGROUP', 'CMB']
565
566 if frame in valid:
567 inf = list(self._getcoordinfo())
568 inf[1] = frame
569 self._setcoordinfo(inf)
570 self._add_history("set_freqframe", varlist)
571 else:
572 msg = "Please specify a valid freq type. Valid types are:\n", valid
573 if rcParams['verbose']:
574 print msg
575 else:
576 raise TypeError(msg)
577 print_log()
578
579 def set_dirframe(self, frame=""):
580 """
581 Set the frame type of the Direction on the sky.
582 Parameters:
583 frame: an optional frame type, default ''. Valid frames are:
584 'J2000', 'B1950', 'GALACTIC'
585 Examples:
586 scan.set_dirframe('GALACTIC')
587 """
588 varlist = vars()
589 try:
590 Scantable.set_dirframe(self, frame)
591 except RuntimeError, msg:
592 if rcParams['verbose']:
593 print msg
594 else:
595 raise
596 self._add_history("set_dirframe", varlist)
597
598 def get_unit(self):
599 """
600 Get the default unit set in this scantable
601 Parameters:
602 Returns:
603 A unit string
604 """
605 inf = self._getcoordinfo()
606 unit = inf[0]
607 if unit == '': unit = 'channel'
608 return unit
609
610 def get_abcissa(self, rowno=0):
611 """
612 Get the abcissa in the current coordinate setup for the currently
613 selected Beam/IF/Pol
614 Parameters:
615 rowno: an optional row number in the scantable. Default is the
616 first row, i.e. rowno=0
617 Returns:
618 The abcissa values and it's format string (as a dictionary)
619 """
620 abc = self._getabcissa(rowno)
621 lbl = self._getabcissalabel(rowno)
622 print_log()
623 return abc, lbl
624
625 def flag(self, mask=None):
626 """
627 Flag the selected data using an optional channel mask.
628 Parameters:
629 mask: an optional channel mask, created with create_mask. Default
630 (no mask) is all channels.
631 """
632 varlist = vars()
633 if mask is None:
634 mask = []
635 try:
636 self._flag(mask)
637 except RuntimeError, msg:
638 if rcParams['verbose']:
639 print msg
640 return
641 else: raise
642 self._add_history("flag", varlist)
643
644 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
645 """
646 Flag the data in 'lag' space by providing a frequency to remove.
647 Flagged data in the scantable gets set to 0.0 before the fft.
648 No taper is applied.
649 Parameters:
650 frequency: the frequency (really a period within the bandwidth) to remove
651 width: the width of the frequency to remove, to remove a range
652 of frequencies aroung the centre.
653 unit: the frequency unit (default "GHz")
654 Notes:
655 It is recommended to flag edges of the band or strong signals beforehand.
656 """
657 if insitu is None: insitu = rcParams['insitu']
658 self._math._setinsitu(insitu)
659 varlist = vars()
660 base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
661 if not base.has_key(unit):
662 raise ValueError("%s is not a valid unit." % unit)
663 try:
664 s = scantable(self._math._lag_flag(self, frequency*base[unit],
665 width*base[unit]))
666 except RuntimeError, msg:
667 if rcParams['verbose']:
668 print msg
669 return
670 else: raise
671 s._add_history("lag_flag", varlist)
672 print_log()
673 if insitu:
674 self._assign(s)
675 else:
676 return s
677
678
679 def create_mask(self, *args, **kwargs):
680 """
681 Compute and return a mask based on [min, max] windows.
682 The specified windows are to be INCLUDED, when the mask is
683 applied.
684 Parameters:
685 [min, max], [min2, max2], ...
686 Pairs of start/end points (inclusive)specifying the regions
687 to be masked
688 invert: optional argument. If specified as True,
689 return an inverted mask, i.e. the regions
690 specified are EXCLUDED
691 row: create the mask using the specified row for
692 unit conversions, default is row=0
693 only necessary if frequency varies over rows.
694 Example:
695 scan.set_unit('channel')
696 a)
697 msk = scan.create_mask([400, 500], [800, 900])
698 # masks everything outside 400 and 500
699 # and 800 and 900 in the unit 'channel'
700
701 b)
702 msk = scan.create_mask([400, 500], [800, 900], invert=True)
703 # masks the regions between 400 and 500
704 # and 800 and 900 in the unit 'channel'
705 c)
706 mask only channel 400
707 msk = scan.create_mask([400, 400])
708 """
709 row = 0
710 if kwargs.has_key("row"):
711 row = kwargs.get("row")
712 data = self._getabcissa(row)
713 u = self._getcoordinfo()[0]
714 if rcParams['verbose']:
715 if u == "": u = "channel"
716 msg = "The current mask window unit is %s" % u
717 i = self._check_ifs()
718 if not i:
719 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
720 asaplog.push(msg)
721 n = self.nchan()
722 msk = _n_bools(n, False)
723 # test if args is a 'list' or a 'normal *args - UGLY!!!
724
725 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
726 and args or args[0]
727 for window in ws:
728 if (len(window) != 2 or window[0] > window[1] ):
729 raise TypeError("A window needs to be defined as [min, max]")
730 for i in range(n):
731 if data[i] >= window[0] and data[i] <= window[1]:
732 msk[i] = True
733 if kwargs.has_key('invert'):
734 if kwargs.get('invert'):
735 msk = mask_not(msk)
736 print_log()
737 return msk
738
739 def get_restfreqs(self):
740 """
741 Get the restfrequency(s) stored in this scantable.
742 The return value(s) are always of unit 'Hz'
743 Parameters:
744 none
745 Returns:
746 a list of doubles
747 """
748 return list(self._getrestfreqs())
749
750
751 def set_restfreqs(self, freqs=None, unit='Hz'):
752 """
753 Set or replace the restfrequency specified and
754 If the 'freqs' argument holds a scalar,
755 then that rest frequency will be applied to all the selected
756 data. If the 'freqs' argument holds
757 a vector, then it MUST be of equal or smaller length than
758 the number of IFs (and the available restfrequencies will be
759 replaced by this vector). In this case, *all* data have
760 the restfrequency set per IF according
761 to the corresponding value you give in the 'freqs' vector.
762 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
763 IF 1 gets restfreq 2e9.
764 You can also specify the frequencies via a linecatalog/
765
766 Parameters:
767 freqs: list of rest frequency values or string idenitfiers
768 unit: unit for rest frequency (default 'Hz')
769
770 Example:
771 # set the given restfrequency for the whole table
772 scan.set_restfreqs(freqs=1.4e9)
773 # If thee number of IFs in the data is >= 2 the IF0 gets the first
774 # value IF1 the second...
775 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
776 #set the given restfrequency for the whole table (by name)
777 scan.set_restfreqs(freqs="OH1667")
778
779 Note:
780 To do more sophisticate Restfrequency setting, e.g. on a
781 source and IF basis, use scantable.set_selection() before using
782 this function.
783 # provide your scantable is call scan
784 selection = selector()
785 selection.set_name("ORION*")
786 selection.set_ifs([1])
787 scan.set_selection(selection)
788 scan.set_restfreqs(freqs=86.6e9)
789
790 """
791 varlist = vars()
792 from asap import linecatalog
793 # simple value
794 if isinstance(freqs, int) or isinstance(freqs, float):
795 self._setrestfreqs(freqs, "",unit)
796 # list of values
797 elif isinstance(freqs, list) or isinstance(freqs, tuple):
798 # list values are scalars
799 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
800 sel = selector()
801 savesel = self._getselection()
802 iflist = self.getifnos()
803 for i in xrange(len(freqs)):
804 sel.set_ifs(iflist[i])
805 self._setselection(sel)
806 self._setrestfreqs(freqs[i], "",unit)
807 self._setselection(savesel)
808 # list values are tuples, (value, name)
809 elif isinstance(freqs[-1], dict):
810 sel = selector()
811 savesel = self._getselection()
812 iflist = self.getifnos()
813 for i in xrange(len(freqs)):
814 sel.set_ifs(iflist[i])
815 self._setselection(sel)
816 self._setrestfreqs(freqs[i]["value"],
817 freqs[i]["name"], "MHz")
818 self._setselection(savesel)
819 # freqs are to be taken from a linecatalog
820 elif isinstance(freqs, linecatalog):
821 sel = selector()
822 savesel = self._getselection()
823 iflist = self.getifnos()
824 for i in xrange(freqs.nrow()):
825 sel.set_ifs(iflist[i])
826 self._setselection(sel)
827 self._setrestfreqs(freqs.get_frequency(i),
828 freqs.get_name(i), "MHz")
829 # ensure that we are not iterating past nIF
830 if i == self.nif()-1: break
831 self._setselection(savesel)
832 else:
833 return
834 self._add_history("set_restfreqs", varlist)
835
836
837
838 def history(self, filename=None):
839 """
840 Print the history. Optionally to a file.
841 """
842 hist = list(self._gethistory())
843 out = "-"*80
844 for h in hist:
845 if h.startswith("---"):
846 out += "\n"+h
847 else:
848 items = h.split("##")
849 date = items[0]
850 func = items[1]
851 items = items[2:]
852 out += "\n"+date+"\n"
853 out += "Function: %s\n Parameters:" % (func)
854 for i in items:
855 s = i.split("=")
856 out += "\n %s = %s" % (s[0], s[1])
857 out += "\n"+"-"*80
858 if filename is not None:
859 if filename is "":
860 filename = 'scantable_history.txt'
861 import os
862 filename = os.path.expandvars(os.path.expanduser(filename))
863 if not os.path.isdir(filename):
864 data = open(filename, 'w')
865 data.write(out)
866 data.close()
867 else:
868 msg = "Illegal file name '%s'." % (filename)
869 if rcParams['verbose']:
870 print msg
871 else:
872 raise IOError(msg)
873 if rcParams['verbose']:
874 try:
875 from IPython.genutils import page as pager
876 except ImportError:
877 from pydoc import pager
878 pager(out)
879 else:
880 return out
881 return
882 #
883 # Maths business
884 #
885
886 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
887 """
888 Return the (time) weighted average of a scan.
889 Note:
890 in channels only - align if necessary
891 Parameters:
892 mask: an optional mask (only used for 'var' and 'tsys'
893 weighting)
894 scanav: True averages each scan separately
895 False (default) averages all scans together,
896 weight: Weighting scheme.
897 'none' (mean no weight)
898 'var' (1/var(spec) weighted)
899 'tsys' (1/Tsys**2 weighted)
900 'tint' (integration time weighted)
901 'tintsys' (Tint/Tsys**2)
902 'median' ( median averaging)
903 The default is 'tint'
904 align: align the spectra in velocity before averaging. It takes
905 the time of the first spectrum as reference time.
906 Example:
907 # time average the scantable without using a mask
908 newscan = scan.average_time()
909 """
910 varlist = vars()
911 if weight is None: weight = 'TINT'
912 if mask is None: mask = ()
913 if scanav: scanav = "SCAN"
914 else: scanav = "NONE"
915 scan = (self, )
916 try:
917 if align:
918 scan = (self.freq_align(insitu=False), )
919 s = None
920 if weight.upper() == 'MEDIAN':
921 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
922 scanav))
923 else:
924 s = scantable(self._math._average(scan, mask, weight.upper(),
925 scanav))
926 except RuntimeError, msg:
927 if rcParams['verbose']:
928 print msg
929 return
930 else: raise
931 s._add_history("average_time", varlist)
932 print_log()
933 return s
934
935 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
936 """
937 Return a scan where all spectra are converted to either
938 Jansky or Kelvin depending upon the flux units of the scan table.
939 By default the function tries to look the values up internally.
940 If it can't find them (or if you want to over-ride), you must
941 specify EITHER jyperk OR eta (and D which it will try to look up
942 also if you don't set it). jyperk takes precedence if you set both.
943 Parameters:
944 jyperk: the Jy / K conversion factor
945 eta: the aperture efficiency
946 d: the geomtric diameter (metres)
947 insitu: if False a new scantable is returned.
948 Otherwise, the scaling is done in-situ
949 The default is taken from .asaprc (False)
950 """
951 if insitu is None: insitu = rcParams['insitu']
952 self._math._setinsitu(insitu)
953 varlist = vars()
954 if jyperk is None: jyperk = -1.0
955 if d is None: d = -1.0
956 if eta is None: eta = -1.0
957 s = scantable(self._math._convertflux(self, d, eta, jyperk))
958 s._add_history("convert_flux", varlist)
959 print_log()
960 if insitu: self._assign(s)
961 else: return s
962
963 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
964 """
965 Return a scan after applying a gain-elevation correction.
966 The correction can be made via either a polynomial or a
967 table-based interpolation (and extrapolation if necessary).
968 You specify polynomial coefficients, an ascii table or neither.
969 If you specify neither, then a polynomial correction will be made
970 with built in coefficients known for certain telescopes (an error
971 will occur if the instrument is not known).
972 The data and Tsys are *divided* by the scaling factors.
973 Parameters:
974 poly: Polynomial coefficients (default None) to compute a
975 gain-elevation correction as a function of
976 elevation (in degrees).
977 filename: The name of an ascii file holding correction factors.
978 The first row of the ascii file must give the column
979 names and these MUST include columns
980 "ELEVATION" (degrees) and "FACTOR" (multiply data
981 by this) somewhere.
982 The second row must give the data type of the
983 column. Use 'R' for Real and 'I' for Integer.
984 An example file would be
985 (actual factors are arbitrary) :
986
987 TIME ELEVATION FACTOR
988 R R R
989 0.1 0 0.8
990 0.2 20 0.85
991 0.3 40 0.9
992 0.4 60 0.85
993 0.5 80 0.8
994 0.6 90 0.75
995 method: Interpolation method when correcting from a table.
996 Values are "nearest", "linear" (default), "cubic"
997 and "spline"
998 insitu: if False a new scantable is returned.
999 Otherwise, the scaling is done in-situ
1000 The default is taken from .asaprc (False)
1001 """
1002
1003 if insitu is None: insitu = rcParams['insitu']
1004 self._math._setinsitu(insitu)
1005 varlist = vars()
1006 if poly is None:
1007 poly = ()
1008 from os.path import expandvars
1009 filename = expandvars(filename)
1010 s = scantable(self._math._gainel(self, poly, filename, method))
1011 s._add_history("gain_el", varlist)
1012 print_log()
1013 if insitu: self._assign(s)
1014 else: return s
1015
1016 def freq_align(self, reftime=None, method='cubic', insitu=None):
1017 """
1018 Return a scan where all rows have been aligned in frequency/velocity.
1019 The alignment frequency frame (e.g. LSRK) is that set by function
1020 set_freqframe.
1021 Parameters:
1022 reftime: reference time to align at. By default, the time of
1023 the first row of data is used.
1024 method: Interpolation method for regridding the spectra.
1025 Choose from "nearest", "linear", "cubic" (default)
1026 and "spline"
1027 insitu: if False a new scantable is returned.
1028 Otherwise, the scaling is done in-situ
1029 The default is taken from .asaprc (False)
1030 """
1031 if insitu is None: insitu = rcParams["insitu"]
1032 self._math._setinsitu(insitu)
1033 varlist = vars()
1034 if reftime is None: reftime = ""
1035 s = scantable(self._math._freq_align(self, reftime, method))
1036 s._add_history("freq_align", varlist)
1037 print_log()
1038 if insitu: self._assign(s)
1039 else: return s
1040
1041 def opacity(self, tau, insitu=None):
1042 """
1043 Apply an opacity correction. The data
1044 and Tsys are multiplied by the correction factor.
1045 Parameters:
1046 tau: Opacity from which the correction factor is
1047 exp(tau*ZD)
1048 where ZD is the zenith-distance
1049 insitu: if False a new scantable is returned.
1050 Otherwise, the scaling is done in-situ
1051 The default is taken from .asaprc (False)
1052 """
1053 if insitu is None: insitu = rcParams['insitu']
1054 self._math._setinsitu(insitu)
1055 varlist = vars()
1056 s = scantable(self._math._opacity(self, tau))
1057 s._add_history("opacity", varlist)
1058 print_log()
1059 if insitu: self._assign(s)
1060 else: return s
1061
1062 def bin(self, width=5, insitu=None):
1063 """
1064 Return a scan where all spectra have been binned up.
1065 width: The bin width (default=5) in pixels
1066 insitu: if False a new scantable is returned.
1067 Otherwise, the scaling is done in-situ
1068 The default is taken from .asaprc (False)
1069 """
1070 if insitu is None: insitu = rcParams['insitu']
1071 self._math._setinsitu(insitu)
1072 varlist = vars()
1073 s = scantable(self._math._bin(self, width))
1074 s._add_history("bin", varlist)
1075 print_log()
1076 if insitu: self._assign(s)
1077 else: return s
1078
1079
1080 def resample(self, width=5, method='cubic', insitu=None):
1081 """
1082 Return a scan where all spectra have been binned up
1083 width: The bin width (default=5) in pixels
1084 method: Interpolation method when correcting from a table.
1085 Values are "nearest", "linear", "cubic" (default)
1086 and "spline"
1087 insitu: if False a new scantable is returned.
1088 Otherwise, the scaling is done in-situ
1089 The default is taken from .asaprc (False)
1090 """
1091 if insitu is None: insitu = rcParams['insitu']
1092 self._math._setinsitu(insitu)
1093 varlist = vars()
1094 s = scantable(self._math._resample(self, method, width))
1095 s._add_history("resample", varlist)
1096 print_log()
1097 if insitu: self._assign(s)
1098 else: return s
1099
1100
1101 def average_pol(self, mask=None, weight='none'):
1102 """
1103 Average the Polarisations together.
1104 Parameters:
1105 mask: An optional mask defining the region, where the
1106 averaging will be applied. The output will have all
1107 specified points masked.
1108 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1109 weighted), or 'tsys' (1/Tsys**2 weighted)
1110 """
1111 varlist = vars()
1112 if mask is None:
1113 mask = ()
1114 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1115 s._add_history("average_pol", varlist)
1116 print_log()
1117 return s
1118
1119 def average_beam(self, mask=None, weight='none'):
1120 """
1121 Average the Beams together.
1122 Parameters:
1123 mask: An optional mask defining the region, where the
1124 averaging will be applied. The output will have all
1125 specified points masked.
1126 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1127 weighted), or 'tsys' (1/Tsys**2 weighted)
1128 """
1129 varlist = vars()
1130 if mask is None:
1131 mask = ()
1132 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1133 s._add_history("average_beam", varlist)
1134 print_log()
1135 return s
1136
1137 def convert_pol(self, poltype=None):
1138 """
1139 Convert the data to a different polarisation type.
1140 Parameters:
1141 poltype: The new polarisation type. Valid types are:
1142 "linear", "stokes" and "circular"
1143 """
1144 varlist = vars()
1145 try:
1146 s = scantable(self._math._convertpol(self, poltype))
1147 except RuntimeError, msg:
1148 if rcParams['verbose']:
1149 print msg
1150 return
1151 else:
1152 raise
1153 s._add_history("convert_pol", varlist)
1154 print_log()
1155 return s
1156
1157 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1158 """
1159 Smooth the spectrum by the specified kernel (conserving flux).
1160 Parameters:
1161 scan: The input scan
1162 kernel: The type of smoothing kernel. Select from
1163 'hanning' (default), 'gaussian' and 'boxcar'.
1164 The first three characters are sufficient.
1165 width: The width of the kernel in pixels. For hanning this is
1166 ignored otherwise it defauls to 5 pixels.
1167 For 'gaussian' it is the Full Width Half
1168 Maximum. For 'boxcar' it is the full width.
1169 insitu: if False a new scantable is returned.
1170 Otherwise, the scaling is done in-situ
1171 The default is taken from .asaprc (False)
1172 Example:
1173 none
1174 """
1175 if insitu is None: insitu = rcParams['insitu']
1176 self._math._setinsitu(insitu)
1177 varlist = vars()
1178 s = scantable(self._math._smooth(self, kernel.lower(), width))
1179 s._add_history("smooth", varlist)
1180 print_log()
1181 if insitu: self._assign(s)
1182 else: return s
1183
1184
1185 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1186 """
1187 Return a scan which has been baselined (all rows) by a polynomial.
1188 Parameters:
1189 scan: a scantable
1190 mask: an optional mask
1191 order: the order of the polynomial (default is 0)
1192 plot: plot the fit and the residual. In this each
1193 indivual fit has to be approved, by typing 'y'
1194 or 'n'
1195 insitu: if False a new scantable is returned.
1196 Otherwise, the scaling is done in-situ
1197 The default is taken from .asaprc (False)
1198 Example:
1199 # return a scan baselined by a third order polynomial,
1200 # not using a mask
1201 bscan = scan.poly_baseline(order=3)
1202 """
1203 if insitu is None: insitu = rcParams['insitu']
1204 varlist = vars()
1205 if mask is None:
1206 mask = [True for i in xrange(self.nchan(-1))]
1207 from asap.asapfitter import fitter
1208 try:
1209 f = fitter()
1210 f.set_scan(self, mask)
1211 f.set_function(poly=order)
1212 s = f.auto_fit(insitu, plot=plot)
1213 s._add_history("poly_baseline", varlist)
1214 print_log()
1215 if insitu: self._assign(s)
1216 else: return s
1217 except RuntimeError:
1218 msg = "The fit failed, possibly because it didn't converge."
1219 if rcParams['verbose']:
1220 print msg
1221 return
1222 else:
1223 raise RuntimeError(msg)
1224
1225
1226 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1227 threshold=3, chan_avg_limit=1, plot=False,
1228 insitu=None):
1229 """
1230 Return a scan which has been baselined (all rows) by a polynomial.
1231 Spectral lines are detected first using linefinder and masked out
1232 to avoid them affecting the baseline solution.
1233
1234 Parameters:
1235 mask: an optional mask retreived from scantable
1236 edge: an optional number of channel to drop at
1237 the edge of spectrum. If only one value is
1238 specified, the same number will be dropped from
1239 both sides of the spectrum. Default is to keep
1240 all channels. Nested tuples represent individual
1241 edge selection for different IFs (a number of spectral
1242 channels can be different)
1243 order: the order of the polynomial (default is 0)
1244 threshold: the threshold used by line finder. It is better to
1245 keep it large as only strong lines affect the
1246 baseline solution.
1247 chan_avg_limit:
1248 a maximum number of consequtive spectral channels to
1249 average during the search of weak and broad lines.
1250 The default is no averaging (and no search for weak
1251 lines). If such lines can affect the fitted baseline
1252 (e.g. a high order polynomial is fitted), increase this
1253 parameter (usually values up to 8 are reasonable). Most
1254 users of this method should find the default value
1255 sufficient.
1256 plot: plot the fit and the residual. In this each
1257 indivual fit has to be approved, by typing 'y'
1258 or 'n'
1259 insitu: if False a new scantable is returned.
1260 Otherwise, the scaling is done in-situ
1261 The default is taken from .asaprc (False)
1262
1263 Example:
1264 scan2=scan.auto_poly_baseline(order=7)
1265 """
1266 if insitu is None: insitu = rcParams['insitu']
1267 varlist = vars()
1268 from asap.asapfitter import fitter
1269 from asap.asaplinefind import linefinder
1270 from asap import _is_sequence_or_number as _is_valid
1271
1272 # check whether edge is set up for each IF individually
1273 individualedge = False;
1274 if len(edge) > 1:
1275 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1276 individualedge = True;
1277
1278 if not _is_valid(edge, int) and not individualedge:
1279 raise ValueError, "Parameter 'edge' has to be an integer or a \
1280 pair of integers specified as a tuple. Nested tuples are allowed \
1281 to make individual selection for different IFs."
1282
1283 curedge = (0, 0)
1284 if individualedge:
1285 for edgepar in edge:
1286 if not _is_valid(edgepar, int):
1287 raise ValueError, "Each element of the 'edge' tuple has \
1288 to be a pair of integers or an integer."
1289 else:
1290 curedge = edge;
1291
1292 # setup fitter
1293 f = fitter()
1294 f.set_function(poly=order)
1295
1296 # setup line finder
1297 fl = linefinder()
1298 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1299
1300 if not insitu:
1301 workscan = self.copy()
1302 else:
1303 workscan = self
1304
1305 fl.set_scan(workscan)
1306
1307 rows = range(workscan.nrow())
1308 asaplog.push("Processing:")
1309 for r in rows:
1310 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1311 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1312 workscan.getpol(r), workscan.getcycle(r))
1313 asaplog.push(msg, False)
1314
1315 # figure out edge parameter
1316 if individualedge:
1317 if len(edge) >= workscan.getif(r):
1318 raise RuntimeError, "Number of edge elements appear to " \
1319 "be less than the number of IFs"
1320 curedge = edge[workscan.getif(r)]
1321
1322 # setup line finder
1323 fl.find_lines(r, mask, curedge)
1324 f.set_scan(workscan, fl.get_mask())
1325 f.x = workscan._getabcissa(r)
1326 f.y = workscan._getspectrum(r)
1327 f.data = None
1328 f.fit()
1329 x = f.get_parameters()
1330 if plot:
1331 f.plot(residual=True)
1332 x = raw_input("Accept fit ( [y]/n ): ")
1333 if x.upper() == 'N':
1334 continue
1335 workscan._setspectrum(f.fitter.getresidual(), r)
1336 if plot:
1337 f._p.unmap()
1338 f._p = None
1339 workscan._add_history("auto_poly_baseline", varlist)
1340 if insitu:
1341 self._assign(workscan)
1342 else:
1343 return workscan
1344
1345 def rotate_linpolphase(self, angle):
1346 """
1347 Rotate the phase of the complex polarization O=Q+iU correlation.
1348 This is always done in situ in the raw data. So if you call this
1349 function more than once then each call rotates the phase further.
1350 Parameters:
1351 angle: The angle (degrees) to rotate (add) by.
1352 Examples:
1353 scan.rotate_linpolphase(2.3)
1354 """
1355 varlist = vars()
1356 self._math._rotate_linpolphase(self, angle)
1357 self._add_history("rotate_linpolphase", varlist)
1358 print_log()
1359 return
1360
1361
1362 def rotate_xyphase(self, angle):
1363 """
1364 Rotate the phase of the XY correlation. This is always done in situ
1365 in the data. So if you call this function more than once
1366 then each call rotates the phase further.
1367 Parameters:
1368 angle: The angle (degrees) to rotate (add) by.
1369 Examples:
1370 scan.rotate_xyphase(2.3)
1371 """
1372 varlist = vars()
1373 self._math._rotate_xyphase(self, angle)
1374 self._add_history("rotate_xyphase", varlist)
1375 print_log()
1376 return
1377
1378 def swap_linears(self):
1379 """
1380 Swap the linear polarisations XX and YY
1381 """
1382 varlist = vars()
1383 self._math._swap_linears(self)
1384 self._add_history("swap_linears", varlist)
1385 print_log()
1386 return
1387
1388 def invert_phase(self):
1389 """
1390 Invert the phase of the complex polarisation
1391 """
1392 varlist = vars()
1393 self._math._invert_phase(self)
1394 self._add_history("invert_phase", varlist)
1395 print_log()
1396 return
1397
1398 def add(self, offset, insitu=None):
1399 """
1400 Return a scan where all spectra have the offset added
1401 Parameters:
1402 offset: the offset
1403 insitu: if False a new scantable is returned.
1404 Otherwise, the scaling is done in-situ
1405 The default is taken from .asaprc (False)
1406 """
1407 if insitu is None: insitu = rcParams['insitu']
1408 self._math._setinsitu(insitu)
1409 varlist = vars()
1410 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1411 s._add_history("add", varlist)
1412 print_log()
1413 if insitu:
1414 self._assign(s)
1415 else:
1416 return s
1417
1418 def scale(self, factor, tsys=True, insitu=None):
1419 """
1420 Return a scan where all spectra are scaled by the give 'factor'
1421 Parameters:
1422 factor: the scaling factor
1423 insitu: if False a new scantable is returned.
1424 Otherwise, the scaling is done in-situ
1425 The default is taken from .asaprc (False)
1426 tsys: if True (default) then apply the operation to Tsys
1427 as well as the data
1428 """
1429 if insitu is None: insitu = rcParams['insitu']
1430 self._math._setinsitu(insitu)
1431 varlist = vars()
1432 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1433 s._add_history("scale", varlist)
1434 print_log()
1435 if insitu:
1436 self._assign(s)
1437 else:
1438 return s
1439
1440 def auto_quotient(self, mode='time', preserve=True):
1441 """
1442 This function allows to build quotients automatically.
1443 It assumes the observation to have the same numer of
1444 "ons" and "offs"
1445 It will support "closest off in time" in the future
1446 Parameters:
1447 mode: the on/off detection mode; 'suffix' (default)
1448 'suffix' identifies 'off' scans by the
1449 trailing '_R' (Mopra/Parkes) or
1450 '_e'/'_w' (Tid)
1451 preserve: you can preserve (default) the continuum or
1452 remove it. The equations used are
1453 preserve: Output = Toff * (on/off) - Toff
1454 remove: Output = Toff * (on/off) - Ton
1455 """
1456 modes = ["time"]
1457 if not mode in modes:
1458 msg = "please provide valid mode. Valid modes are %s" % (modes)
1459 raise ValueError(msg)
1460 varlist = vars()
1461 s = scantable(self._math._auto_quotient(self, mode, preserve))
1462 s._add_history("auto_quotient", varlist)
1463 print_log()
1464 return s
1465
1466 def mx_quotient(self, mask = None, weight='median', preserve=True):
1467 """
1468 Form a quotient using "off" beams when observing in "MX" mode.
1469 Parameters:
1470 mask: an optional mask to be used when weight == 'stddev'
1471 weight: How to average the off beams. Default is 'median'.
1472 preserve: you can preserve (default) the continuum or
1473 remove it. The equations used are
1474 preserve: Output = Toff * (on/off) - Toff
1475 remove: Output = Toff * (on/off) - Ton
1476 """
1477 if mask is None: mask = ()
1478 varlist = vars()
1479 on = scantable(self._math._mx_extract(self, 'on'))
1480 preoff = scantable(self._math._mx_extract(self, 'off'))
1481 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1482 from asapmath import quotient
1483 q = quotient(on, off, preserve)
1484 q._add_history("mx_quotient", varlist)
1485 print_log()
1486 return q
1487
1488 def freq_switch(self, insitu=None):
1489 """
1490 Apply frequency switching to the data.
1491 Parameters:
1492 insitu: if False a new scantable is returned.
1493 Otherwise, the swictching is done in-situ
1494 The default is taken from .asaprc (False)
1495 Example:
1496 none
1497 """
1498 if insitu is None: insitu = rcParams['insitu']
1499 self._math._setinsitu(insitu)
1500 varlist = vars()
1501 s = scantable(self._math._freqswitch(self))
1502 s._add_history("freq_switch", varlist)
1503 print_log()
1504 if insitu: self._assign(s)
1505 else: return s
1506
1507 def recalc_azel(self):
1508 """
1509 Recalculate the azimuth and elevation for each position.
1510 Parameters:
1511 none
1512 Example:
1513 """
1514 varlist = vars()
1515 self._recalcazel()
1516 self._add_history("recalc_azel", varlist)
1517 print_log()
1518 return
1519
1520 def __add__(self, other):
1521 varlist = vars()
1522 s = None
1523 if isinstance(other, scantable):
1524 s = scantable(self._math._binaryop(self, other, "ADD"))
1525 elif isinstance(other, float):
1526 s = scantable(self._math._unaryop(self, other, "ADD", False))
1527 else:
1528 raise TypeError("Other input is not a scantable or float value")
1529 s._add_history("operator +", varlist)
1530 print_log()
1531 return s
1532
1533 def __sub__(self, other):
1534 """
1535 implicit on all axes and on Tsys
1536 """
1537 varlist = vars()
1538 s = None
1539 if isinstance(other, scantable):
1540 s = scantable(self._math._binaryop(self, other, "SUB"))
1541 elif isinstance(other, float):
1542 s = scantable(self._math._unaryop(self, other, "SUB", False))
1543 else:
1544 raise TypeError("Other input is not a scantable or float value")
1545 s._add_history("operator -", varlist)
1546 print_log()
1547 return s
1548
1549 def __mul__(self, other):
1550 """
1551 implicit on all axes and on Tsys
1552 """
1553 varlist = vars()
1554 s = None
1555 if isinstance(other, scantable):
1556 s = scantable(self._math._binaryop(self, other, "MUL"))
1557 elif isinstance(other, float):
1558 s = scantable(self._math._unaryop(self, other, "MUL", False))
1559 else:
1560 raise TypeError("Other input is not a scantable or float value")
1561 s._add_history("operator *", varlist)
1562 print_log()
1563 return s
1564
1565
1566 def __div__(self, other):
1567 """
1568 implicit on all axes and on Tsys
1569 """
1570 varlist = vars()
1571 s = None
1572 if isinstance(other, scantable):
1573 s = scantable(self._math._binaryop(self, other, "DIV"))
1574 elif isinstance(other, float):
1575 if other == 0.0:
1576 raise ZeroDivisionError("Dividing by zero is not recommended")
1577 s = scantable(self._math._unaryop(self, other, "DIV", False))
1578 else:
1579 raise TypeError("Other input is not a scantable or float value")
1580 s._add_history("operator /", varlist)
1581 print_log()
1582 return s
1583
1584 def get_fit(self, row=0):
1585 """
1586 Print or return the stored fits for a row in the scantable
1587 Parameters:
1588 row: the row which the fit has been applied to.
1589 """
1590 if row > self.nrow():
1591 return
1592 from asap.asapfit import asapfit
1593 fit = asapfit(self._getfit(row))
1594 if rcParams['verbose']:
1595 print fit
1596 return
1597 else:
1598 return fit.as_dict()
1599
1600 def _add_history(self, funcname, parameters):
1601 # create date
1602 sep = "##"
1603 from datetime import datetime
1604 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1605 hist = dstr+sep
1606 hist += funcname+sep#cdate+sep
1607 if parameters.has_key('self'): del parameters['self']
1608 for k, v in parameters.iteritems():
1609 if type(v) is dict:
1610 for k2, v2 in v.iteritems():
1611 hist += k2
1612 hist += "="
1613 if isinstance(v2, scantable):
1614 hist += 'scantable'
1615 elif k2 == 'mask':
1616 if isinstance(v2, list) or isinstance(v2, tuple):
1617 hist += str(self._zip_mask(v2))
1618 else:
1619 hist += str(v2)
1620 else:
1621 hist += str(v2)
1622 else:
1623 hist += k
1624 hist += "="
1625 if isinstance(v, scantable):
1626 hist += 'scantable'
1627 elif k == 'mask':
1628 if isinstance(v, list) or isinstance(v, tuple):
1629 hist += str(self._zip_mask(v))
1630 else:
1631 hist += str(v)
1632 else:
1633 hist += str(v)
1634 hist += sep
1635 hist = hist[:-2] # remove trailing '##'
1636 self._addhistory(hist)
1637
1638
1639 def _zip_mask(self, mask):
1640 mask = list(mask)
1641 i = 0
1642 segments = []
1643 while mask[i:].count(1):
1644 i += mask[i:].index(1)
1645 if mask[i:].count(0):
1646 j = i + mask[i:].index(0)
1647 else:
1648 j = len(mask)
1649 segments.append([i, j])
1650 i = j
1651 return segments
1652
1653 def _get_ordinate_label(self):
1654 fu = "("+self.get_fluxunit()+")"
1655 import re
1656 lbl = "Intensity"
1657 if re.match(".K.", fu):
1658 lbl = "Brightness Temperature "+ fu
1659 elif re.match(".Jy.", fu):
1660 lbl = "Flux density "+ fu
1661 return lbl
1662
1663 def _check_ifs(self):
1664 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1665 nchans = filter(lambda t: t > 0, nchans)
1666 return (sum(nchans)/len(nchans) == nchans[0])
1667
1668 def _fill(self, names, unit, average):
1669 import os
1670 from asap._asap import stfiller
1671 first = True
1672 fullnames = []
1673 for name in names:
1674 name = os.path.expandvars(name)
1675 name = os.path.expanduser(name)
1676 if not os.path.exists(name):
1677 msg = "File '%s' does not exists" % (name)
1678 if rcParams['verbose']:
1679 asaplog.push(msg)
1680 print asaplog.pop().strip()
1681 return
1682 raise IOError(msg)
1683 fullnames.append(name)
1684 if average:
1685 asaplog.push('Auto averaging integrations')
1686 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1687 for name in fullnames:
1688 tbl = Scantable(stype)
1689 r = stfiller(tbl)
1690 msg = "Importing %s..." % (name)
1691 asaplog.push(msg, False)
1692 print_log()
1693 r._open(name, -1, -1)
1694 r._read()
1695 #tbl = r._getdata()
1696 if average:
1697 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1698 #tbl = tbl2
1699 if not first:
1700 tbl = self._math._merge([self, tbl])
1701 #tbl = tbl2
1702 Scantable.__init__(self, tbl)
1703 r._close()
1704 del r, tbl
1705 first = False
1706 if unit is not None:
1707 self.set_fluxunit(unit)
1708 self.set_freqframe(rcParams['scantable.freqframe'])
1709
Note: See TracBrowser for help on using the repository browser.