source: tags/Release2.1.0b/python/scantable.py@ 1238

Last change on this file since 1238 was 1237, checked in by mar637, 18 years ago

re-enabled history in constructor. history can be written to a file now (auto-overwrite)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 62.5 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import NUM
7from asap import linecatalog
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 self._add_history("scantable", varlist)
75 print_log()
76
77 def save(self, name=None, format=None, overwrite=False):
78 """
79 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
80 Image FITS or MS2 format.
81 Parameters:
82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
84 and header in 'name'_header.txt)
85 format: an optional file format. Default is ASAP.
86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87 'SDFITS' (save as SDFITS file)
88 'ASCII' (saves as ascii text file)
89 'MS2' (saves as an aips++
90 MeasurementSet V2)
91 overwrite: If the file should be overwritten if it exists.
92 The default False is to return with warning
93 without writing the output. USE WITH CARE.
94 Example:
95 scan.save('myscan.asap')
96 scan.save('myscan.sdfits', 'SDFITS')
97 """
98 from os import path
99 if format is None: format = rcParams['scantable.save']
100 suffix = '.'+format.lower()
101 if name is None or name == "":
102 name = 'scantable'+suffix
103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
105 name = path.expandvars(name)
106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
114 format2 = format.upper()
115 if format2 == 'ASAP':
116 self._save(name)
117 else:
118 from asap._asap import stwriter as stw
119 writer = stw(format2)
120 writer.write(self, name)
121 print_log()
122 return
123
124 def copy(self):
125 """
126 Return a copy of this scantable.
127 Parameters:
128 none
129 Example:
130 copiedscan = scan.copy()
131 """
132 sd = scantable(Scantable._copy(self))
133 return sd
134
135 def drop_scan(self, scanid=None):
136 """
137 Return a new scantable where the specified scan number(s) has(have)
138 been dropped.
139 Parameters:
140 scanid: a (list of) scan number(s)
141 """
142 from asap import _is_sequence_or_number as _is_valid
143 from asap import _to_list
144 from asap import unique
145 if not _is_valid(scanid):
146 if rcParams['verbose']:
147 print "Please specify a scanno to drop from the scantable"
148 return
149 else:
150 raise RuntimeError("No scan given")
151 try:
152 scanid = _to_list(scanid)
153 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
154 for sid in scanid: allscans.remove(sid)
155 if len(allscans) == 0:
156 raise ValueError("Can't remove all scans")
157 except ValueError:
158 if rcParams['verbose']:
159 print "Couldn't find any match."
160 return
161 else: raise
162 try:
163 bsel = self.get_selection()
164 sel = selector()
165 sel.set_scans(allscans)
166 self.set_selection(bsel+sel)
167 scopy = self._copy()
168 self.set_selection(bsel)
169 return scantable(scopy)
170 except RuntimeError:
171 if rcParams['verbose']:
172 print "Couldn't find any match."
173 else:
174 raise
175
176
177 def get_scan(self, scanid=None):
178 """
179 Return a specific scan (by scanno) or collection of scans (by
180 source name) in a new scantable.
181 Parameters:
182 scanid: a (list of) scanno or a source name, unix-style
183 patterns are accepted for source name matching, e.g.
184 '*_R' gets all 'ref scans
185 Example:
186 # get all scans containing the source '323p459'
187 newscan = scan.get_scan('323p459')
188 # get all 'off' scans
189 refscans = scan.get_scan('*_R')
190 # get a susbset of scans by scanno (as listed in scan.summary())
191 newscan = scan.get_scan([0, 2, 7, 10])
192 """
193 if scanid is None:
194 if rcParams['verbose']:
195 print "Please specify a scan no or name to " \
196 "retrieve from the scantable"
197 return
198 else:
199 raise RuntimeError("No scan given")
200
201 try:
202 bsel = self.get_selection()
203 sel = selector()
204 if type(scanid) is str:
205 sel.set_name(scanid)
206 self.set_selection(bsel+sel)
207 scopy = self._copy()
208 self.set_selection(bsel)
209 return scantable(scopy)
210 elif type(scanid) is int:
211 sel.set_scans([scanid])
212 self.set_selection(bsel+sel)
213 scopy = self._copy()
214 self.set_selection(bsel)
215 return scantable(scopy)
216 elif type(scanid) is list:
217 sel.set_scans(scanid)
218 self.set_selection(sel)
219 scopy = self._copy()
220 self.set_selection(bsel)
221 return scantable(scopy)
222 else:
223 msg = "Illegal scanid type, use 'int' or 'list' if ints."
224 if rcParams['verbose']:
225 print msg
226 else:
227 raise TypeError(msg)
228 except RuntimeError:
229 if rcParams['verbose']: print "Couldn't find any match."
230 else: raise
231
232 def __str__(self):
233 return Scantable._summary(self, True)
234
235 def summary(self, filename=None):
236 """
237 Print a summary of the contents of this scantable.
238 Parameters:
239 filename: the name of a file to write the putput to
240 Default - no file output
241 verbose: print extra info such as the frequency table
242 The default (False) is taken from .asaprc
243 """
244 info = Scantable._summary(self, True)
245 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
246 if filename is not None:
247 if filename is "":
248 filename = 'scantable_summary.txt'
249 from os.path import expandvars, isdir
250 filename = expandvars(filename)
251 if not isdir(filename):
252 data = open(filename, 'w')
253 data.write(info)
254 data.close()
255 else:
256 msg = "Illegal file name '%s'." % (filename)
257 if rcParams['verbose']:
258 print msg
259 else:
260 raise IOError(msg)
261 if rcParams['verbose']:
262 try:
263 from IPython.genutils import page as pager
264 except ImportError:
265 from pydoc import pager
266 pager(info)
267 else:
268 return info
269
270
271 def get_selection(self):
272 """
273 Get the selection object currently set on this scantable.
274 Parameters:
275 none
276 Example:
277 sel = scan.get_selection()
278 sel.set_ifs(0) # select IF 0
279 scan.set_selection(sel) # apply modified selection
280 """
281 return selector(self._getselection())
282
283 def set_selection(self, selection=selector()):
284 """
285 Select a subset of the data. All following operations on this scantable
286 are only applied to thi selection.
287 Parameters:
288 selection: a selector object (default unset the selection)
289 Examples:
290 sel = selector() # create a selection object
291 self.set_scans([0, 3]) # select SCANNO 0 and 3
292 scan.set_selection(sel) # set the selection
293 scan.summary() # will only print summary of scanno 0 an 3
294 scan.set_selection() # unset the selection
295 """
296 self._setselection(selection)
297
298 def stats(self, stat='stddev', mask=None):
299 """
300 Determine the specified statistic of the current beam/if/pol
301 Takes a 'mask' as an optional parameter to specify which
302 channels should be excluded.
303 Parameters:
304 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
305 'var', 'stddev', 'avdev', 'rms', 'median'
306 mask: an optional mask specifying where the statistic
307 should be determined.
308 Example:
309 scan.set_unit('channel')
310 msk = scan.create_mask([100, 200], [500, 600])
311 scan.stats(stat='mean', mask=m)
312 """
313 if mask == None:
314 mask = []
315 axes = ['Beam', 'IF', 'Pol', 'Time']
316 if not self._check_ifs():
317 raise ValueError("Cannot apply mask as the IFs have different "
318 "number of channels. Please use setselection() "
319 "to select individual IFs")
320
321 statvals = self._math._stats(self, mask, stat)
322 out = ''
323 axes = []
324 for i in range(self.nrow()):
325 axis = []
326 axis.append(self.getscan(i))
327 axis.append(self.getbeam(i))
328 axis.append(self.getif(i))
329 axis.append(self.getpol(i))
330 axis.append(self.getcycle(i))
331 axes.append(axis)
332 tm = self._gettime(i)
333 src = self._getsourcename(i)
334 out += 'Scan[%d] (%s) ' % (axis[0], src)
335 out += 'Time[%s]:\n' % (tm)
336 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
337 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
338 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
339 out += '= %3.3f\n' % (statvals[i])
340 out += "--------------------------------------------------\n"
341
342 if rcParams['verbose']:
343 print "--------------------------------------------------"
344 print " ", stat
345 print "--------------------------------------------------"
346 print out
347 retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
348 'axes' : axes,
349 'data': statvals}
350 return retval
351
352 def stddev(self, mask=None):
353 """
354 Determine the standard deviation of the current beam/if/pol
355 Takes a 'mask' as an optional parameter to specify which
356 channels should be excluded.
357 Parameters:
358 mask: an optional mask specifying where the standard
359 deviation should be determined.
360
361 Example:
362 scan.set_unit('channel')
363 msk = scan.create_mask([100, 200], [500, 600])
364 scan.stddev(mask=m)
365 """
366 return self.stats(stat='stddev', mask=mask);
367
368
369 def get_column_names(self):
370 """
371 Return a list of column names, which can be used for selection.
372 """
373 return list(Scantable.get_column_names(self))
374
375 def get_tsys(self):
376 """
377 Return the System temperatures.
378 Parameters:
379
380 Returns:
381 a list of Tsys values for the current selection
382 """
383
384 return self._row_callback(self._gettsys, "Tsys")
385
386 def _row_callback(self, callback, label):
387 axes = []
388 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
389 out = ""
390 outvec = []
391 for i in range(self.nrow()):
392 axis = []
393 axis.append(self.getscan(i))
394 axis.append(self.getbeam(i))
395 axis.append(self.getif(i))
396 axis.append(self.getpol(i))
397 axis.append(self.getcycle(i))
398 axes.append(axis)
399 tm = self._gettime(i)
400 src = self._getsourcename(i)
401 out += 'Scan[%d] (%s) ' % (axis[0], src)
402 out += 'Time[%s]:\n' % (tm)
403 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
404 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
405 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
406 outvec.append(callback(i))
407 out += '= %3.3f\n' % (outvec[i])
408 out += "--------------------------------------------------\n"
409 if rcParams['verbose']:
410 print "--------------------------------------------------"
411 print " %s" % (label)
412 print "--------------------------------------------------"
413 print out
414 # disabled because the vector seems more useful
415 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
416 return outvec
417
418 def _get_column(self, callback, row=-1):
419 """
420 """
421 if row == -1:
422 return [callback(i) for i in range(self.nrow())]
423 else:
424 if 0 <= row < self.nrow():
425 return callback(row)
426
427
428 def get_time(self, row=-1):
429 """
430 Get a list of time stamps for the observations.
431 Return a string for each integration in the scantable.
432 Parameters:
433 row: row no of integration. Default -1 return all rows
434 Example:
435 none
436 """
437 from time import strptime
438 from datetime import datetime
439 times = self._get_column(self._gettime, row)
440 format = "%Y/%m/%d/%H:%M:%S"
441 if isinstance(times, list):
442 return [datetime(*strptime(i, format)[:6]) for i in times]
443 else:
444 return datetime(*strptime(times, format)[:6])
445
446 def get_sourcename(self, row=-1):
447 """
448 Get a list source names for the observations.
449 Return a string for each integration in the scantable.
450 Parameters:
451 row: row no of integration. Default -1 return all rows
452 Example:
453 none
454 """
455 return self._get_column(self._getsourcename, row)
456
457 def get_elevation(self, row=-1):
458 """
459 Get a list of elevations for the observations.
460 Return a float for each integration in the scantable.
461 Parameters:
462 row: row no of integration. Default -1 return all rows
463 Example:
464 none
465 """
466 return self._get_column(self._getelevation, row)
467
468 def get_azimuth(self, row=-1):
469 """
470 Get a list of azimuths for the observations.
471 Return a float for each integration in the scantable.
472 Parameters:
473 row: row no of integration. Default -1 return all rows
474 Example:
475 none
476 """
477 return self._get_column(self._getazimuth, row)
478
479 def get_parangle(self, row=-1):
480 """
481 Get a list of parallactic angles for the observations.
482 Return a float for each integration in the scantable.
483 Parameters:
484 row: row no of integration. Default -1 return all rows
485 Example:
486 none
487 """
488 return self._get_column(self._getparangle, row)
489
490 def get_direction(self, row=-1):
491 """
492 Get a list of Positions on the sky (direction) for the observations.
493 Return a float for each integration in the scantable.
494 Parameters:
495 row: row no of integration. Default -1 return all rows
496 Example:
497 none
498 """
499 return self._get_column(self._getdirection, row)
500
501 def set_unit(self, unit='channel'):
502 """
503 Set the unit for all following operations on this scantable
504 Parameters:
505 unit: optional unit, default is 'channel'
506 one of '*Hz', 'km/s', 'channel', ''
507 """
508 varlist = vars()
509 if unit in ['', 'pixel', 'channel']:
510 unit = ''
511 inf = list(self._getcoordinfo())
512 inf[0] = unit
513 self._setcoordinfo(inf)
514 self._add_history("set_unit", varlist)
515
516 def set_instrument(self, instr):
517 """
518 Set the instrument for subsequent processing
519 Parameters:
520 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
521 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
522 """
523 self._setInstrument(instr)
524 self._add_history("set_instument", vars())
525 print_log()
526
527 def set_feedtype(self, feedtype):
528 """
529 Overwrite the feed type, which might not be set correctly.
530 Parameters:
531 feedtype: 'linear' or 'circular'
532 """
533 self._setfeedtype(feedtype)
534 self._add_history("set_feedtype", vars())
535 print_log()
536
537 def set_doppler(self, doppler='RADIO'):
538 """
539 Set the doppler for all following operations on this scantable.
540 Parameters:
541 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
542 """
543 varlist = vars()
544 inf = list(self._getcoordinfo())
545 inf[2] = doppler
546 self._setcoordinfo(inf)
547 self._add_history("set_doppler", vars())
548 print_log()
549
550 def set_freqframe(self, frame=None):
551 """
552 Set the frame type of the Spectral Axis.
553 Parameters:
554 frame: an optional frame type, default 'LSRK'. Valid frames are:
555 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
556 'GEO', 'GALACTO', 'LGROUP', 'CMB'
557 Examples:
558 scan.set_freqframe('BARY')
559 """
560 if frame is None: frame = rcParams['scantable.freqframe']
561 varlist = vars()
562 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
563 'GEO', 'GALACTO', 'LGROUP', 'CMB']
564
565 if frame in valid:
566 inf = list(self._getcoordinfo())
567 inf[1] = frame
568 self._setcoordinfo(inf)
569 self._add_history("set_freqframe", varlist)
570 else:
571 msg = "Please specify a valid freq type. Valid types are:\n", valid
572 if rcParams['verbose']:
573 print msg
574 else:
575 raise TypeError(msg)
576 print_log()
577
578 def set_dirframe(self, frame=""):
579 """
580 Set the frame type of the Direction on the sky.
581 Parameters:
582 frame: an optional frame type, default ''. Valid frames are:
583 'J2000', 'B1950', 'GALACTIC'
584 Examples:
585 scan.set_dirframe('GALACTIC')
586 """
587 varlist = vars()
588 try:
589 Scantable.set_dirframe(self, frame)
590 except RuntimeError, msg:
591 if rcParams['verbose']:
592 print msg
593 else:
594 raise
595 self._add_history("set_dirframe", varlist)
596
597 def get_unit(self):
598 """
599 Get the default unit set in this scantable
600 Parameters:
601 Returns:
602 A unit string
603 """
604 inf = self._getcoordinfo()
605 unit = inf[0]
606 if unit == '': unit = 'channel'
607 return unit
608
609 def get_abcissa(self, rowno=0):
610 """
611 Get the abcissa in the current coordinate setup for the currently
612 selected Beam/IF/Pol
613 Parameters:
614 rowno: an optional row number in the scantable. Default is the
615 first row, i.e. rowno=0
616 Returns:
617 The abcissa values and it's format string (as a dictionary)
618 """
619 abc = self._getabcissa(rowno)
620 lbl = self._getabcissalabel(rowno)
621 print_log()
622 return abc, lbl
623
624 def flag(self, mask=None):
625 """
626 Flag the selected data using an optional channel mask.
627 Parameters:
628 mask: an optional channel mask, created with create_mask. Default
629 (no mask) is all channels.
630 """
631 varlist = vars()
632 if mask is None:
633 mask = []
634 try:
635 self._flag(mask)
636 except RuntimeError, msg:
637 if rcParams['verbose']:
638 print msg
639 return
640 else: raise
641 self._add_history("flag", varlist)
642
643 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
644 """
645 Flag the data in 'lag' space by providing a frequency to remove.
646 Flagged data in the scantable gets set to 0.0 before the fft.
647 No taper is applied.
648 Parameters:
649 frequency: the frequency (really a period within the bandwidth) to remove
650 width: the width of the frequency to remove, to remove a range
651 of frequencies aroung the centre.
652 unit: the frequency unit (default "GHz")
653 Notes:
654 It is recommended to flag edges of the band or strong signals beforehand.
655 """
656 if insitu is None: insitu = rcParams['insitu']
657 self._math._setinsitu(insitu)
658 varlist = vars()
659 base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
660 if not base.has_key(unit):
661 raise ValueError("%s is not a valid unit." % unit)
662 try:
663 s = scantable(self._math._lag_flag(self, frequency*base[unit],
664 width*base[unit]))
665 except RuntimeError, msg:
666 if rcParams['verbose']:
667 print msg
668 return
669 else: raise
670 s._add_history("lag_flag", varlist)
671 print_log()
672 if insitu:
673 self._assign(s)
674 else:
675 return s
676
677
678 def create_mask(self, *args, **kwargs):
679 """
680 Compute and return a mask based on [min, max] windows.
681 The specified windows are to be INCLUDED, when the mask is
682 applied.
683 Parameters:
684 [min, max], [min2, max2], ...
685 Pairs of start/end points (inclusive)specifying the regions
686 to be masked
687 invert: optional argument. If specified as True,
688 return an inverted mask, i.e. the regions
689 specified are EXCLUDED
690 row: create the mask using the specified row for
691 unit conversions, default is row=0
692 only necessary if frequency varies over rows.
693 Example:
694 scan.set_unit('channel')
695 a)
696 msk = scan.create_mask([400, 500], [800, 900])
697 # masks everything outside 400 and 500
698 # and 800 and 900 in the unit 'channel'
699
700 b)
701 msk = scan.create_mask([400, 500], [800, 900], invert=True)
702 # masks the regions between 400 and 500
703 # and 800 and 900 in the unit 'channel'
704 c)
705 mask only channel 400
706 msk = scan.create_mask([400, 400])
707 """
708 row = 0
709 if kwargs.has_key("row"):
710 row = kwargs.get("row")
711 data = self._getabcissa(row)
712 u = self._getcoordinfo()[0]
713 if rcParams['verbose']:
714 if u == "": u = "channel"
715 msg = "The current mask window unit is %s" % u
716 i = self._check_ifs()
717 if not i:
718 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
719 asaplog.push(msg)
720 n = self.nchan()
721 msk = NUM.zeros(n)
722 # test if args is a 'list' or a 'normal *args - UGLY!!!
723
724 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
725 and args or args[0]
726 for window in ws:
727 if (len(window) != 2 or window[0] > window[1] ):
728 raise TypeError("A window needs to be defined as [min, max]")
729 for i in range(n):
730 if data[i] >= window[0] and data[i] <= window[1]:
731 msk[i] = 1
732 if kwargs.has_key('invert'):
733 if kwargs.get('invert'):
734 msk = NUM.logical_not(msk)
735 print_log()
736 return msk
737
738 def get_restfreqs(self):
739 """
740 Get the restfrequency(s) stored in this scantable.
741 The return value(s) are always of unit 'Hz'
742 Parameters:
743 none
744 Returns:
745 a list of doubles
746 """
747 return list(self._getrestfreqs())
748
749
750 def set_restfreqs(self, freqs=None, unit='Hz'):
751 """
752 Set or replace the restfrequency specified and
753 If the 'freqs' argument holds a scalar,
754 then that rest frequency will be applied to all the selected
755 data. If the 'freqs' argument holds
756 a vector, then it MUST be of equal or smaller length than
757 the number of IFs (and the available restfrequencies will be
758 replaced by this vector). In this case, *all* data have
759 the restfrequency set per IF according
760 to the corresponding value you give in the 'freqs' vector.
761 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
762 IF 1 gets restfreq 2e9.
763 You can also specify the frequencies via a linecatalog/
764
765 Parameters:
766 freqs: list of rest frequency values or string idenitfiers
767 unit: unit for rest frequency (default 'Hz')
768
769 Example:
770 # set the given restfrequency for the whole table
771 scan.set_restfreqs(freqs=1.4e9)
772 # If thee number of IFs in the data is >= 2 the IF0 gets the first
773 # value IF1 the second...
774 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
775 #set the given restfrequency for the whole table (by name)
776 scan.set_restfreqs(freqs="OH1667")
777
778 Note:
779 To do more sophisticate Restfrequency setting, e.g. on a
780 source and IF basis, use scantable.set_selection() before using
781 this function.
782 # provide your scantable is call scan
783 selection = selector()
784 selection.set_name("ORION*")
785 selection.set_ifs([1])
786 scan.set_selection(selection)
787 scan.set_restfreqs(freqs=86.6e9)
788
789 """
790 varlist = vars()
791 from asap import linecatalog
792 # simple value
793 if isinstance(freqs, int) or isinstance(freqs, float):
794 self._setrestfreqs(freqs, "",unit)
795 # list of values
796 elif isinstance(freqs, list) or isinstance(freqs, tuple):
797 # list values are scalars
798 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
799 sel = selector()
800 savesel = self._getselection()
801 for i in xrange(len(freqs)):
802 sel.set_ifs([i])
803 self._setselection(sel)
804 self._setrestfreqs(freqs[i], "",unit)
805 self._setselection(savesel)
806 # list values are tuples, (value, name)
807 elif isinstance(freqs[-1], dict):
808 sel = selector()
809 savesel = self._getselection()
810 for i in xrange(len(freqs)):
811 sel.set_ifs([i])
812 self._setrestfreqs(freqs[i]["value"],
813 freqs[i]["name"], "MHz")
814 self._setselection(sel)
815 self._setselection(savesel)
816 # freqs are to be taken from a linecatalog
817 elif isinstance(freqs, linecatalog):
818 sel = selector()
819 savesel = self._getselection()
820 for i in xrange(freqs.nrow()):
821 sel.set_ifs([i])
822 self._setselection(sel)
823 self._setrestfreqs(freqs.get_frequency(i),
824 freqs.get_name(i), "MHz")
825 # ensure that we are not iterating past nIF
826 if i == self.nif()-1: break
827 self._setselection(savesel)
828 else:
829 return
830 self._add_history("set_restfreqs", varlist)
831
832
833
834 def history(self, filename=None):
835 """
836 Print the history. Optionally to a file.
837 """
838 hist = list(self._gethistory())
839 out = "-"*80
840 for h in hist:
841 if h.startswith("---"):
842 out += "\n"+h
843 else:
844 items = h.split("##")
845 date = items[0]
846 func = items[1]
847 items = items[2:]
848 out += "\n"+date+"\n"
849 out += "Function: %s\n Parameters:" % (func)
850 for i in items:
851 s = i.split("=")
852 out += "\n %s = %s" % (s[0], s[1])
853 out += "\n"+"-"*80
854 if filename is not None:
855 if filename is "":
856 filename = 'scantable_history.txt'
857 import os
858 filename = os.path.expandvars(os.path.expanduser(filename))
859 if not os.path.isdir(filename):
860 data = open(filename, 'w')
861 data.write(out)
862 data.close()
863 else:
864 msg = "Illegal file name '%s'." % (filename)
865 if rcParams['verbose']:
866 print msg
867 else:
868 raise IOError(msg)
869 if rcParams['verbose']:
870 try:
871 from IPython.genutils import page as pager
872 except ImportError:
873 from pydoc import pager
874 pager(out)
875 else:
876 return out
877 return
878 #
879 # Maths business
880 #
881
882 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
883 """
884 Return the (time) weighted average of a scan.
885 Note:
886 in channels only - align if necessary
887 Parameters:
888 mask: an optional mask (only used for 'var' and 'tsys'
889 weighting)
890 scanav: True averages each scan separately
891 False (default) averages all scans together,
892 weight: Weighting scheme.
893 'none' (mean no weight)
894 'var' (1/var(spec) weighted)
895 'tsys' (1/Tsys**2 weighted)
896 'tint' (integration time weighted)
897 'tintsys' (Tint/Tsys**2)
898 'median' ( median averaging)
899 The default is 'tint'
900 align: align the spectra in velocity before averaging. It takes
901 the time of the first spectrum as reference time.
902 Example:
903 # time average the scantable without using a mask
904 newscan = scan.average_time()
905 """
906 varlist = vars()
907 if weight is None: weight = 'TINT'
908 if mask is None: mask = ()
909 if scanav: scanav = "SCAN"
910 else: scanav = "NONE"
911 scan = (self, )
912 try:
913 if align:
914 scan = (self.freq_align(insitu=False), )
915 s = None
916 if weight.upper() == 'MEDIAN':
917 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
918 scanav))
919 else:
920 s = scantable(self._math._average(scan, mask, weight.upper(),
921 scanav))
922 except RuntimeError, msg:
923 if rcParams['verbose']:
924 print msg
925 return
926 else: raise
927 s._add_history("average_time", varlist)
928 print_log()
929 return s
930
931 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
932 """
933 Return a scan where all spectra are converted to either
934 Jansky or Kelvin depending upon the flux units of the scan table.
935 By default the function tries to look the values up internally.
936 If it can't find them (or if you want to over-ride), you must
937 specify EITHER jyperk OR eta (and D which it will try to look up
938 also if you don't set it). jyperk takes precedence if you set both.
939 Parameters:
940 jyperk: the Jy / K conversion factor
941 eta: the aperture efficiency
942 d: the geomtric diameter (metres)
943 insitu: if False a new scantable is returned.
944 Otherwise, the scaling is done in-situ
945 The default is taken from .asaprc (False)
946 """
947 if insitu is None: insitu = rcParams['insitu']
948 self._math._setinsitu(insitu)
949 varlist = vars()
950 if jyperk is None: jyperk = -1.0
951 if d is None: d = -1.0
952 if eta is None: eta = -1.0
953 s = scantable(self._math._convertflux(self, d, eta, jyperk))
954 s._add_history("convert_flux", varlist)
955 print_log()
956 if insitu: self._assign(s)
957 else: return s
958
959 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
960 """
961 Return a scan after applying a gain-elevation correction.
962 The correction can be made via either a polynomial or a
963 table-based interpolation (and extrapolation if necessary).
964 You specify polynomial coefficients, an ascii table or neither.
965 If you specify neither, then a polynomial correction will be made
966 with built in coefficients known for certain telescopes (an error
967 will occur if the instrument is not known).
968 The data and Tsys are *divided* by the scaling factors.
969 Parameters:
970 poly: Polynomial coefficients (default None) to compute a
971 gain-elevation correction as a function of
972 elevation (in degrees).
973 filename: The name of an ascii file holding correction factors.
974 The first row of the ascii file must give the column
975 names and these MUST include columns
976 "ELEVATION" (degrees) and "FACTOR" (multiply data
977 by this) somewhere.
978 The second row must give the data type of the
979 column. Use 'R' for Real and 'I' for Integer.
980 An example file would be
981 (actual factors are arbitrary) :
982
983 TIME ELEVATION FACTOR
984 R R R
985 0.1 0 0.8
986 0.2 20 0.85
987 0.3 40 0.9
988 0.4 60 0.85
989 0.5 80 0.8
990 0.6 90 0.75
991 method: Interpolation method when correcting from a table.
992 Values are "nearest", "linear" (default), "cubic"
993 and "spline"
994 insitu: if False a new scantable is returned.
995 Otherwise, the scaling is done in-situ
996 The default is taken from .asaprc (False)
997 """
998
999 if insitu is None: insitu = rcParams['insitu']
1000 self._math._setinsitu(insitu)
1001 varlist = vars()
1002 if poly is None:
1003 poly = ()
1004 from os.path import expandvars
1005 filename = expandvars(filename)
1006 s = scantable(self._math._gainel(self, poly, filename, method))
1007 s._add_history("gain_el", varlist)
1008 print_log()
1009 if insitu: self._assign(s)
1010 else: return s
1011
1012 def freq_align(self, reftime=None, method='cubic', insitu=None):
1013 """
1014 Return a scan where all rows have been aligned in frequency/velocity.
1015 The alignment frequency frame (e.g. LSRK) is that set by function
1016 set_freqframe.
1017 Parameters:
1018 reftime: reference time to align at. By default, the time of
1019 the first row of data is used.
1020 method: Interpolation method for regridding the spectra.
1021 Choose from "nearest", "linear", "cubic" (default)
1022 and "spline"
1023 insitu: if False a new scantable is returned.
1024 Otherwise, the scaling is done in-situ
1025 The default is taken from .asaprc (False)
1026 """
1027 if insitu is None: insitu = rcParams["insitu"]
1028 self._math._setinsitu(insitu)
1029 varlist = vars()
1030 if reftime is None: reftime = ""
1031 s = scantable(self._math._freq_align(self, reftime, method))
1032 s._add_history("freq_align", varlist)
1033 print_log()
1034 if insitu: self._assign(s)
1035 else: return s
1036
1037 def opacity(self, tau, insitu=None):
1038 """
1039 Apply an opacity correction. The data
1040 and Tsys are multiplied by the correction factor.
1041 Parameters:
1042 tau: Opacity from which the correction factor is
1043 exp(tau*ZD)
1044 where ZD is the zenith-distance
1045 insitu: if False a new scantable is returned.
1046 Otherwise, the scaling is done in-situ
1047 The default is taken from .asaprc (False)
1048 """
1049 if insitu is None: insitu = rcParams['insitu']
1050 self._math._setinsitu(insitu)
1051 varlist = vars()
1052 s = scantable(self._math._opacity(self, tau))
1053 s._add_history("opacity", varlist)
1054 print_log()
1055 if insitu: self._assign(s)
1056 else: return s
1057
1058 def bin(self, width=5, insitu=None):
1059 """
1060 Return a scan where all spectra have been binned up.
1061 width: The bin width (default=5) in pixels
1062 insitu: if False a new scantable is returned.
1063 Otherwise, the scaling is done in-situ
1064 The default is taken from .asaprc (False)
1065 """
1066 if insitu is None: insitu = rcParams['insitu']
1067 self._math._setinsitu(insitu)
1068 varlist = vars()
1069 s = scantable(self._math._bin(self, width))
1070 s._add_history("bin", varlist)
1071 print_log()
1072 if insitu: self._assign(s)
1073 else: return s
1074
1075
1076 def resample(self, width=5, method='cubic', insitu=None):
1077 """
1078 Return a scan where all spectra have been binned up
1079 width: The bin width (default=5) in pixels
1080 method: Interpolation method when correcting from a table.
1081 Values are "nearest", "linear", "cubic" (default)
1082 and "spline"
1083 insitu: if False a new scantable is returned.
1084 Otherwise, the scaling is done in-situ
1085 The default is taken from .asaprc (False)
1086 """
1087 if insitu is None: insitu = rcParams['insitu']
1088 self._math._setinsitu(insitu)
1089 varlist = vars()
1090 s = scantable(self._math._resample(self, method, width))
1091 s._add_history("resample", varlist)
1092 print_log()
1093 if insitu: self._assign(s)
1094 else: return s
1095
1096
1097 def average_pol(self, mask=None, weight='none'):
1098 """
1099 Average the Polarisations together.
1100 Parameters:
1101 mask: An optional mask defining the region, where the
1102 averaging will be applied. The output will have all
1103 specified points masked.
1104 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1105 weighted), or 'tsys' (1/Tsys**2 weighted)
1106 """
1107 varlist = vars()
1108 if mask is None:
1109 mask = ()
1110 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1111 s._add_history("average_pol", varlist)
1112 print_log()
1113 return s
1114
1115 def average_beam(self, mask=None, weight='none'):
1116 """
1117 Average the Beams together.
1118 Parameters:
1119 mask: An optional mask defining the region, where the
1120 averaging will be applied. The output will have all
1121 specified points masked.
1122 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1123 weighted), or 'tsys' (1/Tsys**2 weighted)
1124 """
1125 varlist = vars()
1126 if mask is None:
1127 mask = ()
1128 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1129 s._add_history("average_beam", varlist)
1130 print_log()
1131 return s
1132
1133 def convert_pol(self, poltype=None):
1134 """
1135 Convert the data to a different polarisation type.
1136 Parameters:
1137 poltype: The new polarisation type. Valid types are:
1138 "linear", "stokes" and "circular"
1139 """
1140 varlist = vars()
1141 try:
1142 s = scantable(self._math._convertpol(self, poltype))
1143 except RuntimeError, msg:
1144 if rcParams['verbose']:
1145 print msg
1146 return
1147 else:
1148 raise
1149 s._add_history("convert_pol", varlist)
1150 print_log()
1151 return s
1152
1153 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1154 """
1155 Smooth the spectrum by the specified kernel (conserving flux).
1156 Parameters:
1157 scan: The input scan
1158 kernel: The type of smoothing kernel. Select from
1159 'hanning' (default), 'gaussian' and 'boxcar'.
1160 The first three characters are sufficient.
1161 width: The width of the kernel in pixels. For hanning this is
1162 ignored otherwise it defauls to 5 pixels.
1163 For 'gaussian' it is the Full Width Half
1164 Maximum. For 'boxcar' it is the full width.
1165 insitu: if False a new scantable is returned.
1166 Otherwise, the scaling is done in-situ
1167 The default is taken from .asaprc (False)
1168 Example:
1169 none
1170 """
1171 if insitu is None: insitu = rcParams['insitu']
1172 self._math._setinsitu(insitu)
1173 varlist = vars()
1174 s = scantable(self._math._smooth(self, kernel.lower(), width))
1175 s._add_history("smooth", varlist)
1176 print_log()
1177 if insitu: self._assign(s)
1178 else: return s
1179
1180
1181 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1182 """
1183 Return a scan which has been baselined (all rows) by a polynomial.
1184 Parameters:
1185 scan: a scantable
1186 mask: an optional mask
1187 order: the order of the polynomial (default is 0)
1188 plot: plot the fit and the residual. In this each
1189 indivual fit has to be approved, by typing 'y'
1190 or 'n'
1191 insitu: if False a new scantable is returned.
1192 Otherwise, the scaling is done in-situ
1193 The default is taken from .asaprc (False)
1194 Example:
1195 # return a scan baselined by a third order polynomial,
1196 # not using a mask
1197 bscan = scan.poly_baseline(order=3)
1198 """
1199 if insitu is None: insitu = rcParams['insitu']
1200 varlist = vars()
1201 if mask is None:
1202 mask = list(NUM.ones(self.nchan(-1)))
1203 from asap.asapfitter import fitter
1204 f = fitter()
1205 f.set_scan(self, mask)
1206 f.set_function(poly=order)
1207 s = f.auto_fit(insitu, plot=plot)
1208 s._add_history("poly_baseline", varlist)
1209 print_log()
1210 if insitu: self._assign(s)
1211 else: return s
1212
1213 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1214 threshold=3, plot=False, insitu=None):
1215 """
1216 Return a scan which has been baselined (all rows) by a polynomial.
1217 Spectral lines are detected first using linefinder and masked out
1218 to avoid them affecting the baseline solution.
1219
1220 Parameters:
1221 mask: an optional mask retreived from scantable
1222 edge: an optional number of channel to drop at
1223 the edge of spectrum. If only one value is
1224 specified, the same number will be dropped from
1225 both sides of the spectrum. Default is to keep
1226 all channels. Nested tuples represent individual
1227 edge selection for different IFs (a number of spectral
1228 channels can be different)
1229 order: the order of the polynomial (default is 0)
1230 threshold: the threshold used by line finder. It is better to
1231 keep it large as only strong lines affect the
1232 baseline solution.
1233 plot: plot the fit and the residual. In this each
1234 indivual fit has to be approved, by typing 'y'
1235 or 'n'
1236 insitu: if False a new scantable is returned.
1237 Otherwise, the scaling is done in-situ
1238 The default is taken from .asaprc (False)
1239
1240 Example:
1241 scan2=scan.auto_poly_baseline(order=7)
1242 """
1243 if insitu is None: insitu = rcParams['insitu']
1244 varlist = vars()
1245 from asap.asapfitter import fitter
1246 from asap.asaplinefind import linefinder
1247 from asap import _is_sequence_or_number as _is_valid
1248
1249 # check whether edge is set up for each IF individually
1250 individualedge = False;
1251 if len(edge) > 1:
1252 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1253 individualedge = True;
1254
1255 if not _is_valid(edge, int) and not individualedge:
1256 raise ValueError, "Parameter 'edge' has to be an integer or a \
1257 pair of integers specified as a tuple. Nested tuples are allowed \
1258 to make individual selection for different IFs."
1259
1260 curedge = (0, 0)
1261 if individualedge:
1262 for edgepar in edge:
1263 if not _is_valid(edgepar, int):
1264 raise ValueError, "Each element of the 'edge' tuple has \
1265 to be a pair of integers or an integer."
1266 else:
1267 curedge = edge;
1268
1269 # setup fitter
1270 f = fitter()
1271 f.set_function(poly=order)
1272
1273 # setup line finder
1274 fl = linefinder()
1275 fl.set_options(threshold=threshold)
1276
1277 if not insitu:
1278 workscan = self.copy()
1279 else:
1280 workscan = self
1281
1282 fl.set_scan(workscan)
1283
1284 rows = range(workscan.nrow())
1285 asaplog.push("Processing:")
1286 for r in rows:
1287 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1288 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1289 workscan.getpol(r), workscan.getcycle(r))
1290 asaplog.push(msg, False)
1291
1292 # figure out edge parameter
1293 if individualedge:
1294 if len(edge) >= workscan.getif(r):
1295 raise RuntimeError, "Number of edge elements appear to " \
1296 "be less than the number of IFs"
1297 curedge = edge[workscan.getif(r)]
1298
1299 # setup line finder
1300 fl.find_lines(r, mask, curedge)
1301 f.set_scan(workscan, fl.get_mask())
1302 f.x = workscan._getabcissa(r)
1303 f.y = workscan._getspectrum(r)
1304 f.data = None
1305 f.fit()
1306 x = f.get_parameters()
1307 if plot:
1308 f.plot(residual=True)
1309 x = raw_input("Accept fit ( [y]/n ): ")
1310 if x.upper() == 'N':
1311 continue
1312 workscan._setspectrum(f.fitter.getresidual(), r)
1313 if plot:
1314 f._p.unmap()
1315 f._p = None
1316 workscan._add_history("auto_poly_baseline", varlist)
1317 if insitu:
1318 self._assign(workscan)
1319 else:
1320 return workscan
1321
1322 def rotate_linpolphase(self, angle):
1323 """
1324 Rotate the phase of the complex polarization O=Q+iU correlation.
1325 This is always done in situ in the raw data. So if you call this
1326 function more than once then each call rotates the phase further.
1327 Parameters:
1328 angle: The angle (degrees) to rotate (add) by.
1329 Examples:
1330 scan.rotate_linpolphase(2.3)
1331 """
1332 varlist = vars()
1333 self._math._rotate_linpolphase(self, angle)
1334 self._add_history("rotate_linpolphase", varlist)
1335 print_log()
1336 return
1337
1338
1339 def rotate_xyphase(self, angle):
1340 """
1341 Rotate the phase of the XY correlation. This is always done in situ
1342 in the data. So if you call this function more than once
1343 then each call rotates the phase further.
1344 Parameters:
1345 angle: The angle (degrees) to rotate (add) by.
1346 Examples:
1347 scan.rotate_xyphase(2.3)
1348 """
1349 varlist = vars()
1350 self._math._rotate_xyphase(self, angle)
1351 self._add_history("rotate_xyphase", varlist)
1352 print_log()
1353 return
1354
1355 def swap_linears(self):
1356 """
1357 Swap the linear polarisations XX and YY
1358 """
1359 varlist = vars()
1360 self._math._swap_linears(self)
1361 self._add_history("swap_linears", varlist)
1362 print_log()
1363 return
1364
1365 def invert_phase(self):
1366 """
1367 Invert the phase of the complex polarisation
1368 """
1369 varlist = vars()
1370 self._math._invert_phase(self)
1371 self._add_history("invert_phase", varlist)
1372 print_log()
1373 return
1374
1375 def add(self, offset, insitu=None):
1376 """
1377 Return a scan where all spectra have the offset added
1378 Parameters:
1379 offset: the offset
1380 insitu: if False a new scantable is returned.
1381 Otherwise, the scaling is done in-situ
1382 The default is taken from .asaprc (False)
1383 """
1384 if insitu is None: insitu = rcParams['insitu']
1385 self._math._setinsitu(insitu)
1386 varlist = vars()
1387 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1388 s._add_history("add", varlist)
1389 print_log()
1390 if insitu:
1391 self._assign(s)
1392 else:
1393 return s
1394
1395 def scale(self, factor, tsys=True, insitu=None, ):
1396 """
1397 Return a scan where all spectra are scaled by the give 'factor'
1398 Parameters:
1399 factor: the scaling factor
1400 insitu: if False a new scantable is returned.
1401 Otherwise, the scaling is done in-situ
1402 The default is taken from .asaprc (False)
1403 tsys: if True (default) then apply the operation to Tsys
1404 as well as the data
1405 """
1406 if insitu is None: insitu = rcParams['insitu']
1407 self._math._setinsitu(insitu)
1408 varlist = vars()
1409 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1410 s._add_history("scale", varlist)
1411 print_log()
1412 if insitu:
1413 self._assign(s)
1414 else:
1415 return s
1416
1417 def auto_quotient(self, mode='time', preserve=True):
1418 """
1419 This function allows to build quotients automatically.
1420 It assumes the observation to have the same numer of
1421 "ons" and "offs"
1422 It will support "closest off in time" in the future
1423 Parameters:
1424 mode: the on/off detection mode; 'suffix' (default)
1425 'suffix' identifies 'off' scans by the
1426 trailing '_R' (Mopra/Parkes) or
1427 '_e'/'_w' (Tid)
1428 preserve: you can preserve (default) the continuum or
1429 remove it. The equations used are
1430 preserve: Output = Toff * (on/off) - Toff
1431 remove: Output = Toff * (on/off) - Ton
1432 """
1433 modes = ["time"]
1434 if not mode in modes:
1435 msg = "please provide valid mode. Valid modes are %s" % (modes)
1436 raise ValueError(msg)
1437 varlist = vars()
1438 s = scantable(self._math._auto_quotient(self, mode, preserve))
1439 s._add_history("auto_quotient", varlist)
1440 print_log()
1441 return s
1442
1443 def mx_quotient(self, mask = None, weight='median', preserve=True):
1444 """
1445 Form a quotient using "off" beams when observing in "MX" mode.
1446 Parameters:
1447 mask: an optional mask to be used when weight == 'stddev'
1448 weight: How to average the off beams. Default is 'median'.
1449 preserve: you can preserve (default) the continuum or
1450 remove it. The equations used are
1451 preserve: Output = Toff * (on/off) - Toff
1452 remove: Output = Toff * (on/off) - Ton
1453 """
1454 if mask is None: mask = ()
1455 varlist = vars()
1456 on = scantable(self._math._mx_extract(self, 'on'))
1457 preoff = scantable(self._math._mx_extract(self, 'off'))
1458 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1459 from asapmath import quotient
1460 q = quotient(on, off, preserve)
1461 q._add_history("mx_quotient", varlist)
1462 print_log()
1463 return q
1464
1465 def freq_switch(self, insitu=None):
1466 """
1467 Apply frequency switching to the data.
1468 Parameters:
1469 insitu: if False a new scantable is returned.
1470 Otherwise, the swictching is done in-situ
1471 The default is taken from .asaprc (False)
1472 Example:
1473 none
1474 """
1475 if insitu is None: insitu = rcParams['insitu']
1476 self._math._setinsitu(insitu)
1477 varlist = vars()
1478 s = scantable(self._math._freqswitch(self))
1479 s._add_history("freq_switch", varlist)
1480 print_log()
1481 if insitu: self._assign(s)
1482 else: return s
1483
1484 def recalc_azel(self):
1485 """
1486 Recalculate the azimuth and elevation for each position.
1487 Parameters:
1488 none
1489 Example:
1490 """
1491 varlist = vars()
1492 self._recalcazel()
1493 self._add_history("recalc_azel", varlist)
1494 print_log()
1495 return
1496
1497 def __add__(self, other):
1498 varlist = vars()
1499 s = None
1500 if isinstance(other, scantable):
1501 print "scantable + scantable NYI"
1502 return
1503 elif isinstance(other, float):
1504 s = scantable(self._math._unaryop(self, other, "ADD", False))
1505 else:
1506 raise TypeError("Other input is not a scantable or float value")
1507 s._add_history("operator +", varlist)
1508 print_log()
1509 return s
1510
1511 def __sub__(self, other):
1512 """
1513 implicit on all axes and on Tsys
1514 """
1515 varlist = vars()
1516 s = None
1517 if isinstance(other, scantable):
1518 print "scantable - scantable NYI"
1519 return
1520 elif isinstance(other, float):
1521 s = scantable(self._math._unaryop(self, other, "SUB", False))
1522 else:
1523 raise TypeError("Other input is not a scantable or float value")
1524 s._add_history("operator -", varlist)
1525 print_log()
1526 return s
1527
1528 def __mul__(self, other):
1529 """
1530 implicit on all axes and on Tsys
1531 """
1532 varlist = vars()
1533 s = None
1534 if isinstance(other, scantable):
1535 print "scantable * scantable NYI"
1536 return
1537 elif isinstance(other, float):
1538 s = scantable(self._math._unaryop(self, other, "MUL", False))
1539 else:
1540 raise TypeError("Other input is not a scantable or float value")
1541 s._add_history("operator *", varlist)
1542 print_log()
1543 return s
1544
1545
1546 def __div__(self, other):
1547 """
1548 implicit on all axes and on Tsys
1549 """
1550 varlist = vars()
1551 s = None
1552 if isinstance(other, scantable):
1553 print "scantable / scantable NYI"
1554 return
1555 elif isinstance(other, float):
1556 if other == 0.0:
1557 raise ZeroDivisionError("Dividing by zero is not recommended")
1558 s = scantable(self._math._unaryop(self, other, "DIV", False))
1559 else:
1560 raise TypeError("Other input is not a scantable or float value")
1561 s._add_history("operator /", varlist)
1562 print_log()
1563 return s
1564
1565 def get_fit(self, row=0):
1566 """
1567 Print or return the stored fits for a row in the scantable
1568 Parameters:
1569 row: the row which the fit has been applied to.
1570 """
1571 if row > self.nrow():
1572 return
1573 from asap.asapfit import asapfit
1574 fit = asapfit(self._getfit(row))
1575 if rcParams['verbose']:
1576 print fit
1577 return
1578 else:
1579 return fit.as_dict()
1580
1581 def _add_history(self, funcname, parameters):
1582 # create date
1583 sep = "##"
1584 from datetime import datetime
1585 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1586 hist = dstr+sep
1587 hist += funcname+sep#cdate+sep
1588 if parameters.has_key('self'): del parameters['self']
1589 for k, v in parameters.iteritems():
1590 if type(v) is dict:
1591 for k2, v2 in v.iteritems():
1592 hist += k2
1593 hist += "="
1594 if isinstance(v2, scantable):
1595 hist += 'scantable'
1596 elif k2 == 'mask':
1597 if isinstance(v2, list) or isinstance(v2, tuple):
1598 hist += str(self._zip_mask(v2))
1599 else:
1600 hist += str(v2)
1601 else:
1602 hist += str(v2)
1603 else:
1604 hist += k
1605 hist += "="
1606 if isinstance(v, scantable):
1607 hist += 'scantable'
1608 elif k == 'mask':
1609 if isinstance(v, list) or isinstance(v, tuple):
1610 hist += str(self._zip_mask(v))
1611 else:
1612 hist += str(v)
1613 else:
1614 hist += str(v)
1615 hist += sep
1616 hist = hist[:-2] # remove trailing '##'
1617 self._addhistory(hist)
1618
1619
1620 def _zip_mask(self, mask):
1621 mask = list(mask)
1622 i = 0
1623 segments = []
1624 while mask[i:].count(1):
1625 i += mask[i:].index(1)
1626 if mask[i:].count(0):
1627 j = i + mask[i:].index(0)
1628 else:
1629 j = len(mask)
1630 segments.append([i, j])
1631 i = j
1632 return segments
1633
1634 def _get_ordinate_label(self):
1635 fu = "("+self.get_fluxunit()+")"
1636 import re
1637 lbl = "Intensity"
1638 if re.match(".K.", fu):
1639 lbl = "Brightness Temperature "+ fu
1640 elif re.match(".Jy.", fu):
1641 lbl = "Flux density "+ fu
1642 return lbl
1643
1644 def _check_ifs(self):
1645 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1646 nchans = filter(lambda t: t > 0, nchans)
1647 return (sum(nchans)/len(nchans) == nchans[0])
1648
1649 def _fill(self, names, unit, average):
1650 import os
1651 from asap._asap import stfiller
1652 first = True
1653 fullnames = []
1654 for name in names:
1655 name = os.path.expandvars(name)
1656 name = os.path.expanduser(name)
1657 if not os.path.exists(name):
1658 msg = "File '%s' does not exists" % (name)
1659 if rcParams['verbose']:
1660 asaplog.push(msg)
1661 print asaplog.pop().strip()
1662 return
1663 raise IOError(msg)
1664 fullnames.append(name)
1665 if average:
1666 asaplog.push('Auto averaging integrations')
1667 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1668 for name in fullnames:
1669 tbl = Scantable(stype)
1670 r = stfiller(tbl)
1671 msg = "Importing %s..." % (name)
1672 asaplog.push(msg, False)
1673 print_log()
1674 r._open(name, -1, -1)
1675 r._read()
1676 #tbl = r._getdata()
1677 if average:
1678 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1679 #tbl = tbl2
1680 if not first:
1681 tbl = self._math._merge([self, tbl])
1682 #tbl = tbl2
1683 Scantable.__init__(self, tbl)
1684 r._close()
1685 del r, tbl
1686 first = False
1687 if unit is not None:
1688 self.set_fluxunit(unit)
1689 self.set_freqframe(rcParams['scantable.freqframe'])
1690
Note: See TracBrowser for help on using the repository browser.