source: branches/Release2.1/python/scantable.py@ 1473

Last change on this file since 1473 was 1254, checked in by phi196, 18 years ago

Fixed order in set_restfreq from list(dict)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 62.6 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import NUM
7from asap import linecatalog
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 self._add_history("scantable", varlist)
75 print_log()
76
77 def save(self, name=None, format=None, overwrite=False):
78 """
79 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
80 Image FITS or MS2 format.
81 Parameters:
82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
84 and header in 'name'_header.txt)
85 format: an optional file format. Default is ASAP.
86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87 'SDFITS' (save as SDFITS file)
88 'ASCII' (saves as ascii text file)
89 'MS2' (saves as an aips++
90 MeasurementSet V2)
91 overwrite: If the file should be overwritten if it exists.
92 The default False is to return with warning
93 without writing the output. USE WITH CARE.
94 Example:
95 scan.save('myscan.asap')
96 scan.save('myscan.sdfits', 'SDFITS')
97 """
98 from os import path
99 if format is None: format = rcParams['scantable.save']
100 suffix = '.'+format.lower()
101 if name is None or name == "":
102 name = 'scantable'+suffix
103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
105 name = path.expandvars(name)
106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
114 format2 = format.upper()
115 if format2 == 'ASAP':
116 self._save(name)
117 else:
118 from asap._asap import stwriter as stw
119 writer = stw(format2)
120 writer.write(self, name)
121 print_log()
122 return
123
124 def copy(self):
125 """
126 Return a copy of this scantable.
127 Parameters:
128 none
129 Example:
130 copiedscan = scan.copy()
131 """
132 sd = scantable(Scantable._copy(self))
133 return sd
134
135 def drop_scan(self, scanid=None):
136 """
137 Return a new scantable where the specified scan number(s) has(have)
138 been dropped.
139 Parameters:
140 scanid: a (list of) scan number(s)
141 """
142 from asap import _is_sequence_or_number as _is_valid
143 from asap import _to_list
144 from asap import unique
145 if not _is_valid(scanid):
146 if rcParams['verbose']:
147 print "Please specify a scanno to drop from the scantable"
148 return
149 else:
150 raise RuntimeError("No scan given")
151 try:
152 scanid = _to_list(scanid)
153 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
154 for sid in scanid: allscans.remove(sid)
155 if len(allscans) == 0:
156 raise ValueError("Can't remove all scans")
157 except ValueError:
158 if rcParams['verbose']:
159 print "Couldn't find any match."
160 return
161 else: raise
162 try:
163 bsel = self.get_selection()
164 sel = selector()
165 sel.set_scans(allscans)
166 self.set_selection(bsel+sel)
167 scopy = self._copy()
168 self.set_selection(bsel)
169 return scantable(scopy)
170 except RuntimeError:
171 if rcParams['verbose']:
172 print "Couldn't find any match."
173 else:
174 raise
175
176
177 def get_scan(self, scanid=None):
178 """
179 Return a specific scan (by scanno) or collection of scans (by
180 source name) in a new scantable.
181 Parameters:
182 scanid: a (list of) scanno or a source name, unix-style
183 patterns are accepted for source name matching, e.g.
184 '*_R' gets all 'ref scans
185 Example:
186 # get all scans containing the source '323p459'
187 newscan = scan.get_scan('323p459')
188 # get all 'off' scans
189 refscans = scan.get_scan('*_R')
190 # get a susbset of scans by scanno (as listed in scan.summary())
191 newscan = scan.get_scan([0, 2, 7, 10])
192 """
193 if scanid is None:
194 if rcParams['verbose']:
195 print "Please specify a scan no or name to " \
196 "retrieve from the scantable"
197 return
198 else:
199 raise RuntimeError("No scan given")
200
201 try:
202 bsel = self.get_selection()
203 sel = selector()
204 if type(scanid) is str:
205 sel.set_name(scanid)
206 self.set_selection(bsel+sel)
207 scopy = self._copy()
208 self.set_selection(bsel)
209 return scantable(scopy)
210 elif type(scanid) is int:
211 sel.set_scans([scanid])
212 self.set_selection(bsel+sel)
213 scopy = self._copy()
214 self.set_selection(bsel)
215 return scantable(scopy)
216 elif type(scanid) is list:
217 sel.set_scans(scanid)
218 self.set_selection(sel)
219 scopy = self._copy()
220 self.set_selection(bsel)
221 return scantable(scopy)
222 else:
223 msg = "Illegal scanid type, use 'int' or 'list' if ints."
224 if rcParams['verbose']:
225 print msg
226 else:
227 raise TypeError(msg)
228 except RuntimeError:
229 if rcParams['verbose']: print "Couldn't find any match."
230 else: raise
231
232 def __str__(self):
233 return Scantable._summary(self, True)
234
235 def summary(self, filename=None):
236 """
237 Print a summary of the contents of this scantable.
238 Parameters:
239 filename: the name of a file to write the putput to
240 Default - no file output
241 verbose: print extra info such as the frequency table
242 The default (False) is taken from .asaprc
243 """
244 info = Scantable._summary(self, True)
245 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
246 if filename is not None:
247 if filename is "":
248 filename = 'scantable_summary.txt'
249 from os.path import expandvars, isdir
250 filename = expandvars(filename)
251 if not isdir(filename):
252 data = open(filename, 'w')
253 data.write(info)
254 data.close()
255 else:
256 msg = "Illegal file name '%s'." % (filename)
257 if rcParams['verbose']:
258 print msg
259 else:
260 raise IOError(msg)
261 if rcParams['verbose']:
262 try:
263 from IPython.genutils import page as pager
264 except ImportError:
265 from pydoc import pager
266 pager(info)
267 else:
268 return info
269
270
271 def get_selection(self):
272 """
273 Get the selection object currently set on this scantable.
274 Parameters:
275 none
276 Example:
277 sel = scan.get_selection()
278 sel.set_ifs(0) # select IF 0
279 scan.set_selection(sel) # apply modified selection
280 """
281 return selector(self._getselection())
282
283 def set_selection(self, selection=selector()):
284 """
285 Select a subset of the data. All following operations on this scantable
286 are only applied to thi selection.
287 Parameters:
288 selection: a selector object (default unset the selection)
289 Examples:
290 sel = selector() # create a selection object
291 self.set_scans([0, 3]) # select SCANNO 0 and 3
292 scan.set_selection(sel) # set the selection
293 scan.summary() # will only print summary of scanno 0 an 3
294 scan.set_selection() # unset the selection
295 """
296 self._setselection(selection)
297
298 def stats(self, stat='stddev', mask=None):
299 """
300 Determine the specified statistic of the current beam/if/pol
301 Takes a 'mask' as an optional parameter to specify which
302 channels should be excluded.
303 Parameters:
304 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
305 'var', 'stddev', 'avdev', 'rms', 'median'
306 mask: an optional mask specifying where the statistic
307 should be determined.
308 Example:
309 scan.set_unit('channel')
310 msk = scan.create_mask([100, 200], [500, 600])
311 scan.stats(stat='mean', mask=m)
312 """
313 if mask == None:
314 mask = []
315 axes = ['Beam', 'IF', 'Pol', 'Time']
316 if not self._check_ifs():
317 raise ValueError("Cannot apply mask as the IFs have different "
318 "number of channels. Please use setselection() "
319 "to select individual IFs")
320
321 statvals = self._math._stats(self, mask, stat)
322 out = ''
323 axes = []
324 for i in range(self.nrow()):
325 axis = []
326 axis.append(self.getscan(i))
327 axis.append(self.getbeam(i))
328 axis.append(self.getif(i))
329 axis.append(self.getpol(i))
330 axis.append(self.getcycle(i))
331 axes.append(axis)
332 tm = self._gettime(i)
333 src = self._getsourcename(i)
334 out += 'Scan[%d] (%s) ' % (axis[0], src)
335 out += 'Time[%s]:\n' % (tm)
336 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
337 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
338 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
339 out += '= %3.3f\n' % (statvals[i])
340 out += "--------------------------------------------------\n"
341
342 if rcParams['verbose']:
343 print "--------------------------------------------------"
344 print " ", stat
345 print "--------------------------------------------------"
346 print out
347 return
348 else:
349 retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
350 'axes' : axes,
351 'data': statvals}
352 return retval
353
354 def stddev(self, mask=None):
355 """
356 Determine the standard deviation of the current beam/if/pol
357 Takes a 'mask' as an optional parameter to specify which
358 channels should be excluded.
359 Parameters:
360 mask: an optional mask specifying where the standard
361 deviation should be determined.
362
363 Example:
364 scan.set_unit('channel')
365 msk = scan.create_mask([100, 200], [500, 600])
366 scan.stddev(mask=m)
367 """
368 return self.stats(stat='stddev', mask=mask);
369
370
371 def get_column_names(self):
372 """
373 Return a list of column names, which can be used for selection.
374 """
375 return list(Scantable.get_column_names(self))
376
377 def get_tsys(self):
378 """
379 Return the System temperatures.
380 Parameters:
381
382 Returns:
383 a list of Tsys values for the current selection
384 """
385
386 return self._row_callback(self._gettsys, "Tsys")
387
388 def _row_callback(self, callback, label):
389 axes = []
390 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
391 out = ""
392 outvec = []
393 for i in range(self.nrow()):
394 axis = []
395 axis.append(self.getscan(i))
396 axis.append(self.getbeam(i))
397 axis.append(self.getif(i))
398 axis.append(self.getpol(i))
399 axis.append(self.getcycle(i))
400 axes.append(axis)
401 tm = self._gettime(i)
402 src = self._getsourcename(i)
403 out += 'Scan[%d] (%s) ' % (axis[0], src)
404 out += 'Time[%s]:\n' % (tm)
405 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
406 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
407 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
408 outvec.append(callback(i))
409 out += '= %3.3f\n' % (outvec[i])
410 out += "--------------------------------------------------\n"
411 if rcParams['verbose']:
412 print "--------------------------------------------------"
413 print " %s" % (label)
414 print "--------------------------------------------------"
415 print out
416 # disabled because the vector seems more useful
417 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
418 return outvec
419
420 def _get_column(self, callback, row=-1):
421 """
422 """
423 if row == -1:
424 return [callback(i) for i in range(self.nrow())]
425 else:
426 if 0 <= row < self.nrow():
427 return callback(row)
428
429
430 def get_time(self, row=-1):
431 """
432 Get a list of time stamps for the observations.
433 Return a string for each integration in the scantable.
434 Parameters:
435 row: row no of integration. Default -1 return all rows
436 Example:
437 none
438 """
439 from time import strptime
440 from datetime import datetime
441 times = self._get_column(self._gettime, row)
442 format = "%Y/%m/%d/%H:%M:%S"
443 if isinstance(times, list):
444 return [datetime(*strptime(i, format)[:6]) for i in times]
445 else:
446 return datetime(*strptime(times, format)[:6])
447
448 def get_sourcename(self, row=-1):
449 """
450 Get a list source names for the observations.
451 Return a string for each integration in the scantable.
452 Parameters:
453 row: row no of integration. Default -1 return all rows
454 Example:
455 none
456 """
457 return self._get_column(self._getsourcename, row)
458
459 def get_elevation(self, row=-1):
460 """
461 Get a list of elevations for the observations.
462 Return a float for each integration in the scantable.
463 Parameters:
464 row: row no of integration. Default -1 return all rows
465 Example:
466 none
467 """
468 return self._get_column(self._getelevation, row)
469
470 def get_azimuth(self, row=-1):
471 """
472 Get a list of azimuths for the observations.
473 Return a float for each integration in the scantable.
474 Parameters:
475 row: row no of integration. Default -1 return all rows
476 Example:
477 none
478 """
479 return self._get_column(self._getazimuth, row)
480
481 def get_parangle(self, row=-1):
482 """
483 Get a list of parallactic angles for the observations.
484 Return a float for each integration in the scantable.
485 Parameters:
486 row: row no of integration. Default -1 return all rows
487 Example:
488 none
489 """
490 return self._get_column(self._getparangle, row)
491
492 def get_direction(self, row=-1):
493 """
494 Get a list of Positions on the sky (direction) for the observations.
495 Return a float for each integration in the scantable.
496 Parameters:
497 row: row no of integration. Default -1 return all rows
498 Example:
499 none
500 """
501 return self._get_column(self._getdirection, row)
502
503 def set_unit(self, unit='channel'):
504 """
505 Set the unit for all following operations on this scantable
506 Parameters:
507 unit: optional unit, default is 'channel'
508 one of '*Hz', 'km/s', 'channel', ''
509 """
510 varlist = vars()
511 if unit in ['', 'pixel', 'channel']:
512 unit = ''
513 inf = list(self._getcoordinfo())
514 inf[0] = unit
515 self._setcoordinfo(inf)
516 self._add_history("set_unit", varlist)
517
518 def set_instrument(self, instr):
519 """
520 Set the instrument for subsequent processing
521 Parameters:
522 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
523 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
524 """
525 self._setInstrument(instr)
526 self._add_history("set_instument", vars())
527 print_log()
528
529 def set_feedtype(self, feedtype):
530 """
531 Overwrite the feed type, which might not be set correctly.
532 Parameters:
533 feedtype: 'linear' or 'circular'
534 """
535 self._setfeedtype(feedtype)
536 self._add_history("set_feedtype", vars())
537 print_log()
538
539 def set_doppler(self, doppler='RADIO'):
540 """
541 Set the doppler for all following operations on this scantable.
542 Parameters:
543 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
544 """
545 varlist = vars()
546 inf = list(self._getcoordinfo())
547 inf[2] = doppler
548 self._setcoordinfo(inf)
549 self._add_history("set_doppler", vars())
550 print_log()
551
552 def set_freqframe(self, frame=None):
553 """
554 Set the frame type of the Spectral Axis.
555 Parameters:
556 frame: an optional frame type, default 'LSRK'. Valid frames are:
557 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
558 'GEO', 'GALACTO', 'LGROUP', 'CMB'
559 Examples:
560 scan.set_freqframe('BARY')
561 """
562 if frame is None: frame = rcParams['scantable.freqframe']
563 varlist = vars()
564 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
565 'GEO', 'GALACTO', 'LGROUP', 'CMB']
566
567 if frame in valid:
568 inf = list(self._getcoordinfo())
569 inf[1] = frame
570 self._setcoordinfo(inf)
571 self._add_history("set_freqframe", varlist)
572 else:
573 msg = "Please specify a valid freq type. Valid types are:\n", valid
574 if rcParams['verbose']:
575 print msg
576 else:
577 raise TypeError(msg)
578 print_log()
579
580 def set_dirframe(self, frame=""):
581 """
582 Set the frame type of the Direction on the sky.
583 Parameters:
584 frame: an optional frame type, default ''. Valid frames are:
585 'J2000', 'B1950', 'GALACTIC'
586 Examples:
587 scan.set_dirframe('GALACTIC')
588 """
589 varlist = vars()
590 try:
591 Scantable.set_dirframe(self, frame)
592 except RuntimeError, msg:
593 if rcParams['verbose']:
594 print msg
595 else:
596 raise
597 self._add_history("set_dirframe", varlist)
598
599 def get_unit(self):
600 """
601 Get the default unit set in this scantable
602 Parameters:
603 Returns:
604 A unit string
605 """
606 inf = self._getcoordinfo()
607 unit = inf[0]
608 if unit == '': unit = 'channel'
609 return unit
610
611 def get_abcissa(self, rowno=0):
612 """
613 Get the abcissa in the current coordinate setup for the currently
614 selected Beam/IF/Pol
615 Parameters:
616 rowno: an optional row number in the scantable. Default is the
617 first row, i.e. rowno=0
618 Returns:
619 The abcissa values and it's format string (as a dictionary)
620 """
621 abc = self._getabcissa(rowno)
622 lbl = self._getabcissalabel(rowno)
623 print_log()
624 return abc, lbl
625
626 def flag(self, mask=None):
627 """
628 Flag the selected data using an optional channel mask.
629 Parameters:
630 mask: an optional channel mask, created with create_mask. Default
631 (no mask) is all channels.
632 """
633 varlist = vars()
634 if mask is None:
635 mask = []
636 try:
637 self._flag(mask)
638 except RuntimeError, msg:
639 if rcParams['verbose']:
640 print msg
641 return
642 else: raise
643 self._add_history("flag", varlist)
644
645 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
646 """
647 Flag the data in 'lag' space by providing a frequency to remove.
648 Flagged data in the scantable gets set to 0.0 before the fft.
649 No taper is applied.
650 Parameters:
651 frequency: the frequency (really a period within the bandwidth) to remove
652 width: the width of the frequency to remove, to remove a range
653 of frequencies aroung the centre.
654 unit: the frequency unit (default "GHz")
655 Notes:
656 It is recommended to flag edges of the band or strong signals beforehand.
657 """
658 if insitu is None: insitu = rcParams['insitu']
659 self._math._setinsitu(insitu)
660 varlist = vars()
661 base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
662 if not base.has_key(unit):
663 raise ValueError("%s is not a valid unit." % unit)
664 try:
665 s = scantable(self._math._lag_flag(self, frequency*base[unit],
666 width*base[unit]))
667 except RuntimeError, msg:
668 if rcParams['verbose']:
669 print msg
670 return
671 else: raise
672 s._add_history("lag_flag", varlist)
673 print_log()
674 if insitu:
675 self._assign(s)
676 else:
677 return s
678
679
680 def create_mask(self, *args, **kwargs):
681 """
682 Compute and return a mask based on [min, max] windows.
683 The specified windows are to be INCLUDED, when the mask is
684 applied.
685 Parameters:
686 [min, max], [min2, max2], ...
687 Pairs of start/end points (inclusive)specifying the regions
688 to be masked
689 invert: optional argument. If specified as True,
690 return an inverted mask, i.e. the regions
691 specified are EXCLUDED
692 row: create the mask using the specified row for
693 unit conversions, default is row=0
694 only necessary if frequency varies over rows.
695 Example:
696 scan.set_unit('channel')
697 a)
698 msk = scan.create_mask([400, 500], [800, 900])
699 # masks everything outside 400 and 500
700 # and 800 and 900 in the unit 'channel'
701
702 b)
703 msk = scan.create_mask([400, 500], [800, 900], invert=True)
704 # masks the regions between 400 and 500
705 # and 800 and 900 in the unit 'channel'
706 c)
707 mask only channel 400
708 msk = scan.create_mask([400, 400])
709 """
710 row = 0
711 if kwargs.has_key("row"):
712 row = kwargs.get("row")
713 data = self._getabcissa(row)
714 u = self._getcoordinfo()[0]
715 if rcParams['verbose']:
716 if u == "": u = "channel"
717 msg = "The current mask window unit is %s" % u
718 i = self._check_ifs()
719 if not i:
720 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
721 asaplog.push(msg)
722 n = self.nchan()
723 msk = NUM.zeros(n)
724 # test if args is a 'list' or a 'normal *args - UGLY!!!
725
726 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
727 and args or args[0]
728 for window in ws:
729 if (len(window) != 2 or window[0] > window[1] ):
730 raise TypeError("A window needs to be defined as [min, max]")
731 for i in range(n):
732 if data[i] >= window[0] and data[i] <= window[1]:
733 msk[i] = 1
734 if kwargs.has_key('invert'):
735 if kwargs.get('invert'):
736 msk = NUM.logical_not(msk)
737 print_log()
738 return msk
739
740 def get_restfreqs(self):
741 """
742 Get the restfrequency(s) stored in this scantable.
743 The return value(s) are always of unit 'Hz'
744 Parameters:
745 none
746 Returns:
747 a list of doubles
748 """
749 return list(self._getrestfreqs())
750
751
752 def set_restfreqs(self, freqs=None, unit='Hz'):
753 """
754 Set or replace the restfrequency specified and
755 If the 'freqs' argument holds a scalar,
756 then that rest frequency will be applied to all the selected
757 data. If the 'freqs' argument holds
758 a vector, then it MUST be of equal or smaller length than
759 the number of IFs (and the available restfrequencies will be
760 replaced by this vector). In this case, *all* data have
761 the restfrequency set per IF according
762 to the corresponding value you give in the 'freqs' vector.
763 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
764 IF 1 gets restfreq 2e9.
765 You can also specify the frequencies via a linecatalog/
766
767 Parameters:
768 freqs: list of rest frequency values or string idenitfiers
769 unit: unit for rest frequency (default 'Hz')
770
771 Example:
772 # set the given restfrequency for the whole table
773 scan.set_restfreqs(freqs=1.4e9)
774 # If thee number of IFs in the data is >= 2 the IF0 gets the first
775 # value IF1 the second...
776 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
777 #set the given restfrequency for the whole table (by name)
778 scan.set_restfreqs(freqs="OH1667")
779
780 Note:
781 To do more sophisticate Restfrequency setting, e.g. on a
782 source and IF basis, use scantable.set_selection() before using
783 this function.
784 # provide your scantable is call scan
785 selection = selector()
786 selection.set_name("ORION*")
787 selection.set_ifs([1])
788 scan.set_selection(selection)
789 scan.set_restfreqs(freqs=86.6e9)
790
791 """
792 varlist = vars()
793 from asap import linecatalog
794 # simple value
795 if isinstance(freqs, int) or isinstance(freqs, float):
796 self._setrestfreqs(freqs, "",unit)
797 # list of values
798 elif isinstance(freqs, list) or isinstance(freqs, tuple):
799 # list values are scalars
800 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
801 sel = selector()
802 savesel = self._getselection()
803 for i in xrange(len(freqs)):
804 sel.set_ifs([i])
805 self._setselection(sel)
806 self._setrestfreqs(freqs[i], "",unit)
807 self._setselection(savesel)
808 # list values are tuples, (value, name)
809 elif isinstance(freqs[-1], dict):
810 sel = selector()
811 savesel = self._getselection()
812 for i in xrange(len(freqs)):
813 sel.set_ifs([i])
814 self._setselection(sel)
815 self._setrestfreqs(freqs[i]["value"],
816 freqs[i]["name"], "MHz")
817 self._setselection(savesel)
818 # freqs are to be taken from a linecatalog
819 elif isinstance(freqs, linecatalog):
820 sel = selector()
821 savesel = self._getselection()
822 for i in xrange(freqs.nrow()):
823 sel.set_ifs([i])
824 self._setselection(sel)
825 self._setrestfreqs(freqs.get_frequency(i),
826 freqs.get_name(i), "MHz")
827 # ensure that we are not iterating past nIF
828 if i == self.nif()-1: break
829 self._setselection(savesel)
830 else:
831 return
832 self._add_history("set_restfreqs", varlist)
833
834
835
836 def history(self, filename=None):
837 """
838 Print the history. Optionally to a file.
839 """
840 hist = list(self._gethistory())
841 out = "-"*80
842 for h in hist:
843 if h.startswith("---"):
844 out += "\n"+h
845 else:
846 items = h.split("##")
847 date = items[0]
848 func = items[1]
849 items = items[2:]
850 out += "\n"+date+"\n"
851 out += "Function: %s\n Parameters:" % (func)
852 for i in items:
853 s = i.split("=")
854 out += "\n %s = %s" % (s[0], s[1])
855 out += "\n"+"-"*80
856 if filename is not None:
857 if filename is "":
858 filename = 'scantable_history.txt'
859 import os
860 filename = os.path.expandvars(os.path.expanduser(filename))
861 if not os.path.isdir(filename):
862 data = open(filename, 'w')
863 data.write(out)
864 data.close()
865 else:
866 msg = "Illegal file name '%s'." % (filename)
867 if rcParams['verbose']:
868 print msg
869 else:
870 raise IOError(msg)
871 if rcParams['verbose']:
872 try:
873 from IPython.genutils import page as pager
874 except ImportError:
875 from pydoc import pager
876 pager(out)
877 else:
878 return out
879 return
880 #
881 # Maths business
882 #
883
884 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
885 """
886 Return the (time) weighted average of a scan.
887 Note:
888 in channels only - align if necessary
889 Parameters:
890 mask: an optional mask (only used for 'var' and 'tsys'
891 weighting)
892 scanav: True averages each scan separately
893 False (default) averages all scans together,
894 weight: Weighting scheme.
895 'none' (mean no weight)
896 'var' (1/var(spec) weighted)
897 'tsys' (1/Tsys**2 weighted)
898 'tint' (integration time weighted)
899 'tintsys' (Tint/Tsys**2)
900 'median' ( median averaging)
901 The default is 'tint'
902 align: align the spectra in velocity before averaging. It takes
903 the time of the first spectrum as reference time.
904 Example:
905 # time average the scantable without using a mask
906 newscan = scan.average_time()
907 """
908 varlist = vars()
909 if weight is None: weight = 'TINT'
910 if mask is None: mask = ()
911 if scanav: scanav = "SCAN"
912 else: scanav = "NONE"
913 scan = (self, )
914 try:
915 if align:
916 scan = (self.freq_align(insitu=False), )
917 s = None
918 if weight.upper() == 'MEDIAN':
919 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
920 scanav))
921 else:
922 s = scantable(self._math._average(scan, mask, weight.upper(),
923 scanav))
924 except RuntimeError, msg:
925 if rcParams['verbose']:
926 print msg
927 return
928 else: raise
929 s._add_history("average_time", varlist)
930 print_log()
931 return s
932
933 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
934 """
935 Return a scan where all spectra are converted to either
936 Jansky or Kelvin depending upon the flux units of the scan table.
937 By default the function tries to look the values up internally.
938 If it can't find them (or if you want to over-ride), you must
939 specify EITHER jyperk OR eta (and D which it will try to look up
940 also if you don't set it). jyperk takes precedence if you set both.
941 Parameters:
942 jyperk: the Jy / K conversion factor
943 eta: the aperture efficiency
944 d: the geomtric diameter (metres)
945 insitu: if False a new scantable is returned.
946 Otherwise, the scaling is done in-situ
947 The default is taken from .asaprc (False)
948 """
949 if insitu is None: insitu = rcParams['insitu']
950 self._math._setinsitu(insitu)
951 varlist = vars()
952 if jyperk is None: jyperk = -1.0
953 if d is None: d = -1.0
954 if eta is None: eta = -1.0
955 s = scantable(self._math._convertflux(self, d, eta, jyperk))
956 s._add_history("convert_flux", varlist)
957 print_log()
958 if insitu: self._assign(s)
959 else: return s
960
961 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
962 """
963 Return a scan after applying a gain-elevation correction.
964 The correction can be made via either a polynomial or a
965 table-based interpolation (and extrapolation if necessary).
966 You specify polynomial coefficients, an ascii table or neither.
967 If you specify neither, then a polynomial correction will be made
968 with built in coefficients known for certain telescopes (an error
969 will occur if the instrument is not known).
970 The data and Tsys are *divided* by the scaling factors.
971 Parameters:
972 poly: Polynomial coefficients (default None) to compute a
973 gain-elevation correction as a function of
974 elevation (in degrees).
975 filename: The name of an ascii file holding correction factors.
976 The first row of the ascii file must give the column
977 names and these MUST include columns
978 "ELEVATION" (degrees) and "FACTOR" (multiply data
979 by this) somewhere.
980 The second row must give the data type of the
981 column. Use 'R' for Real and 'I' for Integer.
982 An example file would be
983 (actual factors are arbitrary) :
984
985 TIME ELEVATION FACTOR
986 R R R
987 0.1 0 0.8
988 0.2 20 0.85
989 0.3 40 0.9
990 0.4 60 0.85
991 0.5 80 0.8
992 0.6 90 0.75
993 method: Interpolation method when correcting from a table.
994 Values are "nearest", "linear" (default), "cubic"
995 and "spline"
996 insitu: if False a new scantable is returned.
997 Otherwise, the scaling is done in-situ
998 The default is taken from .asaprc (False)
999 """
1000
1001 if insitu is None: insitu = rcParams['insitu']
1002 self._math._setinsitu(insitu)
1003 varlist = vars()
1004 if poly is None:
1005 poly = ()
1006 from os.path import expandvars
1007 filename = expandvars(filename)
1008 s = scantable(self._math._gainel(self, poly, filename, method))
1009 s._add_history("gain_el", varlist)
1010 print_log()
1011 if insitu: self._assign(s)
1012 else: return s
1013
1014 def freq_align(self, reftime=None, method='cubic', insitu=None):
1015 """
1016 Return a scan where all rows have been aligned in frequency/velocity.
1017 The alignment frequency frame (e.g. LSRK) is that set by function
1018 set_freqframe.
1019 Parameters:
1020 reftime: reference time to align at. By default, the time of
1021 the first row of data is used.
1022 method: Interpolation method for regridding the spectra.
1023 Choose from "nearest", "linear", "cubic" (default)
1024 and "spline"
1025 insitu: if False a new scantable is returned.
1026 Otherwise, the scaling is done in-situ
1027 The default is taken from .asaprc (False)
1028 """
1029 if insitu is None: insitu = rcParams["insitu"]
1030 self._math._setinsitu(insitu)
1031 varlist = vars()
1032 if reftime is None: reftime = ""
1033 s = scantable(self._math._freq_align(self, reftime, method))
1034 s._add_history("freq_align", varlist)
1035 print_log()
1036 if insitu: self._assign(s)
1037 else: return s
1038
1039 def opacity(self, tau, insitu=None):
1040 """
1041 Apply an opacity correction. The data
1042 and Tsys are multiplied by the correction factor.
1043 Parameters:
1044 tau: Opacity from which the correction factor is
1045 exp(tau*ZD)
1046 where ZD is the zenith-distance
1047 insitu: if False a new scantable is returned.
1048 Otherwise, the scaling is done in-situ
1049 The default is taken from .asaprc (False)
1050 """
1051 if insitu is None: insitu = rcParams['insitu']
1052 self._math._setinsitu(insitu)
1053 varlist = vars()
1054 s = scantable(self._math._opacity(self, tau))
1055 s._add_history("opacity", varlist)
1056 print_log()
1057 if insitu: self._assign(s)
1058 else: return s
1059
1060 def bin(self, width=5, insitu=None):
1061 """
1062 Return a scan where all spectra have been binned up.
1063 width: The bin width (default=5) in pixels
1064 insitu: if False a new scantable is returned.
1065 Otherwise, the scaling is done in-situ
1066 The default is taken from .asaprc (False)
1067 """
1068 if insitu is None: insitu = rcParams['insitu']
1069 self._math._setinsitu(insitu)
1070 varlist = vars()
1071 s = scantable(self._math._bin(self, width))
1072 s._add_history("bin", varlist)
1073 print_log()
1074 if insitu: self._assign(s)
1075 else: return s
1076
1077
1078 def resample(self, width=5, method='cubic', insitu=None):
1079 """
1080 Return a scan where all spectra have been binned up
1081 width: The bin width (default=5) in pixels
1082 method: Interpolation method when correcting from a table.
1083 Values are "nearest", "linear", "cubic" (default)
1084 and "spline"
1085 insitu: if False a new scantable is returned.
1086 Otherwise, the scaling is done in-situ
1087 The default is taken from .asaprc (False)
1088 """
1089 if insitu is None: insitu = rcParams['insitu']
1090 self._math._setinsitu(insitu)
1091 varlist = vars()
1092 s = scantable(self._math._resample(self, method, width))
1093 s._add_history("resample", varlist)
1094 print_log()
1095 if insitu: self._assign(s)
1096 else: return s
1097
1098
1099 def average_pol(self, mask=None, weight='none'):
1100 """
1101 Average the Polarisations together.
1102 Parameters:
1103 mask: An optional mask defining the region, where the
1104 averaging will be applied. The output will have all
1105 specified points masked.
1106 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1107 weighted), or 'tsys' (1/Tsys**2 weighted)
1108 """
1109 varlist = vars()
1110 if mask is None:
1111 mask = ()
1112 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1113 s._add_history("average_pol", varlist)
1114 print_log()
1115 return s
1116
1117 def average_beam(self, mask=None, weight='none'):
1118 """
1119 Average the Beams together.
1120 Parameters:
1121 mask: An optional mask defining the region, where the
1122 averaging will be applied. The output will have all
1123 specified points masked.
1124 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1125 weighted), or 'tsys' (1/Tsys**2 weighted)
1126 """
1127 varlist = vars()
1128 if mask is None:
1129 mask = ()
1130 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1131 s._add_history("average_beam", varlist)
1132 print_log()
1133 return s
1134
1135 def convert_pol(self, poltype=None):
1136 """
1137 Convert the data to a different polarisation type.
1138 Parameters:
1139 poltype: The new polarisation type. Valid types are:
1140 "linear", "stokes" and "circular"
1141 """
1142 varlist = vars()
1143 try:
1144 s = scantable(self._math._convertpol(self, poltype))
1145 except RuntimeError, msg:
1146 if rcParams['verbose']:
1147 print msg
1148 return
1149 else:
1150 raise
1151 s._add_history("convert_pol", varlist)
1152 print_log()
1153 return s
1154
1155 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1156 """
1157 Smooth the spectrum by the specified kernel (conserving flux).
1158 Parameters:
1159 scan: The input scan
1160 kernel: The type of smoothing kernel. Select from
1161 'hanning' (default), 'gaussian' and 'boxcar'.
1162 The first three characters are sufficient.
1163 width: The width of the kernel in pixels. For hanning this is
1164 ignored otherwise it defauls to 5 pixels.
1165 For 'gaussian' it is the Full Width Half
1166 Maximum. For 'boxcar' it is the full width.
1167 insitu: if False a new scantable is returned.
1168 Otherwise, the scaling is done in-situ
1169 The default is taken from .asaprc (False)
1170 Example:
1171 none
1172 """
1173 if insitu is None: insitu = rcParams['insitu']
1174 self._math._setinsitu(insitu)
1175 varlist = vars()
1176 s = scantable(self._math._smooth(self, kernel.lower(), width))
1177 s._add_history("smooth", varlist)
1178 print_log()
1179 if insitu: self._assign(s)
1180 else: return s
1181
1182
1183 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1184 """
1185 Return a scan which has been baselined (all rows) by a polynomial.
1186 Parameters:
1187 scan: a scantable
1188 mask: an optional mask
1189 order: the order of the polynomial (default is 0)
1190 plot: plot the fit and the residual. In this each
1191 indivual fit has to be approved, by typing 'y'
1192 or 'n'
1193 insitu: if False a new scantable is returned.
1194 Otherwise, the scaling is done in-situ
1195 The default is taken from .asaprc (False)
1196 Example:
1197 # return a scan baselined by a third order polynomial,
1198 # not using a mask
1199 bscan = scan.poly_baseline(order=3)
1200 """
1201 if insitu is None: insitu = rcParams['insitu']
1202 varlist = vars()
1203 if mask is None:
1204 mask = list(NUM.ones(self.nchan(-1)))
1205 from asap.asapfitter import fitter
1206 f = fitter()
1207 f.set_scan(self, mask)
1208 f.set_function(poly=order)
1209 s = f.auto_fit(insitu, plot=plot)
1210 s._add_history("poly_baseline", varlist)
1211 print_log()
1212 if insitu: self._assign(s)
1213 else: return s
1214
1215 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1216 threshold=3, plot=False, insitu=None):
1217 """
1218 Return a scan which has been baselined (all rows) by a polynomial.
1219 Spectral lines are detected first using linefinder and masked out
1220 to avoid them affecting the baseline solution.
1221
1222 Parameters:
1223 mask: an optional mask retreived from scantable
1224 edge: an optional number of channel to drop at
1225 the edge of spectrum. If only one value is
1226 specified, the same number will be dropped from
1227 both sides of the spectrum. Default is to keep
1228 all channels. Nested tuples represent individual
1229 edge selection for different IFs (a number of spectral
1230 channels can be different)
1231 order: the order of the polynomial (default is 0)
1232 threshold: the threshold used by line finder. It is better to
1233 keep it large as only strong lines affect the
1234 baseline solution.
1235 plot: plot the fit and the residual. In this each
1236 indivual fit has to be approved, by typing 'y'
1237 or 'n'
1238 insitu: if False a new scantable is returned.
1239 Otherwise, the scaling is done in-situ
1240 The default is taken from .asaprc (False)
1241
1242 Example:
1243 scan2=scan.auto_poly_baseline(order=7)
1244 """
1245 if insitu is None: insitu = rcParams['insitu']
1246 varlist = vars()
1247 from asap.asapfitter import fitter
1248 from asap.asaplinefind import linefinder
1249 from asap import _is_sequence_or_number as _is_valid
1250
1251 # check whether edge is set up for each IF individually
1252 individualedge = False;
1253 if len(edge) > 1:
1254 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1255 individualedge = True;
1256
1257 if not _is_valid(edge, int) and not individualedge:
1258 raise ValueError, "Parameter 'edge' has to be an integer or a \
1259 pair of integers specified as a tuple. Nested tuples are allowed \
1260 to make individual selection for different IFs."
1261
1262 curedge = (0, 0)
1263 if individualedge:
1264 for edgepar in edge:
1265 if not _is_valid(edgepar, int):
1266 raise ValueError, "Each element of the 'edge' tuple has \
1267 to be a pair of integers or an integer."
1268 else:
1269 curedge = edge;
1270
1271 # setup fitter
1272 f = fitter()
1273 f.set_function(poly=order)
1274
1275 # setup line finder
1276 fl = linefinder()
1277 fl.set_options(threshold=threshold)
1278
1279 if not insitu:
1280 workscan = self.copy()
1281 else:
1282 workscan = self
1283
1284 fl.set_scan(workscan)
1285
1286 rows = range(workscan.nrow())
1287 asaplog.push("Processing:")
1288 for r in rows:
1289 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1290 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1291 workscan.getpol(r), workscan.getcycle(r))
1292 asaplog.push(msg, False)
1293
1294 # figure out edge parameter
1295 if individualedge:
1296 if len(edge) >= workscan.getif(r):
1297 raise RuntimeError, "Number of edge elements appear to " \
1298 "be less than the number of IFs"
1299 curedge = edge[workscan.getif(r)]
1300
1301 # setup line finder
1302 fl.find_lines(r, mask, curedge)
1303 f.set_scan(workscan, fl.get_mask())
1304 f.x = workscan._getabcissa(r)
1305 f.y = workscan._getspectrum(r)
1306 f.data = None
1307 f.fit()
1308 x = f.get_parameters()
1309 if plot:
1310 f.plot(residual=True)
1311 x = raw_input("Accept fit ( [y]/n ): ")
1312 if x.upper() == 'N':
1313 continue
1314 workscan._setspectrum(f.fitter.getresidual(), r)
1315 if plot:
1316 f._p.unmap()
1317 f._p = None
1318 workscan._add_history("auto_poly_baseline", varlist)
1319 if insitu:
1320 self._assign(workscan)
1321 else:
1322 return workscan
1323
1324 def rotate_linpolphase(self, angle):
1325 """
1326 Rotate the phase of the complex polarization O=Q+iU correlation.
1327 This is always done in situ in the raw data. So if you call this
1328 function more than once then each call rotates the phase further.
1329 Parameters:
1330 angle: The angle (degrees) to rotate (add) by.
1331 Examples:
1332 scan.rotate_linpolphase(2.3)
1333 """
1334 varlist = vars()
1335 self._math._rotate_linpolphase(self, angle)
1336 self._add_history("rotate_linpolphase", varlist)
1337 print_log()
1338 return
1339
1340
1341 def rotate_xyphase(self, angle):
1342 """
1343 Rotate the phase of the XY correlation. This is always done in situ
1344 in the data. So if you call this function more than once
1345 then each call rotates the phase further.
1346 Parameters:
1347 angle: The angle (degrees) to rotate (add) by.
1348 Examples:
1349 scan.rotate_xyphase(2.3)
1350 """
1351 varlist = vars()
1352 self._math._rotate_xyphase(self, angle)
1353 self._add_history("rotate_xyphase", varlist)
1354 print_log()
1355 return
1356
1357 def swap_linears(self):
1358 """
1359 Swap the linear polarisations XX and YY
1360 """
1361 varlist = vars()
1362 self._math._swap_linears(self)
1363 self._add_history("swap_linears", varlist)
1364 print_log()
1365 return
1366
1367 def invert_phase(self):
1368 """
1369 Invert the phase of the complex polarisation
1370 """
1371 varlist = vars()
1372 self._math._invert_phase(self)
1373 self._add_history("invert_phase", varlist)
1374 print_log()
1375 return
1376
1377 def add(self, offset, insitu=None):
1378 """
1379 Return a scan where all spectra have the offset added
1380 Parameters:
1381 offset: the offset
1382 insitu: if False a new scantable is returned.
1383 Otherwise, the scaling is done in-situ
1384 The default is taken from .asaprc (False)
1385 """
1386 if insitu is None: insitu = rcParams['insitu']
1387 self._math._setinsitu(insitu)
1388 varlist = vars()
1389 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1390 s._add_history("add", varlist)
1391 print_log()
1392 if insitu:
1393 self._assign(s)
1394 else:
1395 return s
1396
1397 def scale(self, factor, tsys=True, insitu=None, ):
1398 """
1399 Return a scan where all spectra are scaled by the give 'factor'
1400 Parameters:
1401 factor: the scaling factor
1402 insitu: if False a new scantable is returned.
1403 Otherwise, the scaling is done in-situ
1404 The default is taken from .asaprc (False)
1405 tsys: if True (default) then apply the operation to Tsys
1406 as well as the data
1407 """
1408 if insitu is None: insitu = rcParams['insitu']
1409 self._math._setinsitu(insitu)
1410 varlist = vars()
1411 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1412 s._add_history("scale", varlist)
1413 print_log()
1414 if insitu:
1415 self._assign(s)
1416 else:
1417 return s
1418
1419 def auto_quotient(self, mode='time', preserve=True):
1420 """
1421 This function allows to build quotients automatically.
1422 It assumes the observation to have the same numer of
1423 "ons" and "offs"
1424 It will support "closest off in time" in the future
1425 Parameters:
1426 mode: the on/off detection mode; 'suffix' (default)
1427 'suffix' identifies 'off' scans by the
1428 trailing '_R' (Mopra/Parkes) or
1429 '_e'/'_w' (Tid)
1430 preserve: you can preserve (default) the continuum or
1431 remove it. The equations used are
1432 preserve: Output = Toff * (on/off) - Toff
1433 remove: Output = Toff * (on/off) - Ton
1434 """
1435 modes = ["time"]
1436 if not mode in modes:
1437 msg = "please provide valid mode. Valid modes are %s" % (modes)
1438 raise ValueError(msg)
1439 varlist = vars()
1440 s = scantable(self._math._auto_quotient(self, mode, preserve))
1441 s._add_history("auto_quotient", varlist)
1442 print_log()
1443 return s
1444
1445 def mx_quotient(self, mask = None, weight='median', preserve=True):
1446 """
1447 Form a quotient using "off" beams when observing in "MX" mode.
1448 Parameters:
1449 mask: an optional mask to be used when weight == 'stddev'
1450 weight: How to average the off beams. Default is 'median'.
1451 preserve: you can preserve (default) the continuum or
1452 remove it. The equations used are
1453 preserve: Output = Toff * (on/off) - Toff
1454 remove: Output = Toff * (on/off) - Ton
1455 """
1456 if mask is None: mask = ()
1457 varlist = vars()
1458 on = scantable(self._math._mx_extract(self, 'on'))
1459 preoff = scantable(self._math._mx_extract(self, 'off'))
1460 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1461 from asapmath import quotient
1462 q = quotient(on, off, preserve)
1463 q._add_history("mx_quotient", varlist)
1464 print_log()
1465 return q
1466
1467 def freq_switch(self, insitu=None):
1468 """
1469 Apply frequency switching to the data.
1470 Parameters:
1471 insitu: if False a new scantable is returned.
1472 Otherwise, the swictching is done in-situ
1473 The default is taken from .asaprc (False)
1474 Example:
1475 none
1476 """
1477 if insitu is None: insitu = rcParams['insitu']
1478 self._math._setinsitu(insitu)
1479 varlist = vars()
1480 s = scantable(self._math._freqswitch(self))
1481 s._add_history("freq_switch", varlist)
1482 print_log()
1483 if insitu: self._assign(s)
1484 else: return s
1485
1486 def recalc_azel(self):
1487 """
1488 Recalculate the azimuth and elevation for each position.
1489 Parameters:
1490 none
1491 Example:
1492 """
1493 varlist = vars()
1494 self._recalcazel()
1495 self._add_history("recalc_azel", varlist)
1496 print_log()
1497 return
1498
1499 def __add__(self, other):
1500 varlist = vars()
1501 s = None
1502 if isinstance(other, scantable):
1503 print "scantable + scantable NYI"
1504 return
1505 elif isinstance(other, float):
1506 s = scantable(self._math._unaryop(self, other, "ADD", False))
1507 else:
1508 raise TypeError("Other input is not a scantable or float value")
1509 s._add_history("operator +", varlist)
1510 print_log()
1511 return s
1512
1513 def __sub__(self, other):
1514 """
1515 implicit on all axes and on Tsys
1516 """
1517 varlist = vars()
1518 s = None
1519 if isinstance(other, scantable):
1520 print "scantable - scantable NYI"
1521 return
1522 elif isinstance(other, float):
1523 s = scantable(self._math._unaryop(self, other, "SUB", False))
1524 else:
1525 raise TypeError("Other input is not a scantable or float value")
1526 s._add_history("operator -", varlist)
1527 print_log()
1528 return s
1529
1530 def __mul__(self, other):
1531 """
1532 implicit on all axes and on Tsys
1533 """
1534 varlist = vars()
1535 s = None
1536 if isinstance(other, scantable):
1537 print "scantable * scantable NYI"
1538 return
1539 elif isinstance(other, float):
1540 s = scantable(self._math._unaryop(self, other, "MUL", False))
1541 else:
1542 raise TypeError("Other input is not a scantable or float value")
1543 s._add_history("operator *", varlist)
1544 print_log()
1545 return s
1546
1547
1548 def __div__(self, other):
1549 """
1550 implicit on all axes and on Tsys
1551 """
1552 varlist = vars()
1553 s = None
1554 if isinstance(other, scantable):
1555 print "scantable / scantable NYI"
1556 return
1557 elif isinstance(other, float):
1558 if other == 0.0:
1559 raise ZeroDivisionError("Dividing by zero is not recommended")
1560 s = scantable(self._math._unaryop(self, other, "DIV", False))
1561 else:
1562 raise TypeError("Other input is not a scantable or float value")
1563 s._add_history("operator /", varlist)
1564 print_log()
1565 return s
1566
1567 def get_fit(self, row=0):
1568 """
1569 Print or return the stored fits for a row in the scantable
1570 Parameters:
1571 row: the row which the fit has been applied to.
1572 """
1573 if row > self.nrow():
1574 return
1575 from asap.asapfit import asapfit
1576 fit = asapfit(self._getfit(row))
1577 if rcParams['verbose']:
1578 print fit
1579 return
1580 else:
1581 return fit.as_dict()
1582
1583 def _add_history(self, funcname, parameters):
1584 # create date
1585 sep = "##"
1586 from datetime import datetime
1587 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1588 hist = dstr+sep
1589 hist += funcname+sep#cdate+sep
1590 if parameters.has_key('self'): del parameters['self']
1591 for k, v in parameters.iteritems():
1592 if type(v) is dict:
1593 for k2, v2 in v.iteritems():
1594 hist += k2
1595 hist += "="
1596 if isinstance(v2, scantable):
1597 hist += 'scantable'
1598 elif k2 == 'mask':
1599 if isinstance(v2, list) or isinstance(v2, tuple):
1600 hist += str(self._zip_mask(v2))
1601 else:
1602 hist += str(v2)
1603 else:
1604 hist += str(v2)
1605 else:
1606 hist += k
1607 hist += "="
1608 if isinstance(v, scantable):
1609 hist += 'scantable'
1610 elif k == 'mask':
1611 if isinstance(v, list) or isinstance(v, tuple):
1612 hist += str(self._zip_mask(v))
1613 else:
1614 hist += str(v)
1615 else:
1616 hist += str(v)
1617 hist += sep
1618 hist = hist[:-2] # remove trailing '##'
1619 self._addhistory(hist)
1620
1621
1622 def _zip_mask(self, mask):
1623 mask = list(mask)
1624 i = 0
1625 segments = []
1626 while mask[i:].count(1):
1627 i += mask[i:].index(1)
1628 if mask[i:].count(0):
1629 j = i + mask[i:].index(0)
1630 else:
1631 j = len(mask)
1632 segments.append([i, j])
1633 i = j
1634 return segments
1635
1636 def _get_ordinate_label(self):
1637 fu = "("+self.get_fluxunit()+")"
1638 import re
1639 lbl = "Intensity"
1640 if re.match(".K.", fu):
1641 lbl = "Brightness Temperature "+ fu
1642 elif re.match(".Jy.", fu):
1643 lbl = "Flux density "+ fu
1644 return lbl
1645
1646 def _check_ifs(self):
1647 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1648 nchans = filter(lambda t: t > 0, nchans)
1649 return (sum(nchans)/len(nchans) == nchans[0])
1650
1651 def _fill(self, names, unit, average):
1652 import os
1653 from asap._asap import stfiller
1654 first = True
1655 fullnames = []
1656 for name in names:
1657 name = os.path.expandvars(name)
1658 name = os.path.expanduser(name)
1659 if not os.path.exists(name):
1660 msg = "File '%s' does not exists" % (name)
1661 if rcParams['verbose']:
1662 asaplog.push(msg)
1663 print asaplog.pop().strip()
1664 return
1665 raise IOError(msg)
1666 fullnames.append(name)
1667 if average:
1668 asaplog.push('Auto averaging integrations')
1669 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1670 for name in fullnames:
1671 tbl = Scantable(stype)
1672 r = stfiller(tbl)
1673 msg = "Importing %s..." % (name)
1674 asaplog.push(msg, False)
1675 print_log()
1676 r._open(name, -1, -1)
1677 r._read()
1678 #tbl = r._getdata()
1679 if average:
1680 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1681 #tbl = tbl2
1682 if not first:
1683 tbl = self._math._merge([self, tbl])
1684 #tbl = tbl2
1685 Scantable.__init__(self, tbl)
1686 r._close()
1687 del r, tbl
1688 first = False
1689 if unit is not None:
1690 self.set_fluxunit(unit)
1691 self.set_freqframe(rcParams['scantable.freqframe'])
1692
Note: See TracBrowser for help on using the repository browser.