source: trunk/python/scantable.py@ 1279

Last change on this file since 1279 was 1268, checked in by vor010, 18 years ago

chan_avg_limit parameter has been added to auto_poly_baseline method. It is passed to linefinder and determines whether to search for broad weak lines to exclude them from the baseline solution. Default value is 1, which means no search for such lines.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 63.3 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import NUM
7from asap import linecatalog
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 self._add_history("scantable", varlist)
75 print_log()
76
77 def save(self, name=None, format=None, overwrite=False):
78 """
79 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
80 Image FITS or MS2 format.
81 Parameters:
82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
84 and header in 'name'_header.txt)
85 format: an optional file format. Default is ASAP.
86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87 'SDFITS' (save as SDFITS file)
88 'ASCII' (saves as ascii text file)
89 'MS2' (saves as an aips++
90 MeasurementSet V2)
91 overwrite: If the file should be overwritten if it exists.
92 The default False is to return with warning
93 without writing the output. USE WITH CARE.
94 Example:
95 scan.save('myscan.asap')
96 scan.save('myscan.sdfits', 'SDFITS')
97 """
98 from os import path
99 if format is None: format = rcParams['scantable.save']
100 suffix = '.'+format.lower()
101 if name is None or name == "":
102 name = 'scantable'+suffix
103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
105 name = path.expandvars(name)
106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
114 format2 = format.upper()
115 if format2 == 'ASAP':
116 self._save(name)
117 else:
118 from asap._asap import stwriter as stw
119 writer = stw(format2)
120 writer.write(self, name)
121 print_log()
122 return
123
124 def copy(self):
125 """
126 Return a copy of this scantable.
127 Parameters:
128 none
129 Example:
130 copiedscan = scan.copy()
131 """
132 sd = scantable(Scantable._copy(self))
133 return sd
134
135 def drop_scan(self, scanid=None):
136 """
137 Return a new scantable where the specified scan number(s) has(have)
138 been dropped.
139 Parameters:
140 scanid: a (list of) scan number(s)
141 """
142 from asap import _is_sequence_or_number as _is_valid
143 from asap import _to_list
144 from asap import unique
145 if not _is_valid(scanid):
146 if rcParams['verbose']:
147 print "Please specify a scanno to drop from the scantable"
148 return
149 else:
150 raise RuntimeError("No scan given")
151 try:
152 scanid = _to_list(scanid)
153 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
154 for sid in scanid: allscans.remove(sid)
155 if len(allscans) == 0:
156 raise ValueError("Can't remove all scans")
157 except ValueError:
158 if rcParams['verbose']:
159 print "Couldn't find any match."
160 return
161 else: raise
162 try:
163 bsel = self.get_selection()
164 sel = selector()
165 sel.set_scans(allscans)
166 self.set_selection(bsel+sel)
167 scopy = self._copy()
168 self.set_selection(bsel)
169 return scantable(scopy)
170 except RuntimeError:
171 if rcParams['verbose']:
172 print "Couldn't find any match."
173 else:
174 raise
175
176
177 def get_scan(self, scanid=None):
178 """
179 Return a specific scan (by scanno) or collection of scans (by
180 source name) in a new scantable.
181 Parameters:
182 scanid: a (list of) scanno or a source name, unix-style
183 patterns are accepted for source name matching, e.g.
184 '*_R' gets all 'ref scans
185 Example:
186 # get all scans containing the source '323p459'
187 newscan = scan.get_scan('323p459')
188 # get all 'off' scans
189 refscans = scan.get_scan('*_R')
190 # get a susbset of scans by scanno (as listed in scan.summary())
191 newscan = scan.get_scan([0, 2, 7, 10])
192 """
193 if scanid is None:
194 if rcParams['verbose']:
195 print "Please specify a scan no or name to " \
196 "retrieve from the scantable"
197 return
198 else:
199 raise RuntimeError("No scan given")
200
201 try:
202 bsel = self.get_selection()
203 sel = selector()
204 if type(scanid) is str:
205 sel.set_name(scanid)
206 self.set_selection(bsel+sel)
207 scopy = self._copy()
208 self.set_selection(bsel)
209 return scantable(scopy)
210 elif type(scanid) is int:
211 sel.set_scans([scanid])
212 self.set_selection(bsel+sel)
213 scopy = self._copy()
214 self.set_selection(bsel)
215 return scantable(scopy)
216 elif type(scanid) is list:
217 sel.set_scans(scanid)
218 self.set_selection(sel)
219 scopy = self._copy()
220 self.set_selection(bsel)
221 return scantable(scopy)
222 else:
223 msg = "Illegal scanid type, use 'int' or 'list' if ints."
224 if rcParams['verbose']:
225 print msg
226 else:
227 raise TypeError(msg)
228 except RuntimeError:
229 if rcParams['verbose']: print "Couldn't find any match."
230 else: raise
231
232 def __str__(self):
233 return Scantable._summary(self, True)
234
235 def summary(self, filename=None):
236 """
237 Print a summary of the contents of this scantable.
238 Parameters:
239 filename: the name of a file to write the putput to
240 Default - no file output
241 verbose: print extra info such as the frequency table
242 The default (False) is taken from .asaprc
243 """
244 info = Scantable._summary(self, True)
245 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
246 if filename is not None:
247 if filename is "":
248 filename = 'scantable_summary.txt'
249 from os.path import expandvars, isdir
250 filename = expandvars(filename)
251 if not isdir(filename):
252 data = open(filename, 'w')
253 data.write(info)
254 data.close()
255 else:
256 msg = "Illegal file name '%s'." % (filename)
257 if rcParams['verbose']:
258 print msg
259 else:
260 raise IOError(msg)
261 if rcParams['verbose']:
262 try:
263 from IPython.genutils import page as pager
264 except ImportError:
265 from pydoc import pager
266 pager(info)
267 else:
268 return info
269
270
271 def get_selection(self):
272 """
273 Get the selection object currently set on this scantable.
274 Parameters:
275 none
276 Example:
277 sel = scan.get_selection()
278 sel.set_ifs(0) # select IF 0
279 scan.set_selection(sel) # apply modified selection
280 """
281 return selector(self._getselection())
282
283 def set_selection(self, selection=selector()):
284 """
285 Select a subset of the data. All following operations on this scantable
286 are only applied to thi selection.
287 Parameters:
288 selection: a selector object (default unset the selection)
289 Examples:
290 sel = selector() # create a selection object
291 self.set_scans([0, 3]) # select SCANNO 0 and 3
292 scan.set_selection(sel) # set the selection
293 scan.summary() # will only print summary of scanno 0 an 3
294 scan.set_selection() # unset the selection
295 """
296 self._setselection(selection)
297
298 def stats(self, stat='stddev', mask=None):
299 """
300 Determine the specified statistic of the current beam/if/pol
301 Takes a 'mask' as an optional parameter to specify which
302 channels should be excluded.
303 Parameters:
304 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
305 'var', 'stddev', 'avdev', 'rms', 'median'
306 mask: an optional mask specifying where the statistic
307 should be determined.
308 Example:
309 scan.set_unit('channel')
310 msk = scan.create_mask([100, 200], [500, 600])
311 scan.stats(stat='mean', mask=m)
312 """
313 if mask == None:
314 mask = []
315 axes = ['Beam', 'IF', 'Pol', 'Time']
316 if not self._check_ifs():
317 raise ValueError("Cannot apply mask as the IFs have different "
318 "number of channels. Please use setselection() "
319 "to select individual IFs")
320
321 statvals = self._math._stats(self, mask, stat)
322 out = ''
323 axes = []
324 for i in range(self.nrow()):
325 axis = []
326 axis.append(self.getscan(i))
327 axis.append(self.getbeam(i))
328 axis.append(self.getif(i))
329 axis.append(self.getpol(i))
330 axis.append(self.getcycle(i))
331 axes.append(axis)
332 tm = self._gettime(i)
333 src = self._getsourcename(i)
334 out += 'Scan[%d] (%s) ' % (axis[0], src)
335 out += 'Time[%s]:\n' % (tm)
336 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
337 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
338 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
339 out += '= %3.3f\n' % (statvals[i])
340 out += "--------------------------------------------------\n"
341
342 if rcParams['verbose']:
343 print "--------------------------------------------------"
344 print " ", stat
345 print "--------------------------------------------------"
346 print out
347 return
348 else:
349 retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
350 'axes' : axes,
351 'data': statvals}
352 return retval
353
354 def stddev(self, mask=None):
355 """
356 Determine the standard deviation of the current beam/if/pol
357 Takes a 'mask' as an optional parameter to specify which
358 channels should be excluded.
359 Parameters:
360 mask: an optional mask specifying where the standard
361 deviation should be determined.
362
363 Example:
364 scan.set_unit('channel')
365 msk = scan.create_mask([100, 200], [500, 600])
366 scan.stddev(mask=m)
367 """
368 return self.stats(stat='stddev', mask=mask);
369
370
371 def get_column_names(self):
372 """
373 Return a list of column names, which can be used for selection.
374 """
375 return list(Scantable.get_column_names(self))
376
377 def get_tsys(self):
378 """
379 Return the System temperatures.
380 Parameters:
381
382 Returns:
383 a list of Tsys values for the current selection
384 """
385
386 return self._row_callback(self._gettsys, "Tsys")
387
388 def _row_callback(self, callback, label):
389 axes = []
390 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
391 out = ""
392 outvec = []
393 for i in range(self.nrow()):
394 axis = []
395 axis.append(self.getscan(i))
396 axis.append(self.getbeam(i))
397 axis.append(self.getif(i))
398 axis.append(self.getpol(i))
399 axis.append(self.getcycle(i))
400 axes.append(axis)
401 tm = self._gettime(i)
402 src = self._getsourcename(i)
403 out += 'Scan[%d] (%s) ' % (axis[0], src)
404 out += 'Time[%s]:\n' % (tm)
405 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
406 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
407 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
408 outvec.append(callback(i))
409 out += '= %3.3f\n' % (outvec[i])
410 out += "--------------------------------------------------\n"
411 if rcParams['verbose']:
412 print "--------------------------------------------------"
413 print " %s" % (label)
414 print "--------------------------------------------------"
415 print out
416 # disabled because the vector seems more useful
417 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
418 return outvec
419
420 def _get_column(self, callback, row=-1):
421 """
422 """
423 if row == -1:
424 return [callback(i) for i in range(self.nrow())]
425 else:
426 if 0 <= row < self.nrow():
427 return callback(row)
428
429
430 def get_time(self, row=-1):
431 """
432 Get a list of time stamps for the observations.
433 Return a string for each integration in the scantable.
434 Parameters:
435 row: row no of integration. Default -1 return all rows
436 Example:
437 none
438 """
439 from time import strptime
440 from datetime import datetime
441 times = self._get_column(self._gettime, row)
442 format = "%Y/%m/%d/%H:%M:%S"
443 if isinstance(times, list):
444 return [datetime(*strptime(i, format)[:6]) for i in times]
445 else:
446 return datetime(*strptime(times, format)[:6])
447
448 def get_sourcename(self, row=-1):
449 """
450 Get a list source names for the observations.
451 Return a string for each integration in the scantable.
452 Parameters:
453 row: row no of integration. Default -1 return all rows
454 Example:
455 none
456 """
457 return self._get_column(self._getsourcename, row)
458
459 def get_elevation(self, row=-1):
460 """
461 Get a list of elevations for the observations.
462 Return a float for each integration in the scantable.
463 Parameters:
464 row: row no of integration. Default -1 return all rows
465 Example:
466 none
467 """
468 return self._get_column(self._getelevation, row)
469
470 def get_azimuth(self, row=-1):
471 """
472 Get a list of azimuths for the observations.
473 Return a float for each integration in the scantable.
474 Parameters:
475 row: row no of integration. Default -1 return all rows
476 Example:
477 none
478 """
479 return self._get_column(self._getazimuth, row)
480
481 def get_parangle(self, row=-1):
482 """
483 Get a list of parallactic angles for the observations.
484 Return a float for each integration in the scantable.
485 Parameters:
486 row: row no of integration. Default -1 return all rows
487 Example:
488 none
489 """
490 return self._get_column(self._getparangle, row)
491
492 def get_direction(self, row=-1):
493 """
494 Get a list of Positions on the sky (direction) for the observations.
495 Return a float for each integration in the scantable.
496 Parameters:
497 row: row no of integration. Default -1 return all rows
498 Example:
499 none
500 """
501 return self._get_column(self._getdirection, row)
502
503 def set_unit(self, unit='channel'):
504 """
505 Set the unit for all following operations on this scantable
506 Parameters:
507 unit: optional unit, default is 'channel'
508 one of '*Hz', 'km/s', 'channel', ''
509 """
510 varlist = vars()
511 if unit in ['', 'pixel', 'channel']:
512 unit = ''
513 inf = list(self._getcoordinfo())
514 inf[0] = unit
515 self._setcoordinfo(inf)
516 self._add_history("set_unit", varlist)
517
518 def set_instrument(self, instr):
519 """
520 Set the instrument for subsequent processing
521 Parameters:
522 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
523 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
524 """
525 self._setInstrument(instr)
526 self._add_history("set_instument", vars())
527 print_log()
528
529 def set_feedtype(self, feedtype):
530 """
531 Overwrite the feed type, which might not be set correctly.
532 Parameters:
533 feedtype: 'linear' or 'circular'
534 """
535 self._setfeedtype(feedtype)
536 self._add_history("set_feedtype", vars())
537 print_log()
538
539 def set_doppler(self, doppler='RADIO'):
540 """
541 Set the doppler for all following operations on this scantable.
542 Parameters:
543 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
544 """
545 varlist = vars()
546 inf = list(self._getcoordinfo())
547 inf[2] = doppler
548 self._setcoordinfo(inf)
549 self._add_history("set_doppler", vars())
550 print_log()
551
552 def set_freqframe(self, frame=None):
553 """
554 Set the frame type of the Spectral Axis.
555 Parameters:
556 frame: an optional frame type, default 'LSRK'. Valid frames are:
557 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
558 'GEO', 'GALACTO', 'LGROUP', 'CMB'
559 Examples:
560 scan.set_freqframe('BARY')
561 """
562 if frame is None: frame = rcParams['scantable.freqframe']
563 varlist = vars()
564 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
565 'GEO', 'GALACTO', 'LGROUP', 'CMB']
566
567 if frame in valid:
568 inf = list(self._getcoordinfo())
569 inf[1] = frame
570 self._setcoordinfo(inf)
571 self._add_history("set_freqframe", varlist)
572 else:
573 msg = "Please specify a valid freq type. Valid types are:\n", valid
574 if rcParams['verbose']:
575 print msg
576 else:
577 raise TypeError(msg)
578 print_log()
579
580 def set_dirframe(self, frame=""):
581 """
582 Set the frame type of the Direction on the sky.
583 Parameters:
584 frame: an optional frame type, default ''. Valid frames are:
585 'J2000', 'B1950', 'GALACTIC'
586 Examples:
587 scan.set_dirframe('GALACTIC')
588 """
589 varlist = vars()
590 try:
591 Scantable.set_dirframe(self, frame)
592 except RuntimeError, msg:
593 if rcParams['verbose']:
594 print msg
595 else:
596 raise
597 self._add_history("set_dirframe", varlist)
598
599 def get_unit(self):
600 """
601 Get the default unit set in this scantable
602 Parameters:
603 Returns:
604 A unit string
605 """
606 inf = self._getcoordinfo()
607 unit = inf[0]
608 if unit == '': unit = 'channel'
609 return unit
610
611 def get_abcissa(self, rowno=0):
612 """
613 Get the abcissa in the current coordinate setup for the currently
614 selected Beam/IF/Pol
615 Parameters:
616 rowno: an optional row number in the scantable. Default is the
617 first row, i.e. rowno=0
618 Returns:
619 The abcissa values and it's format string (as a dictionary)
620 """
621 abc = self._getabcissa(rowno)
622 lbl = self._getabcissalabel(rowno)
623 print_log()
624 return abc, lbl
625
626 def flag(self, mask=None):
627 """
628 Flag the selected data using an optional channel mask.
629 Parameters:
630 mask: an optional channel mask, created with create_mask. Default
631 (no mask) is all channels.
632 """
633 varlist = vars()
634 if mask is None:
635 mask = []
636 try:
637 self._flag(mask)
638 except RuntimeError, msg:
639 if rcParams['verbose']:
640 print msg
641 return
642 else: raise
643 self._add_history("flag", varlist)
644
645 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
646 """
647 Flag the data in 'lag' space by providing a frequency to remove.
648 Flagged data in the scantable gets set to 0.0 before the fft.
649 No taper is applied.
650 Parameters:
651 frequency: the frequency (really a period within the bandwidth) to remove
652 width: the width of the frequency to remove, to remove a range
653 of frequencies aroung the centre.
654 unit: the frequency unit (default "GHz")
655 Notes:
656 It is recommended to flag edges of the band or strong signals beforehand.
657 """
658 if insitu is None: insitu = rcParams['insitu']
659 self._math._setinsitu(insitu)
660 varlist = vars()
661 base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
662 if not base.has_key(unit):
663 raise ValueError("%s is not a valid unit." % unit)
664 try:
665 s = scantable(self._math._lag_flag(self, frequency*base[unit],
666 width*base[unit]))
667 except RuntimeError, msg:
668 if rcParams['verbose']:
669 print msg
670 return
671 else: raise
672 s._add_history("lag_flag", varlist)
673 print_log()
674 if insitu:
675 self._assign(s)
676 else:
677 return s
678
679
680 def create_mask(self, *args, **kwargs):
681 """
682 Compute and return a mask based on [min, max] windows.
683 The specified windows are to be INCLUDED, when the mask is
684 applied.
685 Parameters:
686 [min, max], [min2, max2], ...
687 Pairs of start/end points (inclusive)specifying the regions
688 to be masked
689 invert: optional argument. If specified as True,
690 return an inverted mask, i.e. the regions
691 specified are EXCLUDED
692 row: create the mask using the specified row for
693 unit conversions, default is row=0
694 only necessary if frequency varies over rows.
695 Example:
696 scan.set_unit('channel')
697 a)
698 msk = scan.create_mask([400, 500], [800, 900])
699 # masks everything outside 400 and 500
700 # and 800 and 900 in the unit 'channel'
701
702 b)
703 msk = scan.create_mask([400, 500], [800, 900], invert=True)
704 # masks the regions between 400 and 500
705 # and 800 and 900 in the unit 'channel'
706 c)
707 mask only channel 400
708 msk = scan.create_mask([400, 400])
709 """
710 row = 0
711 if kwargs.has_key("row"):
712 row = kwargs.get("row")
713 data = self._getabcissa(row)
714 u = self._getcoordinfo()[0]
715 if rcParams['verbose']:
716 if u == "": u = "channel"
717 msg = "The current mask window unit is %s" % u
718 i = self._check_ifs()
719 if not i:
720 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
721 asaplog.push(msg)
722 n = self.nchan()
723 msk = NUM.zeros(n)
724 # test if args is a 'list' or a 'normal *args - UGLY!!!
725
726 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
727 and args or args[0]
728 for window in ws:
729 if (len(window) != 2 or window[0] > window[1] ):
730 raise TypeError("A window needs to be defined as [min, max]")
731 for i in range(n):
732 if data[i] >= window[0] and data[i] <= window[1]:
733 msk[i] = 1
734 if kwargs.has_key('invert'):
735 if kwargs.get('invert'):
736 msk = NUM.logical_not(msk)
737 print_log()
738 return msk
739
740 def get_restfreqs(self):
741 """
742 Get the restfrequency(s) stored in this scantable.
743 The return value(s) are always of unit 'Hz'
744 Parameters:
745 none
746 Returns:
747 a list of doubles
748 """
749 return list(self._getrestfreqs())
750
751
752 def set_restfreqs(self, freqs=None, unit='Hz'):
753 """
754 Set or replace the restfrequency specified and
755 If the 'freqs' argument holds a scalar,
756 then that rest frequency will be applied to all the selected
757 data. If the 'freqs' argument holds
758 a vector, then it MUST be of equal or smaller length than
759 the number of IFs (and the available restfrequencies will be
760 replaced by this vector). In this case, *all* data have
761 the restfrequency set per IF according
762 to the corresponding value you give in the 'freqs' vector.
763 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
764 IF 1 gets restfreq 2e9.
765 You can also specify the frequencies via a linecatalog/
766
767 Parameters:
768 freqs: list of rest frequency values or string idenitfiers
769 unit: unit for rest frequency (default 'Hz')
770
771 Example:
772 # set the given restfrequency for the whole table
773 scan.set_restfreqs(freqs=1.4e9)
774 # If thee number of IFs in the data is >= 2 the IF0 gets the first
775 # value IF1 the second...
776 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
777 #set the given restfrequency for the whole table (by name)
778 scan.set_restfreqs(freqs="OH1667")
779
780 Note:
781 To do more sophisticate Restfrequency setting, e.g. on a
782 source and IF basis, use scantable.set_selection() before using
783 this function.
784 # provide your scantable is call scan
785 selection = selector()
786 selection.set_name("ORION*")
787 selection.set_ifs([1])
788 scan.set_selection(selection)
789 scan.set_restfreqs(freqs=86.6e9)
790
791 """
792 varlist = vars()
793 from asap import linecatalog
794 # simple value
795 if isinstance(freqs, int) or isinstance(freqs, float):
796 self._setrestfreqs(freqs, "",unit)
797 # list of values
798 elif isinstance(freqs, list) or isinstance(freqs, tuple):
799 # list values are scalars
800 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
801 sel = selector()
802 savesel = self._getselection()
803 for i in xrange(len(freqs)):
804 sel.set_ifs([i])
805 self._setselection(sel)
806 self._setrestfreqs(freqs[i], "",unit)
807 self._setselection(savesel)
808 # list values are tuples, (value, name)
809 elif isinstance(freqs[-1], dict):
810 sel = selector()
811 savesel = self._getselection()
812 for i in xrange(len(freqs)):
813 sel.set_ifs([i])
814 self._setselection(sel)
815 self._setrestfreqs(freqs[i]["value"],
816 freqs[i]["name"], "MHz")
817 self._setselection(savesel)
818 # freqs are to be taken from a linecatalog
819 elif isinstance(freqs, linecatalog):
820 sel = selector()
821 savesel = self._getselection()
822 for i in xrange(freqs.nrow()):
823 sel.set_ifs([i])
824 self._setselection(sel)
825 self._setrestfreqs(freqs.get_frequency(i),
826 freqs.get_name(i), "MHz")
827 # ensure that we are not iterating past nIF
828 if i == self.nif()-1: break
829 self._setselection(savesel)
830 else:
831 return
832 self._add_history("set_restfreqs", varlist)
833
834
835
836 def history(self, filename=None):
837 """
838 Print the history. Optionally to a file.
839 """
840 hist = list(self._gethistory())
841 out = "-"*80
842 for h in hist:
843 if h.startswith("---"):
844 out += "\n"+h
845 else:
846 items = h.split("##")
847 date = items[0]
848 func = items[1]
849 items = items[2:]
850 out += "\n"+date+"\n"
851 out += "Function: %s\n Parameters:" % (func)
852 for i in items:
853 s = i.split("=")
854 out += "\n %s = %s" % (s[0], s[1])
855 out += "\n"+"-"*80
856 if filename is not None:
857 if filename is "":
858 filename = 'scantable_history.txt'
859 import os
860 filename = os.path.expandvars(os.path.expanduser(filename))
861 if not os.path.isdir(filename):
862 data = open(filename, 'w')
863 data.write(out)
864 data.close()
865 else:
866 msg = "Illegal file name '%s'." % (filename)
867 if rcParams['verbose']:
868 print msg
869 else:
870 raise IOError(msg)
871 if rcParams['verbose']:
872 try:
873 from IPython.genutils import page as pager
874 except ImportError:
875 from pydoc import pager
876 pager(out)
877 else:
878 return out
879 return
880 #
881 # Maths business
882 #
883
884 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
885 """
886 Return the (time) weighted average of a scan.
887 Note:
888 in channels only - align if necessary
889 Parameters:
890 mask: an optional mask (only used for 'var' and 'tsys'
891 weighting)
892 scanav: True averages each scan separately
893 False (default) averages all scans together,
894 weight: Weighting scheme.
895 'none' (mean no weight)
896 'var' (1/var(spec) weighted)
897 'tsys' (1/Tsys**2 weighted)
898 'tint' (integration time weighted)
899 'tintsys' (Tint/Tsys**2)
900 'median' ( median averaging)
901 The default is 'tint'
902 align: align the spectra in velocity before averaging. It takes
903 the time of the first spectrum as reference time.
904 Example:
905 # time average the scantable without using a mask
906 newscan = scan.average_time()
907 """
908 varlist = vars()
909 if weight is None: weight = 'TINT'
910 if mask is None: mask = ()
911 if scanav: scanav = "SCAN"
912 else: scanav = "NONE"
913 scan = (self, )
914 try:
915 if align:
916 scan = (self.freq_align(insitu=False), )
917 s = None
918 if weight.upper() == 'MEDIAN':
919 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
920 scanav))
921 else:
922 s = scantable(self._math._average(scan, mask, weight.upper(),
923 scanav))
924 except RuntimeError, msg:
925 if rcParams['verbose']:
926 print msg
927 return
928 else: raise
929 s._add_history("average_time", varlist)
930 print_log()
931 return s
932
933 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
934 """
935 Return a scan where all spectra are converted to either
936 Jansky or Kelvin depending upon the flux units of the scan table.
937 By default the function tries to look the values up internally.
938 If it can't find them (or if you want to over-ride), you must
939 specify EITHER jyperk OR eta (and D which it will try to look up
940 also if you don't set it). jyperk takes precedence if you set both.
941 Parameters:
942 jyperk: the Jy / K conversion factor
943 eta: the aperture efficiency
944 d: the geomtric diameter (metres)
945 insitu: if False a new scantable is returned.
946 Otherwise, the scaling is done in-situ
947 The default is taken from .asaprc (False)
948 """
949 if insitu is None: insitu = rcParams['insitu']
950 self._math._setinsitu(insitu)
951 varlist = vars()
952 if jyperk is None: jyperk = -1.0
953 if d is None: d = -1.0
954 if eta is None: eta = -1.0
955 s = scantable(self._math._convertflux(self, d, eta, jyperk))
956 s._add_history("convert_flux", varlist)
957 print_log()
958 if insitu: self._assign(s)
959 else: return s
960
961 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
962 """
963 Return a scan after applying a gain-elevation correction.
964 The correction can be made via either a polynomial or a
965 table-based interpolation (and extrapolation if necessary).
966 You specify polynomial coefficients, an ascii table or neither.
967 If you specify neither, then a polynomial correction will be made
968 with built in coefficients known for certain telescopes (an error
969 will occur if the instrument is not known).
970 The data and Tsys are *divided* by the scaling factors.
971 Parameters:
972 poly: Polynomial coefficients (default None) to compute a
973 gain-elevation correction as a function of
974 elevation (in degrees).
975 filename: The name of an ascii file holding correction factors.
976 The first row of the ascii file must give the column
977 names and these MUST include columns
978 "ELEVATION" (degrees) and "FACTOR" (multiply data
979 by this) somewhere.
980 The second row must give the data type of the
981 column. Use 'R' for Real and 'I' for Integer.
982 An example file would be
983 (actual factors are arbitrary) :
984
985 TIME ELEVATION FACTOR
986 R R R
987 0.1 0 0.8
988 0.2 20 0.85
989 0.3 40 0.9
990 0.4 60 0.85
991 0.5 80 0.8
992 0.6 90 0.75
993 method: Interpolation method when correcting from a table.
994 Values are "nearest", "linear" (default), "cubic"
995 and "spline"
996 insitu: if False a new scantable is returned.
997 Otherwise, the scaling is done in-situ
998 The default is taken from .asaprc (False)
999 """
1000
1001 if insitu is None: insitu = rcParams['insitu']
1002 self._math._setinsitu(insitu)
1003 varlist = vars()
1004 if poly is None:
1005 poly = ()
1006 from os.path import expandvars
1007 filename = expandvars(filename)
1008 s = scantable(self._math._gainel(self, poly, filename, method))
1009 s._add_history("gain_el", varlist)
1010 print_log()
1011 if insitu: self._assign(s)
1012 else: return s
1013
1014 def freq_align(self, reftime=None, method='cubic', insitu=None):
1015 """
1016 Return a scan where all rows have been aligned in frequency/velocity.
1017 The alignment frequency frame (e.g. LSRK) is that set by function
1018 set_freqframe.
1019 Parameters:
1020 reftime: reference time to align at. By default, the time of
1021 the first row of data is used.
1022 method: Interpolation method for regridding the spectra.
1023 Choose from "nearest", "linear", "cubic" (default)
1024 and "spline"
1025 insitu: if False a new scantable is returned.
1026 Otherwise, the scaling is done in-situ
1027 The default is taken from .asaprc (False)
1028 """
1029 if insitu is None: insitu = rcParams["insitu"]
1030 self._math._setinsitu(insitu)
1031 varlist = vars()
1032 if reftime is None: reftime = ""
1033 s = scantable(self._math._freq_align(self, reftime, method))
1034 s._add_history("freq_align", varlist)
1035 print_log()
1036 if insitu: self._assign(s)
1037 else: return s
1038
1039 def opacity(self, tau, insitu=None):
1040 """
1041 Apply an opacity correction. The data
1042 and Tsys are multiplied by the correction factor.
1043 Parameters:
1044 tau: Opacity from which the correction factor is
1045 exp(tau*ZD)
1046 where ZD is the zenith-distance
1047 insitu: if False a new scantable is returned.
1048 Otherwise, the scaling is done in-situ
1049 The default is taken from .asaprc (False)
1050 """
1051 if insitu is None: insitu = rcParams['insitu']
1052 self._math._setinsitu(insitu)
1053 varlist = vars()
1054 s = scantable(self._math._opacity(self, tau))
1055 s._add_history("opacity", varlist)
1056 print_log()
1057 if insitu: self._assign(s)
1058 else: return s
1059
1060 def bin(self, width=5, insitu=None):
1061 """
1062 Return a scan where all spectra have been binned up.
1063 width: The bin width (default=5) in pixels
1064 insitu: if False a new scantable is returned.
1065 Otherwise, the scaling is done in-situ
1066 The default is taken from .asaprc (False)
1067 """
1068 if insitu is None: insitu = rcParams['insitu']
1069 self._math._setinsitu(insitu)
1070 varlist = vars()
1071 s = scantable(self._math._bin(self, width))
1072 s._add_history("bin", varlist)
1073 print_log()
1074 if insitu: self._assign(s)
1075 else: return s
1076
1077
1078 def resample(self, width=5, method='cubic', insitu=None):
1079 """
1080 Return a scan where all spectra have been binned up
1081 width: The bin width (default=5) in pixels
1082 method: Interpolation method when correcting from a table.
1083 Values are "nearest", "linear", "cubic" (default)
1084 and "spline"
1085 insitu: if False a new scantable is returned.
1086 Otherwise, the scaling is done in-situ
1087 The default is taken from .asaprc (False)
1088 """
1089 if insitu is None: insitu = rcParams['insitu']
1090 self._math._setinsitu(insitu)
1091 varlist = vars()
1092 s = scantable(self._math._resample(self, method, width))
1093 s._add_history("resample", varlist)
1094 print_log()
1095 if insitu: self._assign(s)
1096 else: return s
1097
1098
1099 def average_pol(self, mask=None, weight='none'):
1100 """
1101 Average the Polarisations together.
1102 Parameters:
1103 mask: An optional mask defining the region, where the
1104 averaging will be applied. The output will have all
1105 specified points masked.
1106 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1107 weighted), or 'tsys' (1/Tsys**2 weighted)
1108 """
1109 varlist = vars()
1110 if mask is None:
1111 mask = ()
1112 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1113 s._add_history("average_pol", varlist)
1114 print_log()
1115 return s
1116
1117 def average_beam(self, mask=None, weight='none'):
1118 """
1119 Average the Beams together.
1120 Parameters:
1121 mask: An optional mask defining the region, where the
1122 averaging will be applied. The output will have all
1123 specified points masked.
1124 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1125 weighted), or 'tsys' (1/Tsys**2 weighted)
1126 """
1127 varlist = vars()
1128 if mask is None:
1129 mask = ()
1130 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1131 s._add_history("average_beam", varlist)
1132 print_log()
1133 return s
1134
1135 def convert_pol(self, poltype=None):
1136 """
1137 Convert the data to a different polarisation type.
1138 Parameters:
1139 poltype: The new polarisation type. Valid types are:
1140 "linear", "stokes" and "circular"
1141 """
1142 varlist = vars()
1143 try:
1144 s = scantable(self._math._convertpol(self, poltype))
1145 except RuntimeError, msg:
1146 if rcParams['verbose']:
1147 print msg
1148 return
1149 else:
1150 raise
1151 s._add_history("convert_pol", varlist)
1152 print_log()
1153 return s
1154
1155 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1156 """
1157 Smooth the spectrum by the specified kernel (conserving flux).
1158 Parameters:
1159 scan: The input scan
1160 kernel: The type of smoothing kernel. Select from
1161 'hanning' (default), 'gaussian' and 'boxcar'.
1162 The first three characters are sufficient.
1163 width: The width of the kernel in pixels. For hanning this is
1164 ignored otherwise it defauls to 5 pixels.
1165 For 'gaussian' it is the Full Width Half
1166 Maximum. For 'boxcar' it is the full width.
1167 insitu: if False a new scantable is returned.
1168 Otherwise, the scaling is done in-situ
1169 The default is taken from .asaprc (False)
1170 Example:
1171 none
1172 """
1173 if insitu is None: insitu = rcParams['insitu']
1174 self._math._setinsitu(insitu)
1175 varlist = vars()
1176 s = scantable(self._math._smooth(self, kernel.lower(), width))
1177 s._add_history("smooth", varlist)
1178 print_log()
1179 if insitu: self._assign(s)
1180 else: return s
1181
1182
1183 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1184 """
1185 Return a scan which has been baselined (all rows) by a polynomial.
1186 Parameters:
1187 scan: a scantable
1188 mask: an optional mask
1189 order: the order of the polynomial (default is 0)
1190 plot: plot the fit and the residual. In this each
1191 indivual fit has to be approved, by typing 'y'
1192 or 'n'
1193 insitu: if False a new scantable is returned.
1194 Otherwise, the scaling is done in-situ
1195 The default is taken from .asaprc (False)
1196 Example:
1197 # return a scan baselined by a third order polynomial,
1198 # not using a mask
1199 bscan = scan.poly_baseline(order=3)
1200 """
1201 if insitu is None: insitu = rcParams['insitu']
1202 varlist = vars()
1203 if mask is None:
1204 mask = list(NUM.ones(self.nchan(-1)))
1205 from asap.asapfitter import fitter
1206 try:
1207 f = fitter()
1208 f.set_scan(self, mask)
1209 f.set_function(poly=order)
1210 s = f.auto_fit(insitu, plot=plot)
1211 s._add_history("poly_baseline", varlist)
1212 print_log()
1213 if insitu: self._assign(s)
1214 else: return s
1215 except RuntimeError:
1216 msg = "The fit failed, possibly because it didn't converge."
1217 if rcParams['verbose']:
1218 print msg
1219 return
1220 else:
1221 raise RuntimeError(msg)
1222
1223
1224 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1225 threshold=3, chan_avg_limit=1, plot=False,
1226 insitu=None):
1227 """
1228 Return a scan which has been baselined (all rows) by a polynomial.
1229 Spectral lines are detected first using linefinder and masked out
1230 to avoid them affecting the baseline solution.
1231
1232 Parameters:
1233 mask: an optional mask retreived from scantable
1234 edge: an optional number of channel to drop at
1235 the edge of spectrum. If only one value is
1236 specified, the same number will be dropped from
1237 both sides of the spectrum. Default is to keep
1238 all channels. Nested tuples represent individual
1239 edge selection for different IFs (a number of spectral
1240 channels can be different)
1241 order: the order of the polynomial (default is 0)
1242 threshold: the threshold used by line finder. It is better to
1243 keep it large as only strong lines affect the
1244 baseline solution.
1245 chan_avg_limit:
1246 a maximum number of consequtive spectral channels to
1247 average during the search of weak and broad lines.
1248 The default is no averaging (and no search for weak
1249 lines). If such lines can affect the fitted baseline
1250 (e.g. a high order polynomial is fitted), increase this
1251 parameter (usually values up to 8 are reasonable). Most
1252 users of this method should find the default value
1253 sufficient.
1254 plot: plot the fit and the residual. In this each
1255 indivual fit has to be approved, by typing 'y'
1256 or 'n'
1257 insitu: if False a new scantable is returned.
1258 Otherwise, the scaling is done in-situ
1259 The default is taken from .asaprc (False)
1260
1261 Example:
1262 scan2=scan.auto_poly_baseline(order=7)
1263 """
1264 if insitu is None: insitu = rcParams['insitu']
1265 varlist = vars()
1266 from asap.asapfitter import fitter
1267 from asap.asaplinefind import linefinder
1268 from asap import _is_sequence_or_number as _is_valid
1269
1270 # check whether edge is set up for each IF individually
1271 individualedge = False;
1272 if len(edge) > 1:
1273 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1274 individualedge = True;
1275
1276 if not _is_valid(edge, int) and not individualedge:
1277 raise ValueError, "Parameter 'edge' has to be an integer or a \
1278 pair of integers specified as a tuple. Nested tuples are allowed \
1279 to make individual selection for different IFs."
1280
1281 curedge = (0, 0)
1282 if individualedge:
1283 for edgepar in edge:
1284 if not _is_valid(edgepar, int):
1285 raise ValueError, "Each element of the 'edge' tuple has \
1286 to be a pair of integers or an integer."
1287 else:
1288 curedge = edge;
1289
1290 # setup fitter
1291 f = fitter()
1292 f.set_function(poly=order)
1293
1294 # setup line finder
1295 fl = linefinder()
1296 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1297
1298 if not insitu:
1299 workscan = self.copy()
1300 else:
1301 workscan = self
1302
1303 fl.set_scan(workscan)
1304
1305 rows = range(workscan.nrow())
1306 asaplog.push("Processing:")
1307 for r in rows:
1308 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1309 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1310 workscan.getpol(r), workscan.getcycle(r))
1311 asaplog.push(msg, False)
1312
1313 # figure out edge parameter
1314 if individualedge:
1315 if len(edge) >= workscan.getif(r):
1316 raise RuntimeError, "Number of edge elements appear to " \
1317 "be less than the number of IFs"
1318 curedge = edge[workscan.getif(r)]
1319
1320 # setup line finder
1321 fl.find_lines(r, mask, curedge)
1322 f.set_scan(workscan, fl.get_mask())
1323 f.x = workscan._getabcissa(r)
1324 f.y = workscan._getspectrum(r)
1325 f.data = None
1326 f.fit()
1327 x = f.get_parameters()
1328 if plot:
1329 f.plot(residual=True)
1330 x = raw_input("Accept fit ( [y]/n ): ")
1331 if x.upper() == 'N':
1332 continue
1333 workscan._setspectrum(f.fitter.getresidual(), r)
1334 if plot:
1335 f._p.unmap()
1336 f._p = None
1337 workscan._add_history("auto_poly_baseline", varlist)
1338 if insitu:
1339 self._assign(workscan)
1340 else:
1341 return workscan
1342
1343 def rotate_linpolphase(self, angle):
1344 """
1345 Rotate the phase of the complex polarization O=Q+iU correlation.
1346 This is always done in situ in the raw data. So if you call this
1347 function more than once then each call rotates the phase further.
1348 Parameters:
1349 angle: The angle (degrees) to rotate (add) by.
1350 Examples:
1351 scan.rotate_linpolphase(2.3)
1352 """
1353 varlist = vars()
1354 self._math._rotate_linpolphase(self, angle)
1355 self._add_history("rotate_linpolphase", varlist)
1356 print_log()
1357 return
1358
1359
1360 def rotate_xyphase(self, angle):
1361 """
1362 Rotate the phase of the XY correlation. This is always done in situ
1363 in the data. So if you call this function more than once
1364 then each call rotates the phase further.
1365 Parameters:
1366 angle: The angle (degrees) to rotate (add) by.
1367 Examples:
1368 scan.rotate_xyphase(2.3)
1369 """
1370 varlist = vars()
1371 self._math._rotate_xyphase(self, angle)
1372 self._add_history("rotate_xyphase", varlist)
1373 print_log()
1374 return
1375
1376 def swap_linears(self):
1377 """
1378 Swap the linear polarisations XX and YY
1379 """
1380 varlist = vars()
1381 self._math._swap_linears(self)
1382 self._add_history("swap_linears", varlist)
1383 print_log()
1384 return
1385
1386 def invert_phase(self):
1387 """
1388 Invert the phase of the complex polarisation
1389 """
1390 varlist = vars()
1391 self._math._invert_phase(self)
1392 self._add_history("invert_phase", varlist)
1393 print_log()
1394 return
1395
1396 def add(self, offset, insitu=None):
1397 """
1398 Return a scan where all spectra have the offset added
1399 Parameters:
1400 offset: the offset
1401 insitu: if False a new scantable is returned.
1402 Otherwise, the scaling is done in-situ
1403 The default is taken from .asaprc (False)
1404 """
1405 if insitu is None: insitu = rcParams['insitu']
1406 self._math._setinsitu(insitu)
1407 varlist = vars()
1408 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1409 s._add_history("add", varlist)
1410 print_log()
1411 if insitu:
1412 self._assign(s)
1413 else:
1414 return s
1415
1416 def scale(self, factor, tsys=True, insitu=None, ):
1417 """
1418 Return a scan where all spectra are scaled by the give 'factor'
1419 Parameters:
1420 factor: the scaling factor
1421 insitu: if False a new scantable is returned.
1422 Otherwise, the scaling is done in-situ
1423 The default is taken from .asaprc (False)
1424 tsys: if True (default) then apply the operation to Tsys
1425 as well as the data
1426 """
1427 if insitu is None: insitu = rcParams['insitu']
1428 self._math._setinsitu(insitu)
1429 varlist = vars()
1430 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1431 s._add_history("scale", varlist)
1432 print_log()
1433 if insitu:
1434 self._assign(s)
1435 else:
1436 return s
1437
1438 def auto_quotient(self, mode='time', preserve=True):
1439 """
1440 This function allows to build quotients automatically.
1441 It assumes the observation to have the same numer of
1442 "ons" and "offs"
1443 It will support "closest off in time" in the future
1444 Parameters:
1445 mode: the on/off detection mode; 'suffix' (default)
1446 'suffix' identifies 'off' scans by the
1447 trailing '_R' (Mopra/Parkes) or
1448 '_e'/'_w' (Tid)
1449 preserve: you can preserve (default) the continuum or
1450 remove it. The equations used are
1451 preserve: Output = Toff * (on/off) - Toff
1452 remove: Output = Toff * (on/off) - Ton
1453 """
1454 modes = ["time"]
1455 if not mode in modes:
1456 msg = "please provide valid mode. Valid modes are %s" % (modes)
1457 raise ValueError(msg)
1458 varlist = vars()
1459 s = scantable(self._math._auto_quotient(self, mode, preserve))
1460 s._add_history("auto_quotient", varlist)
1461 print_log()
1462 return s
1463
1464 def mx_quotient(self, mask = None, weight='median', preserve=True):
1465 """
1466 Form a quotient using "off" beams when observing in "MX" mode.
1467 Parameters:
1468 mask: an optional mask to be used when weight == 'stddev'
1469 weight: How to average the off beams. Default is 'median'.
1470 preserve: you can preserve (default) the continuum or
1471 remove it. The equations used are
1472 preserve: Output = Toff * (on/off) - Toff
1473 remove: Output = Toff * (on/off) - Ton
1474 """
1475 if mask is None: mask = ()
1476 varlist = vars()
1477 on = scantable(self._math._mx_extract(self, 'on'))
1478 preoff = scantable(self._math._mx_extract(self, 'off'))
1479 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1480 from asapmath import quotient
1481 q = quotient(on, off, preserve)
1482 q._add_history("mx_quotient", varlist)
1483 print_log()
1484 return q
1485
1486 def freq_switch(self, insitu=None):
1487 """
1488 Apply frequency switching to the data.
1489 Parameters:
1490 insitu: if False a new scantable is returned.
1491 Otherwise, the swictching is done in-situ
1492 The default is taken from .asaprc (False)
1493 Example:
1494 none
1495 """
1496 if insitu is None: insitu = rcParams['insitu']
1497 self._math._setinsitu(insitu)
1498 varlist = vars()
1499 s = scantable(self._math._freqswitch(self))
1500 s._add_history("freq_switch", varlist)
1501 print_log()
1502 if insitu: self._assign(s)
1503 else: return s
1504
1505 def recalc_azel(self):
1506 """
1507 Recalculate the azimuth and elevation for each position.
1508 Parameters:
1509 none
1510 Example:
1511 """
1512 varlist = vars()
1513 self._recalcazel()
1514 self._add_history("recalc_azel", varlist)
1515 print_log()
1516 return
1517
1518 def __add__(self, other):
1519 varlist = vars()
1520 s = None
1521 if isinstance(other, scantable):
1522 print "scantable + scantable NYI"
1523 return
1524 elif isinstance(other, float):
1525 s = scantable(self._math._unaryop(self, other, "ADD", False))
1526 else:
1527 raise TypeError("Other input is not a scantable or float value")
1528 s._add_history("operator +", varlist)
1529 print_log()
1530 return s
1531
1532 def __sub__(self, other):
1533 """
1534 implicit on all axes and on Tsys
1535 """
1536 varlist = vars()
1537 s = None
1538 if isinstance(other, scantable):
1539 print "scantable - scantable NYI"
1540 return
1541 elif isinstance(other, float):
1542 s = scantable(self._math._unaryop(self, other, "SUB", False))
1543 else:
1544 raise TypeError("Other input is not a scantable or float value")
1545 s._add_history("operator -", varlist)
1546 print_log()
1547 return s
1548
1549 def __mul__(self, other):
1550 """
1551 implicit on all axes and on Tsys
1552 """
1553 varlist = vars()
1554 s = None
1555 if isinstance(other, scantable):
1556 print "scantable * scantable NYI"
1557 return
1558 elif isinstance(other, float):
1559 s = scantable(self._math._unaryop(self, other, "MUL", False))
1560 else:
1561 raise TypeError("Other input is not a scantable or float value")
1562 s._add_history("operator *", varlist)
1563 print_log()
1564 return s
1565
1566
1567 def __div__(self, other):
1568 """
1569 implicit on all axes and on Tsys
1570 """
1571 varlist = vars()
1572 s = None
1573 if isinstance(other, scantable):
1574 print "scantable / scantable NYI"
1575 return
1576 elif isinstance(other, float):
1577 if other == 0.0:
1578 raise ZeroDivisionError("Dividing by zero is not recommended")
1579 s = scantable(self._math._unaryop(self, other, "DIV", False))
1580 else:
1581 raise TypeError("Other input is not a scantable or float value")
1582 s._add_history("operator /", varlist)
1583 print_log()
1584 return s
1585
1586 def get_fit(self, row=0):
1587 """
1588 Print or return the stored fits for a row in the scantable
1589 Parameters:
1590 row: the row which the fit has been applied to.
1591 """
1592 if row > self.nrow():
1593 return
1594 from asap.asapfit import asapfit
1595 fit = asapfit(self._getfit(row))
1596 if rcParams['verbose']:
1597 print fit
1598 return
1599 else:
1600 return fit.as_dict()
1601
1602 def _add_history(self, funcname, parameters):
1603 # create date
1604 sep = "##"
1605 from datetime import datetime
1606 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1607 hist = dstr+sep
1608 hist += funcname+sep#cdate+sep
1609 if parameters.has_key('self'): del parameters['self']
1610 for k, v in parameters.iteritems():
1611 if type(v) is dict:
1612 for k2, v2 in v.iteritems():
1613 hist += k2
1614 hist += "="
1615 if isinstance(v2, scantable):
1616 hist += 'scantable'
1617 elif k2 == 'mask':
1618 if isinstance(v2, list) or isinstance(v2, tuple):
1619 hist += str(self._zip_mask(v2))
1620 else:
1621 hist += str(v2)
1622 else:
1623 hist += str(v2)
1624 else:
1625 hist += k
1626 hist += "="
1627 if isinstance(v, scantable):
1628 hist += 'scantable'
1629 elif k == 'mask':
1630 if isinstance(v, list) or isinstance(v, tuple):
1631 hist += str(self._zip_mask(v))
1632 else:
1633 hist += str(v)
1634 else:
1635 hist += str(v)
1636 hist += sep
1637 hist = hist[:-2] # remove trailing '##'
1638 self._addhistory(hist)
1639
1640
1641 def _zip_mask(self, mask):
1642 mask = list(mask)
1643 i = 0
1644 segments = []
1645 while mask[i:].count(1):
1646 i += mask[i:].index(1)
1647 if mask[i:].count(0):
1648 j = i + mask[i:].index(0)
1649 else:
1650 j = len(mask)
1651 segments.append([i, j])
1652 i = j
1653 return segments
1654
1655 def _get_ordinate_label(self):
1656 fu = "("+self.get_fluxunit()+")"
1657 import re
1658 lbl = "Intensity"
1659 if re.match(".K.", fu):
1660 lbl = "Brightness Temperature "+ fu
1661 elif re.match(".Jy.", fu):
1662 lbl = "Flux density "+ fu
1663 return lbl
1664
1665 def _check_ifs(self):
1666 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1667 nchans = filter(lambda t: t > 0, nchans)
1668 return (sum(nchans)/len(nchans) == nchans[0])
1669
1670 def _fill(self, names, unit, average):
1671 import os
1672 from asap._asap import stfiller
1673 first = True
1674 fullnames = []
1675 for name in names:
1676 name = os.path.expandvars(name)
1677 name = os.path.expanduser(name)
1678 if not os.path.exists(name):
1679 msg = "File '%s' does not exists" % (name)
1680 if rcParams['verbose']:
1681 asaplog.push(msg)
1682 print asaplog.pop().strip()
1683 return
1684 raise IOError(msg)
1685 fullnames.append(name)
1686 if average:
1687 asaplog.push('Auto averaging integrations')
1688 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1689 for name in fullnames:
1690 tbl = Scantable(stype)
1691 r = stfiller(tbl)
1692 msg = "Importing %s..." % (name)
1693 asaplog.push(msg, False)
1694 print_log()
1695 r._open(name, -1, -1)
1696 r._read()
1697 #tbl = r._getdata()
1698 if average:
1699 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1700 #tbl = tbl2
1701 if not first:
1702 tbl = self._math._merge([self, tbl])
1703 #tbl = tbl2
1704 Scantable.__init__(self, tbl)
1705 r._close()
1706 del r, tbl
1707 first = False
1708 if unit is not None:
1709 self.set_fluxunit(unit)
1710 self.set_freqframe(rcParams['scantable.freqframe'])
1711
Note: See TracBrowser for help on using the repository browser.