source: branches/Release2.1.1/python/scantable.py@ 1391

Last change on this file since 1391 was 1290, checked in by mar637, 18 years ago

More on ticket #81. scantable.stats now returns value as a list of length scantable.nrow

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 63.5 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 self._add_history("scantable", varlist)
75 print_log()
76
77 def save(self, name=None, format=None, overwrite=False):
78 """
79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
81 Parameters:
82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
84 and header in 'name'_header.txt)
85 format: an optional file format. Default is ASAP.
86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87 'SDFITS' (save as SDFITS file)
88 'ASCII' (saves as ascii text file)
89 'MS2' (saves as an aips++
90 MeasurementSet V2)
91 overwrite: If the file should be overwritten if it exists.
92 The default False is to return with warning
93 without writing the output. USE WITH CARE.
94 Example:
95 scan.save('myscan.asap')
96 scan.save('myscan.sdfits', 'SDFITS')
97 """
98 from os import path
99 if format is None: format = rcParams['scantable.save']
100 suffix = '.'+format.lower()
101 if name is None or name == "":
102 name = 'scantable'+suffix
103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
105 name = path.expandvars(name)
106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
114 format2 = format.upper()
115 if format2 == 'ASAP':
116 self._save(name)
117 else:
118 from asap._asap import stwriter as stw
119 writer = stw(format2)
120 writer.write(self, name)
121 print_log()
122 return
123
124 def copy(self):
125 """
126 Return a copy of this scantable.
127 Parameters:
128 none
129 Example:
130 copiedscan = scan.copy()
131 """
132 sd = scantable(Scantable._copy(self))
133 return sd
134
135 def drop_scan(self, scanid=None):
136 """
137 Return a new scantable where the specified scan number(s) has(have)
138 been dropped.
139 Parameters:
140 scanid: a (list of) scan number(s)
141 """
142 from asap import _is_sequence_or_number as _is_valid
143 from asap import _to_list
144 from asap import unique
145 if not _is_valid(scanid):
146 if rcParams['verbose']:
147 print "Please specify a scanno to drop from the scantable"
148 return
149 else:
150 raise RuntimeError("No scan given")
151 try:
152 scanid = _to_list(scanid)
153 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
154 for sid in scanid: allscans.remove(sid)
155 if len(allscans) == 0:
156 raise ValueError("Can't remove all scans")
157 except ValueError:
158 if rcParams['verbose']:
159 print "Couldn't find any match."
160 return
161 else: raise
162 try:
163 bsel = self.get_selection()
164 sel = selector()
165 sel.set_scans(allscans)
166 self.set_selection(bsel+sel)
167 scopy = self._copy()
168 self.set_selection(bsel)
169 return scantable(scopy)
170 except RuntimeError:
171 if rcParams['verbose']:
172 print "Couldn't find any match."
173 else:
174 raise
175
176
177 def get_scan(self, scanid=None):
178 """
179 Return a specific scan (by scanno) or collection of scans (by
180 source name) in a new scantable.
181 Parameters:
182 scanid: a (list of) scanno or a source name, unix-style
183 patterns are accepted for source name matching, e.g.
184 '*_R' gets all 'ref scans
185 Example:
186 # get all scans containing the source '323p459'
187 newscan = scan.get_scan('323p459')
188 # get all 'off' scans
189 refscans = scan.get_scan('*_R')
190 # get a susbset of scans by scanno (as listed in scan.summary())
191 newscan = scan.get_scan([0, 2, 7, 10])
192 """
193 if scanid is None:
194 if rcParams['verbose']:
195 print "Please specify a scan no or name to " \
196 "retrieve from the scantable"
197 return
198 else:
199 raise RuntimeError("No scan given")
200
201 try:
202 bsel = self.get_selection()
203 sel = selector()
204 if type(scanid) is str:
205 sel.set_name(scanid)
206 self.set_selection(bsel+sel)
207 scopy = self._copy()
208 self.set_selection(bsel)
209 return scantable(scopy)
210 elif type(scanid) is int:
211 sel.set_scans([scanid])
212 self.set_selection(bsel+sel)
213 scopy = self._copy()
214 self.set_selection(bsel)
215 return scantable(scopy)
216 elif type(scanid) is list:
217 sel.set_scans(scanid)
218 self.set_selection(sel)
219 scopy = self._copy()
220 self.set_selection(bsel)
221 return scantable(scopy)
222 else:
223 msg = "Illegal scanid type, use 'int' or 'list' if ints."
224 if rcParams['verbose']:
225 print msg
226 else:
227 raise TypeError(msg)
228 except RuntimeError:
229 if rcParams['verbose']: print "Couldn't find any match."
230 else: raise
231
232 def __str__(self):
233 return Scantable._summary(self, True)
234
235 def summary(self, filename=None):
236 """
237 Print a summary of the contents of this scantable.
238 Parameters:
239 filename: the name of a file to write the putput to
240 Default - no file output
241 verbose: print extra info such as the frequency table
242 The default (False) is taken from .asaprc
243 """
244 info = Scantable._summary(self, True)
245 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
246 if filename is not None:
247 if filename is "":
248 filename = 'scantable_summary.txt'
249 from os.path import expandvars, isdir
250 filename = expandvars(filename)
251 if not isdir(filename):
252 data = open(filename, 'w')
253 data.write(info)
254 data.close()
255 else:
256 msg = "Illegal file name '%s'." % (filename)
257 if rcParams['verbose']:
258 print msg
259 else:
260 raise IOError(msg)
261 if rcParams['verbose']:
262 try:
263 from IPython.genutils import page as pager
264 except ImportError:
265 from pydoc import pager
266 pager(info)
267 else:
268 return info
269
270
271 def get_selection(self):
272 """
273 Get the selection object currently set on this scantable.
274 Parameters:
275 none
276 Example:
277 sel = scan.get_selection()
278 sel.set_ifs(0) # select IF 0
279 scan.set_selection(sel) # apply modified selection
280 """
281 return selector(self._getselection())
282
283 def set_selection(self, selection=selector()):
284 """
285 Select a subset of the data. All following operations on this scantable
286 are only applied to thi selection.
287 Parameters:
288 selection: a selector object (default unset the selection)
289 Examples:
290 sel = selector() # create a selection object
291 self.set_scans([0, 3]) # select SCANNO 0 and 3
292 scan.set_selection(sel) # set the selection
293 scan.summary() # will only print summary of scanno 0 an 3
294 scan.set_selection() # unset the selection
295 """
296 self._setselection(selection)
297
298 def stats(self, stat='stddev', mask=None):
299 """
300 Determine the specified statistic of the current beam/if/pol
301 Takes a 'mask' as an optional parameter to specify which
302 channels should be excluded.
303 Parameters:
304 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
305 'var', 'stddev', 'avdev', 'rms', 'median'
306 mask: an optional mask specifying where the statistic
307 should be determined.
308 Example:
309 scan.set_unit('channel')
310 msk = scan.create_mask([100, 200], [500, 600])
311 scan.stats(stat='mean', mask=m)
312 """
313 if mask == None:
314 mask = []
315 axes = ['Beam', 'IF', 'Pol', 'Time']
316 if not self._check_ifs():
317 raise ValueError("Cannot apply mask as the IFs have different "
318 "number of channels. Please use setselection() "
319 "to select individual IFs")
320
321 statvals = self._math._stats(self, mask, stat)
322 out = ''
323 axes = []
324 for i in range(self.nrow()):
325 axis = []
326 axis.append(self.getscan(i))
327 axis.append(self.getbeam(i))
328 axis.append(self.getif(i))
329 axis.append(self.getpol(i))
330 axis.append(self.getcycle(i))
331 axes.append(axis)
332 tm = self._gettime(i)
333 src = self._getsourcename(i)
334 out += 'Scan[%d] (%s) ' % (axis[0], src)
335 out += 'Time[%s]:\n' % (tm)
336 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
337 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
338 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
339 out += '= %3.3f\n' % (statvals[i])
340 out += "--------------------------------------------------\n"
341
342 if rcParams['verbose']:
343 print "--------------------------------------------------"
344 print " ", stat
345 print "--------------------------------------------------"
346 print out
347 #else:
348 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
349 # 'axes' : axes,
350 # 'data': statvals}
351 return statvals
352
353 def stddev(self, mask=None):
354 """
355 Determine the standard deviation of the current beam/if/pol
356 Takes a 'mask' as an optional parameter to specify which
357 channels should be excluded.
358 Parameters:
359 mask: an optional mask specifying where the standard
360 deviation should be determined.
361
362 Example:
363 scan.set_unit('channel')
364 msk = scan.create_mask([100, 200], [500, 600])
365 scan.stddev(mask=m)
366 """
367 return self.stats(stat='stddev', mask=mask);
368
369
370 def get_column_names(self):
371 """
372 Return a list of column names, which can be used for selection.
373 """
374 return list(Scantable.get_column_names(self))
375
376 def get_tsys(self):
377 """
378 Return the System temperatures.
379 Parameters:
380
381 Returns:
382 a list of Tsys values for the current selection
383 """
384
385 return self._row_callback(self._gettsys, "Tsys")
386
387 def _row_callback(self, callback, label):
388 axes = []
389 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
390 out = ""
391 outvec = []
392 for i in range(self.nrow()):
393 axis = []
394 axis.append(self.getscan(i))
395 axis.append(self.getbeam(i))
396 axis.append(self.getif(i))
397 axis.append(self.getpol(i))
398 axis.append(self.getcycle(i))
399 axes.append(axis)
400 tm = self._gettime(i)
401 src = self._getsourcename(i)
402 out += 'Scan[%d] (%s) ' % (axis[0], src)
403 out += 'Time[%s]:\n' % (tm)
404 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
405 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
406 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
407 outvec.append(callback(i))
408 out += '= %3.3f\n' % (outvec[i])
409 out += "--------------------------------------------------\n"
410 if rcParams['verbose']:
411 print "--------------------------------------------------"
412 print " %s" % (label)
413 print "--------------------------------------------------"
414 print out
415 # disabled because the vector seems more useful
416 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
417 return outvec
418
419 def _get_column(self, callback, row=-1):
420 """
421 """
422 if row == -1:
423 return [callback(i) for i in range(self.nrow())]
424 else:
425 if 0 <= row < self.nrow():
426 return callback(row)
427
428
429 def get_time(self, row=-1):
430 """
431 Get a list of time stamps for the observations.
432 Return a string for each integration in the scantable.
433 Parameters:
434 row: row no of integration. Default -1 return all rows
435 Example:
436 none
437 """
438 from time import strptime
439 from datetime import datetime
440 times = self._get_column(self._gettime, row)
441 format = "%Y/%m/%d/%H:%M:%S"
442 if isinstance(times, list):
443 return [datetime(*strptime(i, format)[:6]) for i in times]
444 else:
445 return datetime(*strptime(times, format)[:6])
446
447 def get_sourcename(self, row=-1):
448 """
449 Get a list source names for the observations.
450 Return a string for each integration in the scantable.
451 Parameters:
452 row: row no of integration. Default -1 return all rows
453 Example:
454 none
455 """
456 return self._get_column(self._getsourcename, row)
457
458 def get_elevation(self, row=-1):
459 """
460 Get a list of elevations for the observations.
461 Return a float for each integration in the scantable.
462 Parameters:
463 row: row no of integration. Default -1 return all rows
464 Example:
465 none
466 """
467 return self._get_column(self._getelevation, row)
468
469 def get_azimuth(self, row=-1):
470 """
471 Get a list of azimuths for the observations.
472 Return a float for each integration in the scantable.
473 Parameters:
474 row: row no of integration. Default -1 return all rows
475 Example:
476 none
477 """
478 return self._get_column(self._getazimuth, row)
479
480 def get_parangle(self, row=-1):
481 """
482 Get a list of parallactic angles for the observations.
483 Return a float for each integration in the scantable.
484 Parameters:
485 row: row no of integration. Default -1 return all rows
486 Example:
487 none
488 """
489 return self._get_column(self._getparangle, row)
490
491 def get_direction(self, row=-1):
492 """
493 Get a list of Positions on the sky (direction) for the observations.
494 Return a float for each integration in the scantable.
495 Parameters:
496 row: row no of integration. Default -1 return all rows
497 Example:
498 none
499 """
500 return self._get_column(self._getdirection, row)
501
502 def set_unit(self, unit='channel'):
503 """
504 Set the unit for all following operations on this scantable
505 Parameters:
506 unit: optional unit, default is 'channel'
507 one of '*Hz', 'km/s', 'channel', ''
508 """
509 varlist = vars()
510 if unit in ['', 'pixel', 'channel']:
511 unit = ''
512 inf = list(self._getcoordinfo())
513 inf[0] = unit
514 self._setcoordinfo(inf)
515 self._add_history("set_unit", varlist)
516
517 def set_instrument(self, instr):
518 """
519 Set the instrument for subsequent processing
520 Parameters:
521 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
522 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
523 """
524 self._setInstrument(instr)
525 self._add_history("set_instument", vars())
526 print_log()
527
528 def set_feedtype(self, feedtype):
529 """
530 Overwrite the feed type, which might not be set correctly.
531 Parameters:
532 feedtype: 'linear' or 'circular'
533 """
534 self._setfeedtype(feedtype)
535 self._add_history("set_feedtype", vars())
536 print_log()
537
538 def set_doppler(self, doppler='RADIO'):
539 """
540 Set the doppler for all following operations on this scantable.
541 Parameters:
542 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
543 """
544 varlist = vars()
545 inf = list(self._getcoordinfo())
546 inf[2] = doppler
547 self._setcoordinfo(inf)
548 self._add_history("set_doppler", vars())
549 print_log()
550
551 def set_freqframe(self, frame=None):
552 """
553 Set the frame type of the Spectral Axis.
554 Parameters:
555 frame: an optional frame type, default 'LSRK'. Valid frames are:
556 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
557 'GEO', 'GALACTO', 'LGROUP', 'CMB'
558 Examples:
559 scan.set_freqframe('BARY')
560 """
561 if frame is None: frame = rcParams['scantable.freqframe']
562 varlist = vars()
563 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
564 'GEO', 'GALACTO', 'LGROUP', 'CMB']
565
566 if frame in valid:
567 inf = list(self._getcoordinfo())
568 inf[1] = frame
569 self._setcoordinfo(inf)
570 self._add_history("set_freqframe", varlist)
571 else:
572 msg = "Please specify a valid freq type. Valid types are:\n", valid
573 if rcParams['verbose']:
574 print msg
575 else:
576 raise TypeError(msg)
577 print_log()
578
579 def set_dirframe(self, frame=""):
580 """
581 Set the frame type of the Direction on the sky.
582 Parameters:
583 frame: an optional frame type, default ''. Valid frames are:
584 'J2000', 'B1950', 'GALACTIC'
585 Examples:
586 scan.set_dirframe('GALACTIC')
587 """
588 varlist = vars()
589 try:
590 Scantable.set_dirframe(self, frame)
591 except RuntimeError, msg:
592 if rcParams['verbose']:
593 print msg
594 else:
595 raise
596 self._add_history("set_dirframe", varlist)
597
598 def get_unit(self):
599 """
600 Get the default unit set in this scantable
601 Parameters:
602 Returns:
603 A unit string
604 """
605 inf = self._getcoordinfo()
606 unit = inf[0]
607 if unit == '': unit = 'channel'
608 return unit
609
610 def get_abcissa(self, rowno=0):
611 """
612 Get the abcissa in the current coordinate setup for the currently
613 selected Beam/IF/Pol
614 Parameters:
615 rowno: an optional row number in the scantable. Default is the
616 first row, i.e. rowno=0
617 Returns:
618 The abcissa values and it's format string (as a dictionary)
619 """
620 abc = self._getabcissa(rowno)
621 lbl = self._getabcissalabel(rowno)
622 print_log()
623 return abc, lbl
624
625 def flag(self, mask=None):
626 """
627 Flag the selected data using an optional channel mask.
628 Parameters:
629 mask: an optional channel mask, created with create_mask. Default
630 (no mask) is all channels.
631 """
632 varlist = vars()
633 if mask is None:
634 mask = []
635 try:
636 self._flag(mask)
637 except RuntimeError, msg:
638 if rcParams['verbose']:
639 print msg
640 return
641 else: raise
642 self._add_history("flag", varlist)
643
644 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
645 """
646 Flag the data in 'lag' space by providing a frequency to remove.
647 Flagged data in the scantable gets set to 0.0 before the fft.
648 No taper is applied.
649 Parameters:
650 frequency: the frequency (really a period within the bandwidth) to remove
651 width: the width of the frequency to remove, to remove a range
652 of frequencies aroung the centre.
653 unit: the frequency unit (default "GHz")
654 Notes:
655 It is recommended to flag edges of the band or strong signals beforehand.
656 """
657 if insitu is None: insitu = rcParams['insitu']
658 self._math._setinsitu(insitu)
659 varlist = vars()
660 base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
661 if not base.has_key(unit):
662 raise ValueError("%s is not a valid unit." % unit)
663 try:
664 s = scantable(self._math._lag_flag(self, frequency*base[unit],
665 width*base[unit]))
666 except RuntimeError, msg:
667 if rcParams['verbose']:
668 print msg
669 return
670 else: raise
671 s._add_history("lag_flag", varlist)
672 print_log()
673 if insitu:
674 self._assign(s)
675 else:
676 return s
677
678
679 def create_mask(self, *args, **kwargs):
680 """
681 Compute and return a mask based on [min, max] windows.
682 The specified windows are to be INCLUDED, when the mask is
683 applied.
684 Parameters:
685 [min, max], [min2, max2], ...
686 Pairs of start/end points (inclusive)specifying the regions
687 to be masked
688 invert: optional argument. If specified as True,
689 return an inverted mask, i.e. the regions
690 specified are EXCLUDED
691 row: create the mask using the specified row for
692 unit conversions, default is row=0
693 only necessary if frequency varies over rows.
694 Example:
695 scan.set_unit('channel')
696 a)
697 msk = scan.create_mask([400, 500], [800, 900])
698 # masks everything outside 400 and 500
699 # and 800 and 900 in the unit 'channel'
700
701 b)
702 msk = scan.create_mask([400, 500], [800, 900], invert=True)
703 # masks the regions between 400 and 500
704 # and 800 and 900 in the unit 'channel'
705 c)
706 mask only channel 400
707 msk = scan.create_mask([400, 400])
708 """
709 row = 0
710 if kwargs.has_key("row"):
711 row = kwargs.get("row")
712 data = self._getabcissa(row)
713 u = self._getcoordinfo()[0]
714 if rcParams['verbose']:
715 if u == "": u = "channel"
716 msg = "The current mask window unit is %s" % u
717 i = self._check_ifs()
718 if not i:
719 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
720 asaplog.push(msg)
721 n = self.nchan()
722 msk = _n_bools(n, False)
723 # test if args is a 'list' or a 'normal *args - UGLY!!!
724
725 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
726 and args or args[0]
727 for window in ws:
728 if (len(window) != 2 or window[0] > window[1] ):
729 raise TypeError("A window needs to be defined as [min, max]")
730 for i in range(n):
731 if data[i] >= window[0] and data[i] <= window[1]:
732 msk[i] = True
733 if kwargs.has_key('invert'):
734 if kwargs.get('invert'):
735 msk = mask_not(msk)
736 print_log()
737 return msk
738
739 def get_restfreqs(self):
740 """
741 Get the restfrequency(s) stored in this scantable.
742 The return value(s) are always of unit 'Hz'
743 Parameters:
744 none
745 Returns:
746 a list of doubles
747 """
748 return list(self._getrestfreqs())
749
750
751 def set_restfreqs(self, freqs=None, unit='Hz'):
752 """
753 Set or replace the restfrequency specified and
754 If the 'freqs' argument holds a scalar,
755 then that rest frequency will be applied to all the selected
756 data. If the 'freqs' argument holds
757 a vector, then it MUST be of equal or smaller length than
758 the number of IFs (and the available restfrequencies will be
759 replaced by this vector). In this case, *all* data have
760 the restfrequency set per IF according
761 to the corresponding value you give in the 'freqs' vector.
762 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
763 IF 1 gets restfreq 2e9.
764 You can also specify the frequencies via a linecatalog/
765
766 Parameters:
767 freqs: list of rest frequency values or string idenitfiers
768 unit: unit for rest frequency (default 'Hz')
769
770 Example:
771 # set the given restfrequency for the whole table
772 scan.set_restfreqs(freqs=1.4e9)
773 # If thee number of IFs in the data is >= 2 the IF0 gets the first
774 # value IF1 the second...
775 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
776 #set the given restfrequency for the whole table (by name)
777 scan.set_restfreqs(freqs="OH1667")
778
779 Note:
780 To do more sophisticate Restfrequency setting, e.g. on a
781 source and IF basis, use scantable.set_selection() before using
782 this function.
783 # provide your scantable is call scan
784 selection = selector()
785 selection.set_name("ORION*")
786 selection.set_ifs([1])
787 scan.set_selection(selection)
788 scan.set_restfreqs(freqs=86.6e9)
789
790 """
791 varlist = vars()
792 from asap import linecatalog
793 # simple value
794 if isinstance(freqs, int) or isinstance(freqs, float):
795 self._setrestfreqs(freqs, "",unit)
796 # list of values
797 elif isinstance(freqs, list) or isinstance(freqs, tuple):
798 # list values are scalars
799 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
800 sel = selector()
801 savesel = self._getselection()
802 for i in xrange(len(freqs)):
803 sel.set_ifs([i])
804 self._setselection(sel)
805 self._setrestfreqs(freqs[i], "",unit)
806 self._setselection(savesel)
807 # list values are tuples, (value, name)
808 elif isinstance(freqs[-1], dict):
809 sel = selector()
810 savesel = self._getselection()
811 for i in xrange(len(freqs)):
812 sel.set_ifs([i])
813 self._setselection(sel)
814 self._setrestfreqs(freqs[i]["value"],
815 freqs[i]["name"], "MHz")
816 self._setselection(savesel)
817 # freqs are to be taken from a linecatalog
818 elif isinstance(freqs, linecatalog):
819 sel = selector()
820 savesel = self._getselection()
821 for i in xrange(freqs.nrow()):
822 sel.set_ifs([i])
823 self._setselection(sel)
824 self._setrestfreqs(freqs.get_frequency(i),
825 freqs.get_name(i), "MHz")
826 # ensure that we are not iterating past nIF
827 if i == self.nif()-1: break
828 self._setselection(savesel)
829 else:
830 return
831 self._add_history("set_restfreqs", varlist)
832
833
834
835 def history(self, filename=None):
836 """
837 Print the history. Optionally to a file.
838 """
839 hist = list(self._gethistory())
840 out = "-"*80
841 for h in hist:
842 if h.startswith("---"):
843 out += "\n"+h
844 else:
845 items = h.split("##")
846 date = items[0]
847 func = items[1]
848 items = items[2:]
849 out += "\n"+date+"\n"
850 out += "Function: %s\n Parameters:" % (func)
851 for i in items:
852 s = i.split("=")
853 out += "\n %s = %s" % (s[0], s[1])
854 out += "\n"+"-"*80
855 if filename is not None:
856 if filename is "":
857 filename = 'scantable_history.txt'
858 import os
859 filename = os.path.expandvars(os.path.expanduser(filename))
860 if not os.path.isdir(filename):
861 data = open(filename, 'w')
862 data.write(out)
863 data.close()
864 else:
865 msg = "Illegal file name '%s'." % (filename)
866 if rcParams['verbose']:
867 print msg
868 else:
869 raise IOError(msg)
870 if rcParams['verbose']:
871 try:
872 from IPython.genutils import page as pager
873 except ImportError:
874 from pydoc import pager
875 pager(out)
876 else:
877 return out
878 return
879 #
880 # Maths business
881 #
882
883 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
884 """
885 Return the (time) weighted average of a scan.
886 Note:
887 in channels only - align if necessary
888 Parameters:
889 mask: an optional mask (only used for 'var' and 'tsys'
890 weighting)
891 scanav: True averages each scan separately
892 False (default) averages all scans together,
893 weight: Weighting scheme.
894 'none' (mean no weight)
895 'var' (1/var(spec) weighted)
896 'tsys' (1/Tsys**2 weighted)
897 'tint' (integration time weighted)
898 'tintsys' (Tint/Tsys**2)
899 'median' ( median averaging)
900 The default is 'tint'
901 align: align the spectra in velocity before averaging. It takes
902 the time of the first spectrum as reference time.
903 Example:
904 # time average the scantable without using a mask
905 newscan = scan.average_time()
906 """
907 varlist = vars()
908 if weight is None: weight = 'TINT'
909 if mask is None: mask = ()
910 if scanav: scanav = "SCAN"
911 else: scanav = "NONE"
912 scan = (self, )
913 try:
914 if align:
915 scan = (self.freq_align(insitu=False), )
916 s = None
917 if weight.upper() == 'MEDIAN':
918 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
919 scanav))
920 else:
921 s = scantable(self._math._average(scan, mask, weight.upper(),
922 scanav))
923 except RuntimeError, msg:
924 if rcParams['verbose']:
925 print msg
926 return
927 else: raise
928 s._add_history("average_time", varlist)
929 print_log()
930 return s
931
932 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
933 """
934 Return a scan where all spectra are converted to either
935 Jansky or Kelvin depending upon the flux units of the scan table.
936 By default the function tries to look the values up internally.
937 If it can't find them (or if you want to over-ride), you must
938 specify EITHER jyperk OR eta (and D which it will try to look up
939 also if you don't set it). jyperk takes precedence if you set both.
940 Parameters:
941 jyperk: the Jy / K conversion factor
942 eta: the aperture efficiency
943 d: the geomtric diameter (metres)
944 insitu: if False a new scantable is returned.
945 Otherwise, the scaling is done in-situ
946 The default is taken from .asaprc (False)
947 """
948 if insitu is None: insitu = rcParams['insitu']
949 self._math._setinsitu(insitu)
950 varlist = vars()
951 if jyperk is None: jyperk = -1.0
952 if d is None: d = -1.0
953 if eta is None: eta = -1.0
954 s = scantable(self._math._convertflux(self, d, eta, jyperk))
955 s._add_history("convert_flux", varlist)
956 print_log()
957 if insitu: self._assign(s)
958 else: return s
959
960 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
961 """
962 Return a scan after applying a gain-elevation correction.
963 The correction can be made via either a polynomial or a
964 table-based interpolation (and extrapolation if necessary).
965 You specify polynomial coefficients, an ascii table or neither.
966 If you specify neither, then a polynomial correction will be made
967 with built in coefficients known for certain telescopes (an error
968 will occur if the instrument is not known).
969 The data and Tsys are *divided* by the scaling factors.
970 Parameters:
971 poly: Polynomial coefficients (default None) to compute a
972 gain-elevation correction as a function of
973 elevation (in degrees).
974 filename: The name of an ascii file holding correction factors.
975 The first row of the ascii file must give the column
976 names and these MUST include columns
977 "ELEVATION" (degrees) and "FACTOR" (multiply data
978 by this) somewhere.
979 The second row must give the data type of the
980 column. Use 'R' for Real and 'I' for Integer.
981 An example file would be
982 (actual factors are arbitrary) :
983
984 TIME ELEVATION FACTOR
985 R R R
986 0.1 0 0.8
987 0.2 20 0.85
988 0.3 40 0.9
989 0.4 60 0.85
990 0.5 80 0.8
991 0.6 90 0.75
992 method: Interpolation method when correcting from a table.
993 Values are "nearest", "linear" (default), "cubic"
994 and "spline"
995 insitu: if False a new scantable is returned.
996 Otherwise, the scaling is done in-situ
997 The default is taken from .asaprc (False)
998 """
999
1000 if insitu is None: insitu = rcParams['insitu']
1001 self._math._setinsitu(insitu)
1002 varlist = vars()
1003 if poly is None:
1004 poly = ()
1005 from os.path import expandvars
1006 filename = expandvars(filename)
1007 s = scantable(self._math._gainel(self, poly, filename, method))
1008 s._add_history("gain_el", varlist)
1009 print_log()
1010 if insitu: self._assign(s)
1011 else: return s
1012
1013 def freq_align(self, reftime=None, method='cubic', insitu=None):
1014 """
1015 Return a scan where all rows have been aligned in frequency/velocity.
1016 The alignment frequency frame (e.g. LSRK) is that set by function
1017 set_freqframe.
1018 Parameters:
1019 reftime: reference time to align at. By default, the time of
1020 the first row of data is used.
1021 method: Interpolation method for regridding the spectra.
1022 Choose from "nearest", "linear", "cubic" (default)
1023 and "spline"
1024 insitu: if False a new scantable is returned.
1025 Otherwise, the scaling is done in-situ
1026 The default is taken from .asaprc (False)
1027 """
1028 if insitu is None: insitu = rcParams["insitu"]
1029 self._math._setinsitu(insitu)
1030 varlist = vars()
1031 if reftime is None: reftime = ""
1032 s = scantable(self._math._freq_align(self, reftime, method))
1033 s._add_history("freq_align", varlist)
1034 print_log()
1035 if insitu: self._assign(s)
1036 else: return s
1037
1038 def opacity(self, tau, insitu=None):
1039 """
1040 Apply an opacity correction. The data
1041 and Tsys are multiplied by the correction factor.
1042 Parameters:
1043 tau: Opacity from which the correction factor is
1044 exp(tau*ZD)
1045 where ZD is the zenith-distance
1046 insitu: if False a new scantable is returned.
1047 Otherwise, the scaling is done in-situ
1048 The default is taken from .asaprc (False)
1049 """
1050 if insitu is None: insitu = rcParams['insitu']
1051 self._math._setinsitu(insitu)
1052 varlist = vars()
1053 s = scantable(self._math._opacity(self, tau))
1054 s._add_history("opacity", varlist)
1055 print_log()
1056 if insitu: self._assign(s)
1057 else: return s
1058
1059 def bin(self, width=5, insitu=None):
1060 """
1061 Return a scan where all spectra have been binned up.
1062 width: The bin width (default=5) in pixels
1063 insitu: if False a new scantable is returned.
1064 Otherwise, the scaling is done in-situ
1065 The default is taken from .asaprc (False)
1066 """
1067 if insitu is None: insitu = rcParams['insitu']
1068 self._math._setinsitu(insitu)
1069 varlist = vars()
1070 s = scantable(self._math._bin(self, width))
1071 s._add_history("bin", varlist)
1072 print_log()
1073 if insitu: self._assign(s)
1074 else: return s
1075
1076
1077 def resample(self, width=5, method='cubic', insitu=None):
1078 """
1079 Return a scan where all spectra have been binned up
1080 width: The bin width (default=5) in pixels
1081 method: Interpolation method when correcting from a table.
1082 Values are "nearest", "linear", "cubic" (default)
1083 and "spline"
1084 insitu: if False a new scantable is returned.
1085 Otherwise, the scaling is done in-situ
1086 The default is taken from .asaprc (False)
1087 """
1088 if insitu is None: insitu = rcParams['insitu']
1089 self._math._setinsitu(insitu)
1090 varlist = vars()
1091 s = scantable(self._math._resample(self, method, width))
1092 s._add_history("resample", varlist)
1093 print_log()
1094 if insitu: self._assign(s)
1095 else: return s
1096
1097
1098 def average_pol(self, mask=None, weight='none'):
1099 """
1100 Average the Polarisations together.
1101 Parameters:
1102 mask: An optional mask defining the region, where the
1103 averaging will be applied. The output will have all
1104 specified points masked.
1105 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1106 weighted), or 'tsys' (1/Tsys**2 weighted)
1107 """
1108 varlist = vars()
1109 if mask is None:
1110 mask = ()
1111 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1112 s._add_history("average_pol", varlist)
1113 print_log()
1114 return s
1115
1116 def average_beam(self, mask=None, weight='none'):
1117 """
1118 Average the Beams together.
1119 Parameters:
1120 mask: An optional mask defining the region, where the
1121 averaging will be applied. The output will have all
1122 specified points masked.
1123 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1124 weighted), or 'tsys' (1/Tsys**2 weighted)
1125 """
1126 varlist = vars()
1127 if mask is None:
1128 mask = ()
1129 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1130 s._add_history("average_beam", varlist)
1131 print_log()
1132 return s
1133
1134 def convert_pol(self, poltype=None):
1135 """
1136 Convert the data to a different polarisation type.
1137 Parameters:
1138 poltype: The new polarisation type. Valid types are:
1139 "linear", "stokes" and "circular"
1140 """
1141 varlist = vars()
1142 try:
1143 s = scantable(self._math._convertpol(self, poltype))
1144 except RuntimeError, msg:
1145 if rcParams['verbose']:
1146 print msg
1147 return
1148 else:
1149 raise
1150 s._add_history("convert_pol", varlist)
1151 print_log()
1152 return s
1153
1154 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1155 """
1156 Smooth the spectrum by the specified kernel (conserving flux).
1157 Parameters:
1158 scan: The input scan
1159 kernel: The type of smoothing kernel. Select from
1160 'hanning' (default), 'gaussian' and 'boxcar'.
1161 The first three characters are sufficient.
1162 width: The width of the kernel in pixels. For hanning this is
1163 ignored otherwise it defauls to 5 pixels.
1164 For 'gaussian' it is the Full Width Half
1165 Maximum. For 'boxcar' it is the full width.
1166 insitu: if False a new scantable is returned.
1167 Otherwise, the scaling is done in-situ
1168 The default is taken from .asaprc (False)
1169 Example:
1170 none
1171 """
1172 if insitu is None: insitu = rcParams['insitu']
1173 self._math._setinsitu(insitu)
1174 varlist = vars()
1175 s = scantable(self._math._smooth(self, kernel.lower(), width))
1176 s._add_history("smooth", varlist)
1177 print_log()
1178 if insitu: self._assign(s)
1179 else: return s
1180
1181
1182 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1183 """
1184 Return a scan which has been baselined (all rows) by a polynomial.
1185 Parameters:
1186 scan: a scantable
1187 mask: an optional mask
1188 order: the order of the polynomial (default is 0)
1189 plot: plot the fit and the residual. In this each
1190 indivual fit has to be approved, by typing 'y'
1191 or 'n'
1192 insitu: if False a new scantable is returned.
1193 Otherwise, the scaling is done in-situ
1194 The default is taken from .asaprc (False)
1195 Example:
1196 # return a scan baselined by a third order polynomial,
1197 # not using a mask
1198 bscan = scan.poly_baseline(order=3)
1199 """
1200 if insitu is None: insitu = rcParams['insitu']
1201 varlist = vars()
1202 if mask is None:
1203 mask = [True for i in xrange(self.nchan(-1))]
1204 from asap.asapfitter import fitter
1205 try:
1206 f = fitter()
1207 f.set_scan(self, mask)
1208 f.set_function(poly=order)
1209 s = f.auto_fit(insitu, plot=plot)
1210 s._add_history("poly_baseline", varlist)
1211 print_log()
1212 if insitu: self._assign(s)
1213 else: return s
1214 except RuntimeError:
1215 msg = "The fit failed, possibly because it didn't converge."
1216 if rcParams['verbose']:
1217 print msg
1218 return
1219 else:
1220 raise RuntimeError(msg)
1221
1222
1223 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1224 threshold=3, chan_avg_limit=1, plot=False,
1225 insitu=None):
1226 """
1227 Return a scan which has been baselined (all rows) by a polynomial.
1228 Spectral lines are detected first using linefinder and masked out
1229 to avoid them affecting the baseline solution.
1230
1231 Parameters:
1232 mask: an optional mask retreived from scantable
1233 edge: an optional number of channel to drop at
1234 the edge of spectrum. If only one value is
1235 specified, the same number will be dropped from
1236 both sides of the spectrum. Default is to keep
1237 all channels. Nested tuples represent individual
1238 edge selection for different IFs (a number of spectral
1239 channels can be different)
1240 order: the order of the polynomial (default is 0)
1241 threshold: the threshold used by line finder. It is better to
1242 keep it large as only strong lines affect the
1243 baseline solution.
1244 chan_avg_limit:
1245 a maximum number of consequtive spectral channels to
1246 average during the search of weak and broad lines.
1247 The default is no averaging (and no search for weak
1248 lines). If such lines can affect the fitted baseline
1249 (e.g. a high order polynomial is fitted), increase this
1250 parameter (usually values up to 8 are reasonable). Most
1251 users of this method should find the default value
1252 sufficient.
1253 plot: plot the fit and the residual. In this each
1254 indivual fit has to be approved, by typing 'y'
1255 or 'n'
1256 insitu: if False a new scantable is returned.
1257 Otherwise, the scaling is done in-situ
1258 The default is taken from .asaprc (False)
1259
1260 Example:
1261 scan2=scan.auto_poly_baseline(order=7)
1262 """
1263 if insitu is None: insitu = rcParams['insitu']
1264 varlist = vars()
1265 from asap.asapfitter import fitter
1266 from asap.asaplinefind import linefinder
1267 from asap import _is_sequence_or_number as _is_valid
1268
1269 # check whether edge is set up for each IF individually
1270 individualedge = False;
1271 if len(edge) > 1:
1272 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1273 individualedge = True;
1274
1275 if not _is_valid(edge, int) and not individualedge:
1276 raise ValueError, "Parameter 'edge' has to be an integer or a \
1277 pair of integers specified as a tuple. Nested tuples are allowed \
1278 to make individual selection for different IFs."
1279
1280 curedge = (0, 0)
1281 if individualedge:
1282 for edgepar in edge:
1283 if not _is_valid(edgepar, int):
1284 raise ValueError, "Each element of the 'edge' tuple has \
1285 to be a pair of integers or an integer."
1286 else:
1287 curedge = edge;
1288
1289 # setup fitter
1290 f = fitter()
1291 f.set_function(poly=order)
1292
1293 # setup line finder
1294 fl = linefinder()
1295 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1296
1297 if not insitu:
1298 workscan = self.copy()
1299 else:
1300 workscan = self
1301
1302 fl.set_scan(workscan)
1303
1304 rows = range(workscan.nrow())
1305 asaplog.push("Processing:")
1306 for r in rows:
1307 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1308 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1309 workscan.getpol(r), workscan.getcycle(r))
1310 asaplog.push(msg, False)
1311
1312 # figure out edge parameter
1313 if individualedge:
1314 if len(edge) >= workscan.getif(r):
1315 raise RuntimeError, "Number of edge elements appear to " \
1316 "be less than the number of IFs"
1317 curedge = edge[workscan.getif(r)]
1318
1319 # setup line finder
1320 fl.find_lines(r, mask, curedge)
1321 f.set_scan(workscan, fl.get_mask())
1322 f.x = workscan._getabcissa(r)
1323 f.y = workscan._getspectrum(r)
1324 f.data = None
1325 f.fit()
1326 x = f.get_parameters()
1327 if plot:
1328 f.plot(residual=True)
1329 x = raw_input("Accept fit ( [y]/n ): ")
1330 if x.upper() == 'N':
1331 continue
1332 workscan._setspectrum(f.fitter.getresidual(), r)
1333 if plot:
1334 f._p.unmap()
1335 f._p = None
1336 workscan._add_history("auto_poly_baseline", varlist)
1337 if insitu:
1338 self._assign(workscan)
1339 else:
1340 return workscan
1341
1342 def rotate_linpolphase(self, angle):
1343 """
1344 Rotate the phase of the complex polarization O=Q+iU correlation.
1345 This is always done in situ in the raw data. So if you call this
1346 function more than once then each call rotates the phase further.
1347 Parameters:
1348 angle: The angle (degrees) to rotate (add) by.
1349 Examples:
1350 scan.rotate_linpolphase(2.3)
1351 """
1352 varlist = vars()
1353 self._math._rotate_linpolphase(self, angle)
1354 self._add_history("rotate_linpolphase", varlist)
1355 print_log()
1356 return
1357
1358
1359 def rotate_xyphase(self, angle):
1360 """
1361 Rotate the phase of the XY correlation. This is always done in situ
1362 in the data. So if you call this function more than once
1363 then each call rotates the phase further.
1364 Parameters:
1365 angle: The angle (degrees) to rotate (add) by.
1366 Examples:
1367 scan.rotate_xyphase(2.3)
1368 """
1369 varlist = vars()
1370 self._math._rotate_xyphase(self, angle)
1371 self._add_history("rotate_xyphase", varlist)
1372 print_log()
1373 return
1374
1375 def swap_linears(self):
1376 """
1377 Swap the linear polarisations XX and YY
1378 """
1379 varlist = vars()
1380 self._math._swap_linears(self)
1381 self._add_history("swap_linears", varlist)
1382 print_log()
1383 return
1384
1385 def invert_phase(self):
1386 """
1387 Invert the phase of the complex polarisation
1388 """
1389 varlist = vars()
1390 self._math._invert_phase(self)
1391 self._add_history("invert_phase", varlist)
1392 print_log()
1393 return
1394
1395 def add(self, offset, insitu=None):
1396 """
1397 Return a scan where all spectra have the offset added
1398 Parameters:
1399 offset: the offset
1400 insitu: if False a new scantable is returned.
1401 Otherwise, the scaling is done in-situ
1402 The default is taken from .asaprc (False)
1403 """
1404 if insitu is None: insitu = rcParams['insitu']
1405 self._math._setinsitu(insitu)
1406 varlist = vars()
1407 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1408 s._add_history("add", varlist)
1409 print_log()
1410 if insitu:
1411 self._assign(s)
1412 else:
1413 return s
1414
1415 def scale(self, factor, tsys=True, insitu=None, ):
1416 """
1417 Return a scan where all spectra are scaled by the give 'factor'
1418 Parameters:
1419 factor: the scaling factor
1420 insitu: if False a new scantable is returned.
1421 Otherwise, the scaling is done in-situ
1422 The default is taken from .asaprc (False)
1423 tsys: if True (default) then apply the operation to Tsys
1424 as well as the data
1425 """
1426 if insitu is None: insitu = rcParams['insitu']
1427 self._math._setinsitu(insitu)
1428 varlist = vars()
1429 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1430 s._add_history("scale", varlist)
1431 print_log()
1432 if insitu:
1433 self._assign(s)
1434 else:
1435 return s
1436
1437 def auto_quotient(self, mode='time', preserve=True):
1438 """
1439 This function allows to build quotients automatically.
1440 It assumes the observation to have the same numer of
1441 "ons" and "offs"
1442 It will support "closest off in time" in the future
1443 Parameters:
1444 mode: the on/off detection mode; 'suffix' (default)
1445 'suffix' identifies 'off' scans by the
1446 trailing '_R' (Mopra/Parkes) or
1447 '_e'/'_w' (Tid)
1448 preserve: you can preserve (default) the continuum or
1449 remove it. The equations used are
1450 preserve: Output = Toff * (on/off) - Toff
1451 remove: Output = Toff * (on/off) - Ton
1452 """
1453 modes = ["time"]
1454 if not mode in modes:
1455 msg = "please provide valid mode. Valid modes are %s" % (modes)
1456 raise ValueError(msg)
1457 varlist = vars()
1458 s = scantable(self._math._auto_quotient(self, mode, preserve))
1459 s._add_history("auto_quotient", varlist)
1460 print_log()
1461 return s
1462
1463 def mx_quotient(self, mask = None, weight='median', preserve=True):
1464 """
1465 Form a quotient using "off" beams when observing in "MX" mode.
1466 Parameters:
1467 mask: an optional mask to be used when weight == 'stddev'
1468 weight: How to average the off beams. Default is 'median'.
1469 preserve: you can preserve (default) the continuum or
1470 remove it. The equations used are
1471 preserve: Output = Toff * (on/off) - Toff
1472 remove: Output = Toff * (on/off) - Ton
1473 """
1474 if mask is None: mask = ()
1475 varlist = vars()
1476 on = scantable(self._math._mx_extract(self, 'on'))
1477 preoff = scantable(self._math._mx_extract(self, 'off'))
1478 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1479 from asapmath import quotient
1480 q = quotient(on, off, preserve)
1481 q._add_history("mx_quotient", varlist)
1482 print_log()
1483 return q
1484
1485 def freq_switch(self, insitu=None):
1486 """
1487 Apply frequency switching to the data.
1488 Parameters:
1489 insitu: if False a new scantable is returned.
1490 Otherwise, the swictching is done in-situ
1491 The default is taken from .asaprc (False)
1492 Example:
1493 none
1494 """
1495 if insitu is None: insitu = rcParams['insitu']
1496 self._math._setinsitu(insitu)
1497 varlist = vars()
1498 s = scantable(self._math._freqswitch(self))
1499 s._add_history("freq_switch", varlist)
1500 print_log()
1501 if insitu: self._assign(s)
1502 else: return s
1503
1504 def recalc_azel(self):
1505 """
1506 Recalculate the azimuth and elevation for each position.
1507 Parameters:
1508 none
1509 Example:
1510 """
1511 varlist = vars()
1512 self._recalcazel()
1513 self._add_history("recalc_azel", varlist)
1514 print_log()
1515 return
1516
1517 def __add__(self, other):
1518 varlist = vars()
1519 s = None
1520 if isinstance(other, scantable):
1521 print "scantable + scantable NYI"
1522 return
1523 elif isinstance(other, float):
1524 s = scantable(self._math._unaryop(self, other, "ADD", False))
1525 else:
1526 raise TypeError("Other input is not a scantable or float value")
1527 s._add_history("operator +", varlist)
1528 print_log()
1529 return s
1530
1531 def __sub__(self, other):
1532 """
1533 implicit on all axes and on Tsys
1534 """
1535 varlist = vars()
1536 s = None
1537 if isinstance(other, scantable):
1538 print "scantable - scantable NYI"
1539 return
1540 elif isinstance(other, float):
1541 s = scantable(self._math._unaryop(self, other, "SUB", False))
1542 else:
1543 raise TypeError("Other input is not a scantable or float value")
1544 s._add_history("operator -", varlist)
1545 print_log()
1546 return s
1547
1548 def __mul__(self, other):
1549 """
1550 implicit on all axes and on Tsys
1551 """
1552 varlist = vars()
1553 s = None
1554 if isinstance(other, scantable):
1555 print "scantable * scantable NYI"
1556 return
1557 elif isinstance(other, float):
1558 s = scantable(self._math._unaryop(self, other, "MUL", False))
1559 else:
1560 raise TypeError("Other input is not a scantable or float value")
1561 s._add_history("operator *", varlist)
1562 print_log()
1563 return s
1564
1565
1566 def __div__(self, other):
1567 """
1568 implicit on all axes and on Tsys
1569 """
1570 varlist = vars()
1571 s = None
1572 if isinstance(other, scantable):
1573 print "scantable / scantable NYI"
1574 return
1575 elif isinstance(other, float):
1576 if other == 0.0:
1577 raise ZeroDivisionError("Dividing by zero is not recommended")
1578 s = scantable(self._math._unaryop(self, other, "DIV", False))
1579 else:
1580 raise TypeError("Other input is not a scantable or float value")
1581 s._add_history("operator /", varlist)
1582 print_log()
1583 return s
1584
1585 def get_fit(self, row=0):
1586 """
1587 Print or return the stored fits for a row in the scantable
1588 Parameters:
1589 row: the row which the fit has been applied to.
1590 """
1591 if row > self.nrow():
1592 return
1593 from asap.asapfit import asapfit
1594 fit = asapfit(self._getfit(row))
1595 if rcParams['verbose']:
1596 print fit
1597 return
1598 else:
1599 return fit.as_dict()
1600
1601 def _add_history(self, funcname, parameters):
1602 # create date
1603 sep = "##"
1604 from datetime import datetime
1605 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1606 hist = dstr+sep
1607 hist += funcname+sep#cdate+sep
1608 if parameters.has_key('self'): del parameters['self']
1609 for k, v in parameters.iteritems():
1610 if type(v) is dict:
1611 for k2, v2 in v.iteritems():
1612 hist += k2
1613 hist += "="
1614 if isinstance(v2, scantable):
1615 hist += 'scantable'
1616 elif k2 == 'mask':
1617 if isinstance(v2, list) or isinstance(v2, tuple):
1618 hist += str(self._zip_mask(v2))
1619 else:
1620 hist += str(v2)
1621 else:
1622 hist += str(v2)
1623 else:
1624 hist += k
1625 hist += "="
1626 if isinstance(v, scantable):
1627 hist += 'scantable'
1628 elif k == 'mask':
1629 if isinstance(v, list) or isinstance(v, tuple):
1630 hist += str(self._zip_mask(v))
1631 else:
1632 hist += str(v)
1633 else:
1634 hist += str(v)
1635 hist += sep
1636 hist = hist[:-2] # remove trailing '##'
1637 self._addhistory(hist)
1638
1639
1640 def _zip_mask(self, mask):
1641 mask = list(mask)
1642 i = 0
1643 segments = []
1644 while mask[i:].count(1):
1645 i += mask[i:].index(1)
1646 if mask[i:].count(0):
1647 j = i + mask[i:].index(0)
1648 else:
1649 j = len(mask)
1650 segments.append([i, j])
1651 i = j
1652 return segments
1653
1654 def _get_ordinate_label(self):
1655 fu = "("+self.get_fluxunit()+")"
1656 import re
1657 lbl = "Intensity"
1658 if re.match(".K.", fu):
1659 lbl = "Brightness Temperature "+ fu
1660 elif re.match(".Jy.", fu):
1661 lbl = "Flux density "+ fu
1662 return lbl
1663
1664 def _check_ifs(self):
1665 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1666 nchans = filter(lambda t: t > 0, nchans)
1667 return (sum(nchans)/len(nchans) == nchans[0])
1668
1669 def _fill(self, names, unit, average):
1670 import os
1671 from asap._asap import stfiller
1672 first = True
1673 fullnames = []
1674 for name in names:
1675 name = os.path.expandvars(name)
1676 name = os.path.expanduser(name)
1677 if not os.path.exists(name):
1678 msg = "File '%s' does not exists" % (name)
1679 if rcParams['verbose']:
1680 asaplog.push(msg)
1681 print asaplog.pop().strip()
1682 return
1683 raise IOError(msg)
1684 fullnames.append(name)
1685 if average:
1686 asaplog.push('Auto averaging integrations')
1687 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1688 for name in fullnames:
1689 tbl = Scantable(stype)
1690 r = stfiller(tbl)
1691 msg = "Importing %s..." % (name)
1692 asaplog.push(msg, False)
1693 print_log()
1694 r._open(name, -1, -1)
1695 r._read()
1696 #tbl = r._getdata()
1697 if average:
1698 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1699 #tbl = tbl2
1700 if not first:
1701 tbl = self._math._merge([self, tbl])
1702 #tbl = tbl2
1703 Scantable.__init__(self, tbl)
1704 r._close()
1705 del r, tbl
1706 first = False
1707 if unit is not None:
1708 self.set_fluxunit(unit)
1709 self.set_freqframe(rcParams['scantable.freqframe'])
1710
Note: See TracBrowser for help on using the repository browser.