source: trunk/python/scantable.py@ 1438

Last change on this file since 1438 was 1435, checked in by Malte Marquarding, 16 years ago

optionally suppress history

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 66.9 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):# or \
42# (isinstance(filename, list) or isinstance(filename, tuple)) \
43# and isinstance(filename[-1], str):
44 import os.path
45 filename = os.path.expandvars(filename)
46 filename = os.path.expanduser(filename)
47 if not os.path.exists(filename):
48 s = "File '%s' not found." % (filename)
49 if rcParams['verbose']:
50 asaplog.push(s)
51 print asaplog.pop().strip()
52 return
53 raise IOError(s)
54 if os.path.isdir(filename) \
55 and not os.path.exists(filename+'/table.f1'):
56 # crude check if asap table
57 if os.path.exists(filename+'/table.info'):
58 ondisk = rcParams['scantable.storage'] == 'disk'
59 Scantable.__init__(self, filename, ondisk)
60 if unit is not None:
61 self.set_fluxunit(unit)
62 self.set_freqframe(rcParams['scantable.freqframe'])
63 else:
64 msg = "The given file '%s'is not a valid " \
65 "asap table." % (filename)
66 if rcParams['verbose']:
67 print msg
68 return
69 else:
70 raise IOError(msg)
71 else:
72 self._fill([filename], unit, average)
73 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
74 and isinstance(filename[-1], str):
75 self._fill(filename, unit, average)
76 self._add_history("scantable", varlist)
77 print_log()
78
79 def save(self, name=None, format=None, overwrite=False):
80 """
81 Store the scantable on disk. This can be an asap (aips++) Table,
82 SDFITS or MS2 format.
83 Parameters:
84 name: the name of the outputfile. For format "ASCII"
85 this is the root file name (data in 'name'.txt
86 and header in 'name'_header.txt)
87 format: an optional file format. Default is ASAP.
88 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
89 'SDFITS' (save as SDFITS file)
90 'ASCII' (saves as ascii text file)
91 'MS2' (saves as an aips++
92 MeasurementSet V2)
93 overwrite: If the file should be overwritten if it exists.
94 The default False is to return with warning
95 without writing the output. USE WITH CARE.
96 Example:
97 scan.save('myscan.asap')
98 scan.save('myscan.sdfits', 'SDFITS')
99 """
100 from os import path
101 if format is None: format = rcParams['scantable.save']
102 suffix = '.'+format.lower()
103 if name is None or name == "":
104 name = 'scantable'+suffix
105 msg = "No filename given. Using default name %s..." % name
106 asaplog.push(msg)
107 name = path.expandvars(name)
108 if path.isfile(name) or path.isdir(name):
109 if not overwrite:
110 msg = "File %s exists." % name
111 if rcParams['verbose']:
112 print msg
113 return
114 else:
115 raise IOError(msg)
116 format2 = format.upper()
117 if format2 == 'ASAP':
118 self._save(name)
119 else:
120 from asap._asap import stwriter as stw
121 writer = stw(format2)
122 writer.write(self, name)
123 print_log()
124 return
125
126 def copy(self):
127 """
128 Return a copy of this scantable.
129 Note:
130 This makes a full (deep) copy. scan2 = scan1 makes a reference.
131 Parameters:
132 none
133 Example:
134 copiedscan = scan.copy()
135 """
136 sd = scantable(Scantable._copy(self))
137 return sd
138
139 def drop_scan(self, scanid=None):
140 """
141 Return a new scantable where the specified scan number(s) has(have)
142 been dropped.
143 Parameters:
144 scanid: a (list of) scan number(s)
145 """
146 from asap import _is_sequence_or_number as _is_valid
147 from asap import _to_list
148 from asap import unique
149 if not _is_valid(scanid):
150 if rcParams['verbose']:
151 print "Please specify a scanno to drop from the scantable"
152 return
153 else:
154 raise RuntimeError("No scan given")
155 try:
156 scanid = _to_list(scanid)
157 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
158 for sid in scanid: allscans.remove(sid)
159 if len(allscans) == 0:
160 raise ValueError("Can't remove all scans")
161 except ValueError:
162 if rcParams['verbose']:
163 print "Couldn't find any match."
164 return
165 else: raise
166 try:
167 bsel = self.get_selection()
168 sel = selector()
169 sel.set_scans(allscans)
170 self.set_selection(bsel+sel)
171 scopy = self._copy()
172 self.set_selection(bsel)
173 return scantable(scopy)
174 except RuntimeError:
175 if rcParams['verbose']:
176 print "Couldn't find any match."
177 else:
178 raise
179
180
181 def get_scan(self, scanid=None):
182 """
183 Return a specific scan (by scanno) or collection of scans (by
184 source name) in a new scantable.
185 Note:
186 See scantable.drop_scan() for the inverse operation.
187 Parameters:
188 scanid: a (list of) scanno or a source name, unix-style
189 patterns are accepted for source name matching, e.g.
190 '*_R' gets all 'ref scans
191 Example:
192 # get all scans containing the source '323p459'
193 newscan = scan.get_scan('323p459')
194 # get all 'off' scans
195 refscans = scan.get_scan('*_R')
196 # get a susbset of scans by scanno (as listed in scan.summary())
197 newscan = scan.get_scan([0, 2, 7, 10])
198 """
199 if scanid is None:
200 if rcParams['verbose']:
201 print "Please specify a scan no or name to " \
202 "retrieve from the scantable"
203 return
204 else:
205 raise RuntimeError("No scan given")
206
207 try:
208 bsel = self.get_selection()
209 sel = selector()
210 if type(scanid) is str:
211 sel.set_name(scanid)
212 self.set_selection(bsel+sel)
213 scopy = self._copy()
214 self.set_selection(bsel)
215 return scantable(scopy)
216 elif type(scanid) is int:
217 sel.set_scans([scanid])
218 self.set_selection(bsel+sel)
219 scopy = self._copy()
220 self.set_selection(bsel)
221 return scantable(scopy)
222 elif type(scanid) is list:
223 sel.set_scans(scanid)
224 self.set_selection(sel)
225 scopy = self._copy()
226 self.set_selection(bsel)
227 return scantable(scopy)
228 else:
229 msg = "Illegal scanid type, use 'int' or 'list' if ints."
230 if rcParams['verbose']:
231 print msg
232 else:
233 raise TypeError(msg)
234 except RuntimeError:
235 if rcParams['verbose']: print "Couldn't find any match."
236 else: raise
237
238 def __str__(self):
239 return Scantable._summary(self, True)
240
241 def summary(self, filename=None):
242 """
243 Print a summary of the contents of this scantable.
244 Parameters:
245 filename: the name of a file to write the putput to
246 Default - no file output
247 verbose: print extra info such as the frequency table
248 The default (False) is taken from .asaprc
249 """
250 info = Scantable._summary(self, True)
251 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
252 if filename is not None:
253 if filename is "":
254 filename = 'scantable_summary.txt'
255 from os.path import expandvars, isdir
256 filename = expandvars(filename)
257 if not isdir(filename):
258 data = open(filename, 'w')
259 data.write(info)
260 data.close()
261 else:
262 msg = "Illegal file name '%s'." % (filename)
263 if rcParams['verbose']:
264 print msg
265 else:
266 raise IOError(msg)
267 if rcParams['verbose']:
268 try:
269 from IPython.genutils import page as pager
270 except ImportError:
271 from pydoc import pager
272 pager(info)
273 else:
274 return info
275
276
277 def get_selection(self):
278 """
279 Get the selection object currently set on this scantable.
280 Parameters:
281 none
282 Example:
283 sel = scan.get_selection()
284 sel.set_ifs(0) # select IF 0
285 scan.set_selection(sel) # apply modified selection
286 """
287 return selector(self._getselection())
288
289 def set_selection(self, selection=selector()):
290 """
291 Select a subset of the data. All following operations on this scantable
292 are only applied to thi selection.
293 Parameters:
294 selection: a selector object (default unset the selection)
295 Examples:
296 sel = selector() # create a selection object
297 self.set_scans([0, 3]) # select SCANNO 0 and 3
298 scan.set_selection(sel) # set the selection
299 scan.summary() # will only print summary of scanno 0 an 3
300 scan.set_selection() # unset the selection
301 """
302 self._setselection(selection)
303
304 def stats(self, stat='stddev', mask=None):
305 """
306 Determine the specified statistic of the current beam/if/pol
307 Takes a 'mask' as an optional parameter to specify which
308 channels should be excluded.
309 Parameters:
310 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
311 'var', 'stddev', 'avdev', 'rms', 'median'
312 mask: an optional mask specifying where the statistic
313 should be determined.
314 Example:
315 scan.set_unit('channel')
316 msk = scan.create_mask([100, 200], [500, 600])
317 scan.stats(stat='mean', mask=m)
318 """
319 if mask == None:
320 mask = []
321 axes = ['Beam', 'IF', 'Pol', 'Time']
322 if not self._check_ifs():
323 raise ValueError("Cannot apply mask as the IFs have different "
324 "number of channels. Please use setselection() "
325 "to select individual IFs")
326
327 statvals = self._math._stats(self, mask, stat)
328 out = ''
329 axes = []
330 for i in range(self.nrow()):
331 axis = []
332 axis.append(self.getscan(i))
333 axis.append(self.getbeam(i))
334 axis.append(self.getif(i))
335 axis.append(self.getpol(i))
336 axis.append(self.getcycle(i))
337 axes.append(axis)
338 tm = self._gettime(i)
339 src = self._getsourcename(i)
340 out += 'Scan[%d] (%s) ' % (axis[0], src)
341 out += 'Time[%s]:\n' % (tm)
342 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
343 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
344 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
345 out += '= %3.3f\n' % (statvals[i])
346 out += "--------------------------------------------------\n"
347
348 if rcParams['verbose']:
349 print "--------------------------------------------------"
350 print " ", stat
351 print "--------------------------------------------------"
352 print out
353 #else:
354 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
355 # 'axes' : axes,
356 # 'data': statvals}
357 return statvals
358
359 def stddev(self, mask=None):
360 """
361 Determine the standard deviation of the current beam/if/pol
362 Takes a 'mask' as an optional parameter to specify which
363 channels should be excluded.
364 Parameters:
365 mask: an optional mask specifying where the standard
366 deviation should be determined.
367
368 Example:
369 scan.set_unit('channel')
370 msk = scan.create_mask([100, 200], [500, 600])
371 scan.stddev(mask=m)
372 """
373 return self.stats(stat='stddev', mask=mask);
374
375
376 def get_column_names(self):
377 """
378 Return a list of column names, which can be used for selection.
379 """
380 return list(Scantable.get_column_names(self))
381
382 def get_tsys(self):
383 """
384 Return the System temperatures.
385 Returns:
386 a list of Tsys values for the current selection
387 """
388
389 return self._row_callback(self._gettsys, "Tsys")
390
391 def _row_callback(self, callback, label):
392 axes = []
393 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
394 out = ""
395 outvec = []
396 for i in range(self.nrow()):
397 axis = []
398 axis.append(self.getscan(i))
399 axis.append(self.getbeam(i))
400 axis.append(self.getif(i))
401 axis.append(self.getpol(i))
402 axis.append(self.getcycle(i))
403 axes.append(axis)
404 tm = self._gettime(i)
405 src = self._getsourcename(i)
406 out += 'Scan[%d] (%s) ' % (axis[0], src)
407 out += 'Time[%s]:\n' % (tm)
408 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
409 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
410 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
411 outvec.append(callback(i))
412 out += '= %3.3f\n' % (outvec[i])
413 out += "--------------------------------------------------\n"
414 if rcParams['verbose']:
415 print "--------------------------------------------------"
416 print " %s" % (label)
417 print "--------------------------------------------------"
418 print out
419 # disabled because the vector seems more useful
420 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
421 return outvec
422
423 def _get_column(self, callback, row=-1):
424 """
425 """
426 if row == -1:
427 return [callback(i) for i in range(self.nrow())]
428 else:
429 if 0 <= row < self.nrow():
430 return callback(row)
431
432
433 def get_time(self, row=-1, asdatetime=False):
434 """
435 Get a list of time stamps for the observations.
436 Return a datetime object for each integration time stamp in the scantable.
437 Parameters:
438 row: row no of integration. Default -1 return all rows
439 asdatetime: return values as datetime objects rather than strings
440 Example:
441 none
442 """
443 from time import strptime
444 from datetime import datetime
445 times = self._get_column(self._gettime, row)
446 if not asdatetime:
447 return times
448 format = "%Y/%m/%d/%H:%M:%S"
449 if isinstance(times, list):
450 return [datetime(*strptime(i, format)[:6]) for i in times]
451 else:
452 return datetime(*strptime(times, format)[:6])
453
454
455 def get_inttime(self, row=-1):
456 """
457 Get a list of integration times for the observations.
458 Return a time in seconds for each integration in the scantable.
459 Parameters:
460 row: row no of integration. Default -1 return all rows.
461 Example:
462 none
463 """
464 return self._get_column(self._getinttime, row)
465
466
467 def get_sourcename(self, row=-1):
468 """
469 Get a list source names for the observations.
470 Return a string for each integration in the scantable.
471 Parameters:
472 row: row no of integration. Default -1 return all rows.
473 Example:
474 none
475 """
476 return self._get_column(self._getsourcename, row)
477
478 def get_elevation(self, row=-1):
479 """
480 Get a list of elevations for the observations.
481 Return a float for each integration in the scantable.
482 Parameters:
483 row: row no of integration. Default -1 return all rows.
484 Example:
485 none
486 """
487 return self._get_column(self._getelevation, row)
488
489 def get_azimuth(self, row=-1):
490 """
491 Get a list of azimuths for the observations.
492 Return a float for each integration in the scantable.
493 Parameters:
494 row: row no of integration. Default -1 return all rows.
495 Example:
496 none
497 """
498 return self._get_column(self._getazimuth, row)
499
500 def get_parangle(self, row=-1):
501 """
502 Get a list of parallactic angles for the observations.
503 Return a float for each integration in the scantable.
504 Parameters:
505 row: row no of integration. Default -1 return all rows.
506 Example:
507 none
508 """
509 return self._get_column(self._getparangle, row)
510
511 def get_direction(self, row=-1):
512 """
513 Get a list of Positions on the sky (direction) for the observations.
514 Return a float for each integration in the scantable.
515 Parameters:
516 row: row no of integration. Default -1 return all rows
517 Example:
518 none
519 """
520 return self._get_column(self._getdirection, row)
521
522 def get_directionval(self, row=-1):
523 """
524 Get a list of Positions on the sky (direction) for the observations.
525 Return a float for each integration in the scantable.
526 Parameters:
527 row: row no of integration. Default -1 return all rows
528 Example:
529 none
530 """
531 return self._get_column(self._getdirectionvec, row)
532
533
534 def set_unit(self, unit='channel'):
535 """
536 Set the unit for all following operations on this scantable
537 Parameters:
538 unit: optional unit, default is 'channel'
539 one of '*Hz', 'km/s', 'channel', ''
540 """
541 varlist = vars()
542 if unit in ['', 'pixel', 'channel']:
543 unit = ''
544 inf = list(self._getcoordinfo())
545 inf[0] = unit
546 self._setcoordinfo(inf)
547 self._add_history("set_unit", varlist)
548
549 def set_instrument(self, instr):
550 """
551 Set the instrument for subsequent processing.
552 Parameters:
553 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
554 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
555 """
556 self._setInstrument(instr)
557 self._add_history("set_instument", vars())
558 print_log()
559
560 def set_feedtype(self, feedtype):
561 """
562 Overwrite the feed type, which might not be set correctly.
563 Parameters:
564 feedtype: 'linear' or 'circular'
565 """
566 self._setfeedtype(feedtype)
567 self._add_history("set_feedtype", vars())
568 print_log()
569
570 def set_doppler(self, doppler='RADIO'):
571 """
572 Set the doppler for all following operations on this scantable.
573 Parameters:
574 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
575 """
576 varlist = vars()
577 inf = list(self._getcoordinfo())
578 inf[2] = doppler
579 self._setcoordinfo(inf)
580 self._add_history("set_doppler", vars())
581 print_log()
582
583 def set_freqframe(self, frame=None):
584 """
585 Set the frame type of the Spectral Axis.
586 Parameters:
587 frame: an optional frame type, default 'LSRK'. Valid frames are:
588 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
589 'GEO', 'GALACTO', 'LGROUP', 'CMB'
590 Examples:
591 scan.set_freqframe('BARY')
592 """
593 if frame is None: frame = rcParams['scantable.freqframe']
594 varlist = vars()
595 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
596 'GEO', 'GALACTO', 'LGROUP', 'CMB']
597
598 if frame in valid:
599 inf = list(self._getcoordinfo())
600 inf[1] = frame
601 self._setcoordinfo(inf)
602 self._add_history("set_freqframe", varlist)
603 else:
604 msg = "Please specify a valid freq type. Valid types are:\n", valid
605 if rcParams['verbose']:
606 print msg
607 else:
608 raise TypeError(msg)
609 print_log()
610
611 def set_dirframe(self, frame=""):
612 """
613 Set the frame type of the Direction on the sky.
614 Parameters:
615 frame: an optional frame type, default ''. Valid frames are:
616 'J2000', 'B1950', 'GALACTIC'
617 Examples:
618 scan.set_dirframe('GALACTIC')
619 """
620 varlist = vars()
621 try:
622 Scantable.set_dirframe(self, frame)
623 except RuntimeError, msg:
624 if rcParams['verbose']:
625 print msg
626 else:
627 raise
628 self._add_history("set_dirframe", varlist)
629
630 def get_unit(self):
631 """
632 Get the default unit set in this scantable
633 Returns:
634 A unit string
635 """
636 inf = self._getcoordinfo()
637 unit = inf[0]
638 if unit == '': unit = 'channel'
639 return unit
640
641 def get_abcissa(self, rowno=0):
642 """
643 Get the abcissa in the current coordinate setup for the currently
644 selected Beam/IF/Pol
645 Parameters:
646 rowno: an optional row number in the scantable. Default is the
647 first row, i.e. rowno=0
648 Returns:
649 The abcissa values and the format string (as a dictionary)
650 """
651 abc = self._getabcissa(rowno)
652 lbl = self._getabcissalabel(rowno)
653 print_log()
654 return abc, lbl
655
656 def flag(self, mask=None):
657 """
658 Flag the selected data using an optional channel mask.
659 Parameters:
660 mask: an optional channel mask, created with create_mask. Default
661 (no mask) is all channels.
662 """
663 varlist = vars()
664 if mask is None:
665 mask = []
666 try:
667 self._flag(mask)
668 except RuntimeError, msg:
669 if rcParams['verbose']:
670 print msg
671 return
672 else: raise
673 self._add_history("flag", varlist)
674
675 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
676 """
677 Flag the data in 'lag' space by providing a frequency to remove.
678 Flagged data in the scantable gets set to 0.0 before the fft.
679 No taper is applied.
680 Parameters:
681 frequency: the frequency (really a period within the bandwidth)
682 to remove
683 width: the width of the frequency to remove, to remove a
684 range of frequencies aroung the centre.
685 unit: the frequency unit (default "GHz")
686 Notes:
687 It is recommended to flag edges of the band or strong
688 signals beforehand.
689 """
690 if insitu is None: insitu = rcParams['insitu']
691 self._math._setinsitu(insitu)
692 varlist = vars()
693 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
694 if not base.has_key(unit):
695 raise ValueError("%s is not a valid unit." % unit)
696 try:
697 s = scantable(self._math._lag_flag(self, frequency*base[unit],
698 width*base[unit]))
699 except RuntimeError, msg:
700 if rcParams['verbose']:
701 print msg
702 return
703 else: raise
704 s._add_history("lag_flag", varlist)
705 print_log()
706 if insitu:
707 self._assign(s)
708 else:
709 return s
710
711
712 def create_mask(self, *args, **kwargs):
713 """
714 Compute and return a mask based on [min, max] windows.
715 The specified windows are to be INCLUDED, when the mask is
716 applied.
717 Parameters:
718 [min, max], [min2, max2], ...
719 Pairs of start/end points (inclusive)specifying the regions
720 to be masked
721 invert: optional argument. If specified as True,
722 return an inverted mask, i.e. the regions
723 specified are EXCLUDED
724 row: create the mask using the specified row for
725 unit conversions, default is row=0
726 only necessary if frequency varies over rows.
727 Example:
728 scan.set_unit('channel')
729 a)
730 msk = scan.create_mask([400, 500], [800, 900])
731 # masks everything outside 400 and 500
732 # and 800 and 900 in the unit 'channel'
733
734 b)
735 msk = scan.create_mask([400, 500], [800, 900], invert=True)
736 # masks the regions between 400 and 500
737 # and 800 and 900 in the unit 'channel'
738 c)
739 mask only channel 400
740 msk = scan.create_mask([400, 400])
741 """
742 row = 0
743 if kwargs.has_key("row"):
744 row = kwargs.get("row")
745 data = self._getabcissa(row)
746 u = self._getcoordinfo()[0]
747 if rcParams['verbose']:
748 if u == "": u = "channel"
749 msg = "The current mask window unit is %s" % u
750 i = self._check_ifs()
751 if not i:
752 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
753 asaplog.push(msg)
754 n = self.nchan()
755 msk = _n_bools(n, False)
756 # test if args is a 'list' or a 'normal *args - UGLY!!!
757
758 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
759 and args or args[0]
760 for window in ws:
761 if (len(window) != 2 or window[0] > window[1] ):
762 raise TypeError("A window needs to be defined as [min, max]")
763 for i in range(n):
764 if data[i] >= window[0] and data[i] <= window[1]:
765 msk[i] = True
766 if kwargs.has_key('invert'):
767 if kwargs.get('invert'):
768 msk = mask_not(msk)
769 print_log()
770 return msk
771
772 def get_restfreqs(self):
773 """
774 Get the restfrequency(s) stored in this scantable.
775 The return value(s) are always of unit 'Hz'
776 Parameters:
777 none
778 Returns:
779 a list of doubles
780 """
781 return list(self._getrestfreqs())
782
783
784 def set_restfreqs(self, freqs=None, unit='Hz'):
785 """
786 Set or replace the restfrequency specified and
787 If the 'freqs' argument holds a scalar,
788 then that rest frequency will be applied to all the selected
789 data. If the 'freqs' argument holds
790 a vector, then it MUST be of equal or smaller length than
791 the number of IFs (and the available restfrequencies will be
792 replaced by this vector). In this case, *all* data have
793 the restfrequency set per IF according
794 to the corresponding value you give in the 'freqs' vector.
795 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
796 IF 1 gets restfreq 2e9.
797 You can also specify the frequencies via a linecatalog.
798
799 Parameters:
800 freqs: list of rest frequency values or string idenitfiers
801 unit: unit for rest frequency (default 'Hz')
802
803 Example:
804 # set the given restfrequency for the whole table
805 scan.set_restfreqs(freqs=1.4e9)
806 # If thee number of IFs in the data is >= 2 IF0 gets the first
807 # value IF1 the second...
808 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
809 #set the given restfrequency for the whole table (by name)
810 scan.set_restfreqs(freqs="OH1667")
811
812 Note:
813 To do more sophisticate Restfrequency setting, e.g. on a
814 source and IF basis, use scantable.set_selection() before using
815 this function.
816 # provide your scantable is call scan
817 selection = selector()
818 selection.set_name("ORION*")
819 selection.set_ifs([1])
820 scan.set_selection(selection)
821 scan.set_restfreqs(freqs=86.6e9)
822
823 """
824 varlist = vars()
825 from asap import linecatalog
826 # simple value
827 if isinstance(freqs, int) or isinstance(freqs, float):
828 self._setrestfreqs(freqs, "",unit)
829 # list of values
830 elif isinstance(freqs, list) or isinstance(freqs, tuple):
831 # list values are scalars
832 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
833 sel = selector()
834 savesel = self._getselection()
835 iflist = self.getifnos()
836 for i in xrange(len(freqs)):
837 sel.set_ifs(iflist[i])
838 self._setselection(sel)
839 self._setrestfreqs(freqs[i], "",unit)
840 self._setselection(savesel)
841 # list values are tuples, (value, name)
842 elif isinstance(freqs[-1], dict):
843 sel = selector()
844 savesel = self._getselection()
845 iflist = self.getifnos()
846 for i in xrange(len(freqs)):
847 sel.set_ifs(iflist[i])
848 self._setselection(sel)
849 self._setrestfreqs(freqs[i]["value"],
850 freqs[i]["name"], "MHz")
851 self._setselection(savesel)
852 # freqs are to be taken from a linecatalog
853 elif isinstance(freqs, linecatalog):
854 sel = selector()
855 savesel = self._getselection()
856 iflist = self.getifnos()
857 for i in xrange(freqs.nrow()):
858 sel.set_ifs(iflist[i])
859 self._setselection(sel)
860 self._setrestfreqs(freqs.get_frequency(i),
861 freqs.get_name(i), "MHz")
862 # ensure that we are not iterating past nIF
863 if i == self.nif()-1: break
864 self._setselection(savesel)
865 else:
866 return
867 self._add_history("set_restfreqs", varlist)
868
869 def shift_refpix(self, delta):
870 """
871 Shift the reference pixel of the Spectra Coordinate by an
872 integer amount.
873 Parameters:
874 delta: the amount to shift by
875 Note:
876 Be careful using this with broadband data.
877 """
878 Scantable.shift(self, delta)
879
880 def history(self, filename=None):
881 """
882 Print the history. Optionally to a file.
883 Parameters:
884 filename: The name of the file to save the history to.
885 """
886 hist = list(self._gethistory())
887 out = "-"*80
888 for h in hist:
889 if h.startswith("---"):
890 out += "\n"+h
891 else:
892 items = h.split("##")
893 date = items[0]
894 func = items[1]
895 items = items[2:]
896 out += "\n"+date+"\n"
897 out += "Function: %s\n Parameters:" % (func)
898 for i in items:
899 s = i.split("=")
900 out += "\n %s = %s" % (s[0], s[1])
901 out += "\n"+"-"*80
902 if filename is not None:
903 if filename is "":
904 filename = 'scantable_history.txt'
905 import os
906 filename = os.path.expandvars(os.path.expanduser(filename))
907 if not os.path.isdir(filename):
908 data = open(filename, 'w')
909 data.write(out)
910 data.close()
911 else:
912 msg = "Illegal file name '%s'." % (filename)
913 if rcParams['verbose']:
914 print msg
915 else:
916 raise IOError(msg)
917 if rcParams['verbose']:
918 try:
919 from IPython.genutils import page as pager
920 except ImportError:
921 from pydoc import pager
922 pager(out)
923 else:
924 return out
925 return
926 #
927 # Maths business
928 #
929
930 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
931 """
932 Return the (time) weighted average of a scan.
933 Note:
934 in channels only - align if necessary
935 Parameters:
936 mask: an optional mask (only used for 'var' and 'tsys'
937 weighting)
938 scanav: True averages each scan separately
939 False (default) averages all scans together,
940 weight: Weighting scheme.
941 'none' (mean no weight)
942 'var' (1/var(spec) weighted)
943 'tsys' (1/Tsys**2 weighted)
944 'tint' (integration time weighted)
945 'tintsys' (Tint/Tsys**2)
946 'median' ( median averaging)
947 The default is 'tint'
948 align: align the spectra in velocity before averaging. It takes
949 the time of the first spectrum as reference time.
950 Example:
951 # time average the scantable without using a mask
952 newscan = scan.average_time()
953 """
954 varlist = vars()
955 if weight is None: weight = 'TINT'
956 if mask is None: mask = ()
957 if scanav: scanav = "SCAN"
958 else: scanav = "NONE"
959 scan = (self, )
960 try:
961 if align:
962 scan = (self.freq_align(insitu=False), )
963 s = None
964 if weight.upper() == 'MEDIAN':
965 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
966 scanav))
967 else:
968 s = scantable(self._math._average(scan, mask, weight.upper(),
969 scanav))
970 except RuntimeError, msg:
971 if rcParams['verbose']:
972 print msg
973 return
974 else: raise
975 s._add_history("average_time", varlist)
976 print_log()
977 return s
978
979 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
980 """
981 Return a scan where all spectra are converted to either
982 Jansky or Kelvin depending upon the flux units of the scan table.
983 By default the function tries to look the values up internally.
984 If it can't find them (or if you want to over-ride), you must
985 specify EITHER jyperk OR eta (and D which it will try to look up
986 also if you don't set it). jyperk takes precedence if you set both.
987 Parameters:
988 jyperk: the Jy / K conversion factor
989 eta: the aperture efficiency
990 d: the geomtric diameter (metres)
991 insitu: if False a new scantable is returned.
992 Otherwise, the scaling is done in-situ
993 The default is taken from .asaprc (False)
994 """
995 if insitu is None: insitu = rcParams['insitu']
996 self._math._setinsitu(insitu)
997 varlist = vars()
998 if jyperk is None: jyperk = -1.0
999 if d is None: d = -1.0
1000 if eta is None: eta = -1.0
1001 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1002 s._add_history("convert_flux", varlist)
1003 print_log()
1004 if insitu: self._assign(s)
1005 else: return s
1006
1007 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1008 """
1009 Return a scan after applying a gain-elevation correction.
1010 The correction can be made via either a polynomial or a
1011 table-based interpolation (and extrapolation if necessary).
1012 You specify polynomial coefficients, an ascii table or neither.
1013 If you specify neither, then a polynomial correction will be made
1014 with built in coefficients known for certain telescopes (an error
1015 will occur if the instrument is not known).
1016 The data and Tsys are *divided* by the scaling factors.
1017 Parameters:
1018 poly: Polynomial coefficients (default None) to compute a
1019 gain-elevation correction as a function of
1020 elevation (in degrees).
1021 filename: The name of an ascii file holding correction factors.
1022 The first row of the ascii file must give the column
1023 names and these MUST include columns
1024 "ELEVATION" (degrees) and "FACTOR" (multiply data
1025 by this) somewhere.
1026 The second row must give the data type of the
1027 column. Use 'R' for Real and 'I' for Integer.
1028 An example file would be
1029 (actual factors are arbitrary) :
1030
1031 TIME ELEVATION FACTOR
1032 R R R
1033 0.1 0 0.8
1034 0.2 20 0.85
1035 0.3 40 0.9
1036 0.4 60 0.85
1037 0.5 80 0.8
1038 0.6 90 0.75
1039 method: Interpolation method when correcting from a table.
1040 Values are "nearest", "linear" (default), "cubic"
1041 and "spline"
1042 insitu: if False a new scantable is returned.
1043 Otherwise, the scaling is done in-situ
1044 The default is taken from .asaprc (False)
1045 """
1046
1047 if insitu is None: insitu = rcParams['insitu']
1048 self._math._setinsitu(insitu)
1049 varlist = vars()
1050 if poly is None:
1051 poly = ()
1052 from os.path import expandvars
1053 filename = expandvars(filename)
1054 s = scantable(self._math._gainel(self, poly, filename, method))
1055 s._add_history("gain_el", varlist)
1056 print_log()
1057 if insitu: self._assign(s)
1058 else: return s
1059
1060 def freq_align(self, reftime=None, method='cubic', insitu=None):
1061 """
1062 Return a scan where all rows have been aligned in frequency/velocity.
1063 The alignment frequency frame (e.g. LSRK) is that set by function
1064 set_freqframe.
1065 Parameters:
1066 reftime: reference time to align at. By default, the time of
1067 the first row of data is used.
1068 method: Interpolation method for regridding the spectra.
1069 Choose from "nearest", "linear", "cubic" (default)
1070 and "spline"
1071 insitu: if False a new scantable is returned.
1072 Otherwise, the scaling is done in-situ
1073 The default is taken from .asaprc (False)
1074 """
1075 if insitu is None: insitu = rcParams["insitu"]
1076 self._math._setinsitu(insitu)
1077 varlist = vars()
1078 if reftime is None: reftime = ""
1079 s = scantable(self._math._freq_align(self, reftime, method))
1080 s._add_history("freq_align", varlist)
1081 print_log()
1082 if insitu: self._assign(s)
1083 else: return s
1084
1085 def opacity(self, tau, insitu=None):
1086 """
1087 Apply an opacity correction. The data
1088 and Tsys are multiplied by the correction factor.
1089 Parameters:
1090 tau: Opacity from which the correction factor is
1091 exp(tau*ZD)
1092 where ZD is the zenith-distance
1093 insitu: if False a new scantable is returned.
1094 Otherwise, the scaling is done in-situ
1095 The default is taken from .asaprc (False)
1096 """
1097 if insitu is None: insitu = rcParams['insitu']
1098 self._math._setinsitu(insitu)
1099 varlist = vars()
1100 s = scantable(self._math._opacity(self, tau))
1101 s._add_history("opacity", varlist)
1102 print_log()
1103 if insitu: self._assign(s)
1104 else: return s
1105
1106 def bin(self, width=5, insitu=None):
1107 """
1108 Return a scan where all spectra have been binned up.
1109 Parameters:
1110 width: The bin width (default=5) in pixels
1111 insitu: if False a new scantable is returned.
1112 Otherwise, the scaling is done in-situ
1113 The default is taken from .asaprc (False)
1114 """
1115 if insitu is None: insitu = rcParams['insitu']
1116 self._math._setinsitu(insitu)
1117 varlist = vars()
1118 s = scantable(self._math._bin(self, width))
1119 s._add_history("bin", varlist)
1120 print_log()
1121 if insitu: self._assign(s)
1122 else: return s
1123
1124
1125 def resample(self, width=5, method='cubic', insitu=None):
1126 """
1127 Return a scan where all spectra have been binned up.
1128
1129 Parameters:
1130 width: The bin width (default=5) in pixels
1131 method: Interpolation method when correcting from a table.
1132 Values are "nearest", "linear", "cubic" (default)
1133 and "spline"
1134 insitu: if False a new scantable is returned.
1135 Otherwise, the scaling is done in-situ
1136 The default is taken from .asaprc (False)
1137 """
1138 if insitu is None: insitu = rcParams['insitu']
1139 self._math._setinsitu(insitu)
1140 varlist = vars()
1141 s = scantable(self._math._resample(self, method, width))
1142 s._add_history("resample", varlist)
1143 print_log()
1144 if insitu: self._assign(s)
1145 else: return s
1146
1147
1148 def average_pol(self, mask=None, weight='none'):
1149 """
1150 Average the Polarisations together.
1151 Parameters:
1152 mask: An optional mask defining the region, where the
1153 averaging will be applied. The output will have all
1154 specified points masked.
1155 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1156 weighted), or 'tsys' (1/Tsys**2 weighted)
1157 """
1158 varlist = vars()
1159 if mask is None:
1160 mask = ()
1161 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1162 s._add_history("average_pol", varlist)
1163 print_log()
1164 return s
1165
1166 def average_beam(self, mask=None, weight='none'):
1167 """
1168 Average the Beams together.
1169 Parameters:
1170 mask: An optional mask defining the region, where the
1171 averaging will be applied. The output will have all
1172 specified points masked.
1173 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1174 weighted), or 'tsys' (1/Tsys**2 weighted)
1175 """
1176 varlist = vars()
1177 if mask is None:
1178 mask = ()
1179 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1180 s._add_history("average_beam", varlist)
1181 print_log()
1182 return s
1183
1184 def convert_pol(self, poltype=None):
1185 """
1186 Convert the data to a different polarisation type.
1187 Parameters:
1188 poltype: The new polarisation type. Valid types are:
1189 "linear", "stokes" and "circular"
1190 """
1191 varlist = vars()
1192 try:
1193 s = scantable(self._math._convertpol(self, poltype))
1194 except RuntimeError, msg:
1195 if rcParams['verbose']:
1196 print msg
1197 return
1198 else:
1199 raise
1200 s._add_history("convert_pol", varlist)
1201 print_log()
1202 return s
1203
1204 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1205 """
1206 Smooth the spectrum by the specified kernel (conserving flux).
1207 Parameters:
1208 kernel: The type of smoothing kernel. Select from
1209 'hanning' (default), 'gaussian', 'boxcar' and
1210 'rmedian'
1211 width: The width of the kernel in pixels. For hanning this is
1212 ignored otherwise it defauls to 5 pixels.
1213 For 'gaussian' it is the Full Width Half
1214 Maximum. For 'boxcar' it is the full width.
1215 For 'rmedian' it is the half width.
1216 insitu: if False a new scantable is returned.
1217 Otherwise, the scaling is done in-situ
1218 The default is taken from .asaprc (False)
1219 Example:
1220 none
1221 """
1222 if insitu is None: insitu = rcParams['insitu']
1223 self._math._setinsitu(insitu)
1224 varlist = vars()
1225 s = scantable(self._math._smooth(self, kernel.lower(), width))
1226 s._add_history("smooth", varlist)
1227 print_log()
1228 if insitu: self._assign(s)
1229 else: return s
1230
1231
1232 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1233 """
1234 Return a scan which has been baselined (all rows) by a polynomial.
1235 Parameters:
1236 mask: an optional mask
1237 order: the order of the polynomial (default is 0)
1238 plot: plot the fit and the residual. In this each
1239 indivual fit has to be approved, by typing 'y'
1240 or 'n'
1241 uselin: use linear polynomial fit
1242 insitu: if False a new scantable is returned.
1243 Otherwise, the scaling is done in-situ
1244 The default is taken from .asaprc (False)
1245 Example:
1246 # return a scan baselined by a third order polynomial,
1247 # not using a mask
1248 bscan = scan.poly_baseline(order=3)
1249 """
1250 if insitu is None: insitu = rcParams['insitu']
1251 varlist = vars()
1252 if mask is None:
1253 mask = [True for i in xrange(self.nchan(-1))]
1254 from asap.asapfitter import fitter
1255 try:
1256 f = fitter()
1257 f.set_scan(self, mask)
1258 #f.set_function(poly=order)
1259 if uselin:
1260 f.set_function(lpoly=order)
1261 else:
1262 f.set_function(poly=order)
1263 s = f.auto_fit(insitu, plot=plot)
1264 s._add_history("poly_baseline", varlist)
1265 print_log()
1266 if insitu: self._assign(s)
1267 else: return s
1268 except RuntimeError:
1269 msg = "The fit failed, possibly because it didn't converge."
1270 if rcParams['verbose']:
1271 print msg
1272 return
1273 else:
1274 raise RuntimeError(msg)
1275
1276
1277 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1278 threshold=3, chan_avg_limit=1, plot=False,
1279 insitu=None):
1280 """
1281 Return a scan which has been baselined (all rows) by a polynomial.
1282 Spectral lines are detected first using linefinder and masked out
1283 to avoid them affecting the baseline solution.
1284
1285 Parameters:
1286 mask: an optional mask retreived from scantable
1287 edge: an optional number of channel to drop at
1288 the edge of spectrum. If only one value is
1289 specified, the same number will be dropped from
1290 both sides of the spectrum. Default is to keep
1291 all channels. Nested tuples represent individual
1292 edge selection for different IFs (a number of spectral
1293 channels can be different)
1294 order: the order of the polynomial (default is 0)
1295 threshold: the threshold used by line finder. It is better to
1296 keep it large as only strong lines affect the
1297 baseline solution.
1298 chan_avg_limit:
1299 a maximum number of consequtive spectral channels to
1300 average during the search of weak and broad lines.
1301 The default is no averaging (and no search for weak
1302 lines). If such lines can affect the fitted baseline
1303 (e.g. a high order polynomial is fitted), increase this
1304 parameter (usually values up to 8 are reasonable). Most
1305 users of this method should find the default value
1306 sufficient.
1307 plot: plot the fit and the residual. In this each
1308 indivual fit has to be approved, by typing 'y'
1309 or 'n'
1310 insitu: if False a new scantable is returned.
1311 Otherwise, the scaling is done in-situ
1312 The default is taken from .asaprc (False)
1313
1314 Example:
1315 scan2=scan.auto_poly_baseline(order=7)
1316 """
1317 if insitu is None: insitu = rcParams['insitu']
1318 varlist = vars()
1319 from asap.asapfitter import fitter
1320 from asap.asaplinefind import linefinder
1321 from asap import _is_sequence_or_number as _is_valid
1322
1323 # check whether edge is set up for each IF individually
1324 individualedge = False;
1325 if len(edge) > 1:
1326 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1327 individualedge = True;
1328
1329 if not _is_valid(edge, int) and not individualedge:
1330 raise ValueError, "Parameter 'edge' has to be an integer or a \
1331 pair of integers specified as a tuple. Nested tuples are allowed \
1332 to make individual selection for different IFs."
1333
1334 curedge = (0, 0)
1335 if individualedge:
1336 for edgepar in edge:
1337 if not _is_valid(edgepar, int):
1338 raise ValueError, "Each element of the 'edge' tuple has \
1339 to be a pair of integers or an integer."
1340 else:
1341 curedge = edge;
1342
1343 # setup fitter
1344 f = fitter()
1345 f.set_function(poly=order)
1346
1347 # setup line finder
1348 fl = linefinder()
1349 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1350
1351 if not insitu:
1352 workscan = self.copy()
1353 else:
1354 workscan = self
1355
1356 fl.set_scan(workscan)
1357
1358 rows = range(workscan.nrow())
1359 asaplog.push("Processing:")
1360 for r in rows:
1361 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1362 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1363 workscan.getpol(r), workscan.getcycle(r))
1364 asaplog.push(msg, False)
1365
1366 # figure out edge parameter
1367 if individualedge:
1368 if len(edge) >= workscan.getif(r):
1369 raise RuntimeError, "Number of edge elements appear to " \
1370 "be less than the number of IFs"
1371 curedge = edge[workscan.getif(r)]
1372
1373 # setup line finder
1374 fl.find_lines(r, mask, curedge)
1375 f.set_scan(workscan, fl.get_mask())
1376 f.x = workscan._getabcissa(r)
1377 f.y = workscan._getspectrum(r)
1378 f.data = None
1379 f.fit()
1380 x = f.get_parameters()
1381 if plot:
1382 f.plot(residual=True)
1383 x = raw_input("Accept fit ( [y]/n ): ")
1384 if x.upper() == 'N':
1385 continue
1386 workscan._setspectrum(f.fitter.getresidual(), r)
1387 if plot:
1388 f._p.unmap()
1389 f._p = None
1390 workscan._add_history("auto_poly_baseline", varlist)
1391 if insitu:
1392 self._assign(workscan)
1393 else:
1394 return workscan
1395
1396 def rotate_linpolphase(self, angle):
1397 """
1398 Rotate the phase of the complex polarization O=Q+iU correlation.
1399 This is always done in situ in the raw data. So if you call this
1400 function more than once then each call rotates the phase further.
1401 Parameters:
1402 angle: The angle (degrees) to rotate (add) by.
1403 Examples:
1404 scan.rotate_linpolphase(2.3)
1405 """
1406 varlist = vars()
1407 self._math._rotate_linpolphase(self, angle)
1408 self._add_history("rotate_linpolphase", varlist)
1409 print_log()
1410 return
1411
1412
1413 def rotate_xyphase(self, angle):
1414 """
1415 Rotate the phase of the XY correlation. This is always done in situ
1416 in the data. So if you call this function more than once
1417 then each call rotates the phase further.
1418 Parameters:
1419 angle: The angle (degrees) to rotate (add) by.
1420 Examples:
1421 scan.rotate_xyphase(2.3)
1422 """
1423 varlist = vars()
1424 self._math._rotate_xyphase(self, angle)
1425 self._add_history("rotate_xyphase", varlist)
1426 print_log()
1427 return
1428
1429 def swap_linears(self):
1430 """
1431 Swap the linear polarisations XX and YY, or better the first two
1432 polarisations as this also works for ciculars.
1433 """
1434 varlist = vars()
1435 self._math._swap_linears(self)
1436 self._add_history("swap_linears", varlist)
1437 print_log()
1438 return
1439
1440 def invert_phase(self):
1441 """
1442 Invert the phase of the complex polarisation
1443 """
1444 varlist = vars()
1445 self._math._invert_phase(self)
1446 self._add_history("invert_phase", varlist)
1447 print_log()
1448 return
1449
1450 def add(self, offset, insitu=None):
1451 """
1452 Return a scan where all spectra have the offset added
1453 Parameters:
1454 offset: the offset
1455 insitu: if False a new scantable is returned.
1456 Otherwise, the scaling is done in-situ
1457 The default is taken from .asaprc (False)
1458 """
1459 if insitu is None: insitu = rcParams['insitu']
1460 self._math._setinsitu(insitu)
1461 varlist = vars()
1462 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1463 s._add_history("add", varlist)
1464 print_log()
1465 if insitu:
1466 self._assign(s)
1467 else:
1468 return s
1469
1470 def scale(self, factor, tsys=True, insitu=None):
1471 """
1472 Return a scan where all spectra are scaled by the give 'factor'
1473 Parameters:
1474 factor: the scaling factor
1475 insitu: if False a new scantable is returned.
1476 Otherwise, the scaling is done in-situ
1477 The default is taken from .asaprc (False)
1478 tsys: if True (default) then apply the operation to Tsys
1479 as well as the data
1480 """
1481 if insitu is None: insitu = rcParams['insitu']
1482 self._math._setinsitu(insitu)
1483 varlist = vars()
1484 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1485 s._add_history("scale", varlist)
1486 print_log()
1487 if insitu:
1488 self._assign(s)
1489 else:
1490 return s
1491
1492 def auto_quotient(self, preserve=True, mode='paired'):
1493 """
1494 This function allows to build quotients automatically.
1495 It assumes the observation to have the same numer of
1496 "ons" and "offs"
1497 Parameters:
1498 preserve: you can preserve (default) the continuum or
1499 remove it. The equations used are
1500 preserve: Output = Toff * (on/off) - Toff
1501 remove: Output = Toff * (on/off) - Ton
1502 mode: the on/off detection mode
1503 'paired' (default)
1504 identifies 'off' scans by the
1505 trailing '_R' (Mopra/Parkes) or
1506 '_e'/'_w' (Tid) and matches
1507 on/off pairs from the observing pattern
1508 'time'
1509 finds the closest off in time
1510
1511 """
1512 modes = ["time", "paired"]
1513 if not mode in modes:
1514 msg = "please provide valid mode. Valid modes are %s" % (modes)
1515 raise ValueError(msg)
1516 varlist = vars()
1517 s = None
1518 if mode.lower() == "paired":
1519 basesel = self.get_selection()
1520 sel = selector()+basesel
1521 sel.set_query("SRCTYPE==1")
1522 self.set_selection(sel)
1523 offs = self.copy()
1524 sel.set_query("SRCTYPE==0")
1525 self.set_selection(sel)
1526 ons = self.copy()
1527 s = scantable(self._math._quotient(ons, offs, preserve))
1528 self.set_selection(basesel)
1529 elif mode.lower() == "time":
1530 s = scantable(self._math._auto_quotient(self, mode, preserve))
1531 s._add_history("auto_quotient", varlist)
1532 print_log()
1533 return s
1534
1535 def mx_quotient(self, mask = None, weight='median', preserve=True):
1536 """
1537 Form a quotient using "off" beams when observing in "MX" mode.
1538 Parameters:
1539 mask: an optional mask to be used when weight == 'stddev'
1540 weight: How to average the off beams. Default is 'median'.
1541 preserve: you can preserve (default) the continuum or
1542 remove it. The equations used are
1543 preserve: Output = Toff * (on/off) - Toff
1544 remove: Output = Toff * (on/off) - Ton
1545 """
1546 if mask is None: mask = ()
1547 varlist = vars()
1548 on = scantable(self._math._mx_extract(self, 'on'))
1549 preoff = scantable(self._math._mx_extract(self, 'off'))
1550 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1551 from asapmath import quotient
1552 q = quotient(on, off, preserve)
1553 q._add_history("mx_quotient", varlist)
1554 print_log()
1555 return q
1556
1557 def freq_switch(self, insitu=None):
1558 """
1559 Apply frequency switching to the data.
1560 Parameters:
1561 insitu: if False a new scantable is returned.
1562 Otherwise, the swictching is done in-situ
1563 The default is taken from .asaprc (False)
1564 Example:
1565 none
1566 """
1567 if insitu is None: insitu = rcParams['insitu']
1568 self._math._setinsitu(insitu)
1569 varlist = vars()
1570 s = scantable(self._math._freqswitch(self))
1571 s._add_history("freq_switch", varlist)
1572 print_log()
1573 if insitu: self._assign(s)
1574 else: return s
1575
1576 def recalc_azel(self):
1577 """
1578 Recalculate the azimuth and elevation for each position.
1579 Parameters:
1580 none
1581 Example:
1582 """
1583 varlist = vars()
1584 self._recalcazel()
1585 self._add_history("recalc_azel", varlist)
1586 print_log()
1587 return
1588
1589 def __add__(self, other):
1590 varlist = vars()
1591 s = None
1592 if isinstance(other, scantable):
1593 s = scantable(self._math._binaryop(self, other, "ADD"))
1594 elif isinstance(other, float):
1595 s = scantable(self._math._unaryop(self, other, "ADD", False))
1596 else:
1597 raise TypeError("Other input is not a scantable or float value")
1598 s._add_history("operator +", varlist)
1599 print_log()
1600 return s
1601
1602 def __sub__(self, other):
1603 """
1604 implicit on all axes and on Tsys
1605 """
1606 varlist = vars()
1607 s = None
1608 if isinstance(other, scantable):
1609 s = scantable(self._math._binaryop(self, other, "SUB"))
1610 elif isinstance(other, float):
1611 s = scantable(self._math._unaryop(self, other, "SUB", False))
1612 else:
1613 raise TypeError("Other input is not a scantable or float value")
1614 s._add_history("operator -", varlist)
1615 print_log()
1616 return s
1617
1618 def __mul__(self, other):
1619 """
1620 implicit on all axes and on Tsys
1621 """
1622 varlist = vars()
1623 s = None
1624 if isinstance(other, scantable):
1625 s = scantable(self._math._binaryop(self, other, "MUL"))
1626 elif isinstance(other, float):
1627 s = scantable(self._math._unaryop(self, other, "MUL", False))
1628 else:
1629 raise TypeError("Other input is not a scantable or float value")
1630 s._add_history("operator *", varlist)
1631 print_log()
1632 return s
1633
1634
1635 def __div__(self, other):
1636 """
1637 implicit on all axes and on Tsys
1638 """
1639 varlist = vars()
1640 s = None
1641 if isinstance(other, scantable):
1642 s = scantable(self._math._binaryop(self, other, "DIV"))
1643 elif isinstance(other, float):
1644 if other == 0.0:
1645 raise ZeroDivisionError("Dividing by zero is not recommended")
1646 s = scantable(self._math._unaryop(self, other, "DIV", False))
1647 else:
1648 raise TypeError("Other input is not a scantable or float value")
1649 s._add_history("operator /", varlist)
1650 print_log()
1651 return s
1652
1653 def get_fit(self, row=0):
1654 """
1655 Print or return the stored fits for a row in the scantable
1656 Parameters:
1657 row: the row which the fit has been applied to.
1658 """
1659 if row > self.nrow():
1660 return
1661 from asap.asapfit import asapfit
1662 fit = asapfit(self._getfit(row))
1663 if rcParams['verbose']:
1664 print fit
1665 return
1666 else:
1667 return fit.as_dict()
1668
1669 def _add_history(self, funcname, parameters):
1670 if not rcParams['scantable.history']:
1671 return
1672 # create date
1673 sep = "##"
1674 from datetime import datetime
1675 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1676 hist = dstr+sep
1677 hist += funcname+sep#cdate+sep
1678 if parameters.has_key('self'): del parameters['self']
1679 for k, v in parameters.iteritems():
1680 if type(v) is dict:
1681 for k2, v2 in v.iteritems():
1682 hist += k2
1683 hist += "="
1684 if isinstance(v2, scantable):
1685 hist += 'scantable'
1686 elif k2 == 'mask':
1687 if isinstance(v2, list) or isinstance(v2, tuple):
1688 hist += str(self._zip_mask(v2))
1689 else:
1690 hist += str(v2)
1691 else:
1692 hist += str(v2)
1693 else:
1694 hist += k
1695 hist += "="
1696 if isinstance(v, scantable):
1697 hist += 'scantable'
1698 elif k == 'mask':
1699 if isinstance(v, list) or isinstance(v, tuple):
1700 hist += str(self._zip_mask(v))
1701 else:
1702 hist += str(v)
1703 else:
1704 hist += str(v)
1705 hist += sep
1706 hist = hist[:-2] # remove trailing '##'
1707 self._addhistory(hist)
1708
1709
1710 def _zip_mask(self, mask):
1711 mask = list(mask)
1712 i = 0
1713 segments = []
1714 while mask[i:].count(1):
1715 i += mask[i:].index(1)
1716 if mask[i:].count(0):
1717 j = i + mask[i:].index(0)
1718 else:
1719 j = len(mask)
1720 segments.append([i, j])
1721 i = j
1722 return segments
1723
1724 def _get_ordinate_label(self):
1725 fu = "("+self.get_fluxunit()+")"
1726 import re
1727 lbl = "Intensity"
1728 if re.match(".K.", fu):
1729 lbl = "Brightness Temperature "+ fu
1730 elif re.match(".Jy.", fu):
1731 lbl = "Flux density "+ fu
1732 return lbl
1733
1734 def _check_ifs(self):
1735 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1736 nchans = filter(lambda t: t > 0, nchans)
1737 return (sum(nchans)/len(nchans) == nchans[0])
1738
1739 def _fill(self, names, unit, average):
1740 import os
1741 from asap._asap import stfiller
1742 first = True
1743 fullnames = []
1744 for name in names:
1745 name = os.path.expandvars(name)
1746 name = os.path.expanduser(name)
1747 if not os.path.exists(name):
1748 msg = "File '%s' does not exists" % (name)
1749 if rcParams['verbose']:
1750 asaplog.push(msg)
1751 print asaplog.pop().strip()
1752 return
1753 raise IOError(msg)
1754 fullnames.append(name)
1755 if average:
1756 asaplog.push('Auto averaging integrations')
1757 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1758 for name in fullnames:
1759 tbl = Scantable(stype)
1760 r = stfiller(tbl)
1761 msg = "Importing %s..." % (name)
1762 asaplog.push(msg, False)
1763 print_log()
1764 r._open(name, -1, -1)
1765 r._read()
1766 #tbl = r._getdata()
1767 if average:
1768 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1769 #tbl = tbl2
1770 if not first:
1771 tbl = self._math._merge([self, tbl])
1772 #tbl = tbl2
1773 Scantable.__init__(self, tbl)
1774 r._close()
1775 del r, tbl
1776 first = False
1777 if unit is not None:
1778 self.set_fluxunit(unit)
1779 self.set_freqframe(rcParams['scantable.freqframe'])
1780
1781 def __getitem__(self, key):
1782 if key < 0:
1783 key += self.nrow()
1784 if key >= self.nrow():
1785 raise IndexError("Row index out of range.")
1786 return self._getspectrum(key)
1787
1788 def __setitem__(self, key, value):
1789 if key < 0:
1790 key += self.nrow()
1791 if key >= self.nrow():
1792 raise IndexError("Row index out of range.")
1793 if not hasattr(value, "__len__") or \
1794 len(value) > self.nchan(self.getif(key)):
1795 raise ValueError("Spectrum length doesn't match.")
1796 return self._setspectrum(value, key)
1797
1798 def __len__(self):
1799 return self.nrow()
1800
1801 def __iter__(self):
1802 for i in range(len(self)):
1803 yield self[i]
Note: See TracBrowser for help on using the repository browser.