source: trunk/python/scantable.py@ 1429

Last change on this file since 1429 was 1402, checked in by Malte Marquarding, 17 years ago

implemented iterator to iterate through rows

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 66.7 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 self._add_history("scantable", varlist)
75 print_log()
76
77 def save(self, name=None, format=None, overwrite=False):
78 """
79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
81 Parameters:
82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
84 and header in 'name'_header.txt)
85 format: an optional file format. Default is ASAP.
86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87 'SDFITS' (save as SDFITS file)
88 'ASCII' (saves as ascii text file)
89 'MS2' (saves as an aips++
90 MeasurementSet V2)
91 overwrite: If the file should be overwritten if it exists.
92 The default False is to return with warning
93 without writing the output. USE WITH CARE.
94 Example:
95 scan.save('myscan.asap')
96 scan.save('myscan.sdfits', 'SDFITS')
97 """
98 from os import path
99 if format is None: format = rcParams['scantable.save']
100 suffix = '.'+format.lower()
101 if name is None or name == "":
102 name = 'scantable'+suffix
103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
105 name = path.expandvars(name)
106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
114 format2 = format.upper()
115 if format2 == 'ASAP':
116 self._save(name)
117 else:
118 from asap._asap import stwriter as stw
119 writer = stw(format2)
120 writer.write(self, name)
121 print_log()
122 return
123
124 def copy(self):
125 """
126 Return a copy of this scantable.
127 Note:
128 This makes a full (deep) copy. scan2 = scan1 makes a reference.
129 Parameters:
130 none
131 Example:
132 copiedscan = scan.copy()
133 """
134 sd = scantable(Scantable._copy(self))
135 return sd
136
137 def drop_scan(self, scanid=None):
138 """
139 Return a new scantable where the specified scan number(s) has(have)
140 been dropped.
141 Parameters:
142 scanid: a (list of) scan number(s)
143 """
144 from asap import _is_sequence_or_number as _is_valid
145 from asap import _to_list
146 from asap import unique
147 if not _is_valid(scanid):
148 if rcParams['verbose']:
149 print "Please specify a scanno to drop from the scantable"
150 return
151 else:
152 raise RuntimeError("No scan given")
153 try:
154 scanid = _to_list(scanid)
155 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156 for sid in scanid: allscans.remove(sid)
157 if len(allscans) == 0:
158 raise ValueError("Can't remove all scans")
159 except ValueError:
160 if rcParams['verbose']:
161 print "Couldn't find any match."
162 return
163 else: raise
164 try:
165 bsel = self.get_selection()
166 sel = selector()
167 sel.set_scans(allscans)
168 self.set_selection(bsel+sel)
169 scopy = self._copy()
170 self.set_selection(bsel)
171 return scantable(scopy)
172 except RuntimeError:
173 if rcParams['verbose']:
174 print "Couldn't find any match."
175 else:
176 raise
177
178
179 def get_scan(self, scanid=None):
180 """
181 Return a specific scan (by scanno) or collection of scans (by
182 source name) in a new scantable.
183 Note:
184 See scantable.drop_scan() for the inverse operation.
185 Parameters:
186 scanid: a (list of) scanno or a source name, unix-style
187 patterns are accepted for source name matching, e.g.
188 '*_R' gets all 'ref scans
189 Example:
190 # get all scans containing the source '323p459'
191 newscan = scan.get_scan('323p459')
192 # get all 'off' scans
193 refscans = scan.get_scan('*_R')
194 # get a susbset of scans by scanno (as listed in scan.summary())
195 newscan = scan.get_scan([0, 2, 7, 10])
196 """
197 if scanid is None:
198 if rcParams['verbose']:
199 print "Please specify a scan no or name to " \
200 "retrieve from the scantable"
201 return
202 else:
203 raise RuntimeError("No scan given")
204
205 try:
206 bsel = self.get_selection()
207 sel = selector()
208 if type(scanid) is str:
209 sel.set_name(scanid)
210 self.set_selection(bsel+sel)
211 scopy = self._copy()
212 self.set_selection(bsel)
213 return scantable(scopy)
214 elif type(scanid) is int:
215 sel.set_scans([scanid])
216 self.set_selection(bsel+sel)
217 scopy = self._copy()
218 self.set_selection(bsel)
219 return scantable(scopy)
220 elif type(scanid) is list:
221 sel.set_scans(scanid)
222 self.set_selection(sel)
223 scopy = self._copy()
224 self.set_selection(bsel)
225 return scantable(scopy)
226 else:
227 msg = "Illegal scanid type, use 'int' or 'list' if ints."
228 if rcParams['verbose']:
229 print msg
230 else:
231 raise TypeError(msg)
232 except RuntimeError:
233 if rcParams['verbose']: print "Couldn't find any match."
234 else: raise
235
236 def __str__(self):
237 return Scantable._summary(self, True)
238
239 def summary(self, filename=None):
240 """
241 Print a summary of the contents of this scantable.
242 Parameters:
243 filename: the name of a file to write the putput to
244 Default - no file output
245 verbose: print extra info such as the frequency table
246 The default (False) is taken from .asaprc
247 """
248 info = Scantable._summary(self, True)
249 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
250 if filename is not None:
251 if filename is "":
252 filename = 'scantable_summary.txt'
253 from os.path import expandvars, isdir
254 filename = expandvars(filename)
255 if not isdir(filename):
256 data = open(filename, 'w')
257 data.write(info)
258 data.close()
259 else:
260 msg = "Illegal file name '%s'." % (filename)
261 if rcParams['verbose']:
262 print msg
263 else:
264 raise IOError(msg)
265 if rcParams['verbose']:
266 try:
267 from IPython.genutils import page as pager
268 except ImportError:
269 from pydoc import pager
270 pager(info)
271 else:
272 return info
273
274
275 def get_selection(self):
276 """
277 Get the selection object currently set on this scantable.
278 Parameters:
279 none
280 Example:
281 sel = scan.get_selection()
282 sel.set_ifs(0) # select IF 0
283 scan.set_selection(sel) # apply modified selection
284 """
285 return selector(self._getselection())
286
287 def set_selection(self, selection=selector()):
288 """
289 Select a subset of the data. All following operations on this scantable
290 are only applied to thi selection.
291 Parameters:
292 selection: a selector object (default unset the selection)
293 Examples:
294 sel = selector() # create a selection object
295 self.set_scans([0, 3]) # select SCANNO 0 and 3
296 scan.set_selection(sel) # set the selection
297 scan.summary() # will only print summary of scanno 0 an 3
298 scan.set_selection() # unset the selection
299 """
300 self._setselection(selection)
301
302 def stats(self, stat='stddev', mask=None):
303 """
304 Determine the specified statistic of the current beam/if/pol
305 Takes a 'mask' as an optional parameter to specify which
306 channels should be excluded.
307 Parameters:
308 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
309 'var', 'stddev', 'avdev', 'rms', 'median'
310 mask: an optional mask specifying where the statistic
311 should be determined.
312 Example:
313 scan.set_unit('channel')
314 msk = scan.create_mask([100, 200], [500, 600])
315 scan.stats(stat='mean', mask=m)
316 """
317 if mask == None:
318 mask = []
319 axes = ['Beam', 'IF', 'Pol', 'Time']
320 if not self._check_ifs():
321 raise ValueError("Cannot apply mask as the IFs have different "
322 "number of channels. Please use setselection() "
323 "to select individual IFs")
324
325 statvals = self._math._stats(self, mask, stat)
326 out = ''
327 axes = []
328 for i in range(self.nrow()):
329 axis = []
330 axis.append(self.getscan(i))
331 axis.append(self.getbeam(i))
332 axis.append(self.getif(i))
333 axis.append(self.getpol(i))
334 axis.append(self.getcycle(i))
335 axes.append(axis)
336 tm = self._gettime(i)
337 src = self._getsourcename(i)
338 out += 'Scan[%d] (%s) ' % (axis[0], src)
339 out += 'Time[%s]:\n' % (tm)
340 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
341 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
342 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
343 out += '= %3.3f\n' % (statvals[i])
344 out += "--------------------------------------------------\n"
345
346 if rcParams['verbose']:
347 print "--------------------------------------------------"
348 print " ", stat
349 print "--------------------------------------------------"
350 print out
351 #else:
352 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
353 # 'axes' : axes,
354 # 'data': statvals}
355 return statvals
356
357 def stddev(self, mask=None):
358 """
359 Determine the standard deviation of the current beam/if/pol
360 Takes a 'mask' as an optional parameter to specify which
361 channels should be excluded.
362 Parameters:
363 mask: an optional mask specifying where the standard
364 deviation should be determined.
365
366 Example:
367 scan.set_unit('channel')
368 msk = scan.create_mask([100, 200], [500, 600])
369 scan.stddev(mask=m)
370 """
371 return self.stats(stat='stddev', mask=mask);
372
373
374 def get_column_names(self):
375 """
376 Return a list of column names, which can be used for selection.
377 """
378 return list(Scantable.get_column_names(self))
379
380 def get_tsys(self):
381 """
382 Return the System temperatures.
383 Returns:
384 a list of Tsys values for the current selection
385 """
386
387 return self._row_callback(self._gettsys, "Tsys")
388
389 def _row_callback(self, callback, label):
390 axes = []
391 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
392 out = ""
393 outvec = []
394 for i in range(self.nrow()):
395 axis = []
396 axis.append(self.getscan(i))
397 axis.append(self.getbeam(i))
398 axis.append(self.getif(i))
399 axis.append(self.getpol(i))
400 axis.append(self.getcycle(i))
401 axes.append(axis)
402 tm = self._gettime(i)
403 src = self._getsourcename(i)
404 out += 'Scan[%d] (%s) ' % (axis[0], src)
405 out += 'Time[%s]:\n' % (tm)
406 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
407 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
408 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
409 outvec.append(callback(i))
410 out += '= %3.3f\n' % (outvec[i])
411 out += "--------------------------------------------------\n"
412 if rcParams['verbose']:
413 print "--------------------------------------------------"
414 print " %s" % (label)
415 print "--------------------------------------------------"
416 print out
417 # disabled because the vector seems more useful
418 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
419 return outvec
420
421 def _get_column(self, callback, row=-1):
422 """
423 """
424 if row == -1:
425 return [callback(i) for i in range(self.nrow())]
426 else:
427 if 0 <= row < self.nrow():
428 return callback(row)
429
430
431 def get_time(self, row=-1, asdatetime=False):
432 """
433 Get a list of time stamps for the observations.
434 Return a datetime object for each integration time stamp in the scantable.
435 Parameters:
436 row: row no of integration. Default -1 return all rows
437 asdatetime: return values as datetime objects rather than strings
438 Example:
439 none
440 """
441 from time import strptime
442 from datetime import datetime
443 times = self._get_column(self._gettime, row)
444 if not asdatetime:
445 return times
446 format = "%Y/%m/%d/%H:%M:%S"
447 if isinstance(times, list):
448 return [datetime(*strptime(i, format)[:6]) for i in times]
449 else:
450 return datetime(*strptime(times, format)[:6])
451
452
453 def get_inttime(self, row=-1):
454 """
455 Get a list of integration times for the observations.
456 Return a time in seconds for each integration in the scantable.
457 Parameters:
458 row: row no of integration. Default -1 return all rows.
459 Example:
460 none
461 """
462 return self._get_column(self._getinttime, row)
463
464
465 def get_sourcename(self, row=-1):
466 """
467 Get a list source names for the observations.
468 Return a string for each integration in the scantable.
469 Parameters:
470 row: row no of integration. Default -1 return all rows.
471 Example:
472 none
473 """
474 return self._get_column(self._getsourcename, row)
475
476 def get_elevation(self, row=-1):
477 """
478 Get a list of elevations for the observations.
479 Return a float for each integration in the scantable.
480 Parameters:
481 row: row no of integration. Default -1 return all rows.
482 Example:
483 none
484 """
485 return self._get_column(self._getelevation, row)
486
487 def get_azimuth(self, row=-1):
488 """
489 Get a list of azimuths for the observations.
490 Return a float for each integration in the scantable.
491 Parameters:
492 row: row no of integration. Default -1 return all rows.
493 Example:
494 none
495 """
496 return self._get_column(self._getazimuth, row)
497
498 def get_parangle(self, row=-1):
499 """
500 Get a list of parallactic angles for the observations.
501 Return a float for each integration in the scantable.
502 Parameters:
503 row: row no of integration. Default -1 return all rows.
504 Example:
505 none
506 """
507 return self._get_column(self._getparangle, row)
508
509 def get_direction(self, row=-1):
510 """
511 Get a list of Positions on the sky (direction) for the observations.
512 Return a float for each integration in the scantable.
513 Parameters:
514 row: row no of integration. Default -1 return all rows
515 Example:
516 none
517 """
518 return self._get_column(self._getdirection, row)
519
520 def get_directionval(self, row=-1):
521 """
522 Get a list of Positions on the sky (direction) for the observations.
523 Return a float for each integration in the scantable.
524 Parameters:
525 row: row no of integration. Default -1 return all rows
526 Example:
527 none
528 """
529 return self._get_column(self._getdirectionvec, row)
530
531
532 def set_unit(self, unit='channel'):
533 """
534 Set the unit for all following operations on this scantable
535 Parameters:
536 unit: optional unit, default is 'channel'
537 one of '*Hz', 'km/s', 'channel', ''
538 """
539 varlist = vars()
540 if unit in ['', 'pixel', 'channel']:
541 unit = ''
542 inf = list(self._getcoordinfo())
543 inf[0] = unit
544 self._setcoordinfo(inf)
545 self._add_history("set_unit", varlist)
546
547 def set_instrument(self, instr):
548 """
549 Set the instrument for subsequent processing.
550 Parameters:
551 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
552 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
553 """
554 self._setInstrument(instr)
555 self._add_history("set_instument", vars())
556 print_log()
557
558 def set_feedtype(self, feedtype):
559 """
560 Overwrite the feed type, which might not be set correctly.
561 Parameters:
562 feedtype: 'linear' or 'circular'
563 """
564 self._setfeedtype(feedtype)
565 self._add_history("set_feedtype", vars())
566 print_log()
567
568 def set_doppler(self, doppler='RADIO'):
569 """
570 Set the doppler for all following operations on this scantable.
571 Parameters:
572 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
573 """
574 varlist = vars()
575 inf = list(self._getcoordinfo())
576 inf[2] = doppler
577 self._setcoordinfo(inf)
578 self._add_history("set_doppler", vars())
579 print_log()
580
581 def set_freqframe(self, frame=None):
582 """
583 Set the frame type of the Spectral Axis.
584 Parameters:
585 frame: an optional frame type, default 'LSRK'. Valid frames are:
586 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
587 'GEO', 'GALACTO', 'LGROUP', 'CMB'
588 Examples:
589 scan.set_freqframe('BARY')
590 """
591 if frame is None: frame = rcParams['scantable.freqframe']
592 varlist = vars()
593 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
594 'GEO', 'GALACTO', 'LGROUP', 'CMB']
595
596 if frame in valid:
597 inf = list(self._getcoordinfo())
598 inf[1] = frame
599 self._setcoordinfo(inf)
600 self._add_history("set_freqframe", varlist)
601 else:
602 msg = "Please specify a valid freq type. Valid types are:\n", valid
603 if rcParams['verbose']:
604 print msg
605 else:
606 raise TypeError(msg)
607 print_log()
608
609 def set_dirframe(self, frame=""):
610 """
611 Set the frame type of the Direction on the sky.
612 Parameters:
613 frame: an optional frame type, default ''. Valid frames are:
614 'J2000', 'B1950', 'GALACTIC'
615 Examples:
616 scan.set_dirframe('GALACTIC')
617 """
618 varlist = vars()
619 try:
620 Scantable.set_dirframe(self, frame)
621 except RuntimeError, msg:
622 if rcParams['verbose']:
623 print msg
624 else:
625 raise
626 self._add_history("set_dirframe", varlist)
627
628 def get_unit(self):
629 """
630 Get the default unit set in this scantable
631 Returns:
632 A unit string
633 """
634 inf = self._getcoordinfo()
635 unit = inf[0]
636 if unit == '': unit = 'channel'
637 return unit
638
639 def get_abcissa(self, rowno=0):
640 """
641 Get the abcissa in the current coordinate setup for the currently
642 selected Beam/IF/Pol
643 Parameters:
644 rowno: an optional row number in the scantable. Default is the
645 first row, i.e. rowno=0
646 Returns:
647 The abcissa values and the format string (as a dictionary)
648 """
649 abc = self._getabcissa(rowno)
650 lbl = self._getabcissalabel(rowno)
651 print_log()
652 return abc, lbl
653
654 def flag(self, mask=None):
655 """
656 Flag the selected data using an optional channel mask.
657 Parameters:
658 mask: an optional channel mask, created with create_mask. Default
659 (no mask) is all channels.
660 """
661 varlist = vars()
662 if mask is None:
663 mask = []
664 try:
665 self._flag(mask)
666 except RuntimeError, msg:
667 if rcParams['verbose']:
668 print msg
669 return
670 else: raise
671 self._add_history("flag", varlist)
672
673 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
674 """
675 Flag the data in 'lag' space by providing a frequency to remove.
676 Flagged data in the scantable gets set to 0.0 before the fft.
677 No taper is applied.
678 Parameters:
679 frequency: the frequency (really a period within the bandwidth)
680 to remove
681 width: the width of the frequency to remove, to remove a
682 range of frequencies aroung the centre.
683 unit: the frequency unit (default "GHz")
684 Notes:
685 It is recommended to flag edges of the band or strong
686 signals beforehand.
687 """
688 if insitu is None: insitu = rcParams['insitu']
689 self._math._setinsitu(insitu)
690 varlist = vars()
691 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
692 if not base.has_key(unit):
693 raise ValueError("%s is not a valid unit." % unit)
694 try:
695 s = scantable(self._math._lag_flag(self, frequency*base[unit],
696 width*base[unit]))
697 except RuntimeError, msg:
698 if rcParams['verbose']:
699 print msg
700 return
701 else: raise
702 s._add_history("lag_flag", varlist)
703 print_log()
704 if insitu:
705 self._assign(s)
706 else:
707 return s
708
709
710 def create_mask(self, *args, **kwargs):
711 """
712 Compute and return a mask based on [min, max] windows.
713 The specified windows are to be INCLUDED, when the mask is
714 applied.
715 Parameters:
716 [min, max], [min2, max2], ...
717 Pairs of start/end points (inclusive)specifying the regions
718 to be masked
719 invert: optional argument. If specified as True,
720 return an inverted mask, i.e. the regions
721 specified are EXCLUDED
722 row: create the mask using the specified row for
723 unit conversions, default is row=0
724 only necessary if frequency varies over rows.
725 Example:
726 scan.set_unit('channel')
727 a)
728 msk = scan.create_mask([400, 500], [800, 900])
729 # masks everything outside 400 and 500
730 # and 800 and 900 in the unit 'channel'
731
732 b)
733 msk = scan.create_mask([400, 500], [800, 900], invert=True)
734 # masks the regions between 400 and 500
735 # and 800 and 900 in the unit 'channel'
736 c)
737 mask only channel 400
738 msk = scan.create_mask([400, 400])
739 """
740 row = 0
741 if kwargs.has_key("row"):
742 row = kwargs.get("row")
743 data = self._getabcissa(row)
744 u = self._getcoordinfo()[0]
745 if rcParams['verbose']:
746 if u == "": u = "channel"
747 msg = "The current mask window unit is %s" % u
748 i = self._check_ifs()
749 if not i:
750 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
751 asaplog.push(msg)
752 n = self.nchan()
753 msk = _n_bools(n, False)
754 # test if args is a 'list' or a 'normal *args - UGLY!!!
755
756 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
757 and args or args[0]
758 for window in ws:
759 if (len(window) != 2 or window[0] > window[1] ):
760 raise TypeError("A window needs to be defined as [min, max]")
761 for i in range(n):
762 if data[i] >= window[0] and data[i] <= window[1]:
763 msk[i] = True
764 if kwargs.has_key('invert'):
765 if kwargs.get('invert'):
766 msk = mask_not(msk)
767 print_log()
768 return msk
769
770 def get_restfreqs(self):
771 """
772 Get the restfrequency(s) stored in this scantable.
773 The return value(s) are always of unit 'Hz'
774 Parameters:
775 none
776 Returns:
777 a list of doubles
778 """
779 return list(self._getrestfreqs())
780
781
782 def set_restfreqs(self, freqs=None, unit='Hz'):
783 """
784 Set or replace the restfrequency specified and
785 If the 'freqs' argument holds a scalar,
786 then that rest frequency will be applied to all the selected
787 data. If the 'freqs' argument holds
788 a vector, then it MUST be of equal or smaller length than
789 the number of IFs (and the available restfrequencies will be
790 replaced by this vector). In this case, *all* data have
791 the restfrequency set per IF according
792 to the corresponding value you give in the 'freqs' vector.
793 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
794 IF 1 gets restfreq 2e9.
795 You can also specify the frequencies via a linecatalog.
796
797 Parameters:
798 freqs: list of rest frequency values or string idenitfiers
799 unit: unit for rest frequency (default 'Hz')
800
801 Example:
802 # set the given restfrequency for the whole table
803 scan.set_restfreqs(freqs=1.4e9)
804 # If thee number of IFs in the data is >= 2 IF0 gets the first
805 # value IF1 the second...
806 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
807 #set the given restfrequency for the whole table (by name)
808 scan.set_restfreqs(freqs="OH1667")
809
810 Note:
811 To do more sophisticate Restfrequency setting, e.g. on a
812 source and IF basis, use scantable.set_selection() before using
813 this function.
814 # provide your scantable is call scan
815 selection = selector()
816 selection.set_name("ORION*")
817 selection.set_ifs([1])
818 scan.set_selection(selection)
819 scan.set_restfreqs(freqs=86.6e9)
820
821 """
822 varlist = vars()
823 from asap import linecatalog
824 # simple value
825 if isinstance(freqs, int) or isinstance(freqs, float):
826 self._setrestfreqs(freqs, "",unit)
827 # list of values
828 elif isinstance(freqs, list) or isinstance(freqs, tuple):
829 # list values are scalars
830 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
831 sel = selector()
832 savesel = self._getselection()
833 iflist = self.getifnos()
834 for i in xrange(len(freqs)):
835 sel.set_ifs(iflist[i])
836 self._setselection(sel)
837 self._setrestfreqs(freqs[i], "",unit)
838 self._setselection(savesel)
839 # list values are tuples, (value, name)
840 elif isinstance(freqs[-1], dict):
841 sel = selector()
842 savesel = self._getselection()
843 iflist = self.getifnos()
844 for i in xrange(len(freqs)):
845 sel.set_ifs(iflist[i])
846 self._setselection(sel)
847 self._setrestfreqs(freqs[i]["value"],
848 freqs[i]["name"], "MHz")
849 self._setselection(savesel)
850 # freqs are to be taken from a linecatalog
851 elif isinstance(freqs, linecatalog):
852 sel = selector()
853 savesel = self._getselection()
854 iflist = self.getifnos()
855 for i in xrange(freqs.nrow()):
856 sel.set_ifs(iflist[i])
857 self._setselection(sel)
858 self._setrestfreqs(freqs.get_frequency(i),
859 freqs.get_name(i), "MHz")
860 # ensure that we are not iterating past nIF
861 if i == self.nif()-1: break
862 self._setselection(savesel)
863 else:
864 return
865 self._add_history("set_restfreqs", varlist)
866
867 def shift_refpix(self, delta):
868 """
869 Shift the reference pixel of the Spectra Coordinate by an
870 integer amount.
871 Parameters:
872 delta: the amount to shift by
873 Note:
874 Be careful using this with broadband data.
875 """
876 Scantable.shift(self, delta)
877
878 def history(self, filename=None):
879 """
880 Print the history. Optionally to a file.
881 Parameters:
882 filename: The name of the file to save the history to.
883 """
884 hist = list(self._gethistory())
885 out = "-"*80
886 for h in hist:
887 if h.startswith("---"):
888 out += "\n"+h
889 else:
890 items = h.split("##")
891 date = items[0]
892 func = items[1]
893 items = items[2:]
894 out += "\n"+date+"\n"
895 out += "Function: %s\n Parameters:" % (func)
896 for i in items:
897 s = i.split("=")
898 out += "\n %s = %s" % (s[0], s[1])
899 out += "\n"+"-"*80
900 if filename is not None:
901 if filename is "":
902 filename = 'scantable_history.txt'
903 import os
904 filename = os.path.expandvars(os.path.expanduser(filename))
905 if not os.path.isdir(filename):
906 data = open(filename, 'w')
907 data.write(out)
908 data.close()
909 else:
910 msg = "Illegal file name '%s'." % (filename)
911 if rcParams['verbose']:
912 print msg
913 else:
914 raise IOError(msg)
915 if rcParams['verbose']:
916 try:
917 from IPython.genutils import page as pager
918 except ImportError:
919 from pydoc import pager
920 pager(out)
921 else:
922 return out
923 return
924 #
925 # Maths business
926 #
927
928 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
929 """
930 Return the (time) weighted average of a scan.
931 Note:
932 in channels only - align if necessary
933 Parameters:
934 mask: an optional mask (only used for 'var' and 'tsys'
935 weighting)
936 scanav: True averages each scan separately
937 False (default) averages all scans together,
938 weight: Weighting scheme.
939 'none' (mean no weight)
940 'var' (1/var(spec) weighted)
941 'tsys' (1/Tsys**2 weighted)
942 'tint' (integration time weighted)
943 'tintsys' (Tint/Tsys**2)
944 'median' ( median averaging)
945 The default is 'tint'
946 align: align the spectra in velocity before averaging. It takes
947 the time of the first spectrum as reference time.
948 Example:
949 # time average the scantable without using a mask
950 newscan = scan.average_time()
951 """
952 varlist = vars()
953 if weight is None: weight = 'TINT'
954 if mask is None: mask = ()
955 if scanav: scanav = "SCAN"
956 else: scanav = "NONE"
957 scan = (self, )
958 try:
959 if align:
960 scan = (self.freq_align(insitu=False), )
961 s = None
962 if weight.upper() == 'MEDIAN':
963 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
964 scanav))
965 else:
966 s = scantable(self._math._average(scan, mask, weight.upper(),
967 scanav))
968 except RuntimeError, msg:
969 if rcParams['verbose']:
970 print msg
971 return
972 else: raise
973 s._add_history("average_time", varlist)
974 print_log()
975 return s
976
977 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
978 """
979 Return a scan where all spectra are converted to either
980 Jansky or Kelvin depending upon the flux units of the scan table.
981 By default the function tries to look the values up internally.
982 If it can't find them (or if you want to over-ride), you must
983 specify EITHER jyperk OR eta (and D which it will try to look up
984 also if you don't set it). jyperk takes precedence if you set both.
985 Parameters:
986 jyperk: the Jy / K conversion factor
987 eta: the aperture efficiency
988 d: the geomtric diameter (metres)
989 insitu: if False a new scantable is returned.
990 Otherwise, the scaling is done in-situ
991 The default is taken from .asaprc (False)
992 """
993 if insitu is None: insitu = rcParams['insitu']
994 self._math._setinsitu(insitu)
995 varlist = vars()
996 if jyperk is None: jyperk = -1.0
997 if d is None: d = -1.0
998 if eta is None: eta = -1.0
999 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1000 s._add_history("convert_flux", varlist)
1001 print_log()
1002 if insitu: self._assign(s)
1003 else: return s
1004
1005 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1006 """
1007 Return a scan after applying a gain-elevation correction.
1008 The correction can be made via either a polynomial or a
1009 table-based interpolation (and extrapolation if necessary).
1010 You specify polynomial coefficients, an ascii table or neither.
1011 If you specify neither, then a polynomial correction will be made
1012 with built in coefficients known for certain telescopes (an error
1013 will occur if the instrument is not known).
1014 The data and Tsys are *divided* by the scaling factors.
1015 Parameters:
1016 poly: Polynomial coefficients (default None) to compute a
1017 gain-elevation correction as a function of
1018 elevation (in degrees).
1019 filename: The name of an ascii file holding correction factors.
1020 The first row of the ascii file must give the column
1021 names and these MUST include columns
1022 "ELEVATION" (degrees) and "FACTOR" (multiply data
1023 by this) somewhere.
1024 The second row must give the data type of the
1025 column. Use 'R' for Real and 'I' for Integer.
1026 An example file would be
1027 (actual factors are arbitrary) :
1028
1029 TIME ELEVATION FACTOR
1030 R R R
1031 0.1 0 0.8
1032 0.2 20 0.85
1033 0.3 40 0.9
1034 0.4 60 0.85
1035 0.5 80 0.8
1036 0.6 90 0.75
1037 method: Interpolation method when correcting from a table.
1038 Values are "nearest", "linear" (default), "cubic"
1039 and "spline"
1040 insitu: if False a new scantable is returned.
1041 Otherwise, the scaling is done in-situ
1042 The default is taken from .asaprc (False)
1043 """
1044
1045 if insitu is None: insitu = rcParams['insitu']
1046 self._math._setinsitu(insitu)
1047 varlist = vars()
1048 if poly is None:
1049 poly = ()
1050 from os.path import expandvars
1051 filename = expandvars(filename)
1052 s = scantable(self._math._gainel(self, poly, filename, method))
1053 s._add_history("gain_el", varlist)
1054 print_log()
1055 if insitu: self._assign(s)
1056 else: return s
1057
1058 def freq_align(self, reftime=None, method='cubic', insitu=None):
1059 """
1060 Return a scan where all rows have been aligned in frequency/velocity.
1061 The alignment frequency frame (e.g. LSRK) is that set by function
1062 set_freqframe.
1063 Parameters:
1064 reftime: reference time to align at. By default, the time of
1065 the first row of data is used.
1066 method: Interpolation method for regridding the spectra.
1067 Choose from "nearest", "linear", "cubic" (default)
1068 and "spline"
1069 insitu: if False a new scantable is returned.
1070 Otherwise, the scaling is done in-situ
1071 The default is taken from .asaprc (False)
1072 """
1073 if insitu is None: insitu = rcParams["insitu"]
1074 self._math._setinsitu(insitu)
1075 varlist = vars()
1076 if reftime is None: reftime = ""
1077 s = scantable(self._math._freq_align(self, reftime, method))
1078 s._add_history("freq_align", varlist)
1079 print_log()
1080 if insitu: self._assign(s)
1081 else: return s
1082
1083 def opacity(self, tau, insitu=None):
1084 """
1085 Apply an opacity correction. The data
1086 and Tsys are multiplied by the correction factor.
1087 Parameters:
1088 tau: Opacity from which the correction factor is
1089 exp(tau*ZD)
1090 where ZD is the zenith-distance
1091 insitu: if False a new scantable is returned.
1092 Otherwise, the scaling is done in-situ
1093 The default is taken from .asaprc (False)
1094 """
1095 if insitu is None: insitu = rcParams['insitu']
1096 self._math._setinsitu(insitu)
1097 varlist = vars()
1098 s = scantable(self._math._opacity(self, tau))
1099 s._add_history("opacity", varlist)
1100 print_log()
1101 if insitu: self._assign(s)
1102 else: return s
1103
1104 def bin(self, width=5, insitu=None):
1105 """
1106 Return a scan where all spectra have been binned up.
1107 Parameters:
1108 width: The bin width (default=5) in pixels
1109 insitu: if False a new scantable is returned.
1110 Otherwise, the scaling is done in-situ
1111 The default is taken from .asaprc (False)
1112 """
1113 if insitu is None: insitu = rcParams['insitu']
1114 self._math._setinsitu(insitu)
1115 varlist = vars()
1116 s = scantable(self._math._bin(self, width))
1117 s._add_history("bin", varlist)
1118 print_log()
1119 if insitu: self._assign(s)
1120 else: return s
1121
1122
1123 def resample(self, width=5, method='cubic', insitu=None):
1124 """
1125 Return a scan where all spectra have been binned up.
1126
1127 Parameters:
1128 width: The bin width (default=5) in pixels
1129 method: Interpolation method when correcting from a table.
1130 Values are "nearest", "linear", "cubic" (default)
1131 and "spline"
1132 insitu: if False a new scantable is returned.
1133 Otherwise, the scaling is done in-situ
1134 The default is taken from .asaprc (False)
1135 """
1136 if insitu is None: insitu = rcParams['insitu']
1137 self._math._setinsitu(insitu)
1138 varlist = vars()
1139 s = scantable(self._math._resample(self, method, width))
1140 s._add_history("resample", varlist)
1141 print_log()
1142 if insitu: self._assign(s)
1143 else: return s
1144
1145
1146 def average_pol(self, mask=None, weight='none'):
1147 """
1148 Average the Polarisations together.
1149 Parameters:
1150 mask: An optional mask defining the region, where the
1151 averaging will be applied. The output will have all
1152 specified points masked.
1153 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1154 weighted), or 'tsys' (1/Tsys**2 weighted)
1155 """
1156 varlist = vars()
1157 if mask is None:
1158 mask = ()
1159 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1160 s._add_history("average_pol", varlist)
1161 print_log()
1162 return s
1163
1164 def average_beam(self, mask=None, weight='none'):
1165 """
1166 Average the Beams together.
1167 Parameters:
1168 mask: An optional mask defining the region, where the
1169 averaging will be applied. The output will have all
1170 specified points masked.
1171 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1172 weighted), or 'tsys' (1/Tsys**2 weighted)
1173 """
1174 varlist = vars()
1175 if mask is None:
1176 mask = ()
1177 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1178 s._add_history("average_beam", varlist)
1179 print_log()
1180 return s
1181
1182 def convert_pol(self, poltype=None):
1183 """
1184 Convert the data to a different polarisation type.
1185 Parameters:
1186 poltype: The new polarisation type. Valid types are:
1187 "linear", "stokes" and "circular"
1188 """
1189 varlist = vars()
1190 try:
1191 s = scantable(self._math._convertpol(self, poltype))
1192 except RuntimeError, msg:
1193 if rcParams['verbose']:
1194 print msg
1195 return
1196 else:
1197 raise
1198 s._add_history("convert_pol", varlist)
1199 print_log()
1200 return s
1201
1202 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1203 """
1204 Smooth the spectrum by the specified kernel (conserving flux).
1205 Parameters:
1206 kernel: The type of smoothing kernel. Select from
1207 'hanning' (default), 'gaussian', 'boxcar' and
1208 'rmedian'
1209 width: The width of the kernel in pixels. For hanning this is
1210 ignored otherwise it defauls to 5 pixels.
1211 For 'gaussian' it is the Full Width Half
1212 Maximum. For 'boxcar' it is the full width.
1213 For 'rmedian' it is the half width.
1214 insitu: if False a new scantable is returned.
1215 Otherwise, the scaling is done in-situ
1216 The default is taken from .asaprc (False)
1217 Example:
1218 none
1219 """
1220 if insitu is None: insitu = rcParams['insitu']
1221 self._math._setinsitu(insitu)
1222 varlist = vars()
1223 s = scantable(self._math._smooth(self, kernel.lower(), width))
1224 s._add_history("smooth", varlist)
1225 print_log()
1226 if insitu: self._assign(s)
1227 else: return s
1228
1229
1230 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1231 """
1232 Return a scan which has been baselined (all rows) by a polynomial.
1233 Parameters:
1234 mask: an optional mask
1235 order: the order of the polynomial (default is 0)
1236 plot: plot the fit and the residual. In this each
1237 indivual fit has to be approved, by typing 'y'
1238 or 'n'
1239 uselin: use linear polynomial fit
1240 insitu: if False a new scantable is returned.
1241 Otherwise, the scaling is done in-situ
1242 The default is taken from .asaprc (False)
1243 Example:
1244 # return a scan baselined by a third order polynomial,
1245 # not using a mask
1246 bscan = scan.poly_baseline(order=3)
1247 """
1248 if insitu is None: insitu = rcParams['insitu']
1249 varlist = vars()
1250 if mask is None:
1251 mask = [True for i in xrange(self.nchan(-1))]
1252 from asap.asapfitter import fitter
1253 try:
1254 f = fitter()
1255 f.set_scan(self, mask)
1256 #f.set_function(poly=order)
1257 if uselin:
1258 f.set_function(lpoly=order)
1259 else:
1260 f.set_function(poly=order)
1261 s = f.auto_fit(insitu, plot=plot)
1262 s._add_history("poly_baseline", varlist)
1263 print_log()
1264 if insitu: self._assign(s)
1265 else: return s
1266 except RuntimeError:
1267 msg = "The fit failed, possibly because it didn't converge."
1268 if rcParams['verbose']:
1269 print msg
1270 return
1271 else:
1272 raise RuntimeError(msg)
1273
1274
1275 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1276 threshold=3, chan_avg_limit=1, plot=False,
1277 insitu=None):
1278 """
1279 Return a scan which has been baselined (all rows) by a polynomial.
1280 Spectral lines are detected first using linefinder and masked out
1281 to avoid them affecting the baseline solution.
1282
1283 Parameters:
1284 mask: an optional mask retreived from scantable
1285 edge: an optional number of channel to drop at
1286 the edge of spectrum. If only one value is
1287 specified, the same number will be dropped from
1288 both sides of the spectrum. Default is to keep
1289 all channels. Nested tuples represent individual
1290 edge selection for different IFs (a number of spectral
1291 channels can be different)
1292 order: the order of the polynomial (default is 0)
1293 threshold: the threshold used by line finder. It is better to
1294 keep it large as only strong lines affect the
1295 baseline solution.
1296 chan_avg_limit:
1297 a maximum number of consequtive spectral channels to
1298 average during the search of weak and broad lines.
1299 The default is no averaging (and no search for weak
1300 lines). If such lines can affect the fitted baseline
1301 (e.g. a high order polynomial is fitted), increase this
1302 parameter (usually values up to 8 are reasonable). Most
1303 users of this method should find the default value
1304 sufficient.
1305 plot: plot the fit and the residual. In this each
1306 indivual fit has to be approved, by typing 'y'
1307 or 'n'
1308 insitu: if False a new scantable is returned.
1309 Otherwise, the scaling is done in-situ
1310 The default is taken from .asaprc (False)
1311
1312 Example:
1313 scan2=scan.auto_poly_baseline(order=7)
1314 """
1315 if insitu is None: insitu = rcParams['insitu']
1316 varlist = vars()
1317 from asap.asapfitter import fitter
1318 from asap.asaplinefind import linefinder
1319 from asap import _is_sequence_or_number as _is_valid
1320
1321 # check whether edge is set up for each IF individually
1322 individualedge = False;
1323 if len(edge) > 1:
1324 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1325 individualedge = True;
1326
1327 if not _is_valid(edge, int) and not individualedge:
1328 raise ValueError, "Parameter 'edge' has to be an integer or a \
1329 pair of integers specified as a tuple. Nested tuples are allowed \
1330 to make individual selection for different IFs."
1331
1332 curedge = (0, 0)
1333 if individualedge:
1334 for edgepar in edge:
1335 if not _is_valid(edgepar, int):
1336 raise ValueError, "Each element of the 'edge' tuple has \
1337 to be a pair of integers or an integer."
1338 else:
1339 curedge = edge;
1340
1341 # setup fitter
1342 f = fitter()
1343 f.set_function(poly=order)
1344
1345 # setup line finder
1346 fl = linefinder()
1347 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1348
1349 if not insitu:
1350 workscan = self.copy()
1351 else:
1352 workscan = self
1353
1354 fl.set_scan(workscan)
1355
1356 rows = range(workscan.nrow())
1357 asaplog.push("Processing:")
1358 for r in rows:
1359 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1360 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1361 workscan.getpol(r), workscan.getcycle(r))
1362 asaplog.push(msg, False)
1363
1364 # figure out edge parameter
1365 if individualedge:
1366 if len(edge) >= workscan.getif(r):
1367 raise RuntimeError, "Number of edge elements appear to " \
1368 "be less than the number of IFs"
1369 curedge = edge[workscan.getif(r)]
1370
1371 # setup line finder
1372 fl.find_lines(r, mask, curedge)
1373 f.set_scan(workscan, fl.get_mask())
1374 f.x = workscan._getabcissa(r)
1375 f.y = workscan._getspectrum(r)
1376 f.data = None
1377 f.fit()
1378 x = f.get_parameters()
1379 if plot:
1380 f.plot(residual=True)
1381 x = raw_input("Accept fit ( [y]/n ): ")
1382 if x.upper() == 'N':
1383 continue
1384 workscan._setspectrum(f.fitter.getresidual(), r)
1385 if plot:
1386 f._p.unmap()
1387 f._p = None
1388 workscan._add_history("auto_poly_baseline", varlist)
1389 if insitu:
1390 self._assign(workscan)
1391 else:
1392 return workscan
1393
1394 def rotate_linpolphase(self, angle):
1395 """
1396 Rotate the phase of the complex polarization O=Q+iU correlation.
1397 This is always done in situ in the raw data. So if you call this
1398 function more than once then each call rotates the phase further.
1399 Parameters:
1400 angle: The angle (degrees) to rotate (add) by.
1401 Examples:
1402 scan.rotate_linpolphase(2.3)
1403 """
1404 varlist = vars()
1405 self._math._rotate_linpolphase(self, angle)
1406 self._add_history("rotate_linpolphase", varlist)
1407 print_log()
1408 return
1409
1410
1411 def rotate_xyphase(self, angle):
1412 """
1413 Rotate the phase of the XY correlation. This is always done in situ
1414 in the data. So if you call this function more than once
1415 then each call rotates the phase further.
1416 Parameters:
1417 angle: The angle (degrees) to rotate (add) by.
1418 Examples:
1419 scan.rotate_xyphase(2.3)
1420 """
1421 varlist = vars()
1422 self._math._rotate_xyphase(self, angle)
1423 self._add_history("rotate_xyphase", varlist)
1424 print_log()
1425 return
1426
1427 def swap_linears(self):
1428 """
1429 Swap the linear polarisations XX and YY, or better the first two
1430 polarisations as this also works for ciculars.
1431 """
1432 varlist = vars()
1433 self._math._swap_linears(self)
1434 self._add_history("swap_linears", varlist)
1435 print_log()
1436 return
1437
1438 def invert_phase(self):
1439 """
1440 Invert the phase of the complex polarisation
1441 """
1442 varlist = vars()
1443 self._math._invert_phase(self)
1444 self._add_history("invert_phase", varlist)
1445 print_log()
1446 return
1447
1448 def add(self, offset, insitu=None):
1449 """
1450 Return a scan where all spectra have the offset added
1451 Parameters:
1452 offset: the offset
1453 insitu: if False a new scantable is returned.
1454 Otherwise, the scaling is done in-situ
1455 The default is taken from .asaprc (False)
1456 """
1457 if insitu is None: insitu = rcParams['insitu']
1458 self._math._setinsitu(insitu)
1459 varlist = vars()
1460 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1461 s._add_history("add", varlist)
1462 print_log()
1463 if insitu:
1464 self._assign(s)
1465 else:
1466 return s
1467
1468 def scale(self, factor, tsys=True, insitu=None):
1469 """
1470 Return a scan where all spectra are scaled by the give 'factor'
1471 Parameters:
1472 factor: the scaling factor
1473 insitu: if False a new scantable is returned.
1474 Otherwise, the scaling is done in-situ
1475 The default is taken from .asaprc (False)
1476 tsys: if True (default) then apply the operation to Tsys
1477 as well as the data
1478 """
1479 if insitu is None: insitu = rcParams['insitu']
1480 self._math._setinsitu(insitu)
1481 varlist = vars()
1482 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1483 s._add_history("scale", varlist)
1484 print_log()
1485 if insitu:
1486 self._assign(s)
1487 else:
1488 return s
1489
1490 def auto_quotient(self, preserve=True, mode='paired'):
1491 """
1492 This function allows to build quotients automatically.
1493 It assumes the observation to have the same numer of
1494 "ons" and "offs"
1495 Parameters:
1496 preserve: you can preserve (default) the continuum or
1497 remove it. The equations used are
1498 preserve: Output = Toff * (on/off) - Toff
1499 remove: Output = Toff * (on/off) - Ton
1500 mode: the on/off detection mode
1501 'paired' (default)
1502 identifies 'off' scans by the
1503 trailing '_R' (Mopra/Parkes) or
1504 '_e'/'_w' (Tid) and matches
1505 on/off pairs from the observing pattern
1506 'time'
1507 finds the closest off in time
1508
1509 """
1510 modes = ["time", "paired"]
1511 if not mode in modes:
1512 msg = "please provide valid mode. Valid modes are %s" % (modes)
1513 raise ValueError(msg)
1514 varlist = vars()
1515 s = None
1516 if mode.lower() == "paired":
1517 basesel = self.get_selection()
1518 sel = selector()+basesel
1519 sel.set_query("SRCTYPE==1")
1520 self.set_selection(sel)
1521 offs = self.copy()
1522 sel.set_query("SRCTYPE==0")
1523 self.set_selection(sel)
1524 ons = self.copy()
1525 s = scantable(self._math._quotient(ons, offs, preserve))
1526 self.set_selection(basesel)
1527 elif mode.lower() == "time":
1528 s = scantable(self._math._auto_quotient(self, mode, preserve))
1529 s._add_history("auto_quotient", varlist)
1530 print_log()
1531 return s
1532
1533 def mx_quotient(self, mask = None, weight='median', preserve=True):
1534 """
1535 Form a quotient using "off" beams when observing in "MX" mode.
1536 Parameters:
1537 mask: an optional mask to be used when weight == 'stddev'
1538 weight: How to average the off beams. Default is 'median'.
1539 preserve: you can preserve (default) the continuum or
1540 remove it. The equations used are
1541 preserve: Output = Toff * (on/off) - Toff
1542 remove: Output = Toff * (on/off) - Ton
1543 """
1544 if mask is None: mask = ()
1545 varlist = vars()
1546 on = scantable(self._math._mx_extract(self, 'on'))
1547 preoff = scantable(self._math._mx_extract(self, 'off'))
1548 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1549 from asapmath import quotient
1550 q = quotient(on, off, preserve)
1551 q._add_history("mx_quotient", varlist)
1552 print_log()
1553 return q
1554
1555 def freq_switch(self, insitu=None):
1556 """
1557 Apply frequency switching to the data.
1558 Parameters:
1559 insitu: if False a new scantable is returned.
1560 Otherwise, the swictching is done in-situ
1561 The default is taken from .asaprc (False)
1562 Example:
1563 none
1564 """
1565 if insitu is None: insitu = rcParams['insitu']
1566 self._math._setinsitu(insitu)
1567 varlist = vars()
1568 s = scantable(self._math._freqswitch(self))
1569 s._add_history("freq_switch", varlist)
1570 print_log()
1571 if insitu: self._assign(s)
1572 else: return s
1573
1574 def recalc_azel(self):
1575 """
1576 Recalculate the azimuth and elevation for each position.
1577 Parameters:
1578 none
1579 Example:
1580 """
1581 varlist = vars()
1582 self._recalcazel()
1583 self._add_history("recalc_azel", varlist)
1584 print_log()
1585 return
1586
1587 def __add__(self, other):
1588 varlist = vars()
1589 s = None
1590 if isinstance(other, scantable):
1591 s = scantable(self._math._binaryop(self, other, "ADD"))
1592 elif isinstance(other, float):
1593 s = scantable(self._math._unaryop(self, other, "ADD", False))
1594 else:
1595 raise TypeError("Other input is not a scantable or float value")
1596 s._add_history("operator +", varlist)
1597 print_log()
1598 return s
1599
1600 def __sub__(self, other):
1601 """
1602 implicit on all axes and on Tsys
1603 """
1604 varlist = vars()
1605 s = None
1606 if isinstance(other, scantable):
1607 s = scantable(self._math._binaryop(self, other, "SUB"))
1608 elif isinstance(other, float):
1609 s = scantable(self._math._unaryop(self, other, "SUB", False))
1610 else:
1611 raise TypeError("Other input is not a scantable or float value")
1612 s._add_history("operator -", varlist)
1613 print_log()
1614 return s
1615
1616 def __mul__(self, other):
1617 """
1618 implicit on all axes and on Tsys
1619 """
1620 varlist = vars()
1621 s = None
1622 if isinstance(other, scantable):
1623 s = scantable(self._math._binaryop(self, other, "MUL"))
1624 elif isinstance(other, float):
1625 s = scantable(self._math._unaryop(self, other, "MUL", False))
1626 else:
1627 raise TypeError("Other input is not a scantable or float value")
1628 s._add_history("operator *", varlist)
1629 print_log()
1630 return s
1631
1632
1633 def __div__(self, other):
1634 """
1635 implicit on all axes and on Tsys
1636 """
1637 varlist = vars()
1638 s = None
1639 if isinstance(other, scantable):
1640 s = scantable(self._math._binaryop(self, other, "DIV"))
1641 elif isinstance(other, float):
1642 if other == 0.0:
1643 raise ZeroDivisionError("Dividing by zero is not recommended")
1644 s = scantable(self._math._unaryop(self, other, "DIV", False))
1645 else:
1646 raise TypeError("Other input is not a scantable or float value")
1647 s._add_history("operator /", varlist)
1648 print_log()
1649 return s
1650
1651 def get_fit(self, row=0):
1652 """
1653 Print or return the stored fits for a row in the scantable
1654 Parameters:
1655 row: the row which the fit has been applied to.
1656 """
1657 if row > self.nrow():
1658 return
1659 from asap.asapfit import asapfit
1660 fit = asapfit(self._getfit(row))
1661 if rcParams['verbose']:
1662 print fit
1663 return
1664 else:
1665 return fit.as_dict()
1666
1667 def _add_history(self, funcname, parameters):
1668 # create date
1669 sep = "##"
1670 from datetime import datetime
1671 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1672 hist = dstr+sep
1673 hist += funcname+sep#cdate+sep
1674 if parameters.has_key('self'): del parameters['self']
1675 for k, v in parameters.iteritems():
1676 if type(v) is dict:
1677 for k2, v2 in v.iteritems():
1678 hist += k2
1679 hist += "="
1680 if isinstance(v2, scantable):
1681 hist += 'scantable'
1682 elif k2 == 'mask':
1683 if isinstance(v2, list) or isinstance(v2, tuple):
1684 hist += str(self._zip_mask(v2))
1685 else:
1686 hist += str(v2)
1687 else:
1688 hist += str(v2)
1689 else:
1690 hist += k
1691 hist += "="
1692 if isinstance(v, scantable):
1693 hist += 'scantable'
1694 elif k == 'mask':
1695 if isinstance(v, list) or isinstance(v, tuple):
1696 hist += str(self._zip_mask(v))
1697 else:
1698 hist += str(v)
1699 else:
1700 hist += str(v)
1701 hist += sep
1702 hist = hist[:-2] # remove trailing '##'
1703 self._addhistory(hist)
1704
1705
1706 def _zip_mask(self, mask):
1707 mask = list(mask)
1708 i = 0
1709 segments = []
1710 while mask[i:].count(1):
1711 i += mask[i:].index(1)
1712 if mask[i:].count(0):
1713 j = i + mask[i:].index(0)
1714 else:
1715 j = len(mask)
1716 segments.append([i, j])
1717 i = j
1718 return segments
1719
1720 def _get_ordinate_label(self):
1721 fu = "("+self.get_fluxunit()+")"
1722 import re
1723 lbl = "Intensity"
1724 if re.match(".K.", fu):
1725 lbl = "Brightness Temperature "+ fu
1726 elif re.match(".Jy.", fu):
1727 lbl = "Flux density "+ fu
1728 return lbl
1729
1730 def _check_ifs(self):
1731 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1732 nchans = filter(lambda t: t > 0, nchans)
1733 return (sum(nchans)/len(nchans) == nchans[0])
1734
1735 def _fill(self, names, unit, average):
1736 import os
1737 from asap._asap import stfiller
1738 first = True
1739 fullnames = []
1740 for name in names:
1741 name = os.path.expandvars(name)
1742 name = os.path.expanduser(name)
1743 if not os.path.exists(name):
1744 msg = "File '%s' does not exists" % (name)
1745 if rcParams['verbose']:
1746 asaplog.push(msg)
1747 print asaplog.pop().strip()
1748 return
1749 raise IOError(msg)
1750 fullnames.append(name)
1751 if average:
1752 asaplog.push('Auto averaging integrations')
1753 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1754 for name in fullnames:
1755 tbl = Scantable(stype)
1756 r = stfiller(tbl)
1757 msg = "Importing %s..." % (name)
1758 asaplog.push(msg, False)
1759 print_log()
1760 r._open(name, -1, -1)
1761 r._read()
1762 #tbl = r._getdata()
1763 if average:
1764 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1765 #tbl = tbl2
1766 if not first:
1767 tbl = self._math._merge([self, tbl])
1768 #tbl = tbl2
1769 Scantable.__init__(self, tbl)
1770 r._close()
1771 del r, tbl
1772 first = False
1773 if unit is not None:
1774 self.set_fluxunit(unit)
1775 self.set_freqframe(rcParams['scantable.freqframe'])
1776
1777 def __getitem__(self, key):
1778 if key < 0:
1779 key += self.nrow()
1780 if key >= self.nrow():
1781 raise IndexError("Row index out of range.")
1782 return self._getspectrum(key)
1783
1784 def __setitem__(self, key, value):
1785 if key < 0:
1786 key += self.nrow()
1787 if key >= self.nrow():
1788 raise IndexError("Row index out of range.")
1789 if not hasattr(value, "__len__") or \
1790 len(value) > self.nchan(self.getif(key)):
1791 raise ValueError("Spectrum length doesn't match.")
1792 return self._setspectrum(value, key)
1793
1794 def __len__(self):
1795 return self.nrow()
1796
1797 def __iter__(self):
1798 for i in range(len(self)):
1799 yield self[i]
Note: See TracBrowser for help on using the repository browser.