source: trunk/python/scantable.py@ 1589

Last change on this file since 1589 was 1589, checked in by Malte Marquarding, 16 years ago

Introduced print_log_dec(orator)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 71.5 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log, print_log_dec
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13 @print_log_dec
14 def __init__(self, filename, average=None, unit=None, parallactify=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 parallactify: Indcicate that the data had been parallatified.
33 Default is taken form rc file.
34 """
35 if average is None:
36 average = rcParams['scantable.autoaverage']
37 if parallactify is None:
38 parallactify = rcParams['scantable.parallactify']
39 varlist = vars()
40 from asap._asap import stmath
41 self._math = stmath()
42 if isinstance(filename, Scantable):
43 Scantable.__init__(self, filename)
44 else:
45 if isinstance(filename, str):# or \
46# (isinstance(filename, list) or isinstance(filename, tuple)) \
47# and isinstance(filename[-1], str):
48 import os.path
49 filename = os.path.expandvars(filename)
50 filename = os.path.expanduser(filename)
51 if not os.path.exists(filename):
52 s = "File '%s' not found." % (filename)
53 if rcParams['verbose']:
54 asaplog.push(s)
55 #print asaplog.pop().strip()
56 return
57 raise IOError(s)
58 if os.path.isdir(filename) \
59 and not os.path.exists(filename+'/table.f1'):
60 # crude check if asap table
61 if os.path.exists(filename+'/table.info'):
62 ondisk = rcParams['scantable.storage'] == 'disk'
63 Scantable.__init__(self, filename, ondisk)
64 if unit is not None:
65 self.set_fluxunit(unit)
66 self.set_freqframe(rcParams['scantable.freqframe'])
67 else:
68 msg = "The given file '%s'is not a valid " \
69 "asap table." % (filename)
70 if rcParams['verbose']:
71 print msg
72 return
73 else:
74 raise IOError(msg)
75 else:
76 self._fill([filename], unit, average)
77 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
78 and isinstance(filename[-1], str):
79 self._fill(filename, unit, average)
80 self.parallactify(parallactify)
81 self._add_history("scantable", varlist)
82
83 @print_log_dec
84 def save(self, name=None, format=None, overwrite=False):
85 """
86 Store the scantable on disk. This can be an asap (aips++) Table,
87 SDFITS or MS2 format.
88 Parameters:
89 name: the name of the outputfile. For format "ASCII"
90 this is the root file name (data in 'name'.txt
91 and header in 'name'_header.txt)
92 format: an optional file format. Default is ASAP.
93 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
94 'SDFITS' (save as SDFITS file)
95 'ASCII' (saves as ascii text file)
96 'MS2' (saves as an aips++
97 MeasurementSet V2)
98 'FITS' (save as image FITS - not
99 readable by class)
100 'CLASS' (save as FITS readable by CLASS)
101 overwrite: If the file should be overwritten if it exists.
102 The default False is to return with warning
103 without writing the output. USE WITH CARE.
104 Example:
105 scan.save('myscan.asap')
106 scan.save('myscan.sdfits', 'SDFITS')
107 """
108 from os import path
109 if format is None: format = rcParams['scantable.save']
110 suffix = '.'+format.lower()
111 if name is None or name == "":
112 name = 'scantable'+suffix
113 msg = "No filename given. Using default name %s..." % name
114 asaplog.push(msg)
115 name = path.expandvars(name)
116 if path.isfile(name) or path.isdir(name):
117 if not overwrite:
118 msg = "File %s exists." % name
119 if rcParams['verbose']:
120 print msg
121 return
122 else:
123 raise IOError(msg)
124 format2 = format.upper()
125 if format2 == 'ASAP':
126 self._save(name)
127 else:
128 from asap._asap import stwriter as stw
129 writer = stw(format2)
130 writer.write(self, name)
131 return
132
133 def copy(self):
134 """
135 Return a copy of this scantable.
136 Note:
137 This makes a full (deep) copy. scan2 = scan1 makes a reference.
138 Parameters:
139 none
140 Example:
141 copiedscan = scan.copy()
142 """
143 sd = scantable(Scantable._copy(self))
144 return sd
145
146 def drop_scan(self, scanid=None):
147 """
148 Return a new scantable where the specified scan number(s) has(have)
149 been dropped.
150 Parameters:
151 scanid: a (list of) scan number(s)
152 """
153 from asap import _is_sequence_or_number as _is_valid
154 from asap import _to_list
155 from asap import unique
156 if not _is_valid(scanid):
157 if rcParams['verbose']:
158 print "Please specify a scanno to drop from the scantable"
159 return
160 else:
161 raise RuntimeError("No scan given")
162 try:
163 scanid = _to_list(scanid)
164 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
165 for sid in scanid: allscans.remove(sid)
166 if len(allscans) == 0:
167 raise ValueError("Can't remove all scans")
168 except ValueError:
169 if rcParams['verbose']:
170 print "Couldn't find any match."
171 return
172 else: raise
173 try:
174 bsel = self.get_selection()
175 sel = selector()
176 sel.set_scans(allscans)
177 self.set_selection(bsel+sel)
178 scopy = self._copy()
179 self.set_selection(bsel)
180 return scantable(scopy)
181 except RuntimeError:
182 if rcParams['verbose']:
183 print "Couldn't find any match."
184 else:
185 raise
186
187
188 def get_scan(self, scanid=None):
189 """
190 Return a specific scan (by scanno) or collection of scans (by
191 source name) in a new scantable.
192 Note:
193 See scantable.drop_scan() for the inverse operation.
194 Parameters:
195 scanid: a (list of) scanno or a source name, unix-style
196 patterns are accepted for source name matching, e.g.
197 '*_R' gets all 'ref scans
198 Example:
199 # get all scans containing the source '323p459'
200 newscan = scan.get_scan('323p459')
201 # get all 'off' scans
202 refscans = scan.get_scan('*_R')
203 # get a susbset of scans by scanno (as listed in scan.summary())
204 newscan = scan.get_scan([0, 2, 7, 10])
205 """
206 if scanid is None:
207 if rcParams['verbose']:
208 print "Please specify a scan no or name to " \
209 "retrieve from the scantable"
210 return
211 else:
212 raise RuntimeError("No scan given")
213
214 try:
215 bsel = self.get_selection()
216 sel = selector()
217 if type(scanid) is str:
218 sel.set_name(scanid)
219 self.set_selection(bsel+sel)
220 scopy = self._copy()
221 self.set_selection(bsel)
222 return scantable(scopy)
223 elif type(scanid) is int:
224 sel.set_scans([scanid])
225 self.set_selection(bsel+sel)
226 scopy = self._copy()
227 self.set_selection(bsel)
228 return scantable(scopy)
229 elif type(scanid) is list:
230 sel.set_scans(scanid)
231 self.set_selection(sel)
232 scopy = self._copy()
233 self.set_selection(bsel)
234 return scantable(scopy)
235 else:
236 msg = "Illegal scanid type, use 'int' or 'list' if ints."
237 if rcParams['verbose']:
238 print msg
239 else:
240 raise TypeError(msg)
241 except RuntimeError:
242 if rcParams['verbose']: print "Couldn't find any match."
243 else: raise
244
245 def __str__(self):
246 return Scantable._summary(self, True)
247
248 def summary(self, filename=None):
249 """
250 Print a summary of the contents of this scantable.
251 Parameters:
252 filename: the name of a file to write the putput to
253 Default - no file output
254 verbose: print extra info such as the frequency table
255 The default (False) is taken from .asaprc
256 """
257 info = Scantable._summary(self, True)
258 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
259 if filename is not None:
260 if filename is "":
261 filename = 'scantable_summary.txt'
262 from os.path import expandvars, isdir
263 filename = expandvars(filename)
264 if not isdir(filename):
265 data = open(filename, 'w')
266 data.write(info)
267 data.close()
268 else:
269 msg = "Illegal file name '%s'." % (filename)
270 if rcParams['verbose']:
271 print msg
272 else:
273 raise IOError(msg)
274 if rcParams['verbose']:
275 try:
276 from IPython.genutils import page as pager
277 except ImportError:
278 from pydoc import pager
279 pager(info)
280 else:
281 return info
282
283 def get_spectrum(self, rowno):
284 """Return the spectrum for the current row in the scantable as a list.
285 Parameters:
286 rowno: the row number to retrieve the spectrum from
287 """
288 return self._getspectrum(rowno)
289
290 def get_mask(self, rowno):
291 """Return the mask for the current row in the scantable as a list.
292 Parameters:
293 rowno: the row number to retrieve the mask from
294 """
295 return self._getmask(rowno)
296
297 def set_spectrum(self, spec, rowno):
298 """Return the spectrum for the current row in the scantable as a list.
299 Parameters:
300 spec: the spectrum
301 rowno: the row number to set the spectrum for
302 """
303 assert(len(spec) == self.nchan())
304 return self._setspectrum(spec, rowno)
305
306 def get_selection(self):
307 """
308 Get the selection object currently set on this scantable.
309 Parameters:
310 none
311 Example:
312 sel = scan.get_selection()
313 sel.set_ifs(0) # select IF 0
314 scan.set_selection(sel) # apply modified selection
315 """
316 return selector(self._getselection())
317
318 def set_selection(self, selection=None, **kw):
319 """
320 Select a subset of the data. All following operations on this scantable
321 are only applied to thi selection.
322 Parameters:
323 selection: a selector object (default unset the selection)
324 Examples:
325 sel = selector() # create a selection object
326 self.set_scans([0, 3]) # select SCANNO 0 and 3
327 scan.set_selection(sel) # set the selection
328 scan.summary() # will only print summary of scanno 0 an 3
329 scan.set_selection() # unset the selection
330 """
331 if selection is None:
332 # reset
333 if len(kw) == 0:
334 selection = selector()
335 else:
336 # try keywords
337 for k in kw:
338 if k not in selector.fields:
339 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
340 selection = selector(**kw)
341 self._setselection(selection)
342
343 def stats(self, stat='stddev', mask=None):
344 """
345 Determine the specified statistic of the current beam/if/pol
346 Takes a 'mask' as an optional parameter to specify which
347 channels should be excluded.
348 Parameters:
349 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
350 'var', 'stddev', 'avdev', 'rms', 'median'
351 mask: an optional mask specifying where the statistic
352 should be determined.
353 Example:
354 scan.set_unit('channel')
355 msk = scan.create_mask([100, 200], [500, 600])
356 scan.stats(stat='mean', mask=m)
357 """
358 if mask == None:
359 mask = []
360 axes = ['Beam', 'IF', 'Pol', 'Time']
361 if not self._check_ifs():
362 raise ValueError("Cannot apply mask as the IFs have different "
363 "number of channels. Please use setselection() "
364 "to select individual IFs")
365
366 statvals = self._math._stats(self, mask, stat)
367 out = ''
368 axes = []
369 for i in range(self.nrow()):
370 axis = []
371 axis.append(self.getscan(i))
372 axis.append(self.getbeam(i))
373 axis.append(self.getif(i))
374 axis.append(self.getpol(i))
375 axis.append(self.getcycle(i))
376 axes.append(axis)
377 tm = self._gettime(i)
378 src = self._getsourcename(i)
379 out += 'Scan[%d] (%s) ' % (axis[0], src)
380 out += 'Time[%s]:\n' % (tm)
381 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
382 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
383 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
384 out += '= %3.3f\n' % (statvals[i])
385 out += "--------------------------------------------------\n"
386
387 if rcParams['verbose']:
388 print "--------------------------------------------------"
389 print " ", stat
390 print "--------------------------------------------------"
391 print out
392 #else:
393 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
394 # 'axes' : axes,
395 # 'data': statvals}
396 return statvals
397
398 def stddev(self, mask=None):
399 """
400 Determine the standard deviation of the current beam/if/pol
401 Takes a 'mask' as an optional parameter to specify which
402 channels should be excluded.
403 Parameters:
404 mask: an optional mask specifying where the standard
405 deviation should be determined.
406
407 Example:
408 scan.set_unit('channel')
409 msk = scan.create_mask([100, 200], [500, 600])
410 scan.stddev(mask=m)
411 """
412 return self.stats(stat='stddev', mask=mask);
413
414
415 def get_column_names(self):
416 """
417 Return a list of column names, which can be used for selection.
418 """
419 return list(Scantable.get_column_names(self))
420
421 def get_tsys(self):
422 """
423 Return the System temperatures.
424 Returns:
425 a list of Tsys values for the current selection
426 """
427
428 return self._row_callback(self._gettsys, "Tsys")
429
430 def _row_callback(self, callback, label):
431 axes = []
432 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
433 out = ""
434 outvec = []
435 for i in range(self.nrow()):
436 axis = []
437 axis.append(self.getscan(i))
438 axis.append(self.getbeam(i))
439 axis.append(self.getif(i))
440 axis.append(self.getpol(i))
441 axis.append(self.getcycle(i))
442 axes.append(axis)
443 tm = self._gettime(i)
444 src = self._getsourcename(i)
445 out += 'Scan[%d] (%s) ' % (axis[0], src)
446 out += 'Time[%s]:\n' % (tm)
447 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
448 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
449 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
450 outvec.append(callback(i))
451 out += '= %3.3f\n' % (outvec[i])
452 out += "--------------------------------------------------\n"
453 if rcParams['verbose']:
454 print "--------------------------------------------------"
455 print " %s" % (label)
456 print "--------------------------------------------------"
457 print out
458 # disabled because the vector seems more useful
459 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
460 return outvec
461
462 def _get_column(self, callback, row=-1):
463 """
464 """
465 if row == -1:
466 return [callback(i) for i in range(self.nrow())]
467 else:
468 if 0 <= row < self.nrow():
469 return callback(row)
470
471
472 def get_time(self, row=-1, asdatetime=False):
473 """
474 Get a list of time stamps for the observations.
475 Return a datetime object for each integration time stamp in the scantable.
476 Parameters:
477 row: row no of integration. Default -1 return all rows
478 asdatetime: return values as datetime objects rather than strings
479 Example:
480 none
481 """
482 from time import strptime
483 from datetime import datetime
484 times = self._get_column(self._gettime, row)
485 if not asdatetime:
486 return times
487 format = "%Y/%m/%d/%H:%M:%S"
488 if isinstance(times, list):
489 return [datetime(*strptime(i, format)[:6]) for i in times]
490 else:
491 return datetime(*strptime(times, format)[:6])
492
493
494 def get_inttime(self, row=-1):
495 """
496 Get a list of integration times for the observations.
497 Return a time in seconds for each integration in the scantable.
498 Parameters:
499 row: row no of integration. Default -1 return all rows.
500 Example:
501 none
502 """
503 return self._get_column(self._getinttime, row)
504
505
506 def get_sourcename(self, row=-1):
507 """
508 Get a list source names for the observations.
509 Return a string for each integration in the scantable.
510 Parameters:
511 row: row no of integration. Default -1 return all rows.
512 Example:
513 none
514 """
515 return self._get_column(self._getsourcename, row)
516
517 def get_elevation(self, row=-1):
518 """
519 Get a list of elevations for the observations.
520 Return a float for each integration in the scantable.
521 Parameters:
522 row: row no of integration. Default -1 return all rows.
523 Example:
524 none
525 """
526 return self._get_column(self._getelevation, row)
527
528 def get_azimuth(self, row=-1):
529 """
530 Get a list of azimuths for the observations.
531 Return a float for each integration in the scantable.
532 Parameters:
533 row: row no of integration. Default -1 return all rows.
534 Example:
535 none
536 """
537 return self._get_column(self._getazimuth, row)
538
539 def get_parangle(self, row=-1):
540 """
541 Get a list of parallactic angles for the observations.
542 Return a float for each integration in the scantable.
543 Parameters:
544 row: row no of integration. Default -1 return all rows.
545 Example:
546 none
547 """
548 return self._get_column(self._getparangle, row)
549
550 def get_direction(self, row=-1):
551 """
552 Get a list of Positions on the sky (direction) for the observations.
553 Return a float for each integration in the scantable.
554 Parameters:
555 row: row no of integration. Default -1 return all rows
556 Example:
557 none
558 """
559 return self._get_column(self._getdirection, row)
560
561 def get_directionval(self, row=-1):
562 """
563 Get a list of Positions on the sky (direction) for the observations.
564 Return a float for each integration in the scantable.
565 Parameters:
566 row: row no of integration. Default -1 return all rows
567 Example:
568 none
569 """
570 return self._get_column(self._getdirectionvec, row)
571
572
573 def set_unit(self, unit='channel'):
574 """
575 Set the unit for all following operations on this scantable
576 Parameters:
577 unit: optional unit, default is 'channel'
578 one of '*Hz', 'km/s', 'channel', ''
579 """
580 varlist = vars()
581 if unit in ['', 'pixel', 'channel']:
582 unit = ''
583 inf = list(self._getcoordinfo())
584 inf[0] = unit
585 self._setcoordinfo(inf)
586 self._add_history("set_unit", varlist)
587
588 @print_log_dec
589 def set_instrument(self, instr):
590 """
591 Set the instrument for subsequent processing.
592 Parameters:
593 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
594 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
595 """
596 self._setInstrument(instr)
597 self._add_history("set_instument", vars())
598
599 @print_log_dec
600 def set_feedtype(self, feedtype):
601 """
602 Overwrite the feed type, which might not be set correctly.
603 Parameters:
604 feedtype: 'linear' or 'circular'
605 """
606 self._setfeedtype(feedtype)
607 self._add_history("set_feedtype", vars())
608
609 @print_log_dec
610 def set_doppler(self, doppler='RADIO'):
611 """
612 Set the doppler for all following operations on this scantable.
613 Parameters:
614 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
615 """
616 varlist = vars()
617 inf = list(self._getcoordinfo())
618 inf[2] = doppler
619 self._setcoordinfo(inf)
620 self._add_history("set_doppler", vars())
621
622 @print_log_dec
623 def set_freqframe(self, frame=None):
624 """
625 Set the frame type of the Spectral Axis.
626 Parameters:
627 frame: an optional frame type, default 'LSRK'. Valid frames are:
628 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
629 'GEO', 'GALACTO', 'LGROUP', 'CMB'
630 Examples:
631 scan.set_freqframe('BARY')
632 """
633 if frame is None: frame = rcParams['scantable.freqframe']
634 varlist = vars()
635 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
636 'GEO', 'GALACTO', 'LGROUP', 'CMB']
637
638 if frame in valid:
639 inf = list(self._getcoordinfo())
640 inf[1] = frame
641 self._setcoordinfo(inf)
642 self._add_history("set_freqframe", varlist)
643 else:
644 msg = "Please specify a valid freq type. Valid types are:\n", valid
645 if rcParams['verbose']:
646 print msg
647 else:
648 raise TypeError(msg)
649
650 def set_dirframe(self, frame=""):
651 """
652 Set the frame type of the Direction on the sky.
653 Parameters:
654 frame: an optional frame type, default ''. Valid frames are:
655 'J2000', 'B1950', 'GALACTIC'
656 Examples:
657 scan.set_dirframe('GALACTIC')
658 """
659 varlist = vars()
660 try:
661 Scantable.set_dirframe(self, frame)
662 except RuntimeError, msg:
663 if rcParams['verbose']:
664 print msg
665 else:
666 raise
667 self._add_history("set_dirframe", varlist)
668
669 def get_unit(self):
670 """
671 Get the default unit set in this scantable
672 Returns:
673 A unit string
674 """
675 inf = self._getcoordinfo()
676 unit = inf[0]
677 if unit == '': unit = 'channel'
678 return unit
679
680 def get_abcissa(self, rowno=0):
681 """
682 Get the abcissa in the current coordinate setup for the currently
683 selected Beam/IF/Pol
684 Parameters:
685 rowno: an optional row number in the scantable. Default is the
686 first row, i.e. rowno=0
687 Returns:
688 The abcissa values and the format string (as a dictionary)
689 """
690 abc = self._getabcissa(rowno)
691 lbl = self._getabcissalabel(rowno)
692 return abc, lbl
693
694 def flag(self, mask=None):
695 """
696 Flag the selected data using an optional channel mask.
697 Parameters:
698 mask: an optional channel mask, created with create_mask. Default
699 (no mask) is all channels.
700 """
701 varlist = vars()
702 if mask is None:
703 mask = []
704 try:
705 self._flag(mask)
706 except RuntimeError, msg:
707 if rcParams['verbose']:
708 print msg
709 return
710 else: raise
711 self._add_history("flag", varlist)
712
713 @print_log_dec
714 def lag_flag(self, start, end, unit="MHz", insitu=None):
715 """
716 Flag the data in 'lag' space by providing a frequency to remove.
717 Flagged data in the scantable gets interpolated over the region.
718 No taper is applied.
719 Parameters:
720 start: the start frequency (really a period within the
721 bandwidth) or period to remove
722 end: the end frequency or period to remove
723 unit: the frequency unit (default "MHz") or "" for
724 explicit lag channels
725 Notes:
726 It is recommended to flag edges of the band or strong
727 signals beforehand.
728 """
729 if insitu is None: insitu = rcParams['insitu']
730 self._math._setinsitu(insitu)
731 varlist = vars()
732 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
733 if not (unit == "" or base.has_key(unit)):
734 raise ValueError("%s is not a valid unit." % unit)
735 try:
736 if unit == "":
737 s = scantable(self._math._lag_flag(self, start, end, "lags"))
738 else:
739 s = scantable(self._math._lag_flag(self, start*base[unit],
740 end*base[unit], "frequency"))
741 except RuntimeError, msg:
742 if rcParams['verbose']:
743 print msg
744 return
745 else: raise
746 s._add_history("lag_flag", varlist)
747 if insitu:
748 self._assign(s)
749 else:
750 return s
751
752 @print_log_dec
753 def create_mask(self, *args, **kwargs):
754 """
755 Compute and return a mask based on [min, max] windows.
756 The specified windows are to be INCLUDED, when the mask is
757 applied.
758 Parameters:
759 [min, max], [min2, max2], ...
760 Pairs of start/end points (inclusive)specifying the regions
761 to be masked
762 invert: optional argument. If specified as True,
763 return an inverted mask, i.e. the regions
764 specified are EXCLUDED
765 row: create the mask using the specified row for
766 unit conversions, default is row=0
767 only necessary if frequency varies over rows.
768 Example:
769 scan.set_unit('channel')
770 a)
771 msk = scan.create_mask([400, 500], [800, 900])
772 # masks everything outside 400 and 500
773 # and 800 and 900 in the unit 'channel'
774
775 b)
776 msk = scan.create_mask([400, 500], [800, 900], invert=True)
777 # masks the regions between 400 and 500
778 # and 800 and 900 in the unit 'channel'
779 c)
780 mask only channel 400
781 msk = scan.create_mask([400])
782 """
783 row = kwargs.get("row", 0)
784 data = self._getabcissa(row)
785 u = self._getcoordinfo()[0]
786 if rcParams['verbose']:
787 if u == "": u = "channel"
788 msg = "The current mask window unit is %s" % u
789 i = self._check_ifs()
790 if not i:
791 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
792 asaplog.push(msg)
793 n = self.nchan()
794 msk = _n_bools(n, False)
795 # test if args is a 'list' or a 'normal *args - UGLY!!!
796
797 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
798 and args or args[0]
799 for window in ws:
800 if len(window) == 1:
801 window = [window[0], window[0]]
802 if len(window) == 0 or len(window) > 2:
803 raise ValueError("A window needs to be defined as [start(, end)]")
804 if window[0] > window[1]:
805 tmp = window[0]
806 window[0] = window[1]
807 window[1] = tmp
808 for i in range(n):
809 if data[i] >= window[0] and data[i] <= window[1]:
810 msk[i] = True
811 if kwargs.has_key('invert'):
812 if kwargs.get('invert'):
813 msk = mask_not(msk)
814 return msk
815
816 def get_restfreqs(self):
817 """
818 Get the restfrequency(s) stored in this scantable.
819 The return value(s) are always of unit 'Hz'
820 Parameters:
821 none
822 Returns:
823 a list of doubles
824 """
825 return list(self._getrestfreqs())
826
827
828 def set_restfreqs(self, freqs=None, unit='Hz'):
829 """
830 Set or replace the restfrequency specified and
831 If the 'freqs' argument holds a scalar,
832 then that rest frequency will be applied to all the selected
833 data. If the 'freqs' argument holds
834 a vector, then it MUST be of equal or smaller length than
835 the number of IFs (and the available restfrequencies will be
836 replaced by this vector). In this case, *all* data have
837 the restfrequency set per IF according
838 to the corresponding value you give in the 'freqs' vector.
839 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
840 IF 1 gets restfreq 2e9.
841 You can also specify the frequencies via a linecatalog.
842
843 Parameters:
844 freqs: list of rest frequency values or string idenitfiers
845 unit: unit for rest frequency (default 'Hz')
846
847 Example:
848 # set the given restfrequency for the whole table
849 scan.set_restfreqs(freqs=1.4e9)
850 # If thee number of IFs in the data is >= 2 IF0 gets the first
851 # value IF1 the second...
852 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
853 #set the given restfrequency for the whole table (by name)
854 scan.set_restfreqs(freqs="OH1667")
855
856 Note:
857 To do more sophisticate Restfrequency setting, e.g. on a
858 source and IF basis, use scantable.set_selection() before using
859 this function.
860 # provide your scantable is call scan
861 selection = selector()
862 selection.set_name("ORION*")
863 selection.set_ifs([1])
864 scan.set_selection(selection)
865 scan.set_restfreqs(freqs=86.6e9)
866
867 """
868 varlist = vars()
869 from asap import linecatalog
870 # simple value
871 if isinstance(freqs, int) or isinstance(freqs, float):
872 self._setrestfreqs(freqs, "",unit)
873 # list of values
874 elif isinstance(freqs, list) or isinstance(freqs, tuple):
875 # list values are scalars
876 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
877 sel = selector()
878 savesel = self._getselection()
879 iflist = self.getifnos()
880 for i in xrange(len(freqs)):
881 sel.set_ifs(iflist[i])
882 self._setselection(sel)
883 self._setrestfreqs(freqs[i], "",unit)
884 self._setselection(savesel)
885 # list values are tuples, (value, name)
886 elif isinstance(freqs[-1], dict):
887 sel = selector()
888 savesel = self._getselection()
889 iflist = self.getifnos()
890 for i in xrange(len(freqs)):
891 sel.set_ifs(iflist[i])
892 self._setselection(sel)
893 self._setrestfreqs(freqs[i]["value"],
894 freqs[i]["name"], "MHz")
895 self._setselection(savesel)
896 # freqs are to be taken from a linecatalog
897 elif isinstance(freqs, linecatalog):
898 sel = selector()
899 savesel = self._getselection()
900 iflist = self.getifnos()
901 for i in xrange(freqs.nrow()):
902 sel.set_ifs(iflist[i])
903 self._setselection(sel)
904 self._setrestfreqs(freqs.get_frequency(i),
905 freqs.get_name(i), "MHz")
906 # ensure that we are not iterating past nIF
907 if i == self.nif()-1: break
908 self._setselection(savesel)
909 else:
910 return
911 self._add_history("set_restfreqs", varlist)
912
913 def shift_refpix(self, delta):
914 """
915 Shift the reference pixel of the Spectra Coordinate by an
916 integer amount.
917 Parameters:
918 delta: the amount to shift by
919 Note:
920 Be careful using this with broadband data.
921 """
922 Scantable.shift(self, delta)
923
924 def history(self, filename=None):
925 """
926 Print the history. Optionally to a file.
927 Parameters:
928 filename: The name of the file to save the history to.
929 """
930 hist = list(self._gethistory())
931 out = "-"*80
932 for h in hist:
933 if h.startswith("---"):
934 out += "\n"+h
935 else:
936 items = h.split("##")
937 date = items[0]
938 func = items[1]
939 items = items[2:]
940 out += "\n"+date+"\n"
941 out += "Function: %s\n Parameters:" % (func)
942 for i in items:
943 s = i.split("=")
944 out += "\n %s = %s" % (s[0], s[1])
945 out += "\n"+"-"*80
946 if filename is not None:
947 if filename is "":
948 filename = 'scantable_history.txt'
949 import os
950 filename = os.path.expandvars(os.path.expanduser(filename))
951 if not os.path.isdir(filename):
952 data = open(filename, 'w')
953 data.write(out)
954 data.close()
955 else:
956 msg = "Illegal file name '%s'." % (filename)
957 if rcParams['verbose']:
958 print msg
959 else:
960 raise IOError(msg)
961 if rcParams['verbose']:
962 try:
963 from IPython.genutils import page as pager
964 except ImportError:
965 from pydoc import pager
966 pager(out)
967 else:
968 return out
969 return
970 #
971 # Maths business
972 #
973 @print_log_dec
974 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
975 """
976 Return the (time) weighted average of a scan.
977 Note:
978 in channels only - align if necessary
979 Parameters:
980 mask: an optional mask (only used for 'var' and 'tsys'
981 weighting)
982 scanav: True averages each scan separately
983 False (default) averages all scans together,
984 weight: Weighting scheme.
985 'none' (mean no weight)
986 'var' (1/var(spec) weighted)
987 'tsys' (1/Tsys**2 weighted)
988 'tint' (integration time weighted)
989 'tintsys' (Tint/Tsys**2)
990 'median' ( median averaging)
991 The default is 'tint'
992 align: align the spectra in velocity before averaging. It takes
993 the time of the first spectrum as reference time.
994 Example:
995 # time average the scantable without using a mask
996 newscan = scan.average_time()
997 """
998 varlist = vars()
999 if weight is None: weight = 'TINT'
1000 if mask is None: mask = ()
1001 if scanav: scanav = "SCAN"
1002 else: scanav = "NONE"
1003 scan = (self, )
1004 try:
1005 if align:
1006 scan = (self.freq_align(insitu=False), )
1007 s = None
1008 if weight.upper() == 'MEDIAN':
1009 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1010 scanav))
1011 else:
1012 s = scantable(self._math._average(scan, mask, weight.upper(),
1013 scanav))
1014 except RuntimeError, msg:
1015 if rcParams['verbose']:
1016 print msg
1017 return
1018 else: raise
1019 s._add_history("average_time", varlist)
1020 return s
1021
1022 @print_log_dec
1023 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1024 """
1025 Return a scan where all spectra are converted to either
1026 Jansky or Kelvin depending upon the flux units of the scan table.
1027 By default the function tries to look the values up internally.
1028 If it can't find them (or if you want to over-ride), you must
1029 specify EITHER jyperk OR eta (and D which it will try to look up
1030 also if you don't set it). jyperk takes precedence if you set both.
1031 Parameters:
1032 jyperk: the Jy / K conversion factor
1033 eta: the aperture efficiency
1034 d: the geomtric diameter (metres)
1035 insitu: if False a new scantable is returned.
1036 Otherwise, the scaling is done in-situ
1037 The default is taken from .asaprc (False)
1038 """
1039 if insitu is None: insitu = rcParams['insitu']
1040 self._math._setinsitu(insitu)
1041 varlist = vars()
1042 if jyperk is None: jyperk = -1.0
1043 if d is None: d = -1.0
1044 if eta is None: eta = -1.0
1045 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1046 s._add_history("convert_flux", varlist)
1047 if insitu: self._assign(s)
1048 else: return s
1049
1050 @print_log_dec
1051 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1052 """
1053 Return a scan after applying a gain-elevation correction.
1054 The correction can be made via either a polynomial or a
1055 table-based interpolation (and extrapolation if necessary).
1056 You specify polynomial coefficients, an ascii table or neither.
1057 If you specify neither, then a polynomial correction will be made
1058 with built in coefficients known for certain telescopes (an error
1059 will occur if the instrument is not known).
1060 The data and Tsys are *divided* by the scaling factors.
1061 Parameters:
1062 poly: Polynomial coefficients (default None) to compute a
1063 gain-elevation correction as a function of
1064 elevation (in degrees).
1065 filename: The name of an ascii file holding correction factors.
1066 The first row of the ascii file must give the column
1067 names and these MUST include columns
1068 "ELEVATION" (degrees) and "FACTOR" (multiply data
1069 by this) somewhere.
1070 The second row must give the data type of the
1071 column. Use 'R' for Real and 'I' for Integer.
1072 An example file would be
1073 (actual factors are arbitrary) :
1074
1075 TIME ELEVATION FACTOR
1076 R R R
1077 0.1 0 0.8
1078 0.2 20 0.85
1079 0.3 40 0.9
1080 0.4 60 0.85
1081 0.5 80 0.8
1082 0.6 90 0.75
1083 method: Interpolation method when correcting from a table.
1084 Values are "nearest", "linear" (default), "cubic"
1085 and "spline"
1086 insitu: if False a new scantable is returned.
1087 Otherwise, the scaling is done in-situ
1088 The default is taken from .asaprc (False)
1089 """
1090
1091 if insitu is None: insitu = rcParams['insitu']
1092 self._math._setinsitu(insitu)
1093 varlist = vars()
1094 if poly is None:
1095 poly = ()
1096 from os.path import expandvars
1097 filename = expandvars(filename)
1098 s = scantable(self._math._gainel(self, poly, filename, method))
1099 s._add_history("gain_el", varlist)
1100 if insitu: self._assign(s)
1101 else: return s
1102
1103 @print_log_dec
1104 def freq_align(self, reftime=None, method='cubic', insitu=None):
1105 """
1106 Return a scan where all rows have been aligned in frequency/velocity.
1107 The alignment frequency frame (e.g. LSRK) is that set by function
1108 set_freqframe.
1109 Parameters:
1110 reftime: reference time to align at. By default, the time of
1111 the first row of data is used.
1112 method: Interpolation method for regridding the spectra.
1113 Choose from "nearest", "linear", "cubic" (default)
1114 and "spline"
1115 insitu: if False a new scantable is returned.
1116 Otherwise, the scaling is done in-situ
1117 The default is taken from .asaprc (False)
1118 """
1119 if insitu is None: insitu = rcParams["insitu"]
1120 self._math._setinsitu(insitu)
1121 varlist = vars()
1122 if reftime is None: reftime = ""
1123 s = scantable(self._math._freq_align(self, reftime, method))
1124 s._add_history("freq_align", varlist)
1125 if insitu: self._assign(s)
1126 else: return s
1127
1128 @print_log_dec
1129 def opacity(self, tau, insitu=None):
1130 """
1131 Apply an opacity correction. The data
1132 and Tsys are multiplied by the correction factor.
1133 Parameters:
1134 tau: Opacity from which the correction factor is
1135 exp(tau*ZD)
1136 where ZD is the zenith-distance
1137 insitu: if False a new scantable is returned.
1138 Otherwise, the scaling is done in-situ
1139 The default is taken from .asaprc (False)
1140 """
1141 if insitu is None: insitu = rcParams['insitu']
1142 self._math._setinsitu(insitu)
1143 varlist = vars()
1144 s = scantable(self._math._opacity(self, tau))
1145 s._add_history("opacity", varlist)
1146 if insitu: self._assign(s)
1147 else: return s
1148
1149 @print_log_dec
1150 def bin(self, width=5, insitu=None):
1151 """
1152 Return a scan where all spectra have been binned up.
1153 Parameters:
1154 width: The bin width (default=5) in pixels
1155 insitu: if False a new scantable is returned.
1156 Otherwise, the scaling is done in-situ
1157 The default is taken from .asaprc (False)
1158 """
1159 if insitu is None: insitu = rcParams['insitu']
1160 self._math._setinsitu(insitu)
1161 varlist = vars()
1162 s = scantable(self._math._bin(self, width))
1163 s._add_history("bin", varlist)
1164 if insitu:
1165 self._assign(s)
1166 else:
1167 return s
1168
1169 @print_log_dec
1170 def resample(self, width=5, method='cubic', insitu=None):
1171 """
1172 Return a scan where all spectra have been binned up.
1173
1174 Parameters:
1175 width: The bin width (default=5) in pixels
1176 method: Interpolation method when correcting from a table.
1177 Values are "nearest", "linear", "cubic" (default)
1178 and "spline"
1179 insitu: if False a new scantable is returned.
1180 Otherwise, the scaling is done in-situ
1181 The default is taken from .asaprc (False)
1182 """
1183 if insitu is None: insitu = rcParams['insitu']
1184 self._math._setinsitu(insitu)
1185 varlist = vars()
1186 s = scantable(self._math._resample(self, method, width))
1187 s._add_history("resample", varlist)
1188 if insitu: self._assign(s)
1189 else: return s
1190
1191 @print_log_dec
1192 def average_pol(self, mask=None, weight='none'):
1193 """
1194 Average the Polarisations together.
1195 Parameters:
1196 mask: An optional mask defining the region, where the
1197 averaging will be applied. The output will have all
1198 specified points masked.
1199 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1200 weighted), or 'tsys' (1/Tsys**2 weighted)
1201 """
1202 varlist = vars()
1203 if mask is None:
1204 mask = ()
1205 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1206 s._add_history("average_pol", varlist)
1207 return s
1208
1209 @print_log_dec
1210 def average_beam(self, mask=None, weight='none'):
1211 """
1212 Average the Beams together.
1213 Parameters:
1214 mask: An optional mask defining the region, where the
1215 averaging will be applied. The output will have all
1216 specified points masked.
1217 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1218 weighted), or 'tsys' (1/Tsys**2 weighted)
1219 """
1220 varlist = vars()
1221 if mask is None:
1222 mask = ()
1223 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1224 s._add_history("average_beam", varlist)
1225 return s
1226
1227 def parallactify(self, pflag):
1228 varlist = vars()
1229 self._parallactify(pflag)
1230 self._add_history("parallactify", varlist)
1231
1232 @print_log_dec
1233 def convert_pol(self, poltype=None):
1234 """
1235 Convert the data to a different polarisation type.
1236 Note that you will need cross-polarisation terms for most conversions.
1237 Parameters:
1238 poltype: The new polarisation type. Valid types are:
1239 "linear", "circular", "stokes" and "linpol"
1240 """
1241 varlist = vars()
1242 try:
1243 s = scantable(self._math._convertpol(self, poltype))
1244 except RuntimeError, msg:
1245 if rcParams['verbose']:
1246 print msg
1247 return
1248 else:
1249 raise
1250 s._add_history("convert_pol", varlist)
1251 return s
1252
1253 @print_log_dec
1254 def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1255 """
1256 Smooth the spectrum by the specified kernel (conserving flux).
1257 Parameters:
1258 kernel: The type of smoothing kernel. Select from
1259 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1260 or 'poly'
1261 width: The width of the kernel in pixels. For hanning this is
1262 ignored otherwise it defauls to 5 pixels.
1263 For 'gaussian' it is the Full Width Half
1264 Maximum. For 'boxcar' it is the full width.
1265 For 'rmedian' and 'poly' it is the half width.
1266 order: Optional parameter for 'poly' kernel (default is 2), to
1267 specify the order of the polnomial. Ignored by all other
1268 kernels.
1269 insitu: if False a new scantable is returned.
1270 Otherwise, the scaling is done in-situ
1271 The default is taken from .asaprc (False)
1272 Example:
1273 none
1274 """
1275 if insitu is None: insitu = rcParams['insitu']
1276 self._math._setinsitu(insitu)
1277 varlist = vars()
1278 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1279 s._add_history("smooth", varlist)
1280 if insitu: self._assign(s)
1281 else: return s
1282
1283 @print_log_dec
1284 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1285 """
1286 Return a scan which has been baselined (all rows) by a polynomial.
1287 Parameters:
1288 mask: an optional mask
1289 order: the order of the polynomial (default is 0)
1290 plot: plot the fit and the residual. In this each
1291 indivual fit has to be approved, by typing 'y'
1292 or 'n'
1293 uselin: use linear polynomial fit
1294 insitu: if False a new scantable is returned.
1295 Otherwise, the scaling is done in-situ
1296 The default is taken from .asaprc (False)
1297 Example:
1298 # return a scan baselined by a third order polynomial,
1299 # not using a mask
1300 bscan = scan.poly_baseline(order=3)
1301 """
1302 if insitu is None: insitu = rcParams['insitu']
1303 varlist = vars()
1304 if mask is None:
1305 mask = [True for i in xrange(self.nchan(-1))]
1306 from asap.asapfitter import fitter
1307 try:
1308 f = fitter()
1309 f.set_scan(self, mask)
1310 #f.set_function(poly=order)
1311 if uselin:
1312 f.set_function(lpoly=order)
1313 else:
1314 f.set_function(poly=order)
1315 s = f.auto_fit(insitu, plot=plot)
1316 s._add_history("poly_baseline", varlist)
1317 if insitu: self._assign(s)
1318 else: return s
1319 except RuntimeError:
1320 msg = "The fit failed, possibly because it didn't converge."
1321 if rcParams['verbose']:
1322 print msg
1323 return
1324 else:
1325 raise RuntimeError(msg)
1326
1327 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1328 threshold=3, chan_avg_limit=1, plot=False,
1329 insitu=None):
1330 """
1331 Return a scan which has been baselined (all rows) by a polynomial.
1332 Spectral lines are detected first using linefinder and masked out
1333 to avoid them affecting the baseline solution.
1334
1335 Parameters:
1336 mask: an optional mask retreived from scantable
1337 edge: an optional number of channel to drop at
1338 the edge of spectrum. If only one value is
1339 specified, the same number will be dropped from
1340 both sides of the spectrum. Default is to keep
1341 all channels. Nested tuples represent individual
1342 edge selection for different IFs (a number of spectral
1343 channels can be different)
1344 order: the order of the polynomial (default is 0)
1345 threshold: the threshold used by line finder. It is better to
1346 keep it large as only strong lines affect the
1347 baseline solution.
1348 chan_avg_limit:
1349 a maximum number of consequtive spectral channels to
1350 average during the search of weak and broad lines.
1351 The default is no averaging (and no search for weak
1352 lines). If such lines can affect the fitted baseline
1353 (e.g. a high order polynomial is fitted), increase this
1354 parameter (usually values up to 8 are reasonable). Most
1355 users of this method should find the default value
1356 sufficient.
1357 plot: plot the fit and the residual. In this each
1358 indivual fit has to be approved, by typing 'y'
1359 or 'n'
1360 insitu: if False a new scantable is returned.
1361 Otherwise, the scaling is done in-situ
1362 The default is taken from .asaprc (False)
1363
1364 Example:
1365 scan2=scan.auto_poly_baseline(order=7)
1366 """
1367 if insitu is None: insitu = rcParams['insitu']
1368 varlist = vars()
1369 from asap.asapfitter import fitter
1370 from asap.asaplinefind import linefinder
1371 from asap import _is_sequence_or_number as _is_valid
1372
1373 # check whether edge is set up for each IF individually
1374 individualedge = False;
1375 if len(edge) > 1:
1376 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1377 individualedge = True;
1378
1379 if not _is_valid(edge, int) and not individualedge:
1380 raise ValueError, "Parameter 'edge' has to be an integer or a \
1381 pair of integers specified as a tuple. Nested tuples are allowed \
1382 to make individual selection for different IFs."
1383
1384 curedge = (0, 0)
1385 if individualedge:
1386 for edgepar in edge:
1387 if not _is_valid(edgepar, int):
1388 raise ValueError, "Each element of the 'edge' tuple has \
1389 to be a pair of integers or an integer."
1390 else:
1391 curedge = edge;
1392
1393 # setup fitter
1394 f = fitter()
1395 f.set_function(poly=order)
1396
1397 # setup line finder
1398 fl = linefinder()
1399 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1400
1401 if not insitu:
1402 workscan = self.copy()
1403 else:
1404 workscan = self
1405
1406 fl.set_scan(workscan)
1407
1408 rows = range(workscan.nrow())
1409 asaplog.push("Processing:")
1410 for r in rows:
1411 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1412 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1413 workscan.getpol(r), workscan.getcycle(r))
1414 asaplog.push(msg, False)
1415
1416 # figure out edge parameter
1417 if individualedge:
1418 if len(edge) >= workscan.getif(r):
1419 raise RuntimeError, "Number of edge elements appear to " \
1420 "be less than the number of IFs"
1421 curedge = edge[workscan.getif(r)]
1422
1423 # setup line finder
1424 fl.find_lines(r, mask, curedge)
1425 f.set_data(workscan._getabcissa(r), workscan._getspectrum(r),
1426 mask_and(workscan._getmask(r), fl.get_mask()))
1427 f.fit()
1428 x = f.get_parameters()
1429 if plot:
1430 f.plot(residual=True)
1431 x = raw_input("Accept fit ( [y]/n ): ")
1432 if x.upper() == 'N':
1433 continue
1434 workscan._setspectrum(f.fitter.getresidual(), r)
1435 if plot:
1436 f._p.unmap()
1437 f._p = None
1438 workscan._add_history("auto_poly_baseline", varlist)
1439 if insitu:
1440 self._assign(workscan)
1441 else:
1442 return workscan
1443
1444 @print_log_dec
1445 def rotate_linpolphase(self, angle):
1446 """
1447 Rotate the phase of the complex polarization O=Q+iU correlation.
1448 This is always done in situ in the raw data. So if you call this
1449 function more than once then each call rotates the phase further.
1450 Parameters:
1451 angle: The angle (degrees) to rotate (add) by.
1452 Examples:
1453 scan.rotate_linpolphase(2.3)
1454 """
1455 varlist = vars()
1456 self._math._rotate_linpolphase(self, angle)
1457 self._add_history("rotate_linpolphase", varlist)
1458 return
1459
1460 @print_log_dec
1461 def rotate_xyphase(self, angle):
1462 """
1463 Rotate the phase of the XY correlation. This is always done in situ
1464 in the data. So if you call this function more than once
1465 then each call rotates the phase further.
1466 Parameters:
1467 angle: The angle (degrees) to rotate (add) by.
1468 Examples:
1469 scan.rotate_xyphase(2.3)
1470 """
1471 varlist = vars()
1472 self._math._rotate_xyphase(self, angle)
1473 self._add_history("rotate_xyphase", varlist)
1474 return
1475
1476 @print_log_dec
1477 def swap_linears(self):
1478 """
1479 Swap the linear polarisations XX and YY, or better the first two
1480 polarisations as this also works for ciculars.
1481 """
1482 varlist = vars()
1483 self._math._swap_linears(self)
1484 self._add_history("swap_linears", varlist)
1485 return
1486
1487 @print_log_dec
1488 def invert_phase(self):
1489 """
1490 Invert the phase of the complex polarisation
1491 """
1492 varlist = vars()
1493 self._math._invert_phase(self)
1494 self._add_history("invert_phase", varlist)
1495 return
1496
1497 @print_log_dec
1498 def add(self, offset, insitu=None):
1499 """
1500 Return a scan where all spectra have the offset added
1501 Parameters:
1502 offset: the offset
1503 insitu: if False a new scantable is returned.
1504 Otherwise, the scaling is done in-situ
1505 The default is taken from .asaprc (False)
1506 """
1507 if insitu is None: insitu = rcParams['insitu']
1508 self._math._setinsitu(insitu)
1509 varlist = vars()
1510 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1511 s._add_history("add", varlist)
1512 if insitu:
1513 self._assign(s)
1514 else:
1515 return s
1516
1517 @print_log_dec
1518 def scale(self, factor, tsys=True, insitu=None):
1519 """
1520 Return a scan where all spectra are scaled by the give 'factor'
1521 Parameters:
1522 factor: the scaling factor
1523 insitu: if False a new scantable is returned.
1524 Otherwise, the scaling is done in-situ
1525 The default is taken from .asaprc (False)
1526 tsys: if True (default) then apply the operation to Tsys
1527 as well as the data
1528 """
1529 if insitu is None: insitu = rcParams['insitu']
1530 self._math._setinsitu(insitu)
1531 varlist = vars()
1532 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1533 s._add_history("scale", varlist)
1534 if insitu:
1535 self._assign(s)
1536 else:
1537 return s
1538
1539 def set_sourcetype(self, match, matchtype="pattern",
1540 sourcetype="reference"):
1541 """
1542 Set the type of the source to be an source or reference scan
1543 using the provided pattern:
1544 Parameters:
1545 match: a Unix style pattern, regular expression or selector
1546 matchtype: 'pattern' (default) UNIX style pattern or
1547 'regex' regular expression
1548 sourcetype: the type of the source to use (source/reference)
1549 """
1550 varlist = vars()
1551 basesel = self.get_selection()
1552 stype = -1
1553 if sourcetype.lower().startswith("r"):
1554 stype = 1
1555 elif sourcetype.lower().startswith("s"):
1556 stype = 0
1557 else:
1558 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1559 if matchtype.lower().startswith("p"):
1560 matchtype = "pattern"
1561 elif matchtype.lower().startswith("r"):
1562 matchtype = "regex"
1563 else:
1564 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1565 sel = selector()
1566 if isinstance(match, selector):
1567 sel = match
1568 else:
1569 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1570 self.set_selection(basesel+sel)
1571 self._setsourcetype(stype)
1572 self.set_selection(basesel)
1573 self._add_history("set_sourcetype", varlist)
1574
1575 @print_log_dec
1576 def auto_quotient(self, preserve=True, mode='paired'):
1577 """
1578 This function allows to build quotients automatically.
1579 It assumes the observation to have the same numer of
1580 "ons" and "offs"
1581 Parameters:
1582 preserve: you can preserve (default) the continuum or
1583 remove it. The equations used are
1584 preserve: Output = Toff * (on/off) - Toff
1585 remove: Output = Toff * (on/off) - Ton
1586 mode: the on/off detection mode
1587 'paired' (default)
1588 identifies 'off' scans by the
1589 trailing '_R' (Mopra/Parkes) or
1590 '_e'/'_w' (Tid) and matches
1591 on/off pairs from the observing pattern
1592 'time'
1593 finds the closest off in time
1594
1595 """
1596 modes = ["time", "paired"]
1597 if not mode in modes:
1598 msg = "please provide valid mode. Valid modes are %s" % (modes)
1599 raise ValueError(msg)
1600 varlist = vars()
1601 s = None
1602 if mode.lower() == "paired":
1603 basesel = self.get_selection()
1604 sel = selector()+basesel
1605 sel.set_query("SRCTYPE==1")
1606 self.set_selection(sel)
1607 offs = self.copy()
1608 sel.set_query("SRCTYPE==0")
1609 self.set_selection(sel)
1610 ons = self.copy()
1611 s = scantable(self._math._quotient(ons, offs, preserve))
1612 self.set_selection(basesel)
1613 elif mode.lower() == "time":
1614 s = scantable(self._math._auto_quotient(self, mode, preserve))
1615 s._add_history("auto_quotient", varlist)
1616 return s
1617
1618 @print_log_dec
1619 def mx_quotient(self, mask = None, weight='median', preserve=True):
1620 """
1621 Form a quotient using "off" beams when observing in "MX" mode.
1622 Parameters:
1623 mask: an optional mask to be used when weight == 'stddev'
1624 weight: How to average the off beams. Default is 'median'.
1625 preserve: you can preserve (default) the continuum or
1626 remove it. The equations used are
1627 preserve: Output = Toff * (on/off) - Toff
1628 remove: Output = Toff * (on/off) - Ton
1629 """
1630 if mask is None: mask = ()
1631 varlist = vars()
1632 on = scantable(self._math._mx_extract(self, 'on'))
1633 preoff = scantable(self._math._mx_extract(self, 'off'))
1634 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1635 from asapmath import quotient
1636 q = quotient(on, off, preserve)
1637 q._add_history("mx_quotient", varlist)
1638 return q
1639
1640 @print_log_dec
1641 def freq_switch(self, insitu=None):
1642 """
1643 Apply frequency switching to the data.
1644 Parameters:
1645 insitu: if False a new scantable is returned.
1646 Otherwise, the swictching is done in-situ
1647 The default is taken from .asaprc (False)
1648 Example:
1649 none
1650 """
1651 if insitu is None: insitu = rcParams['insitu']
1652 self._math._setinsitu(insitu)
1653 varlist = vars()
1654 s = scantable(self._math._freqswitch(self))
1655 s._add_history("freq_switch", varlist)
1656 if insitu: self._assign(s)
1657 else: return s
1658
1659 @print_log_dec
1660 def recalc_azel(self):
1661 """
1662 Recalculate the azimuth and elevation for each position.
1663 Parameters:
1664 none
1665 Example:
1666 """
1667 varlist = vars()
1668 self._recalcazel()
1669 self._add_history("recalc_azel", varlist)
1670 return
1671
1672 @print_log_dec
1673 def __add__(self, other):
1674 varlist = vars()
1675 s = None
1676 if isinstance(other, scantable):
1677 s = scantable(self._math._binaryop(self, other, "ADD"))
1678 elif isinstance(other, float):
1679 s = scantable(self._math._unaryop(self, other, "ADD", False))
1680 else:
1681 raise TypeError("Other input is not a scantable or float value")
1682 s._add_history("operator +", varlist)
1683 return s
1684
1685 @print_log_dec
1686 def __sub__(self, other):
1687 """
1688 implicit on all axes and on Tsys
1689 """
1690 varlist = vars()
1691 s = None
1692 if isinstance(other, scantable):
1693 s = scantable(self._math._binaryop(self, other, "SUB"))
1694 elif isinstance(other, float):
1695 s = scantable(self._math._unaryop(self, other, "SUB", False))
1696 else:
1697 raise TypeError("Other input is not a scantable or float value")
1698 s._add_history("operator -", varlist)
1699 return s
1700
1701 @print_log_dec
1702 def __mul__(self, other):
1703 """
1704 implicit on all axes and on Tsys
1705 """
1706 varlist = vars()
1707 s = None
1708 if isinstance(other, scantable):
1709 s = scantable(self._math._binaryop(self, other, "MUL"))
1710 elif isinstance(other, float):
1711 s = scantable(self._math._unaryop(self, other, "MUL", False))
1712 else:
1713 raise TypeError("Other input is not a scantable or float value")
1714 s._add_history("operator *", varlist)
1715 return s
1716
1717
1718 @print_log_dec
1719 def __div__(self, other):
1720 """
1721 implicit on all axes and on Tsys
1722 """
1723 varlist = vars()
1724 s = None
1725 if isinstance(other, scantable):
1726 s = scantable(self._math._binaryop(self, other, "DIV"))
1727 elif isinstance(other, float):
1728 if other == 0.0:
1729 raise ZeroDivisionError("Dividing by zero is not recommended")
1730 s = scantable(self._math._unaryop(self, other, "DIV", False))
1731 else:
1732 raise TypeError("Other input is not a scantable or float value")
1733 s._add_history("operator /", varlist)
1734 return s
1735
1736 def get_fit(self, row=0):
1737 """
1738 Print or return the stored fits for a row in the scantable
1739 Parameters:
1740 row: the row which the fit has been applied to.
1741 """
1742 if row > self.nrow():
1743 return
1744 from asap.asapfit import asapfit
1745 fit = asapfit(self._getfit(row))
1746 if rcParams['verbose']:
1747 print fit
1748 return
1749 else:
1750 return fit.as_dict()
1751
1752 def flag_nans(self):
1753 """
1754 Utility function to flag NaN values in the scantable.
1755 """
1756 import numpy
1757 basesel = self.get_selection()
1758 for i in range(self.nrow()):
1759 sel = self.get_row_selector(i)
1760 self.set_selection(basesel+sel)
1761 nans = numpy.isnan(self._getspectrum(0))
1762 if numpy.any(nans):
1763 bnans = [ bool(v) for v in nans]
1764 self.flag(bnans)
1765 self.set_selection(basesel)
1766
1767 def get_row_selector(self, rowno):
1768 return selector(beams=self.getbeam(rowno),
1769 ifs=self.getif(rowno),
1770 pols=self.getpol(rowno),
1771 scans=self.getscan(rowno),
1772 cycles=self.getcycle(rowno))
1773
1774 def _add_history(self, funcname, parameters):
1775 if not rcParams['scantable.history']:
1776 return
1777 # create date
1778 sep = "##"
1779 from datetime import datetime
1780 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1781 hist = dstr+sep
1782 hist += funcname+sep#cdate+sep
1783 if parameters.has_key('self'): del parameters['self']
1784 for k, v in parameters.iteritems():
1785 if type(v) is dict:
1786 for k2, v2 in v.iteritems():
1787 hist += k2
1788 hist += "="
1789 if isinstance(v2, scantable):
1790 hist += 'scantable'
1791 elif k2 == 'mask':
1792 if isinstance(v2, list) or isinstance(v2, tuple):
1793 hist += str(self._zip_mask(v2))
1794 else:
1795 hist += str(v2)
1796 else:
1797 hist += str(v2)
1798 else:
1799 hist += k
1800 hist += "="
1801 if isinstance(v, scantable):
1802 hist += 'scantable'
1803 elif k == 'mask':
1804 if isinstance(v, list) or isinstance(v, tuple):
1805 hist += str(self._zip_mask(v))
1806 else:
1807 hist += str(v)
1808 else:
1809 hist += str(v)
1810 hist += sep
1811 hist = hist[:-2] # remove trailing '##'
1812 self._addhistory(hist)
1813
1814
1815 def _zip_mask(self, mask):
1816 mask = list(mask)
1817 i = 0
1818 segments = []
1819 while mask[i:].count(1):
1820 i += mask[i:].index(1)
1821 if mask[i:].count(0):
1822 j = i + mask[i:].index(0)
1823 else:
1824 j = len(mask)
1825 segments.append([i, j])
1826 i = j
1827 return segments
1828
1829 def _get_ordinate_label(self):
1830 fu = "("+self.get_fluxunit()+")"
1831 import re
1832 lbl = "Intensity"
1833 if re.match(".K.", fu):
1834 lbl = "Brightness Temperature "+ fu
1835 elif re.match(".Jy.", fu):
1836 lbl = "Flux density "+ fu
1837 return lbl
1838
1839 def _check_ifs(self):
1840 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1841 nchans = filter(lambda t: t > 0, nchans)
1842 return (sum(nchans)/len(nchans) == nchans[0])
1843
1844 def _fill(self, names, unit, average):
1845 import os
1846 from asap._asap import stfiller
1847 first = True
1848 fullnames = []
1849 for name in names:
1850 name = os.path.expandvars(name)
1851 name = os.path.expanduser(name)
1852 if not os.path.exists(name):
1853 msg = "File '%s' does not exists" % (name)
1854 if rcParams['verbose']:
1855 asaplog.push(msg)
1856 print asaplog.pop().strip()
1857 return
1858 raise IOError(msg)
1859 fullnames.append(name)
1860 if average:
1861 asaplog.push('Auto averaging integrations')
1862 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1863 for name in fullnames:
1864 tbl = Scantable(stype)
1865 r = stfiller(tbl)
1866 rx = rcParams['scantable.reference']
1867 r._setreferenceexpr(rx)
1868 msg = "Importing %s..." % (name)
1869 asaplog.push(msg, False)
1870 print_log()
1871 r._open(name, -1, -1)
1872 r._read()
1873 if average:
1874 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1875 if not first:
1876 tbl = self._math._merge([self, tbl])
1877 Scantable.__init__(self, tbl)
1878 r._close()
1879 del r, tbl
1880 first = False
1881 if unit is not None:
1882 self.set_fluxunit(unit)
1883 self.set_freqframe(rcParams['scantable.freqframe'])
1884
1885 def __getitem__(self, key):
1886 if key < 0:
1887 key += self.nrow()
1888 if key >= self.nrow():
1889 raise IndexError("Row index out of range.")
1890 return self._getspectrum(key)
1891
1892 def __setitem__(self, key, value):
1893 if key < 0:
1894 key += self.nrow()
1895 if key >= self.nrow():
1896 raise IndexError("Row index out of range.")
1897 if not hasattr(value, "__len__") or \
1898 len(value) > self.nchan(self.getif(key)):
1899 raise ValueError("Spectrum length doesn't match.")
1900 return self._setspectrum(value, key)
1901
1902 def __len__(self):
1903 return self.nrow()
1904
1905 def __iter__(self):
1906 for i in range(len(self)):
1907 yield self[i]
Note: See TracBrowser for help on using the repository browser.