source: branches/asap-3.x/python/scantable.py@ 2851

Last change on this file since 2851 was 1731, checked in by Malte Marquarding, 15 years ago

Fixed wrong base class call in shift_refix

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 71.4 KB
Line 
1import os
2try:
3 from functools import wraps as wraps_dec
4except ImportError:
5 from asap.compatibility import wraps as wraps_dec
6
7from asap._asap import Scantable
8from asap import rcParams
9from asap import print_log, print_log_dec
10from asap import asaplog
11from asap import selector
12from asap import linecatalog
13from asap.coordinate import coordinate
14from asap import _n_bools, mask_not, mask_and, mask_or
15
16
17def preserve_selection(func):
18 @wraps_dec(func)
19 def wrap(obj, *args, **kw):
20 basesel = obj.get_selection()
21 val = func(obj, *args, **kw)
22 obj.set_selection(basesel)
23 return val
24 return wrap
25
26
27def is_scantable(filename):
28 return (os.path.isdir(filename)
29 and not os.path.exists(filename+'/table.f1')
30 and os.path.exists(filename+'/table.info'))
31
32
33class scantable(Scantable):
34 """
35 The ASAP container for scans
36 """
37 @print_log_dec
38 def __init__(self, filename, average=None, unit=None, parallactify=None):
39 """
40 Create a scantable from a saved one or make a reference
41 Parameters:
42 filename: the name of an asap table on disk
43 or
44 the name of a rpfits/sdfits/ms file
45 (integrations within scans are auto averaged
46 and the whole file is read)
47 or
48 [advanced] a reference to an existing
49 scantable
50 average: average all integrations withinb a scan on read.
51 The default (True) is taken from .asaprc.
52 unit: brightness unit; must be consistent with K or Jy.
53 Over-rides the default selected by the reader
54 (input rpfits/sdfits/ms) or replaces the value
55 in existing scantables
56 parallactify: Indcicate that the data had been parallatified.
57 Default is taken form rc file.
58 """
59 if average is None:
60 average = rcParams['scantable.autoaverage']
61 parallactify = parallactify or rcParams['scantable.parallactify']
62 varlist = vars()
63 from asap._asap import stmath
64 self._math = stmath()
65 if isinstance(filename, Scantable):
66 Scantable.__init__(self, filename)
67 else:
68 if isinstance(filename, str):
69 filename = os.path.expandvars(filename)
70 filename = os.path.expanduser(filename)
71 if not os.path.exists(filename):
72 s = "File '%s' not found." % (filename)
73 if rcParams['verbose']:
74 asaplog.push(s)
75 return
76 raise IOError(s)
77 if is_scantable(filename):
78 ondisk = rcParams['scantable.storage'] == 'disk'
79 Scantable.__init__(self, filename, ondisk)
80 if unit is not None:
81 self.set_fluxunit(unit)
82 self.set_freqframe(rcParams['scantable.freqframe'])
83 else:
84 self._fill([filename], unit, average)
85 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
86 and isinstance(filename[-1], str):
87 self._fill(filename, unit, average)
88 self.parallactify(parallactify)
89 self._add_history("scantable", varlist)
90
91 @print_log_dec
92 def save(self, name=None, format=None, overwrite=False):
93 """
94 Store the scantable on disk. This can be an asap (aips++) Table,
95 SDFITS or MS2 format.
96 Parameters:
97 name: the name of the outputfile. For format "ASCII"
98 this is the root file name (data in 'name'.txt
99 and header in 'name'_header.txt)
100 format: an optional file format. Default is ASAP.
101 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
102 'SDFITS' (save as SDFITS file)
103 'ASCII' (saves as ascii text file)
104 'MS2' (saves as an aips++
105 MeasurementSet V2)
106 'FITS' (save as image FITS - not
107 readable by class)
108 'CLASS' (save as FITS readable by CLASS)
109 overwrite: If the file should be overwritten if it exists.
110 The default False is to return with warning
111 without writing the output. USE WITH CARE.
112 Example:
113 scan.save('myscan.asap')
114 scan.save('myscan.sdfits', 'SDFITS')
115 """
116 from os import path
117 format = format or rcParams['scantable.save']
118 suffix = '.'+format.lower()
119 if name is None or name == "":
120 name = 'scantable'+suffix
121 msg = "No filename given. Using default name %s..." % name
122 asaplog.push(msg)
123 name = path.expandvars(name)
124 if path.isfile(name) or path.isdir(name):
125 if not overwrite:
126 msg = "File %s exists." % name
127 if rcParams['verbose']:
128 print msg
129 return
130 else:
131 raise IOError(msg)
132 format2 = format.upper()
133 if format2 == 'ASAP':
134 self._save(name)
135 else:
136 from asap._asap import stwriter as stw
137 writer = stw(format2)
138 writer.write(self, name)
139 return
140
141 def copy(self):
142 """
143 Return a copy of this scantable.
144 Note:
145 This makes a full (deep) copy. scan2 = scan1 makes a reference.
146 Parameters:
147 none
148 Example:
149 copiedscan = scan.copy()
150 """
151 sd = scantable(Scantable._copy(self))
152 return sd
153
154 def drop_scan(self, scanid=None):
155 """
156 Return a new scantable where the specified scan number(s) has(have)
157 been dropped.
158 Parameters:
159 scanid: a (list of) scan number(s)
160 """
161 from asap import _is_sequence_or_number as _is_valid
162 from asap import _to_list
163 from asap import unique
164 if not _is_valid(scanid):
165 if rcParams['verbose']:
166 print "Please specify a scanno to drop from the scantable"
167 return
168 else:
169 raise RuntimeError("No scan given")
170 try:
171 scanid = _to_list(scanid)
172 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
173 for sid in scanid: allscans.remove(sid)
174 if len(allscans) == 0:
175 raise ValueError("Can't remove all scans")
176 except ValueError:
177 if rcParams['verbose']:
178 print "Couldn't find any match."
179 return
180 else: raise
181 try:
182 sel = selector(scans=allscans)
183 return self._select_copy(sel)
184 except RuntimeError:
185 if rcParams['verbose']:
186 print "Couldn't find any match."
187 else:
188 raise
189
190 def _select_copy(self, selection):
191 orig = self.get_selection()
192 self.set_selection(orig+selection)
193 cp = self.copy()
194 self.set_selection(orig)
195 return cp
196
197 def get_scan(self, scanid=None):
198 """
199 Return a specific scan (by scanno) or collection of scans (by
200 source name) in a new scantable.
201 Note:
202 See scantable.drop_scan() for the inverse operation.
203 Parameters:
204 scanid: a (list of) scanno or a source name, unix-style
205 patterns are accepted for source name matching, e.g.
206 '*_R' gets all 'ref scans
207 Example:
208 # get all scans containing the source '323p459'
209 newscan = scan.get_scan('323p459')
210 # get all 'off' scans
211 refscans = scan.get_scan('*_R')
212 # get a susbset of scans by scanno (as listed in scan.summary())
213 newscan = scan.get_scan([0, 2, 7, 10])
214 """
215 if scanid is None:
216 if rcParams['verbose']:
217 print "Please specify a scan no or name to " \
218 "retrieve from the scantable"
219 return
220 else:
221 raise RuntimeError("No scan given")
222
223 try:
224 bsel = self.get_selection()
225 sel = selector()
226 if type(scanid) is str:
227 sel.set_name(scanid)
228 return self._select_copy(sel)
229 elif type(scanid) is int:
230 sel.set_scans([scanid])
231 return self._select_copy(sel)
232 elif type(scanid) is list:
233 sel.set_scans(scanid)
234 return self._select_copy(sel)
235 else:
236 msg = "Illegal scanid type, use 'int' or 'list' if ints."
237 if rcParams['verbose']:
238 print msg
239 else:
240 raise TypeError(msg)
241 except RuntimeError:
242 if rcParams['verbose']: print "Couldn't find any match."
243 else: raise
244
245 def __str__(self):
246 return Scantable._summary(self, True)
247
248 def summary(self, filename=None):
249 """
250 Print a summary of the contents of this scantable.
251 Parameters:
252 filename: the name of a file to write the putput to
253 Default - no file output
254 """
255 info = Scantable._summary(self, True)
256 if filename is not None:
257 if filename is "":
258 filename = 'scantable_summary.txt'
259 from os.path import expandvars, isdir
260 filename = expandvars(filename)
261 if not isdir(filename):
262 data = open(filename, 'w')
263 data.write(info)
264 data.close()
265 else:
266 msg = "Illegal file name '%s'." % (filename)
267 if rcParams['verbose']:
268 print msg
269 else:
270 raise IOError(msg)
271 if rcParams['verbose']:
272 try:
273 from IPython.genutils import page as pager
274 except ImportError:
275 from pydoc import pager
276 pager(info)
277 else:
278 return info
279
280 def get_spectrum(self, rowno):
281 """Return the spectrum for the current row in the scantable as a list.
282 Parameters:
283 rowno: the row number to retrieve the spectrum from
284 """
285 return self._getspectrum(rowno)
286
287 def get_mask(self, rowno):
288 """Return the mask for the current row in the scantable as a list.
289 Parameters:
290 rowno: the row number to retrieve the mask from
291 """
292 return self._getmask(rowno)
293
294 def set_spectrum(self, spec, rowno):
295 """Return the spectrum for the current row in the scantable as a list.
296 Parameters:
297 spec: the spectrum
298 rowno: the row number to set the spectrum for
299 """
300 assert(len(spec) == self.nchan())
301 return self._setspectrum(spec, rowno)
302
303 def get_coordinate(self, rowno):
304 """Return the (spectral) coordinate for a a given 'rowno'.
305 NOTE:
306 * This coordinate is only valid until a scantable method modifies
307 the frequency axis.
308 * This coordinate does contain the original frequency set-up
309 NOT the new frame. The conversions however are done using the user
310 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
311 use scantable.freq_align first. Without it there is no closure,
312 i.e.
313 c = myscan.get_coordinate(0)
314 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
315
316 Parameters:
317 rowno: the row number for the spectral coordinate
318
319 """
320 return coordinate(Scantable.get_coordinate(self, rowno))
321
322 def get_selection(self):
323 """
324 Get the selection object currently set on this scantable.
325 Parameters:
326 none
327 Example:
328 sel = scan.get_selection()
329 sel.set_ifs(0) # select IF 0
330 scan.set_selection(sel) # apply modified selection
331 """
332 return selector(self._getselection())
333
334 def set_selection(self, selection=None, **kw):
335 """
336 Select a subset of the data. All following operations on this scantable
337 are only applied to thi selection.
338 Parameters:
339 selection: a selector object (default unset the selection),
340
341 or
342
343 any combination of
344 "pols", "ifs", "beams", "scans", "cycles", "name", "query"
345
346 Examples:
347 sel = selector() # create a selection object
348 self.set_scans([0, 3]) # select SCANNO 0 and 3
349 scan.set_selection(sel) # set the selection
350 scan.summary() # will only print summary of scanno 0 an 3
351 scan.set_selection() # unset the selection
352 # or the equivalent
353 scan.set_selection(scans=[0,3])
354 scan.summary() # will only print summary of scanno 0 an 3
355 scan.set_selection() # unset the selection
356 """
357 if selection is None:
358 # reset
359 if len(kw) == 0:
360 selection = selector()
361 else:
362 # try keywords
363 for k in kw:
364 if k not in selector.fields:
365 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
366 selection = selector(**kw)
367 self._setselection(selection)
368
369 def stats(self, stat='stddev', mask=None):
370 """
371 Determine the specified statistic of the current beam/if/pol
372 Takes a 'mask' as an optional parameter to specify which
373 channels should be excluded.
374 Parameters:
375 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
376 'var', 'stddev', 'avdev', 'rms', 'median'
377 mask: an optional mask specifying where the statistic
378 should be determined.
379 Example:
380 scan.set_unit('channel')
381 msk = scan.create_mask([100, 200], [500, 600])
382 scan.stats(stat='mean', mask=m)
383 """
384 mask = mask or []
385 if not self._check_ifs():
386 raise ValueError("Cannot apply mask as the IFs have different "
387 "number of channels. Please use setselection() "
388 "to select individual IFs")
389
390 statvals = self._math._stats(self, mask, stat)
391 def cb(i):
392 return statvals[i]
393
394 return self._row_callback(cb, stat)
395
396 def stddev(self, mask=None):
397 """
398 Determine the standard deviation of the current beam/if/pol
399 Takes a 'mask' as an optional parameter to specify which
400 channels should be excluded.
401 Parameters:
402 mask: an optional mask specifying where the standard
403 deviation should be determined.
404
405 Example:
406 scan.set_unit('channel')
407 msk = scan.create_mask([100, 200], [500, 600])
408 scan.stddev(mask=m)
409 """
410 return self.stats(stat='stddev', mask=mask);
411
412
413 def get_column_names(self):
414 """
415 Return a list of column names, which can be used for selection.
416 """
417 return list(Scantable.get_column_names(self))
418
419 def get_tsys(self, row=-1):
420 """
421 Return the System temperatures.
422 Returns:
423 a list of Tsys values for the current selection
424 """
425 if row > -1:
426 return self._get_column(self._gettsys, row)
427 return self._row_callback(self._gettsys, "Tsys")
428
429
430 def get_weather(self, row=-1):
431 values = self._get_column(self._get_weather, row)
432 if row > -1:
433 return {'temperature': values[0],
434 'pressure': values[1], 'humidity' : values[2],
435 'windspeed' : values[3], 'windaz' : values[4]
436 }
437 else:
438 out = []
439 for r in values:
440
441 out.append({'temperature': r[0],
442 'pressure': r[1], 'humidity' : r[2],
443 'windspeed' : r[3], 'windaz' : r[4]
444 })
445 return out
446
447 def _row_callback(self, callback, label):
448 out = ""
449 outvec = []
450 sep = '-'*50
451 for i in range(self.nrow()):
452 tm = self._gettime(i)
453 src = self._getsourcename(i)
454 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
455 out += 'Time[%s]:\n' % (tm)
456 if self.nbeam(-1) > 1:
457 out += ' Beam[%d] ' % (self.getbeam(i))
458 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
459 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
460 outvec.append(callback(i))
461 out += '= %3.3f\n' % (outvec[i])
462 out += sep+'\n'
463 if rcParams['verbose']:
464 print sep
465 print " %s" % (label)
466 print sep
467 print out
468 return outvec
469
470 def _get_column(self, callback, row=-1):
471 """
472 """
473 if row == -1:
474 return [callback(i) for i in range(self.nrow())]
475 else:
476 if 0 <= row < self.nrow():
477 return callback(row)
478
479
480 def get_time(self, row=-1, asdatetime=False):
481 """
482 Get a list of time stamps for the observations.
483 Return a datetime object for each integration time stamp in the scantable.
484 Parameters:
485 row: row no of integration. Default -1 return all rows
486 asdatetime: return values as datetime objects rather than strings
487 Example:
488 none
489 """
490 from time import strptime
491 from datetime import datetime
492 times = self._get_column(self._gettime, row)
493 if not asdatetime:
494 return times
495 format = "%Y/%m/%d/%H:%M:%S"
496 if isinstance(times, list):
497 return [datetime(*strptime(i, format)[:6]) for i in times]
498 else:
499 return datetime(*strptime(times, format)[:6])
500
501
502 def get_inttime(self, row=-1):
503 """
504 Get a list of integration times for the observations.
505 Return a time in seconds for each integration in the scantable.
506 Parameters:
507 row: row no of integration. Default -1 return all rows.
508 Example:
509 none
510 """
511 return self._get_column(self._getinttime, row)
512
513
514 def get_sourcename(self, row=-1):
515 """
516 Get a list source names for the observations.
517 Return a string for each integration in the scantable.
518 Parameters:
519 row: row no of integration. Default -1 return all rows.
520 Example:
521 none
522 """
523 return self._get_column(self._getsourcename, row)
524
525 def get_elevation(self, row=-1):
526 """
527 Get a list of elevations for the observations.
528 Return a float for each integration in the scantable.
529 Parameters:
530 row: row no of integration. Default -1 return all rows.
531 Example:
532 none
533 """
534 return self._get_column(self._getelevation, row)
535
536 def get_azimuth(self, row=-1):
537 """
538 Get a list of azimuths for the observations.
539 Return a float for each integration in the scantable.
540 Parameters:
541 row: row no of integration. Default -1 return all rows.
542 Example:
543 none
544 """
545 return self._get_column(self._getazimuth, row)
546
547 def get_parangle(self, row=-1):
548 """
549 Get a list of parallactic angles for the observations.
550 Return a float for each integration in the scantable.
551 Parameters:
552 row: row no of integration. Default -1 return all rows.
553 Example:
554 none
555 """
556 return self._get_column(self._getparangle, row)
557
558 def get_direction(self, row=-1):
559 """
560 Get a list of Positions on the sky (direction) for the observations.
561 Return a string for each integration in the scantable.
562 Parameters:
563 row: row no of integration. Default -1 return all rows
564 Example:
565 none
566 """
567 return self._get_column(self._getdirection, row)
568
569 def get_directionval(self, row=-1):
570 """
571 Get a list of Positions on the sky (direction) for the observations.
572 Return a float for each integration in the scantable.
573 Parameters:
574 row: row no of integration. Default -1 return all rows
575 Example:
576 none
577 """
578 return self._get_column(self._getdirectionvec, row)
579
580 @print_log_dec
581 def set_unit(self, unit='channel'):
582 """
583 Set the unit for all following operations on this scantable
584 Parameters:
585 unit: optional unit, default is 'channel'
586 one of '*Hz', 'km/s', 'channel', ''
587 """
588 varlist = vars()
589 if unit in ['', 'pixel', 'channel']:
590 unit = ''
591 inf = list(self._getcoordinfo())
592 inf[0] = unit
593 self._setcoordinfo(inf)
594 self._add_history("set_unit", varlist)
595
596 @print_log_dec
597 def set_instrument(self, instr):
598 """
599 Set the instrument for subsequent processing.
600 Parameters:
601 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
602 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
603 """
604 self._setInstrument(instr)
605 self._add_history("set_instument", vars())
606
607 @print_log_dec
608 def set_feedtype(self, feedtype):
609 """
610 Overwrite the feed type, which might not be set correctly.
611 Parameters:
612 feedtype: 'linear' or 'circular'
613 """
614 self._setfeedtype(feedtype)
615 self._add_history("set_feedtype", vars())
616
617 @print_log_dec
618 def set_doppler(self, doppler='RADIO'):
619 """
620 Set the doppler for all following operations on this scantable.
621 Parameters:
622 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
623 """
624 varlist = vars()
625 inf = list(self._getcoordinfo())
626 inf[2] = doppler
627 self._setcoordinfo(inf)
628 self._add_history("set_doppler", vars())
629
630 @print_log_dec
631 def set_freqframe(self, frame=None):
632 """
633 Set the frame type of the Spectral Axis.
634 Parameters:
635 frame: an optional frame type, default 'LSRK'. Valid frames are:
636 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
637 'GEO', 'GALACTO', 'LGROUP', 'CMB'
638 Examples:
639 scan.set_freqframe('BARY')
640 """
641 frame = frame or rcParams['scantable.freqframe']
642 varlist = vars()
643 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
644 'GEO', 'GALACTO', 'LGROUP', 'CMB']
645
646 if frame in valid:
647 inf = list(self._getcoordinfo())
648 inf[1] = frame
649 self._setcoordinfo(inf)
650 self._add_history("set_freqframe", varlist)
651 else:
652 msg = "Please specify a valid freq type. Valid types are:\n", valid
653 if rcParams['verbose']:
654 print msg
655 else:
656 raise TypeError(msg)
657
658 def set_dirframe(self, frame=""):
659 """
660 Set the frame type of the Direction on the sky.
661 Parameters:
662 frame: an optional frame type, default ''. Valid frames are:
663 'J2000', 'B1950', 'GALACTIC'
664 Examples:
665 scan.set_dirframe('GALACTIC')
666 """
667 varlist = vars()
668 try:
669 Scantable.set_dirframe(self, frame)
670 except RuntimeError, msg:
671 if rcParams['verbose']:
672 print msg
673 else:
674 raise
675 self._add_history("set_dirframe", varlist)
676
677 def get_unit(self):
678 """
679 Get the default unit set in this scantable
680 Returns:
681 A unit string
682 """
683 inf = self._getcoordinfo()
684 unit = inf[0]
685 if unit == '': unit = 'channel'
686 return unit
687
688 def get_abcissa(self, rowno=0):
689 """
690 Get the abcissa in the current coordinate setup for the currently
691 selected Beam/IF/Pol
692 Parameters:
693 rowno: an optional row number in the scantable. Default is the
694 first row, i.e. rowno=0
695 Returns:
696 The abcissa values and the format string (as a dictionary)
697 """
698 abc = self._getabcissa(rowno)
699 lbl = self._getabcissalabel(rowno)
700 return abc, lbl
701
702 def flag(self, mask=None):
703 """
704 Flag the selected data using an optional channel mask.
705 Parameters:
706 mask: an optional channel mask, created with create_mask. Default
707 (no mask) is all channels.
708 """
709 varlist = vars()
710 mask = mask or []
711 try:
712 self._flag(mask)
713 except RuntimeError, msg:
714 if rcParams['verbose']:
715 print msg
716 return
717 else: raise
718 self._add_history("flag", varlist)
719
720 @print_log_dec
721 def lag_flag(self, start, end, unit="MHz", insitu=None):
722 """
723 Flag the data in 'lag' space by providing a frequency to remove.
724 Flagged data in the scantable gets interpolated over the region.
725 No taper is applied.
726 Parameters:
727 start: the start frequency (really a period within the
728 bandwidth) or period to remove
729 end: the end frequency or period to remove
730 unit: the frequency unit (default "MHz") or "" for
731 explicit lag channels
732 Notes:
733 It is recommended to flag edges of the band or strong
734 signals beforehand.
735 """
736 if insitu is None: insitu = rcParams['insitu']
737 self._math._setinsitu(insitu)
738 varlist = vars()
739 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
740 if not (unit == "" or base.has_key(unit)):
741 raise ValueError("%s is not a valid unit." % unit)
742 try:
743 if unit == "":
744 s = scantable(self._math._lag_flag(self, start, end, "lags"))
745 else:
746 s = scantable(self._math._lag_flag(self, start*base[unit],
747 end*base[unit], "frequency"))
748 except RuntimeError, msg:
749 if rcParams['verbose']:
750 print msg
751 return
752 else: raise
753 s._add_history("lag_flag", varlist)
754 if insitu:
755 self._assign(s)
756 else:
757 return s
758
759 @print_log_dec
760 def create_mask(self, *args, **kwargs):
761 """
762 Compute and return a mask based on [min, max] windows.
763 The specified windows are to be INCLUDED, when the mask is
764 applied.
765 Parameters:
766 [min, max], [min2, max2], ...
767 Pairs of start/end points (inclusive)specifying the regions
768 to be masked
769 invert: optional argument. If specified as True,
770 return an inverted mask, i.e. the regions
771 specified are EXCLUDED
772 row: create the mask using the specified row for
773 unit conversions, default is row=0
774 only necessary if frequency varies over rows.
775 Example:
776 scan.set_unit('channel')
777 a)
778 msk = scan.create_mask([400, 500], [800, 900])
779 # masks everything outside 400 and 500
780 # and 800 and 900 in the unit 'channel'
781
782 b)
783 msk = scan.create_mask([400, 500], [800, 900], invert=True)
784 # masks the regions between 400 and 500
785 # and 800 and 900 in the unit 'channel'
786 c)
787 mask only channel 400
788 msk = scan.create_mask([400])
789 """
790 row = kwargs.get("row", 0)
791 data = self._getabcissa(row)
792 u = self._getcoordinfo()[0]
793 if rcParams['verbose']:
794 if u == "": u = "channel"
795 msg = "The current mask window unit is %s" % u
796 i = self._check_ifs()
797 if not i:
798 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
799 asaplog.push(msg)
800 n = self.nchan()
801 msk = _n_bools(n, False)
802 # test if args is a 'list' or a 'normal *args - UGLY!!!
803
804 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
805 and args or args[0]
806 for window in ws:
807 if len(window) == 1:
808 window = [window[0], window[0]]
809 if len(window) == 0 or len(window) > 2:
810 raise ValueError("A window needs to be defined as [start(, end)]")
811 if window[0] > window[1]:
812 tmp = window[0]
813 window[0] = window[1]
814 window[1] = tmp
815 for i in range(n):
816 if data[i] >= window[0] and data[i] <= window[1]:
817 msk[i] = True
818 if kwargs.has_key('invert'):
819 if kwargs.get('invert'):
820 msk = mask_not(msk)
821 return msk
822
823 def get_restfreqs(self):
824 """
825 Get the restfrequency(s) stored in this scantable.
826 The return value(s) are always of unit 'Hz'
827 Parameters:
828 none
829 Returns:
830 a list of doubles
831 """
832 return list(self._getrestfreqs())
833
834
835 def set_restfreqs(self, freqs=None, unit='Hz'):
836 """
837 Set or replace the restfrequency specified and
838 If the 'freqs' argument holds a scalar,
839 then that rest frequency will be applied to all the selected
840 data. If the 'freqs' argument holds
841 a vector, then it MUST be of equal or smaller length than
842 the number of IFs (and the available restfrequencies will be
843 replaced by this vector). In this case, *all* data have
844 the restfrequency set per IF according
845 to the corresponding value you give in the 'freqs' vector.
846 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
847 IF 1 gets restfreq 2e9.
848 You can also specify the frequencies via a linecatalog.
849
850 Parameters:
851 freqs: list of rest frequency values or string idenitfiers
852 unit: unit for rest frequency (default 'Hz')
853
854 Example:
855 # set the given restfrequency for the whole table
856 scan.set_restfreqs(freqs=1.4e9)
857 # If thee number of IFs in the data is >= 2 IF0 gets the first
858 # value IF1 the second...
859 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
860 #set the given restfrequency for the whole table (by name)
861 scan.set_restfreqs(freqs="OH1667")
862
863 Note:
864 To do more sophisticate Restfrequency setting, e.g. on a
865 source and IF basis, use scantable.set_selection() before using
866 this function.
867 # provide your scantable is called scan
868 selection = selector()
869 selection.set_name("ORION*")
870 selection.set_ifs([1])
871 scan.set_selection(selection)
872 scan.set_restfreqs(freqs=86.6e9)
873
874 """
875 varlist = vars()
876 from asap import linecatalog
877 # simple value
878 if isinstance(freqs, int) or isinstance(freqs, float):
879 self._setrestfreqs(freqs, "",unit)
880 # list of values
881 elif isinstance(freqs, list) or isinstance(freqs, tuple):
882 # list values are scalars
883 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
884 sel = selector()
885 savesel = self._getselection()
886 iflist = self.getifnos()
887 for i in xrange(len(freqs)):
888 sel.set_ifs(iflist[i])
889 self._setselection(sel)
890 self._setrestfreqs(freqs[i], "",unit)
891 self._setselection(savesel)
892 # list values are tuples, (value, name)
893 elif isinstance(freqs[-1], dict):
894 sel = selector()
895 savesel = self._getselection()
896 iflist = self.getifnos()
897 for i in xrange(len(freqs)):
898 sel.set_ifs(iflist[i])
899 self._setselection(sel)
900 self._setrestfreqs(freqs[i]["value"],
901 freqs[i]["name"], "MHz")
902 self._setselection(savesel)
903 # freqs are to be taken from a linecatalog
904 elif isinstance(freqs, linecatalog):
905 sel = selector()
906 savesel = self._getselection()
907 iflist = self.getifnos()
908 for i in xrange(freqs.nrow()):
909 sel.set_ifs(iflist[i])
910 self._setselection(sel)
911 self._setrestfreqs(freqs.get_frequency(i),
912 freqs.get_name(i), "MHz")
913 # ensure that we are not iterating past nIF
914 if i == self.nif()-1: break
915 self._setselection(savesel)
916 else:
917 return
918 self._add_history("set_restfreqs", varlist)
919
920 def shift_refpix(self, delta):
921 """
922 Shift the reference pixel of the Spectra Coordinate by an
923 integer amount.
924 Parameters:
925 delta: the amount to shift by
926 Note:
927 Be careful using this with broadband data.
928 """
929 Scantable.shift_refpix(self, delta)
930
931 def history(self, filename=None):
932 """
933 Print the history. Optionally to a file.
934 Parameters:
935 filename: The name of the file to save the history to.
936 """
937 hist = list(self._gethistory())
938 out = "-"*80
939 for h in hist:
940 if h.startswith("---"):
941 out += "\n"+h
942 else:
943 items = h.split("##")
944 date = items[0]
945 func = items[1]
946 items = items[2:]
947 out += "\n"+date+"\n"
948 out += "Function: %s\n Parameters:" % (func)
949 for i in items:
950 s = i.split("=")
951 out += "\n %s = %s" % (s[0], s[1])
952 out += "\n"+"-"*80
953 if filename is not None:
954 if filename is "":
955 filename = 'scantable_history.txt'
956 import os
957 filename = os.path.expandvars(os.path.expanduser(filename))
958 if not os.path.isdir(filename):
959 data = open(filename, 'w')
960 data.write(out)
961 data.close()
962 else:
963 msg = "Illegal file name '%s'." % (filename)
964 if rcParams['verbose']:
965 print msg
966 else:
967 raise IOError(msg)
968 if rcParams['verbose']:
969 try:
970 from IPython.genutils import page as pager
971 except ImportError:
972 from pydoc import pager
973 pager(out)
974 else:
975 return out
976 return
977 #
978 # Maths business
979 #
980 @print_log_dec
981 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
982 """
983 Return the (time) weighted average of a scan.
984 Note:
985 in channels only - align if necessary
986 Parameters:
987 mask: an optional mask (only used for 'var' and 'tsys'
988 weighting)
989 scanav: True averages each scan separately
990 False (default) averages all scans together,
991 weight: Weighting scheme.
992 'none' (mean no weight)
993 'var' (1/var(spec) weighted)
994 'tsys' (1/Tsys**2 weighted)
995 'tint' (integration time weighted)
996 'tintsys' (Tint/Tsys**2)
997 'median' ( median averaging)
998 The default is 'tint'
999 align: align the spectra in velocity before averaging. It takes
1000 the time of the first spectrum as reference time.
1001 Example:
1002 # time average the scantable without using a mask
1003 newscan = scan.average_time()
1004 """
1005 varlist = vars()
1006 weight = weight or 'TINT'
1007 mask = mask or ()
1008 scanav = (scanav and 'SCAN') or 'NONE'
1009 scan = (self, )
1010 try:
1011 if align:
1012 scan = (self.freq_align(insitu=False), )
1013 s = None
1014 if weight.upper() == 'MEDIAN':
1015 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1016 scanav))
1017 else:
1018 s = scantable(self._math._average(scan, mask, weight.upper(),
1019 scanav))
1020 except RuntimeError, msg:
1021 if rcParams['verbose']:
1022 print msg
1023 return
1024 else: raise
1025 s._add_history("average_time", varlist)
1026 return s
1027
1028 @print_log_dec
1029 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1030 """
1031 Return a scan where all spectra are converted to either
1032 Jansky or Kelvin depending upon the flux units of the scan table.
1033 By default the function tries to look the values up internally.
1034 If it can't find them (or if you want to over-ride), you must
1035 specify EITHER jyperk OR eta (and D which it will try to look up
1036 also if you don't set it). jyperk takes precedence if you set both.
1037 Parameters:
1038 jyperk: the Jy / K conversion factor
1039 eta: the aperture efficiency
1040 d: the geomtric diameter (metres)
1041 insitu: if False a new scantable is returned.
1042 Otherwise, the scaling is done in-situ
1043 The default is taken from .asaprc (False)
1044 """
1045 if insitu is None: insitu = rcParams['insitu']
1046 self._math._setinsitu(insitu)
1047 varlist = vars()
1048 jyperk = jyperk or -1.0
1049 d = d or -1.0
1050 eta = eta or -1.0
1051 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1052 s._add_history("convert_flux", varlist)
1053 if insitu: self._assign(s)
1054 else: return s
1055
1056 @print_log_dec
1057 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1058 """
1059 Return a scan after applying a gain-elevation correction.
1060 The correction can be made via either a polynomial or a
1061 table-based interpolation (and extrapolation if necessary).
1062 You specify polynomial coefficients, an ascii table or neither.
1063 If you specify neither, then a polynomial correction will be made
1064 with built in coefficients known for certain telescopes (an error
1065 will occur if the instrument is not known).
1066 The data and Tsys are *divided* by the scaling factors.
1067 Parameters:
1068 poly: Polynomial coefficients (default None) to compute a
1069 gain-elevation correction as a function of
1070 elevation (in degrees).
1071 filename: The name of an ascii file holding correction factors.
1072 The first row of the ascii file must give the column
1073 names and these MUST include columns
1074 "ELEVATION" (degrees) and "FACTOR" (multiply data
1075 by this) somewhere.
1076 The second row must give the data type of the
1077 column. Use 'R' for Real and 'I' for Integer.
1078 An example file would be
1079 (actual factors are arbitrary) :
1080
1081 TIME ELEVATION FACTOR
1082 R R R
1083 0.1 0 0.8
1084 0.2 20 0.85
1085 0.3 40 0.9
1086 0.4 60 0.85
1087 0.5 80 0.8
1088 0.6 90 0.75
1089 method: Interpolation method when correcting from a table.
1090 Values are "nearest", "linear" (default), "cubic"
1091 and "spline"
1092 insitu: if False a new scantable is returned.
1093 Otherwise, the scaling is done in-situ
1094 The default is taken from .asaprc (False)
1095 """
1096
1097 if insitu is None: insitu = rcParams['insitu']
1098 self._math._setinsitu(insitu)
1099 varlist = vars()
1100 poly = poly or ()
1101 from os.path import expandvars
1102 filename = expandvars(filename)
1103 s = scantable(self._math._gainel(self, poly, filename, method))
1104 s._add_history("gain_el", varlist)
1105 if insitu:
1106 self._assign(s)
1107 else:
1108 return s
1109
1110 @print_log_dec
1111 def freq_align(self, reftime=None, method='cubic', insitu=None):
1112 """
1113 Return a scan where all rows have been aligned in frequency/velocity.
1114 The alignment frequency frame (e.g. LSRK) is that set by function
1115 set_freqframe.
1116 Parameters:
1117 reftime: reference time to align at. By default, the time of
1118 the first row of data is used.
1119 method: Interpolation method for regridding the spectra.
1120 Choose from "nearest", "linear", "cubic" (default)
1121 and "spline"
1122 insitu: if False a new scantable is returned.
1123 Otherwise, the scaling is done in-situ
1124 The default is taken from .asaprc (False)
1125 """
1126 if insitu is None: insitu = rcParams["insitu"]
1127 self._math._setinsitu(insitu)
1128 varlist = vars()
1129 reftime = reftime or ""
1130 s = scantable(self._math._freq_align(self, reftime, method))
1131 s._add_history("freq_align", varlist)
1132 if insitu: self._assign(s)
1133 else: return s
1134
1135 @print_log_dec
1136 def opacity(self, tau=None, insitu=None):
1137 """
1138 Apply an opacity correction. The data
1139 and Tsys are multiplied by the correction factor.
1140 Parameters:
1141 tau: (list of) opacity from which the correction factor is
1142 exp(tau*ZD)
1143 where ZD is the zenith-distance.
1144 If a list is provided, it has to be of length nIF,
1145 nIF*nPol or 1 and in order of IF/POL, e.g.
1146 [opif0pol0, opif0pol1, opif1pol0 ...]
1147 if tau is `None` the opacities are determined from a
1148 model.
1149 insitu: if False a new scantable is returned.
1150 Otherwise, the scaling is done in-situ
1151 The default is taken from .asaprc (False)
1152 """
1153 if insitu is None: insitu = rcParams['insitu']
1154 self._math._setinsitu(insitu)
1155 varlist = vars()
1156 if not hasattr(tau, "__len__"):
1157 tau = [tau]
1158 s = scantable(self._math._opacity(self, tau))
1159 s._add_history("opacity", varlist)
1160 if insitu: self._assign(s)
1161 else: return s
1162
1163 @print_log_dec
1164 def bin(self, width=5, insitu=None):
1165 """
1166 Return a scan where all spectra have been binned up.
1167 Parameters:
1168 width: The bin width (default=5) in pixels
1169 insitu: if False a new scantable is returned.
1170 Otherwise, the scaling is done in-situ
1171 The default is taken from .asaprc (False)
1172 """
1173 if insitu is None: insitu = rcParams['insitu']
1174 self._math._setinsitu(insitu)
1175 varlist = vars()
1176 s = scantable(self._math._bin(self, width))
1177 s._add_history("bin", varlist)
1178 if insitu:
1179 self._assign(s)
1180 else:
1181 return s
1182
1183 @print_log_dec
1184 def resample(self, width=5, method='cubic', insitu=None):
1185 """
1186 Return a scan where all spectra have been binned up.
1187
1188 Parameters:
1189 width: The bin width (default=5) in pixels
1190 method: Interpolation method when correcting from a table.
1191 Values are "nearest", "linear", "cubic" (default)
1192 and "spline"
1193 insitu: if False a new scantable is returned.
1194 Otherwise, the scaling is done in-situ
1195 The default is taken from .asaprc (False)
1196 """
1197 if insitu is None: insitu = rcParams['insitu']
1198 self._math._setinsitu(insitu)
1199 varlist = vars()
1200 s = scantable(self._math._resample(self, method, width))
1201 s._add_history("resample", varlist)
1202 if insitu: self._assign(s)
1203 else: return s
1204
1205 @print_log_dec
1206 def average_pol(self, mask=None, weight='none'):
1207 """
1208 Average the Polarisations together.
1209 Parameters:
1210 mask: An optional mask defining the region, where the
1211 averaging will be applied. The output will have all
1212 specified points masked.
1213 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1214 weighted), or 'tsys' (1/Tsys**2 weighted)
1215 """
1216 varlist = vars()
1217 mask = mask or ()
1218 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1219 s._add_history("average_pol", varlist)
1220 return s
1221
1222 @print_log_dec
1223 def average_beam(self, mask=None, weight='none'):
1224 """
1225 Average the Beams together.
1226 Parameters:
1227 mask: An optional mask defining the region, where the
1228 averaging will be applied. The output will have all
1229 specified points masked.
1230 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1231 weighted), or 'tsys' (1/Tsys**2 weighted)
1232 """
1233 varlist = vars()
1234 mask = mask or ()
1235 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1236 s._add_history("average_beam", varlist)
1237 return s
1238
1239 def parallactify(self, pflag):
1240 """
1241 Set a flag to inidcate whether this data should be treated as having
1242 been 'parallactified' (total phase == 0.0)
1243 Parameters:
1244 pflag: Bool inidcating whether to turn this on (True) or
1245 off (False)
1246 """
1247 varlist = vars()
1248 self._parallactify(pflag)
1249 self._add_history("parallactify", varlist)
1250
1251 @print_log_dec
1252 def convert_pol(self, poltype=None):
1253 """
1254 Convert the data to a different polarisation type.
1255 Note that you will need cross-polarisation terms for most conversions.
1256 Parameters:
1257 poltype: The new polarisation type. Valid types are:
1258 "linear", "circular", "stokes" and "linpol"
1259 """
1260 varlist = vars()
1261 try:
1262 s = scantable(self._math._convertpol(self, poltype))
1263 except RuntimeError, msg:
1264 if rcParams['verbose']:
1265 print msg
1266 return
1267 else:
1268 raise
1269 s._add_history("convert_pol", varlist)
1270 return s
1271
1272 @print_log_dec
1273 def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1274 """
1275 Smooth the spectrum by the specified kernel (conserving flux).
1276 Parameters:
1277 kernel: The type of smoothing kernel. Select from
1278 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1279 or 'poly'
1280 width: The width of the kernel in pixels. For hanning this is
1281 ignored otherwise it defauls to 5 pixels.
1282 For 'gaussian' it is the Full Width Half
1283 Maximum. For 'boxcar' it is the full width.
1284 For 'rmedian' and 'poly' it is the half width.
1285 order: Optional parameter for 'poly' kernel (default is 2), to
1286 specify the order of the polnomial. Ignored by all other
1287 kernels.
1288 insitu: if False a new scantable is returned.
1289 Otherwise, the scaling is done in-situ
1290 The default is taken from .asaprc (False)
1291 Example:
1292 none
1293 """
1294 if insitu is None: insitu = rcParams['insitu']
1295 self._math._setinsitu(insitu)
1296 varlist = vars()
1297 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1298 s._add_history("smooth", varlist)
1299 if insitu: self._assign(s)
1300 else: return s
1301
1302 @print_log_dec
1303 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1304 insitu=None):
1305 """
1306 Return a scan which has been baselined (all rows) by a polynomial.
1307 Parameters:
1308 mask: an optional mask
1309 order: the order of the polynomial (default is 0)
1310 plot: plot the fit and the residual. In this each
1311 indivual fit has to be approved, by typing 'y'
1312 or 'n'
1313 uselin: use linear polynomial fit
1314 insitu: if False a new scantable is returned.
1315 Otherwise, the scaling is done in-situ
1316 The default is taken from .asaprc (False)
1317 Example:
1318 # return a scan baselined by a third order polynomial,
1319 # not using a mask
1320 bscan = scan.poly_baseline(order=3)
1321 """
1322 if insitu is None: insitu = rcParams['insitu']
1323 varlist = vars()
1324 if mask is None:
1325 mask = [True for i in xrange(self.nchan(-1))]
1326 from asap.asapfitter import fitter
1327 try:
1328 f = fitter()
1329 f.set_scan(self, mask)
1330 #f.set_function(poly=order)
1331 if uselin:
1332 f.set_function(lpoly=order)
1333 else:
1334 f.set_function(poly=order)
1335 s = f.auto_fit(insitu, plot=plot)
1336 s._add_history("poly_baseline", varlist)
1337 if insitu: self._assign(s)
1338 else: return s
1339 except RuntimeError:
1340 msg = "The fit failed, possibly because it didn't converge."
1341 if rcParams['verbose']:
1342 print msg
1343 return
1344 else:
1345 raise RuntimeError(msg)
1346
1347 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1348 threshold=3, chan_avg_limit=1, plot=False,
1349 insitu=None):
1350 """
1351 Return a scan which has been baselined (all rows) by a polynomial.
1352 Spectral lines are detected first using linefinder and masked out
1353 to avoid them affecting the baseline solution.
1354
1355 Parameters:
1356 mask: an optional mask retreived from scantable
1357 edge: an optional number of channel to drop at
1358 the edge of spectrum. If only one value is
1359 specified, the same number will be dropped from
1360 both sides of the spectrum. Default is to keep
1361 all channels. Nested tuples represent individual
1362 edge selection for different IFs (a number of spectral
1363 channels can be different)
1364 order: the order of the polynomial (default is 0)
1365 threshold: the threshold used by line finder. It is better to
1366 keep it large as only strong lines affect the
1367 baseline solution.
1368 chan_avg_limit:
1369 a maximum number of consequtive spectral channels to
1370 average during the search of weak and broad lines.
1371 The default is no averaging (and no search for weak
1372 lines). If such lines can affect the fitted baseline
1373 (e.g. a high order polynomial is fitted), increase this
1374 parameter (usually values up to 8 are reasonable). Most
1375 users of this method should find the default value
1376 sufficient.
1377 plot: plot the fit and the residual. In this each
1378 indivual fit has to be approved, by typing 'y'
1379 or 'n'
1380 insitu: if False a new scantable is returned.
1381 Otherwise, the scaling is done in-situ
1382 The default is taken from .asaprc (False)
1383
1384 Example:
1385 scan2=scan.auto_poly_baseline(order=7)
1386 """
1387 if insitu is None: insitu = rcParams['insitu']
1388 varlist = vars()
1389 from asap.asapfitter import fitter
1390 from asap.asaplinefind import linefinder
1391 from asap import _is_sequence_or_number as _is_valid
1392
1393 # check whether edge is set up for each IF individually
1394 individualedge = False;
1395 if len(edge) > 1:
1396 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1397 individualedge = True;
1398
1399 if not _is_valid(edge, int) and not individualedge:
1400 raise ValueError, "Parameter 'edge' has to be an integer or a \
1401 pair of integers specified as a tuple. Nested tuples are allowed \
1402 to make individual selection for different IFs."
1403
1404 curedge = (0, 0)
1405 if individualedge:
1406 for edgepar in edge:
1407 if not _is_valid(edgepar, int):
1408 raise ValueError, "Each element of the 'edge' tuple has \
1409 to be a pair of integers or an integer."
1410 else:
1411 curedge = edge;
1412
1413 # setup fitter
1414 f = fitter()
1415 f.set_function(poly=order)
1416
1417 # setup line finder
1418 fl = linefinder()
1419 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1420
1421 if not insitu:
1422 workscan = self.copy()
1423 else:
1424 workscan = self
1425
1426 fl.set_scan(workscan)
1427
1428 rows = range(workscan.nrow())
1429 asaplog.push("Processing:")
1430 for r in rows:
1431 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1432 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1433 workscan.getpol(r), workscan.getcycle(r))
1434 asaplog.push(msg, False)
1435
1436 # figure out edge parameter
1437 if individualedge:
1438 if len(edge) >= workscan.getif(r):
1439 raise RuntimeError, "Number of edge elements appear to " \
1440 "be less than the number of IFs"
1441 curedge = edge[workscan.getif(r)]
1442
1443 # setup line finder
1444 fl.find_lines(r, mask, curedge)
1445 f.set_data(workscan._getabcissa(r), workscan._getspectrum(r),
1446 mask_and(workscan._getmask(r), fl.get_mask()))
1447 f.fit()
1448 x = f.get_parameters()
1449 if plot:
1450 f.plot(residual=True)
1451 x = raw_input("Accept fit ( [y]/n ): ")
1452 if x.upper() == 'N':
1453 continue
1454 workscan._setspectrum(f.fitter.getresidual(), r)
1455 if plot:
1456 f._p.unmap()
1457 f._p = None
1458 workscan._add_history("auto_poly_baseline", varlist)
1459 if insitu:
1460 self._assign(workscan)
1461 else:
1462 return workscan
1463
1464 @print_log_dec
1465 def rotate_linpolphase(self, angle):
1466 """
1467 Rotate the phase of the complex polarization O=Q+iU correlation.
1468 This is always done in situ in the raw data. So if you call this
1469 function more than once then each call rotates the phase further.
1470 Parameters:
1471 angle: The angle (degrees) to rotate (add) by.
1472 Examples:
1473 scan.rotate_linpolphase(2.3)
1474 """
1475 varlist = vars()
1476 self._math._rotate_linpolphase(self, angle)
1477 self._add_history("rotate_linpolphase", varlist)
1478 return
1479
1480 @print_log_dec
1481 def rotate_xyphase(self, angle):
1482 """
1483 Rotate the phase of the XY correlation. This is always done in situ
1484 in the data. So if you call this function more than once
1485 then each call rotates the phase further.
1486 Parameters:
1487 angle: The angle (degrees) to rotate (add) by.
1488 Examples:
1489 scan.rotate_xyphase(2.3)
1490 """
1491 varlist = vars()
1492 self._math._rotate_xyphase(self, angle)
1493 self._add_history("rotate_xyphase", varlist)
1494 return
1495
1496 @print_log_dec
1497 def swap_linears(self):
1498 """
1499 Swap the linear polarisations XX and YY, or better the first two
1500 polarisations as this also works for ciculars.
1501 """
1502 varlist = vars()
1503 self._math._swap_linears(self)
1504 self._add_history("swap_linears", varlist)
1505 return
1506
1507 @print_log_dec
1508 def invert_phase(self):
1509 """
1510 Invert the phase of the complex polarisation
1511 """
1512 varlist = vars()
1513 self._math._invert_phase(self)
1514 self._add_history("invert_phase", varlist)
1515 return
1516
1517 @print_log_dec
1518 def add(self, offset, insitu=None):
1519 """
1520 Return a scan where all spectra have the offset added
1521 Parameters:
1522 offset: the offset
1523 insitu: if False a new scantable is returned.
1524 Otherwise, the scaling is done in-situ
1525 The default is taken from .asaprc (False)
1526 """
1527 if insitu is None: insitu = rcParams['insitu']
1528 self._math._setinsitu(insitu)
1529 varlist = vars()
1530 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1531 s._add_history("add", varlist)
1532 if insitu:
1533 self._assign(s)
1534 else:
1535 return s
1536
1537 @print_log_dec
1538 def scale(self, factor, tsys=True, insitu=None):
1539 """
1540 Return a scan where all spectra are scaled by the give 'factor'
1541 Parameters:
1542 factor: the scaling factor
1543 insitu: if False a new scantable is returned.
1544 Otherwise, the scaling is done in-situ
1545 The default is taken from .asaprc (False)
1546 tsys: if True (default) then apply the operation to Tsys
1547 as well as the data
1548 """
1549 if insitu is None: insitu = rcParams['insitu']
1550 self._math._setinsitu(insitu)
1551 varlist = vars()
1552 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1553 s._add_history("scale", varlist)
1554 if insitu:
1555 self._assign(s)
1556 else:
1557 return s
1558
1559 def set_sourcetype(self, match, matchtype="pattern",
1560 sourcetype="reference"):
1561 """
1562 Set the type of the source to be an source or reference scan
1563 using the provided pattern:
1564 Parameters:
1565 match: a Unix style pattern, regular expression or selector
1566 matchtype: 'pattern' (default) UNIX style pattern or
1567 'regex' regular expression
1568 sourcetype: the type of the source to use (source/reference)
1569 """
1570 varlist = vars()
1571 basesel = self.get_selection()
1572 stype = -1
1573 if sourcetype.lower().startswith("r"):
1574 stype = 1
1575 elif sourcetype.lower().startswith("s"):
1576 stype = 0
1577 else:
1578 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1579 if matchtype.lower().startswith("p"):
1580 matchtype = "pattern"
1581 elif matchtype.lower().startswith("r"):
1582 matchtype = "regex"
1583 else:
1584 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1585 sel = selector()
1586 if isinstance(match, selector):
1587 sel = match
1588 else:
1589 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1590 self.set_selection(basesel+sel)
1591 self._setsourcetype(stype)
1592 self.set_selection(basesel)
1593 self._add_history("set_sourcetype", varlist)
1594
1595 @print_log_dec
1596 def auto_quotient(self, preserve=True, mode='paired'):
1597 """
1598 This function allows to build quotients automatically.
1599 It assumes the observation to have the same numer of
1600 "ons" and "offs"
1601 Parameters:
1602 preserve: you can preserve (default) the continuum or
1603 remove it. The equations used are
1604 preserve: Output = Toff * (on/off) - Toff
1605 remove: Output = Toff * (on/off) - Ton
1606 mode: the on/off detection mode
1607 'paired' (default)
1608 identifies 'off' scans by the
1609 trailing '_R' (Mopra/Parkes) or
1610 '_e'/'_w' (Tid) and matches
1611 on/off pairs from the observing pattern
1612 'time'
1613 finds the closest off in time
1614
1615 """
1616 modes = ["time", "paired"]
1617 if not mode in modes:
1618 msg = "please provide valid mode. Valid modes are %s" % (modes)
1619 raise ValueError(msg)
1620 varlist = vars()
1621 s = None
1622 if mode.lower() == "paired":
1623 basesel = self.get_selection()
1624 sel = selector()+basesel
1625 sel.set_query("SRCTYPE==1")
1626 self.set_selection(sel)
1627 offs = self.copy()
1628 sel.set_query("SRCTYPE==0")
1629 self.set_selection(sel)
1630 ons = self.copy()
1631 s = scantable(self._math._quotient(ons, offs, preserve))
1632 self.set_selection(basesel)
1633 elif mode.lower() == "time":
1634 s = scantable(self._math._auto_quotient(self, mode, preserve))
1635 s._add_history("auto_quotient", varlist)
1636 return s
1637
1638 @print_log_dec
1639 def mx_quotient(self, mask = None, weight='median', preserve=True):
1640 """
1641 Form a quotient using "off" beams when observing in "MX" mode.
1642 Parameters:
1643 mask: an optional mask to be used when weight == 'stddev'
1644 weight: How to average the off beams. Default is 'median'.
1645 preserve: you can preserve (default) the continuum or
1646 remove it. The equations used are
1647 preserve: Output = Toff * (on/off) - Toff
1648 remove: Output = Toff * (on/off) - Ton
1649 """
1650 mask = mask or ()
1651 varlist = vars()
1652 on = scantable(self._math._mx_extract(self, 'on'))
1653 preoff = scantable(self._math._mx_extract(self, 'off'))
1654 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1655 from asapmath import quotient
1656 q = quotient(on, off, preserve)
1657 q._add_history("mx_quotient", varlist)
1658 return q
1659
1660 @print_log_dec
1661 def freq_switch(self, insitu=None):
1662 """
1663 Apply frequency switching to the data.
1664 Parameters:
1665 insitu: if False a new scantable is returned.
1666 Otherwise, the swictching is done in-situ
1667 The default is taken from .asaprc (False)
1668 Example:
1669 none
1670 """
1671 if insitu is None: insitu = rcParams['insitu']
1672 self._math._setinsitu(insitu)
1673 varlist = vars()
1674 s = scantable(self._math._freqswitch(self))
1675 s._add_history("freq_switch", varlist)
1676 if insitu: self._assign(s)
1677 else: return s
1678
1679 @print_log_dec
1680 def recalc_azel(self):
1681 """
1682 Recalculate the azimuth and elevation for each position.
1683 Parameters:
1684 none
1685 Example:
1686 """
1687 varlist = vars()
1688 self._recalcazel()
1689 self._add_history("recalc_azel", varlist)
1690 return
1691
1692 @print_log_dec
1693 def __add__(self, other):
1694 varlist = vars()
1695 s = None
1696 if isinstance(other, scantable):
1697 s = scantable(self._math._binaryop(self, other, "ADD"))
1698 elif isinstance(other, float):
1699 s = scantable(self._math._unaryop(self, other, "ADD", False))
1700 else:
1701 raise TypeError("Other input is not a scantable or float value")
1702 s._add_history("operator +", varlist)
1703 return s
1704
1705 @print_log_dec
1706 def __sub__(self, other):
1707 """
1708 implicit on all axes and on Tsys
1709 """
1710 varlist = vars()
1711 s = None
1712 if isinstance(other, scantable):
1713 s = scantable(self._math._binaryop(self, other, "SUB"))
1714 elif isinstance(other, float):
1715 s = scantable(self._math._unaryop(self, other, "SUB", False))
1716 else:
1717 raise TypeError("Other input is not a scantable or float value")
1718 s._add_history("operator -", varlist)
1719 return s
1720
1721 @print_log_dec
1722 def __mul__(self, other):
1723 """
1724 implicit on all axes and on Tsys
1725 """
1726 varlist = vars()
1727 s = None
1728 if isinstance(other, scantable):
1729 s = scantable(self._math._binaryop(self, other, "MUL"))
1730 elif isinstance(other, float):
1731 s = scantable(self._math._unaryop(self, other, "MUL", False))
1732 else:
1733 raise TypeError("Other input is not a scantable or float value")
1734 s._add_history("operator *", varlist)
1735 return s
1736
1737
1738 @print_log_dec
1739 def __div__(self, other):
1740 """
1741 implicit on all axes and on Tsys
1742 """
1743 varlist = vars()
1744 s = None
1745 if isinstance(other, scantable):
1746 s = scantable(self._math._binaryop(self, other, "DIV"))
1747 elif isinstance(other, float):
1748 if other == 0.0:
1749 raise ZeroDivisionError("Dividing by zero is not recommended")
1750 s = scantable(self._math._unaryop(self, other, "DIV", False))
1751 else:
1752 raise TypeError("Other input is not a scantable or float value")
1753 s._add_history("operator /", varlist)
1754 return s
1755
1756 def get_fit(self, row=0):
1757 """
1758 Print or return the stored fits for a row in the scantable
1759 Parameters:
1760 row: the row which the fit has been applied to.
1761 """
1762 if row > self.nrow():
1763 return
1764 from asap.asapfit import asapfit
1765 fit = asapfit(self._getfit(row))
1766 if rcParams['verbose']:
1767 print fit
1768 return
1769 else:
1770 return fit.as_dict()
1771
1772 def flag_nans(self):
1773 """
1774 Utility function to flag NaN values in the scantable.
1775 """
1776 import numpy
1777 basesel = self.get_selection()
1778 for i in range(self.nrow()):
1779 sel = self.get_row_selector(i)
1780 self.set_selection(basesel+sel)
1781 nans = numpy.isnan(self._getspectrum(0))
1782 if numpy.any(nans):
1783 bnans = [ bool(v) for v in nans]
1784 self.flag(bnans)
1785 self.set_selection(basesel)
1786
1787 def get_row_selector(self, rowno):
1788 return selector(beams=self.getbeam(rowno),
1789 ifs=self.getif(rowno),
1790 pols=self.getpol(rowno),
1791 scans=self.getscan(rowno),
1792 cycles=self.getcycle(rowno))
1793
1794 def _add_history(self, funcname, parameters):
1795 if not rcParams['scantable.history']:
1796 return
1797 # create date
1798 sep = "##"
1799 from datetime import datetime
1800 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1801 hist = dstr+sep
1802 hist += funcname+sep#cdate+sep
1803 if parameters.has_key('self'): del parameters['self']
1804 for k, v in parameters.iteritems():
1805 if type(v) is dict:
1806 for k2, v2 in v.iteritems():
1807 hist += k2
1808 hist += "="
1809 if isinstance(v2, scantable):
1810 hist += 'scantable'
1811 elif k2 == 'mask':
1812 if isinstance(v2, list) or isinstance(v2, tuple):
1813 hist += str(self._zip_mask(v2))
1814 else:
1815 hist += str(v2)
1816 else:
1817 hist += str(v2)
1818 else:
1819 hist += k
1820 hist += "="
1821 if isinstance(v, scantable):
1822 hist += 'scantable'
1823 elif k == 'mask':
1824 if isinstance(v, list) or isinstance(v, tuple):
1825 hist += str(self._zip_mask(v))
1826 else:
1827 hist += str(v)
1828 else:
1829 hist += str(v)
1830 hist += sep
1831 hist = hist[:-2] # remove trailing '##'
1832 self._addhistory(hist)
1833
1834
1835 def _zip_mask(self, mask):
1836 mask = list(mask)
1837 i = 0
1838 segments = []
1839 while mask[i:].count(1):
1840 i += mask[i:].index(1)
1841 if mask[i:].count(0):
1842 j = i + mask[i:].index(0)
1843 else:
1844 j = len(mask)
1845 segments.append([i, j])
1846 i = j
1847 return segments
1848
1849 def _get_ordinate_label(self):
1850 fu = "("+self.get_fluxunit()+")"
1851 import re
1852 lbl = "Intensity"
1853 if re.match(".K.", fu):
1854 lbl = "Brightness Temperature "+ fu
1855 elif re.match(".Jy.", fu):
1856 lbl = "Flux density "+ fu
1857 return lbl
1858
1859 def _check_ifs(self):
1860 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1861 nchans = filter(lambda t: t > 0, nchans)
1862 return (sum(nchans)/len(nchans) == nchans[0])
1863
1864 def _fill(self, names, unit, average):
1865 import os
1866 from asap._asap import stfiller
1867 first = True
1868 fullnames = []
1869 for name in names:
1870 name = os.path.expandvars(name)
1871 name = os.path.expanduser(name)
1872 if not os.path.exists(name):
1873 msg = "File '%s' does not exists" % (name)
1874 if rcParams['verbose']:
1875 asaplog.push(msg)
1876 print asaplog.pop().strip()
1877 return
1878 raise IOError(msg)
1879 fullnames.append(name)
1880 if average:
1881 asaplog.push('Auto averaging integrations')
1882 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1883 for name in fullnames:
1884 tbl = Scantable(stype)
1885 r = stfiller(tbl)
1886 rx = rcParams['scantable.reference']
1887 r._setreferenceexpr(rx)
1888 msg = "Importing %s..." % (name)
1889 asaplog.push(msg, False)
1890 print_log()
1891 r._open(name, -1, -1)
1892 r._read()
1893 if average:
1894 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1895 if not first:
1896 tbl = self._math._merge([self, tbl])
1897 Scantable.__init__(self, tbl)
1898 r._close()
1899 del r, tbl
1900 first = False
1901 if unit is not None:
1902 self.set_fluxunit(unit)
1903 self.set_freqframe(rcParams['scantable.freqframe'])
1904
1905 def __getitem__(self, key):
1906 if key < 0:
1907 key += self.nrow()
1908 if key >= self.nrow():
1909 raise IndexError("Row index out of range.")
1910 return self._getspectrum(key)
1911
1912 def __setitem__(self, key, value):
1913 if key < 0:
1914 key += self.nrow()
1915 if key >= self.nrow():
1916 raise IndexError("Row index out of range.")
1917 if not hasattr(value, "__len__") or \
1918 len(value) > self.nchan(self.getif(key)):
1919 raise ValueError("Spectrum length doesn't match.")
1920 return self._setspectrum(value, key)
1921
1922 def __len__(self):
1923 return self.nrow()
1924
1925 def __iter__(self):
1926 for i in range(len(self)):
1927 yield self[i]
Note: See TracBrowser for help on using the repository browser.