source: trunk/python/scantable.py@ 1595

Last change on this file since 1595 was 1594, checked in by Malte Marquarding, 15 years ago

More tidy up. factored out copy for localised selection

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 69.2 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log, print_log_dec
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13 @print_log_dec
14 def __init__(self, filename, average=None, unit=None, parallactify=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 parallactify: Indcicate that the data had been parallatified.
33 Default is taken form rc file.
34 """
35 if average is None:
36 average = rcParams['scantable.autoaverage']
37 parallactify = parallactify or rcParams['scantable.parallactify']
38 varlist = vars()
39 from asap._asap import stmath
40 self._math = stmath()
41 if isinstance(filename, Scantable):
42 Scantable.__init__(self, filename)
43 else:
44 if isinstance(filename, str):# or \
45# (isinstance(filename, list) or isinstance(filename, tuple)) \
46# and isinstance(filename[-1], str):
47 import os.path
48 filename = os.path.expandvars(filename)
49 filename = os.path.expanduser(filename)
50 if not os.path.exists(filename):
51 s = "File '%s' not found." % (filename)
52 if rcParams['verbose']:
53 asaplog.push(s)
54 #print asaplog.pop().strip()
55 return
56 raise IOError(s)
57 if os.path.isdir(filename) \
58 and not os.path.exists(filename+'/table.f1'):
59 # crude check if asap table
60 if os.path.exists(filename+'/table.info'):
61 ondisk = rcParams['scantable.storage'] == 'disk'
62 Scantable.__init__(self, filename, ondisk)
63 if unit is not None:
64 self.set_fluxunit(unit)
65 self.set_freqframe(rcParams['scantable.freqframe'])
66 else:
67 msg = "The given file '%s'is not a valid " \
68 "asap table." % (filename)
69 if rcParams['verbose']:
70 print msg
71 return
72 else:
73 raise IOError(msg)
74 else:
75 self._fill([filename], unit, average)
76 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
77 and isinstance(filename[-1], str):
78 self._fill(filename, unit, average)
79 self.parallactify(parallactify)
80 self._add_history("scantable", varlist)
81
82 @print_log_dec
83 def save(self, name=None, format=None, overwrite=False):
84 """
85 Store the scantable on disk. This can be an asap (aips++) Table,
86 SDFITS or MS2 format.
87 Parameters:
88 name: the name of the outputfile. For format "ASCII"
89 this is the root file name (data in 'name'.txt
90 and header in 'name'_header.txt)
91 format: an optional file format. Default is ASAP.
92 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
93 'SDFITS' (save as SDFITS file)
94 'ASCII' (saves as ascii text file)
95 'MS2' (saves as an aips++
96 MeasurementSet V2)
97 'FITS' (save as image FITS - not
98 readable by class)
99 'CLASS' (save as FITS readable by CLASS)
100 overwrite: If the file should be overwritten if it exists.
101 The default False is to return with warning
102 without writing the output. USE WITH CARE.
103 Example:
104 scan.save('myscan.asap')
105 scan.save('myscan.sdfits', 'SDFITS')
106 """
107 from os import path
108 format = format or rcParams['scantable.save']
109 suffix = '.'+format.lower()
110 if name is None or name == "":
111 name = 'scantable'+suffix
112 msg = "No filename given. Using default name %s..." % name
113 asaplog.push(msg)
114 name = path.expandvars(name)
115 if path.isfile(name) or path.isdir(name):
116 if not overwrite:
117 msg = "File %s exists." % name
118 if rcParams['verbose']:
119 print msg
120 return
121 else:
122 raise IOError(msg)
123 format2 = format.upper()
124 if format2 == 'ASAP':
125 self._save(name)
126 else:
127 from asap._asap import stwriter as stw
128 writer = stw(format2)
129 writer.write(self, name)
130 return
131
132 def copy(self):
133 """
134 Return a copy of this scantable.
135 Note:
136 This makes a full (deep) copy. scan2 = scan1 makes a reference.
137 Parameters:
138 none
139 Example:
140 copiedscan = scan.copy()
141 """
142 sd = scantable(Scantable._copy(self))
143 return sd
144
145 def drop_scan(self, scanid=None):
146 """
147 Return a new scantable where the specified scan number(s) has(have)
148 been dropped.
149 Parameters:
150 scanid: a (list of) scan number(s)
151 """
152 from asap import _is_sequence_or_number as _is_valid
153 from asap import _to_list
154 from asap import unique
155 if not _is_valid(scanid):
156 if rcParams['verbose']:
157 print "Please specify a scanno to drop from the scantable"
158 return
159 else:
160 raise RuntimeError("No scan given")
161 try:
162 scanid = _to_list(scanid)
163 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
164 for sid in scanid: allscans.remove(sid)
165 if len(allscans) == 0:
166 raise ValueError("Can't remove all scans")
167 except ValueError:
168 if rcParams['verbose']:
169 print "Couldn't find any match."
170 return
171 else: raise
172 try:
173 sel = selector(scans=allscans)
174 return self._select_copy(sel)
175 except RuntimeError:
176 if rcParams['verbose']:
177 print "Couldn't find any match."
178 else:
179 raise
180
181 def _select_copy(self, selection):
182 orig = self.get_selection()
183 self.set_selection(orig+selection)
184 cp = self.copy()
185 self.set_selection(orig)
186 return cp
187
188 def get_scan(self, scanid=None):
189 """
190 Return a specific scan (by scanno) or collection of scans (by
191 source name) in a new scantable.
192 Note:
193 See scantable.drop_scan() for the inverse operation.
194 Parameters:
195 scanid: a (list of) scanno or a source name, unix-style
196 patterns are accepted for source name matching, e.g.
197 '*_R' gets all 'ref scans
198 Example:
199 # get all scans containing the source '323p459'
200 newscan = scan.get_scan('323p459')
201 # get all 'off' scans
202 refscans = scan.get_scan('*_R')
203 # get a susbset of scans by scanno (as listed in scan.summary())
204 newscan = scan.get_scan([0, 2, 7, 10])
205 """
206 if scanid is None:
207 if rcParams['verbose']:
208 print "Please specify a scan no or name to " \
209 "retrieve from the scantable"
210 return
211 else:
212 raise RuntimeError("No scan given")
213
214 try:
215 bsel = self.get_selection()
216 sel = selector()
217 if type(scanid) is str:
218 sel.set_name(scanid)
219 return self._select_copy(sel)
220 elif type(scanid) is int:
221 sel.set_scans([scanid])
222 return self._select_copy(sel)
223 elif type(scanid) is list:
224 sel.set_scans(scanid)
225 return self._select_copy(sel)
226 else:
227 msg = "Illegal scanid type, use 'int' or 'list' if ints."
228 if rcParams['verbose']:
229 print msg
230 else:
231 raise TypeError(msg)
232 except RuntimeError:
233 if rcParams['verbose']: print "Couldn't find any match."
234 else: raise
235
236 def __str__(self):
237 return Scantable._summary(self, True)
238
239 def summary(self, filename=None):
240 """
241 Print a summary of the contents of this scantable.
242 Parameters:
243 filename: the name of a file to write the putput to
244 Default - no file output
245 verbose: print extra info such as the frequency table
246 The default (False) is taken from .asaprc
247 """
248 info = Scantable._summary(self, True)
249 if filename is not None:
250 if filename is "":
251 filename = 'scantable_summary.txt'
252 from os.path import expandvars, isdir
253 filename = expandvars(filename)
254 if not isdir(filename):
255 data = open(filename, 'w')
256 data.write(info)
257 data.close()
258 else:
259 msg = "Illegal file name '%s'." % (filename)
260 if rcParams['verbose']:
261 print msg
262 else:
263 raise IOError(msg)
264 if rcParams['verbose']:
265 try:
266 from IPython.genutils import page as pager
267 except ImportError:
268 from pydoc import pager
269 pager(info)
270 else:
271 return info
272
273 def get_spectrum(self, rowno):
274 """Return the spectrum for the current row in the scantable as a list.
275 Parameters:
276 rowno: the row number to retrieve the spectrum from
277 """
278 return self._getspectrum(rowno)
279
280 def get_mask(self, rowno):
281 """Return the mask for the current row in the scantable as a list.
282 Parameters:
283 rowno: the row number to retrieve the mask from
284 """
285 return self._getmask(rowno)
286
287 def set_spectrum(self, spec, rowno):
288 """Return the spectrum for the current row in the scantable as a list.
289 Parameters:
290 spec: the spectrum
291 rowno: the row number to set the spectrum for
292 """
293 assert(len(spec) == self.nchan())
294 return self._setspectrum(spec, rowno)
295
296 def get_selection(self):
297 """
298 Get the selection object currently set on this scantable.
299 Parameters:
300 none
301 Example:
302 sel = scan.get_selection()
303 sel.set_ifs(0) # select IF 0
304 scan.set_selection(sel) # apply modified selection
305 """
306 return selector(self._getselection())
307
308 def set_selection(self, selection=None, **kw):
309 """
310 Select a subset of the data. All following operations on this scantable
311 are only applied to thi selection.
312 Parameters:
313 selection: a selector object (default unset the selection)
314 Examples:
315 sel = selector() # create a selection object
316 self.set_scans([0, 3]) # select SCANNO 0 and 3
317 scan.set_selection(sel) # set the selection
318 scan.summary() # will only print summary of scanno 0 an 3
319 scan.set_selection() # unset the selection
320 """
321 if selection is None:
322 # reset
323 if len(kw) == 0:
324 selection = selector()
325 else:
326 # try keywords
327 for k in kw:
328 if k not in selector.fields:
329 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
330 selection = selector(**kw)
331 self._setselection(selection)
332
333 def stats(self, stat='stddev', mask=None):
334 """
335 Determine the specified statistic of the current beam/if/pol
336 Takes a 'mask' as an optional parameter to specify which
337 channels should be excluded.
338 Parameters:
339 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
340 'var', 'stddev', 'avdev', 'rms', 'median'
341 mask: an optional mask specifying where the statistic
342 should be determined.
343 Example:
344 scan.set_unit('channel')
345 msk = scan.create_mask([100, 200], [500, 600])
346 scan.stats(stat='mean', mask=m)
347 """
348 mask = mask or []
349 if not self._check_ifs():
350 raise ValueError("Cannot apply mask as the IFs have different "
351 "number of channels. Please use setselection() "
352 "to select individual IFs")
353
354 statvals = self._math._stats(self, mask, stat)
355 def cb(i):
356 return statvals[i]
357
358 return self._row_callback(cb, stat)
359
360 def stddev(self, mask=None):
361 """
362 Determine the standard deviation of the current beam/if/pol
363 Takes a 'mask' as an optional parameter to specify which
364 channels should be excluded.
365 Parameters:
366 mask: an optional mask specifying where the standard
367 deviation should be determined.
368
369 Example:
370 scan.set_unit('channel')
371 msk = scan.create_mask([100, 200], [500, 600])
372 scan.stddev(mask=m)
373 """
374 return self.stats(stat='stddev', mask=mask);
375
376
377 def get_column_names(self):
378 """
379 Return a list of column names, which can be used for selection.
380 """
381 return list(Scantable.get_column_names(self))
382
383 def get_tsys(self):
384 """
385 Return the System temperatures.
386 Returns:
387 a list of Tsys values for the current selection
388 """
389
390 return self._row_callback(self._gettsys, "Tsys")
391
392 def _row_callback(self, callback, label):
393 out = ""
394 outvec = []
395 sep = '-'*50
396 for i in range(self.nrow()):
397 tm = self._gettime(i)
398 src = self._getsourcename(i)
399 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
400 out += 'Time[%s]:\n' % (tm)
401 if self.nbeam(-1) > 1:
402 out += ' Beam[%d] ' % (self.getbeam(i))
403 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
404 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
405 outvec.append(callback(i))
406 out += '= %3.3f\n' % (outvec[i])
407 out += sep+'\n'
408 if rcParams['verbose']:
409 print sep
410 print " %s" % (label)
411 print sep
412 print out
413 return outvec
414
415 def _get_column(self, callback, row=-1):
416 """
417 """
418 if row == -1:
419 return [callback(i) for i in range(self.nrow())]
420 else:
421 if 0 <= row < self.nrow():
422 return callback(row)
423
424
425 def get_time(self, row=-1, asdatetime=False):
426 """
427 Get a list of time stamps for the observations.
428 Return a datetime object for each integration time stamp in the scantable.
429 Parameters:
430 row: row no of integration. Default -1 return all rows
431 asdatetime: return values as datetime objects rather than strings
432 Example:
433 none
434 """
435 from time import strptime
436 from datetime import datetime
437 times = self._get_column(self._gettime, row)
438 if not asdatetime:
439 return times
440 format = "%Y/%m/%d/%H:%M:%S"
441 if isinstance(times, list):
442 return [datetime(*strptime(i, format)[:6]) for i in times]
443 else:
444 return datetime(*strptime(times, format)[:6])
445
446
447 def get_inttime(self, row=-1):
448 """
449 Get a list of integration times for the observations.
450 Return a time in seconds for each integration in the scantable.
451 Parameters:
452 row: row no of integration. Default -1 return all rows.
453 Example:
454 none
455 """
456 return self._get_column(self._getinttime, row)
457
458
459 def get_sourcename(self, row=-1):
460 """
461 Get a list source names for the observations.
462 Return a string for each integration in the scantable.
463 Parameters:
464 row: row no of integration. Default -1 return all rows.
465 Example:
466 none
467 """
468 return self._get_column(self._getsourcename, row)
469
470 def get_elevation(self, row=-1):
471 """
472 Get a list of elevations for the observations.
473 Return a float for each integration in the scantable.
474 Parameters:
475 row: row no of integration. Default -1 return all rows.
476 Example:
477 none
478 """
479 return self._get_column(self._getelevation, row)
480
481 def get_azimuth(self, row=-1):
482 """
483 Get a list of azimuths for the observations.
484 Return a float for each integration in the scantable.
485 Parameters:
486 row: row no of integration. Default -1 return all rows.
487 Example:
488 none
489 """
490 return self._get_column(self._getazimuth, row)
491
492 def get_parangle(self, row=-1):
493 """
494 Get a list of parallactic angles for the observations.
495 Return a float for each integration in the scantable.
496 Parameters:
497 row: row no of integration. Default -1 return all rows.
498 Example:
499 none
500 """
501 return self._get_column(self._getparangle, row)
502
503 def get_direction(self, row=-1):
504 """
505 Get a list of Positions on the sky (direction) for the observations.
506 Return a string for each integration in the scantable.
507 Parameters:
508 row: row no of integration. Default -1 return all rows
509 Example:
510 none
511 """
512 return self._get_column(self._getdirection, row)
513
514 def get_directionval(self, row=-1):
515 """
516 Get a list of Positions on the sky (direction) for the observations.
517 Return a float for each integration in the scantable.
518 Parameters:
519 row: row no of integration. Default -1 return all rows
520 Example:
521 none
522 """
523 return self._get_column(self._getdirectionvec, row)
524
525
526 def set_unit(self, unit='channel'):
527 """
528 Set the unit for all following operations on this scantable
529 Parameters:
530 unit: optional unit, default is 'channel'
531 one of '*Hz', 'km/s', 'channel', ''
532 """
533 varlist = vars()
534 if unit in ['', 'pixel', 'channel']:
535 unit = ''
536 inf = list(self._getcoordinfo())
537 inf[0] = unit
538 self._setcoordinfo(inf)
539 self._add_history("set_unit", varlist)
540
541 @print_log_dec
542 def set_instrument(self, instr):
543 """
544 Set the instrument for subsequent processing.
545 Parameters:
546 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
547 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
548 """
549 self._setInstrument(instr)
550 self._add_history("set_instument", vars())
551
552 @print_log_dec
553 def set_feedtype(self, feedtype):
554 """
555 Overwrite the feed type, which might not be set correctly.
556 Parameters:
557 feedtype: 'linear' or 'circular'
558 """
559 self._setfeedtype(feedtype)
560 self._add_history("set_feedtype", vars())
561
562 @print_log_dec
563 def set_doppler(self, doppler='RADIO'):
564 """
565 Set the doppler for all following operations on this scantable.
566 Parameters:
567 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
568 """
569 varlist = vars()
570 inf = list(self._getcoordinfo())
571 inf[2] = doppler
572 self._setcoordinfo(inf)
573 self._add_history("set_doppler", vars())
574
575 @print_log_dec
576 def set_freqframe(self, frame=None):
577 """
578 Set the frame type of the Spectral Axis.
579 Parameters:
580 frame: an optional frame type, default 'LSRK'. Valid frames are:
581 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
582 'GEO', 'GALACTO', 'LGROUP', 'CMB'
583 Examples:
584 scan.set_freqframe('BARY')
585 """
586 frame = frame or rcParams['scantable.freqframe']
587 varlist = vars()
588 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
589 'GEO', 'GALACTO', 'LGROUP', 'CMB']
590
591 if frame in valid:
592 inf = list(self._getcoordinfo())
593 inf[1] = frame
594 self._setcoordinfo(inf)
595 self._add_history("set_freqframe", varlist)
596 else:
597 msg = "Please specify a valid freq type. Valid types are:\n", valid
598 if rcParams['verbose']:
599 print msg
600 else:
601 raise TypeError(msg)
602
603 def set_dirframe(self, frame=""):
604 """
605 Set the frame type of the Direction on the sky.
606 Parameters:
607 frame: an optional frame type, default ''. Valid frames are:
608 'J2000', 'B1950', 'GALACTIC'
609 Examples:
610 scan.set_dirframe('GALACTIC')
611 """
612 varlist = vars()
613 try:
614 Scantable.set_dirframe(self, frame)
615 except RuntimeError, msg:
616 if rcParams['verbose']:
617 print msg
618 else:
619 raise
620 self._add_history("set_dirframe", varlist)
621
622 def get_unit(self):
623 """
624 Get the default unit set in this scantable
625 Returns:
626 A unit string
627 """
628 inf = self._getcoordinfo()
629 unit = inf[0]
630 if unit == '': unit = 'channel'
631 return unit
632
633 def get_abcissa(self, rowno=0):
634 """
635 Get the abcissa in the current coordinate setup for the currently
636 selected Beam/IF/Pol
637 Parameters:
638 rowno: an optional row number in the scantable. Default is the
639 first row, i.e. rowno=0
640 Returns:
641 The abcissa values and the format string (as a dictionary)
642 """
643 abc = self._getabcissa(rowno)
644 lbl = self._getabcissalabel(rowno)
645 return abc, lbl
646
647 def flag(self, mask=None):
648 """
649 Flag the selected data using an optional channel mask.
650 Parameters:
651 mask: an optional channel mask, created with create_mask. Default
652 (no mask) is all channels.
653 """
654 varlist = vars()
655 mask = mask or []
656 try:
657 self._flag(mask)
658 except RuntimeError, msg:
659 if rcParams['verbose']:
660 print msg
661 return
662 else: raise
663 self._add_history("flag", varlist)
664
665 @print_log_dec
666 def lag_flag(self, start, end, unit="MHz", insitu=None):
667 """
668 Flag the data in 'lag' space by providing a frequency to remove.
669 Flagged data in the scantable gets interpolated over the region.
670 No taper is applied.
671 Parameters:
672 start: the start frequency (really a period within the
673 bandwidth) or period to remove
674 end: the end frequency or period to remove
675 unit: the frequency unit (default "MHz") or "" for
676 explicit lag channels
677 Notes:
678 It is recommended to flag edges of the band or strong
679 signals beforehand.
680 """
681 if insitu is None: insitu = rcParams['insitu']
682 self._math._setinsitu(insitu)
683 varlist = vars()
684 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
685 if not (unit == "" or base.has_key(unit)):
686 raise ValueError("%s is not a valid unit." % unit)
687 try:
688 if unit == "":
689 s = scantable(self._math._lag_flag(self, start, end, "lags"))
690 else:
691 s = scantable(self._math._lag_flag(self, start*base[unit],
692 end*base[unit], "frequency"))
693 except RuntimeError, msg:
694 if rcParams['verbose']:
695 print msg
696 return
697 else: raise
698 s._add_history("lag_flag", varlist)
699 if insitu:
700 self._assign(s)
701 else:
702 return s
703
704 @print_log_dec
705 def create_mask(self, *args, **kwargs):
706 """
707 Compute and return a mask based on [min, max] windows.
708 The specified windows are to be INCLUDED, when the mask is
709 applied.
710 Parameters:
711 [min, max], [min2, max2], ...
712 Pairs of start/end points (inclusive)specifying the regions
713 to be masked
714 invert: optional argument. If specified as True,
715 return an inverted mask, i.e. the regions
716 specified are EXCLUDED
717 row: create the mask using the specified row for
718 unit conversions, default is row=0
719 only necessary if frequency varies over rows.
720 Example:
721 scan.set_unit('channel')
722 a)
723 msk = scan.create_mask([400, 500], [800, 900])
724 # masks everything outside 400 and 500
725 # and 800 and 900 in the unit 'channel'
726
727 b)
728 msk = scan.create_mask([400, 500], [800, 900], invert=True)
729 # masks the regions between 400 and 500
730 # and 800 and 900 in the unit 'channel'
731 c)
732 mask only channel 400
733 msk = scan.create_mask([400])
734 """
735 row = kwargs.get("row", 0)
736 data = self._getabcissa(row)
737 u = self._getcoordinfo()[0]
738 if rcParams['verbose']:
739 if u == "": u = "channel"
740 msg = "The current mask window unit is %s" % u
741 i = self._check_ifs()
742 if not i:
743 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
744 asaplog.push(msg)
745 n = self.nchan()
746 msk = _n_bools(n, False)
747 # test if args is a 'list' or a 'normal *args - UGLY!!!
748
749 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
750 and args or args[0]
751 for window in ws:
752 if len(window) == 1:
753 window = [window[0], window[0]]
754 if len(window) == 0 or len(window) > 2:
755 raise ValueError("A window needs to be defined as [start(, end)]")
756 if window[0] > window[1]:
757 tmp = window[0]
758 window[0] = window[1]
759 window[1] = tmp
760 for i in range(n):
761 if data[i] >= window[0] and data[i] <= window[1]:
762 msk[i] = True
763 if kwargs.has_key('invert'):
764 if kwargs.get('invert'):
765 msk = mask_not(msk)
766 return msk
767
768 def get_restfreqs(self):
769 """
770 Get the restfrequency(s) stored in this scantable.
771 The return value(s) are always of unit 'Hz'
772 Parameters:
773 none
774 Returns:
775 a list of doubles
776 """
777 return list(self._getrestfreqs())
778
779
780 def set_restfreqs(self, freqs=None, unit='Hz'):
781 """
782 Set or replace the restfrequency specified and
783 If the 'freqs' argument holds a scalar,
784 then that rest frequency will be applied to all the selected
785 data. If the 'freqs' argument holds
786 a vector, then it MUST be of equal or smaller length than
787 the number of IFs (and the available restfrequencies will be
788 replaced by this vector). In this case, *all* data have
789 the restfrequency set per IF according
790 to the corresponding value you give in the 'freqs' vector.
791 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
792 IF 1 gets restfreq 2e9.
793 You can also specify the frequencies via a linecatalog.
794
795 Parameters:
796 freqs: list of rest frequency values or string idenitfiers
797 unit: unit for rest frequency (default 'Hz')
798
799 Example:
800 # set the given restfrequency for the whole table
801 scan.set_restfreqs(freqs=1.4e9)
802 # If thee number of IFs in the data is >= 2 IF0 gets the first
803 # value IF1 the second...
804 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
805 #set the given restfrequency for the whole table (by name)
806 scan.set_restfreqs(freqs="OH1667")
807
808 Note:
809 To do more sophisticate Restfrequency setting, e.g. on a
810 source and IF basis, use scantable.set_selection() before using
811 this function.
812 # provide your scantable is called scan
813 selection = selector()
814 selection.set_name("ORION*")
815 selection.set_ifs([1])
816 scan.set_selection(selection)
817 scan.set_restfreqs(freqs=86.6e9)
818
819 """
820 varlist = vars()
821 from asap import linecatalog
822 # simple value
823 if isinstance(freqs, int) or isinstance(freqs, float):
824 self._setrestfreqs(freqs, "",unit)
825 # list of values
826 elif isinstance(freqs, list) or isinstance(freqs, tuple):
827 # list values are scalars
828 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
829 sel = selector()
830 savesel = self._getselection()
831 iflist = self.getifnos()
832 for i in xrange(len(freqs)):
833 sel.set_ifs(iflist[i])
834 self._setselection(sel)
835 self._setrestfreqs(freqs[i], "",unit)
836 self._setselection(savesel)
837 # list values are tuples, (value, name)
838 elif isinstance(freqs[-1], dict):
839 sel = selector()
840 savesel = self._getselection()
841 iflist = self.getifnos()
842 for i in xrange(len(freqs)):
843 sel.set_ifs(iflist[i])
844 self._setselection(sel)
845 self._setrestfreqs(freqs[i]["value"],
846 freqs[i]["name"], "MHz")
847 self._setselection(savesel)
848 # freqs are to be taken from a linecatalog
849 elif isinstance(freqs, linecatalog):
850 sel = selector()
851 savesel = self._getselection()
852 iflist = self.getifnos()
853 for i in xrange(freqs.nrow()):
854 sel.set_ifs(iflist[i])
855 self._setselection(sel)
856 self._setrestfreqs(freqs.get_frequency(i),
857 freqs.get_name(i), "MHz")
858 # ensure that we are not iterating past nIF
859 if i == self.nif()-1: break
860 self._setselection(savesel)
861 else:
862 return
863 self._add_history("set_restfreqs", varlist)
864
865 def shift_refpix(self, delta):
866 """
867 Shift the reference pixel of the Spectra Coordinate by an
868 integer amount.
869 Parameters:
870 delta: the amount to shift by
871 Note:
872 Be careful using this with broadband data.
873 """
874 Scantable.shift(self, delta)
875
876 def history(self, filename=None):
877 """
878 Print the history. Optionally to a file.
879 Parameters:
880 filename: The name of the file to save the history to.
881 """
882 hist = list(self._gethistory())
883 out = "-"*80
884 for h in hist:
885 if h.startswith("---"):
886 out += "\n"+h
887 else:
888 items = h.split("##")
889 date = items[0]
890 func = items[1]
891 items = items[2:]
892 out += "\n"+date+"\n"
893 out += "Function: %s\n Parameters:" % (func)
894 for i in items:
895 s = i.split("=")
896 out += "\n %s = %s" % (s[0], s[1])
897 out += "\n"+"-"*80
898 if filename is not None:
899 if filename is "":
900 filename = 'scantable_history.txt'
901 import os
902 filename = os.path.expandvars(os.path.expanduser(filename))
903 if not os.path.isdir(filename):
904 data = open(filename, 'w')
905 data.write(out)
906 data.close()
907 else:
908 msg = "Illegal file name '%s'." % (filename)
909 if rcParams['verbose']:
910 print msg
911 else:
912 raise IOError(msg)
913 if rcParams['verbose']:
914 try:
915 from IPython.genutils import page as pager
916 except ImportError:
917 from pydoc import pager
918 pager(out)
919 else:
920 return out
921 return
922 #
923 # Maths business
924 #
925 @print_log_dec
926 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
927 """
928 Return the (time) weighted average of a scan.
929 Note:
930 in channels only - align if necessary
931 Parameters:
932 mask: an optional mask (only used for 'var' and 'tsys'
933 weighting)
934 scanav: True averages each scan separately
935 False (default) averages all scans together,
936 weight: Weighting scheme.
937 'none' (mean no weight)
938 'var' (1/var(spec) weighted)
939 'tsys' (1/Tsys**2 weighted)
940 'tint' (integration time weighted)
941 'tintsys' (Tint/Tsys**2)
942 'median' ( median averaging)
943 The default is 'tint'
944 align: align the spectra in velocity before averaging. It takes
945 the time of the first spectrum as reference time.
946 Example:
947 # time average the scantable without using a mask
948 newscan = scan.average_time()
949 """
950 varlist = vars()
951 weight = weight or 'TINT'
952 mask = mask or ()
953 scanav = (scanav and 'SCAN') or 'NONE'
954 scan = (self, )
955 try:
956 if align:
957 scan = (self.freq_align(insitu=False), )
958 s = None
959 if weight.upper() == 'MEDIAN':
960 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
961 scanav))
962 else:
963 s = scantable(self._math._average(scan, mask, weight.upper(),
964 scanav))
965 except RuntimeError, msg:
966 if rcParams['verbose']:
967 print msg
968 return
969 else: raise
970 s._add_history("average_time", varlist)
971 return s
972
973 @print_log_dec
974 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
975 """
976 Return a scan where all spectra are converted to either
977 Jansky or Kelvin depending upon the flux units of the scan table.
978 By default the function tries to look the values up internally.
979 If it can't find them (or if you want to over-ride), you must
980 specify EITHER jyperk OR eta (and D which it will try to look up
981 also if you don't set it). jyperk takes precedence if you set both.
982 Parameters:
983 jyperk: the Jy / K conversion factor
984 eta: the aperture efficiency
985 d: the geomtric diameter (metres)
986 insitu: if False a new scantable is returned.
987 Otherwise, the scaling is done in-situ
988 The default is taken from .asaprc (False)
989 """
990 if insitu is None: insitu = rcParams['insitu']
991 self._math._setinsitu(insitu)
992 varlist = vars()
993 jyperk = jyperk or -1.0
994 d = d or -1.0
995 eta = eta or -1.0
996 s = scantable(self._math._convertflux(self, d, eta, jyperk))
997 s._add_history("convert_flux", varlist)
998 if insitu: self._assign(s)
999 else: return s
1000
1001 @print_log_dec
1002 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1003 """
1004 Return a scan after applying a gain-elevation correction.
1005 The correction can be made via either a polynomial or a
1006 table-based interpolation (and extrapolation if necessary).
1007 You specify polynomial coefficients, an ascii table or neither.
1008 If you specify neither, then a polynomial correction will be made
1009 with built in coefficients known for certain telescopes (an error
1010 will occur if the instrument is not known).
1011 The data and Tsys are *divided* by the scaling factors.
1012 Parameters:
1013 poly: Polynomial coefficients (default None) to compute a
1014 gain-elevation correction as a function of
1015 elevation (in degrees).
1016 filename: The name of an ascii file holding correction factors.
1017 The first row of the ascii file must give the column
1018 names and these MUST include columns
1019 "ELEVATION" (degrees) and "FACTOR" (multiply data
1020 by this) somewhere.
1021 The second row must give the data type of the
1022 column. Use 'R' for Real and 'I' for Integer.
1023 An example file would be
1024 (actual factors are arbitrary) :
1025
1026 TIME ELEVATION FACTOR
1027 R R R
1028 0.1 0 0.8
1029 0.2 20 0.85
1030 0.3 40 0.9
1031 0.4 60 0.85
1032 0.5 80 0.8
1033 0.6 90 0.75
1034 method: Interpolation method when correcting from a table.
1035 Values are "nearest", "linear" (default), "cubic"
1036 and "spline"
1037 insitu: if False a new scantable is returned.
1038 Otherwise, the scaling is done in-situ
1039 The default is taken from .asaprc (False)
1040 """
1041
1042 if insitu is None: insitu = rcParams['insitu']
1043 self._math._setinsitu(insitu)
1044 varlist = vars()
1045 poly = poly or ()
1046 from os.path import expandvars
1047 filename = expandvars(filename)
1048 s = scantable(self._math._gainel(self, poly, filename, method))
1049 s._add_history("gain_el", varlist)
1050 if insitu:
1051 self._assign(s)
1052 else:
1053 return s
1054
1055 @print_log_dec
1056 def freq_align(self, reftime=None, method='cubic', insitu=None):
1057 """
1058 Return a scan where all rows have been aligned in frequency/velocity.
1059 The alignment frequency frame (e.g. LSRK) is that set by function
1060 set_freqframe.
1061 Parameters:
1062 reftime: reference time to align at. By default, the time of
1063 the first row of data is used.
1064 method: Interpolation method for regridding the spectra.
1065 Choose from "nearest", "linear", "cubic" (default)
1066 and "spline"
1067 insitu: if False a new scantable is returned.
1068 Otherwise, the scaling is done in-situ
1069 The default is taken from .asaprc (False)
1070 """
1071 if insitu is None: insitu = rcParams["insitu"]
1072 self._math._setinsitu(insitu)
1073 varlist = vars()
1074 reftime = reftime or ""
1075 s = scantable(self._math._freq_align(self, reftime, method))
1076 s._add_history("freq_align", varlist)
1077 if insitu: self._assign(s)
1078 else: return s
1079
1080 @print_log_dec
1081 def opacity(self, tau, insitu=None):
1082 """
1083 Apply an opacity correction. The data
1084 and Tsys are multiplied by the correction factor.
1085 Parameters:
1086 tau: Opacity from which the correction factor is
1087 exp(tau*ZD)
1088 where ZD is the zenith-distance
1089 insitu: if False a new scantable is returned.
1090 Otherwise, the scaling is done in-situ
1091 The default is taken from .asaprc (False)
1092 """
1093 if insitu is None: insitu = rcParams['insitu']
1094 self._math._setinsitu(insitu)
1095 varlist = vars()
1096 s = scantable(self._math._opacity(self, tau))
1097 s._add_history("opacity", varlist)
1098 if insitu: self._assign(s)
1099 else: return s
1100
1101 @print_log_dec
1102 def bin(self, width=5, insitu=None):
1103 """
1104 Return a scan where all spectra have been binned up.
1105 Parameters:
1106 width: The bin width (default=5) in pixels
1107 insitu: if False a new scantable is returned.
1108 Otherwise, the scaling is done in-situ
1109 The default is taken from .asaprc (False)
1110 """
1111 if insitu is None: insitu = rcParams['insitu']
1112 self._math._setinsitu(insitu)
1113 varlist = vars()
1114 s = scantable(self._math._bin(self, width))
1115 s._add_history("bin", varlist)
1116 if insitu:
1117 self._assign(s)
1118 else:
1119 return s
1120
1121 @print_log_dec
1122 def resample(self, width=5, method='cubic', insitu=None):
1123 """
1124 Return a scan where all spectra have been binned up.
1125
1126 Parameters:
1127 width: The bin width (default=5) in pixels
1128 method: Interpolation method when correcting from a table.
1129 Values are "nearest", "linear", "cubic" (default)
1130 and "spline"
1131 insitu: if False a new scantable is returned.
1132 Otherwise, the scaling is done in-situ
1133 The default is taken from .asaprc (False)
1134 """
1135 if insitu is None: insitu = rcParams['insitu']
1136 self._math._setinsitu(insitu)
1137 varlist = vars()
1138 s = scantable(self._math._resample(self, method, width))
1139 s._add_history("resample", varlist)
1140 if insitu: self._assign(s)
1141 else: return s
1142
1143 @print_log_dec
1144 def average_pol(self, mask=None, weight='none'):
1145 """
1146 Average the Polarisations together.
1147 Parameters:
1148 mask: An optional mask defining the region, where the
1149 averaging will be applied. The output will have all
1150 specified points masked.
1151 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1152 weighted), or 'tsys' (1/Tsys**2 weighted)
1153 """
1154 varlist = vars()
1155 mask = mask or ()
1156 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1157 s._add_history("average_pol", varlist)
1158 return s
1159
1160 @print_log_dec
1161 def average_beam(self, mask=None, weight='none'):
1162 """
1163 Average the Beams together.
1164 Parameters:
1165 mask: An optional mask defining the region, where the
1166 averaging will be applied. The output will have all
1167 specified points masked.
1168 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1169 weighted), or 'tsys' (1/Tsys**2 weighted)
1170 """
1171 varlist = vars()
1172 mask = mask or ()
1173 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1174 s._add_history("average_beam", varlist)
1175 return s
1176
1177 def parallactify(self, pflag):
1178 varlist = vars()
1179 self._parallactify(pflag)
1180 self._add_history("parallactify", varlist)
1181
1182 @print_log_dec
1183 def convert_pol(self, poltype=None):
1184 """
1185 Convert the data to a different polarisation type.
1186 Note that you will need cross-polarisation terms for most conversions.
1187 Parameters:
1188 poltype: The new polarisation type. Valid types are:
1189 "linear", "circular", "stokes" and "linpol"
1190 """
1191 varlist = vars()
1192 try:
1193 s = scantable(self._math._convertpol(self, poltype))
1194 except RuntimeError, msg:
1195 if rcParams['verbose']:
1196 print msg
1197 return
1198 else:
1199 raise
1200 s._add_history("convert_pol", varlist)
1201 return s
1202
1203 @print_log_dec
1204 def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1205 """
1206 Smooth the spectrum by the specified kernel (conserving flux).
1207 Parameters:
1208 kernel: The type of smoothing kernel. Select from
1209 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1210 or 'poly'
1211 width: The width of the kernel in pixels. For hanning this is
1212 ignored otherwise it defauls to 5 pixels.
1213 For 'gaussian' it is the Full Width Half
1214 Maximum. For 'boxcar' it is the full width.
1215 For 'rmedian' and 'poly' it is the half width.
1216 order: Optional parameter for 'poly' kernel (default is 2), to
1217 specify the order of the polnomial. Ignored by all other
1218 kernels.
1219 insitu: if False a new scantable is returned.
1220 Otherwise, the scaling is done in-situ
1221 The default is taken from .asaprc (False)
1222 Example:
1223 none
1224 """
1225 if insitu is None: insitu = rcParams['insitu']
1226 self._math._setinsitu(insitu)
1227 varlist = vars()
1228 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1229 s._add_history("smooth", varlist)
1230 if insitu: self._assign(s)
1231 else: return s
1232
1233 @print_log_dec
1234 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1235 """
1236 Return a scan which has been baselined (all rows) by a polynomial.
1237 Parameters:
1238 mask: an optional mask
1239 order: the order of the polynomial (default is 0)
1240 plot: plot the fit and the residual. In this each
1241 indivual fit has to be approved, by typing 'y'
1242 or 'n'
1243 uselin: use linear polynomial fit
1244 insitu: if False a new scantable is returned.
1245 Otherwise, the scaling is done in-situ
1246 The default is taken from .asaprc (False)
1247 Example:
1248 # return a scan baselined by a third order polynomial,
1249 # not using a mask
1250 bscan = scan.poly_baseline(order=3)
1251 """
1252 if insitu is None: insitu = rcParams['insitu']
1253 varlist = vars()
1254 if mask is None:
1255 mask = [True for i in xrange(self.nchan(-1))]
1256 from asap.asapfitter import fitter
1257 try:
1258 f = fitter()
1259 f.set_scan(self, mask)
1260 #f.set_function(poly=order)
1261 if uselin:
1262 f.set_function(lpoly=order)
1263 else:
1264 f.set_function(poly=order)
1265 s = f.auto_fit(insitu, plot=plot)
1266 s._add_history("poly_baseline", varlist)
1267 if insitu: self._assign(s)
1268 else: return s
1269 except RuntimeError:
1270 msg = "The fit failed, possibly because it didn't converge."
1271 if rcParams['verbose']:
1272 print msg
1273 return
1274 else:
1275 raise RuntimeError(msg)
1276
1277 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1278 threshold=3, chan_avg_limit=1, plot=False,
1279 insitu=None):
1280 """
1281 Return a scan which has been baselined (all rows) by a polynomial.
1282 Spectral lines are detected first using linefinder and masked out
1283 to avoid them affecting the baseline solution.
1284
1285 Parameters:
1286 mask: an optional mask retreived from scantable
1287 edge: an optional number of channel to drop at
1288 the edge of spectrum. If only one value is
1289 specified, the same number will be dropped from
1290 both sides of the spectrum. Default is to keep
1291 all channels. Nested tuples represent individual
1292 edge selection for different IFs (a number of spectral
1293 channels can be different)
1294 order: the order of the polynomial (default is 0)
1295 threshold: the threshold used by line finder. It is better to
1296 keep it large as only strong lines affect the
1297 baseline solution.
1298 chan_avg_limit:
1299 a maximum number of consequtive spectral channels to
1300 average during the search of weak and broad lines.
1301 The default is no averaging (and no search for weak
1302 lines). If such lines can affect the fitted baseline
1303 (e.g. a high order polynomial is fitted), increase this
1304 parameter (usually values up to 8 are reasonable). Most
1305 users of this method should find the default value
1306 sufficient.
1307 plot: plot the fit and the residual. In this each
1308 indivual fit has to be approved, by typing 'y'
1309 or 'n'
1310 insitu: if False a new scantable is returned.
1311 Otherwise, the scaling is done in-situ
1312 The default is taken from .asaprc (False)
1313
1314 Example:
1315 scan2=scan.auto_poly_baseline(order=7)
1316 """
1317 if insitu is None: insitu = rcParams['insitu']
1318 varlist = vars()
1319 from asap.asapfitter import fitter
1320 from asap.asaplinefind import linefinder
1321 from asap import _is_sequence_or_number as _is_valid
1322
1323 # check whether edge is set up for each IF individually
1324 individualedge = False;
1325 if len(edge) > 1:
1326 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1327 individualedge = True;
1328
1329 if not _is_valid(edge, int) and not individualedge:
1330 raise ValueError, "Parameter 'edge' has to be an integer or a \
1331 pair of integers specified as a tuple. Nested tuples are allowed \
1332 to make individual selection for different IFs."
1333
1334 curedge = (0, 0)
1335 if individualedge:
1336 for edgepar in edge:
1337 if not _is_valid(edgepar, int):
1338 raise ValueError, "Each element of the 'edge' tuple has \
1339 to be a pair of integers or an integer."
1340 else:
1341 curedge = edge;
1342
1343 # setup fitter
1344 f = fitter()
1345 f.set_function(poly=order)
1346
1347 # setup line finder
1348 fl = linefinder()
1349 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1350
1351 if not insitu:
1352 workscan = self.copy()
1353 else:
1354 workscan = self
1355
1356 fl.set_scan(workscan)
1357
1358 rows = range(workscan.nrow())
1359 asaplog.push("Processing:")
1360 for r in rows:
1361 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1362 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1363 workscan.getpol(r), workscan.getcycle(r))
1364 asaplog.push(msg, False)
1365
1366 # figure out edge parameter
1367 if individualedge:
1368 if len(edge) >= workscan.getif(r):
1369 raise RuntimeError, "Number of edge elements appear to " \
1370 "be less than the number of IFs"
1371 curedge = edge[workscan.getif(r)]
1372
1373 # setup line finder
1374 fl.find_lines(r, mask, curedge)
1375 f.set_data(workscan._getabcissa(r), workscan._getspectrum(r),
1376 mask_and(workscan._getmask(r), fl.get_mask()))
1377 f.fit()
1378 x = f.get_parameters()
1379 if plot:
1380 f.plot(residual=True)
1381 x = raw_input("Accept fit ( [y]/n ): ")
1382 if x.upper() == 'N':
1383 continue
1384 workscan._setspectrum(f.fitter.getresidual(), r)
1385 if plot:
1386 f._p.unmap()
1387 f._p = None
1388 workscan._add_history("auto_poly_baseline", varlist)
1389 if insitu:
1390 self._assign(workscan)
1391 else:
1392 return workscan
1393
1394 @print_log_dec
1395 def rotate_linpolphase(self, angle):
1396 """
1397 Rotate the phase of the complex polarization O=Q+iU correlation.
1398 This is always done in situ in the raw data. So if you call this
1399 function more than once then each call rotates the phase further.
1400 Parameters:
1401 angle: The angle (degrees) to rotate (add) by.
1402 Examples:
1403 scan.rotate_linpolphase(2.3)
1404 """
1405 varlist = vars()
1406 self._math._rotate_linpolphase(self, angle)
1407 self._add_history("rotate_linpolphase", varlist)
1408 return
1409
1410 @print_log_dec
1411 def rotate_xyphase(self, angle):
1412 """
1413 Rotate the phase of the XY correlation. This is always done in situ
1414 in the data. So if you call this function more than once
1415 then each call rotates the phase further.
1416 Parameters:
1417 angle: The angle (degrees) to rotate (add) by.
1418 Examples:
1419 scan.rotate_xyphase(2.3)
1420 """
1421 varlist = vars()
1422 self._math._rotate_xyphase(self, angle)
1423 self._add_history("rotate_xyphase", varlist)
1424 return
1425
1426 @print_log_dec
1427 def swap_linears(self):
1428 """
1429 Swap the linear polarisations XX and YY, or better the first two
1430 polarisations as this also works for ciculars.
1431 """
1432 varlist = vars()
1433 self._math._swap_linears(self)
1434 self._add_history("swap_linears", varlist)
1435 return
1436
1437 @print_log_dec
1438 def invert_phase(self):
1439 """
1440 Invert the phase of the complex polarisation
1441 """
1442 varlist = vars()
1443 self._math._invert_phase(self)
1444 self._add_history("invert_phase", varlist)
1445 return
1446
1447 @print_log_dec
1448 def add(self, offset, insitu=None):
1449 """
1450 Return a scan where all spectra have the offset added
1451 Parameters:
1452 offset: the offset
1453 insitu: if False a new scantable is returned.
1454 Otherwise, the scaling is done in-situ
1455 The default is taken from .asaprc (False)
1456 """
1457 if insitu is None: insitu = rcParams['insitu']
1458 self._math._setinsitu(insitu)
1459 varlist = vars()
1460 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1461 s._add_history("add", varlist)
1462 if insitu:
1463 self._assign(s)
1464 else:
1465 return s
1466
1467 @print_log_dec
1468 def scale(self, factor, tsys=True, insitu=None):
1469 """
1470 Return a scan where all spectra are scaled by the give 'factor'
1471 Parameters:
1472 factor: the scaling factor
1473 insitu: if False a new scantable is returned.
1474 Otherwise, the scaling is done in-situ
1475 The default is taken from .asaprc (False)
1476 tsys: if True (default) then apply the operation to Tsys
1477 as well as the data
1478 """
1479 if insitu is None: insitu = rcParams['insitu']
1480 self._math._setinsitu(insitu)
1481 varlist = vars()
1482 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1483 s._add_history("scale", varlist)
1484 if insitu:
1485 self._assign(s)
1486 else:
1487 return s
1488
1489 def set_sourcetype(self, match, matchtype="pattern",
1490 sourcetype="reference"):
1491 """
1492 Set the type of the source to be an source or reference scan
1493 using the provided pattern:
1494 Parameters:
1495 match: a Unix style pattern, regular expression or selector
1496 matchtype: 'pattern' (default) UNIX style pattern or
1497 'regex' regular expression
1498 sourcetype: the type of the source to use (source/reference)
1499 """
1500 varlist = vars()
1501 basesel = self.get_selection()
1502 stype = -1
1503 if sourcetype.lower().startswith("r"):
1504 stype = 1
1505 elif sourcetype.lower().startswith("s"):
1506 stype = 0
1507 else:
1508 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1509 if matchtype.lower().startswith("p"):
1510 matchtype = "pattern"
1511 elif matchtype.lower().startswith("r"):
1512 matchtype = "regex"
1513 else:
1514 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1515 sel = selector()
1516 if isinstance(match, selector):
1517 sel = match
1518 else:
1519 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1520 self.set_selection(basesel+sel)
1521 self._setsourcetype(stype)
1522 self.set_selection(basesel)
1523 self._add_history("set_sourcetype", varlist)
1524
1525 @print_log_dec
1526 def auto_quotient(self, preserve=True, mode='paired'):
1527 """
1528 This function allows to build quotients automatically.
1529 It assumes the observation to have the same numer of
1530 "ons" and "offs"
1531 Parameters:
1532 preserve: you can preserve (default) the continuum or
1533 remove it. The equations used are
1534 preserve: Output = Toff * (on/off) - Toff
1535 remove: Output = Toff * (on/off) - Ton
1536 mode: the on/off detection mode
1537 'paired' (default)
1538 identifies 'off' scans by the
1539 trailing '_R' (Mopra/Parkes) or
1540 '_e'/'_w' (Tid) and matches
1541 on/off pairs from the observing pattern
1542 'time'
1543 finds the closest off in time
1544
1545 """
1546 modes = ["time", "paired"]
1547 if not mode in modes:
1548 msg = "please provide valid mode. Valid modes are %s" % (modes)
1549 raise ValueError(msg)
1550 varlist = vars()
1551 s = None
1552 if mode.lower() == "paired":
1553 basesel = self.get_selection()
1554 sel = selector()+basesel
1555 sel.set_query("SRCTYPE==1")
1556 self.set_selection(sel)
1557 offs = self.copy()
1558 sel.set_query("SRCTYPE==0")
1559 self.set_selection(sel)
1560 ons = self.copy()
1561 s = scantable(self._math._quotient(ons, offs, preserve))
1562 self.set_selection(basesel)
1563 elif mode.lower() == "time":
1564 s = scantable(self._math._auto_quotient(self, mode, preserve))
1565 s._add_history("auto_quotient", varlist)
1566 return s
1567
1568 @print_log_dec
1569 def mx_quotient(self, mask = None, weight='median', preserve=True):
1570 """
1571 Form a quotient using "off" beams when observing in "MX" mode.
1572 Parameters:
1573 mask: an optional mask to be used when weight == 'stddev'
1574 weight: How to average the off beams. Default is 'median'.
1575 preserve: you can preserve (default) the continuum or
1576 remove it. The equations used are
1577 preserve: Output = Toff * (on/off) - Toff
1578 remove: Output = Toff * (on/off) - Ton
1579 """
1580 mask = mask or ()
1581 varlist = vars()
1582 on = scantable(self._math._mx_extract(self, 'on'))
1583 preoff = scantable(self._math._mx_extract(self, 'off'))
1584 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1585 from asapmath import quotient
1586 q = quotient(on, off, preserve)
1587 q._add_history("mx_quotient", varlist)
1588 return q
1589
1590 @print_log_dec
1591 def freq_switch(self, insitu=None):
1592 """
1593 Apply frequency switching to the data.
1594 Parameters:
1595 insitu: if False a new scantable is returned.
1596 Otherwise, the swictching is done in-situ
1597 The default is taken from .asaprc (False)
1598 Example:
1599 none
1600 """
1601 if insitu is None: insitu = rcParams['insitu']
1602 self._math._setinsitu(insitu)
1603 varlist = vars()
1604 s = scantable(self._math._freqswitch(self))
1605 s._add_history("freq_switch", varlist)
1606 if insitu: self._assign(s)
1607 else: return s
1608
1609 @print_log_dec
1610 def recalc_azel(self):
1611 """
1612 Recalculate the azimuth and elevation for each position.
1613 Parameters:
1614 none
1615 Example:
1616 """
1617 varlist = vars()
1618 self._recalcazel()
1619 self._add_history("recalc_azel", varlist)
1620 return
1621
1622 @print_log_dec
1623 def __add__(self, other):
1624 varlist = vars()
1625 s = None
1626 if isinstance(other, scantable):
1627 s = scantable(self._math._binaryop(self, other, "ADD"))
1628 elif isinstance(other, float):
1629 s = scantable(self._math._unaryop(self, other, "ADD", False))
1630 else:
1631 raise TypeError("Other input is not a scantable or float value")
1632 s._add_history("operator +", varlist)
1633 return s
1634
1635 @print_log_dec
1636 def __sub__(self, other):
1637 """
1638 implicit on all axes and on Tsys
1639 """
1640 varlist = vars()
1641 s = None
1642 if isinstance(other, scantable):
1643 s = scantable(self._math._binaryop(self, other, "SUB"))
1644 elif isinstance(other, float):
1645 s = scantable(self._math._unaryop(self, other, "SUB", False))
1646 else:
1647 raise TypeError("Other input is not a scantable or float value")
1648 s._add_history("operator -", varlist)
1649 return s
1650
1651 @print_log_dec
1652 def __mul__(self, other):
1653 """
1654 implicit on all axes and on Tsys
1655 """
1656 varlist = vars()
1657 s = None
1658 if isinstance(other, scantable):
1659 s = scantable(self._math._binaryop(self, other, "MUL"))
1660 elif isinstance(other, float):
1661 s = scantable(self._math._unaryop(self, other, "MUL", False))
1662 else:
1663 raise TypeError("Other input is not a scantable or float value")
1664 s._add_history("operator *", varlist)
1665 return s
1666
1667
1668 @print_log_dec
1669 def __div__(self, other):
1670 """
1671 implicit on all axes and on Tsys
1672 """
1673 varlist = vars()
1674 s = None
1675 if isinstance(other, scantable):
1676 s = scantable(self._math._binaryop(self, other, "DIV"))
1677 elif isinstance(other, float):
1678 if other == 0.0:
1679 raise ZeroDivisionError("Dividing by zero is not recommended")
1680 s = scantable(self._math._unaryop(self, other, "DIV", False))
1681 else:
1682 raise TypeError("Other input is not a scantable or float value")
1683 s._add_history("operator /", varlist)
1684 return s
1685
1686 def get_fit(self, row=0):
1687 """
1688 Print or return the stored fits for a row in the scantable
1689 Parameters:
1690 row: the row which the fit has been applied to.
1691 """
1692 if row > self.nrow():
1693 return
1694 from asap.asapfit import asapfit
1695 fit = asapfit(self._getfit(row))
1696 if rcParams['verbose']:
1697 print fit
1698 return
1699 else:
1700 return fit.as_dict()
1701
1702 def flag_nans(self):
1703 """
1704 Utility function to flag NaN values in the scantable.
1705 """
1706 import numpy
1707 basesel = self.get_selection()
1708 for i in range(self.nrow()):
1709 sel = self.get_row_selector(i)
1710 self.set_selection(basesel+sel)
1711 nans = numpy.isnan(self._getspectrum(0))
1712 if numpy.any(nans):
1713 bnans = [ bool(v) for v in nans]
1714 self.flag(bnans)
1715 self.set_selection(basesel)
1716
1717 def get_row_selector(self, rowno):
1718 return selector(beams=self.getbeam(rowno),
1719 ifs=self.getif(rowno),
1720 pols=self.getpol(rowno),
1721 scans=self.getscan(rowno),
1722 cycles=self.getcycle(rowno))
1723
1724 def _add_history(self, funcname, parameters):
1725 if not rcParams['scantable.history']:
1726 return
1727 # create date
1728 sep = "##"
1729 from datetime import datetime
1730 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1731 hist = dstr+sep
1732 hist += funcname+sep#cdate+sep
1733 if parameters.has_key('self'): del parameters['self']
1734 for k, v in parameters.iteritems():
1735 if type(v) is dict:
1736 for k2, v2 in v.iteritems():
1737 hist += k2
1738 hist += "="
1739 if isinstance(v2, scantable):
1740 hist += 'scantable'
1741 elif k2 == 'mask':
1742 if isinstance(v2, list) or isinstance(v2, tuple):
1743 hist += str(self._zip_mask(v2))
1744 else:
1745 hist += str(v2)
1746 else:
1747 hist += str(v2)
1748 else:
1749 hist += k
1750 hist += "="
1751 if isinstance(v, scantable):
1752 hist += 'scantable'
1753 elif k == 'mask':
1754 if isinstance(v, list) or isinstance(v, tuple):
1755 hist += str(self._zip_mask(v))
1756 else:
1757 hist += str(v)
1758 else:
1759 hist += str(v)
1760 hist += sep
1761 hist = hist[:-2] # remove trailing '##'
1762 self._addhistory(hist)
1763
1764
1765 def _zip_mask(self, mask):
1766 mask = list(mask)
1767 i = 0
1768 segments = []
1769 while mask[i:].count(1):
1770 i += mask[i:].index(1)
1771 if mask[i:].count(0):
1772 j = i + mask[i:].index(0)
1773 else:
1774 j = len(mask)
1775 segments.append([i, j])
1776 i = j
1777 return segments
1778
1779 def _get_ordinate_label(self):
1780 fu = "("+self.get_fluxunit()+")"
1781 import re
1782 lbl = "Intensity"
1783 if re.match(".K.", fu):
1784 lbl = "Brightness Temperature "+ fu
1785 elif re.match(".Jy.", fu):
1786 lbl = "Flux density "+ fu
1787 return lbl
1788
1789 def _check_ifs(self):
1790 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1791 nchans = filter(lambda t: t > 0, nchans)
1792 return (sum(nchans)/len(nchans) == nchans[0])
1793
1794 def _fill(self, names, unit, average):
1795 import os
1796 from asap._asap import stfiller
1797 first = True
1798 fullnames = []
1799 for name in names:
1800 name = os.path.expandvars(name)
1801 name = os.path.expanduser(name)
1802 if not os.path.exists(name):
1803 msg = "File '%s' does not exists" % (name)
1804 if rcParams['verbose']:
1805 asaplog.push(msg)
1806 print asaplog.pop().strip()
1807 return
1808 raise IOError(msg)
1809 fullnames.append(name)
1810 if average:
1811 asaplog.push('Auto averaging integrations')
1812 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1813 for name in fullnames:
1814 tbl = Scantable(stype)
1815 r = stfiller(tbl)
1816 rx = rcParams['scantable.reference']
1817 r._setreferenceexpr(rx)
1818 msg = "Importing %s..." % (name)
1819 asaplog.push(msg, False)
1820 print_log()
1821 r._open(name, -1, -1)
1822 r._read()
1823 if average:
1824 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1825 if not first:
1826 tbl = self._math._merge([self, tbl])
1827 Scantable.__init__(self, tbl)
1828 r._close()
1829 del r, tbl
1830 first = False
1831 if unit is not None:
1832 self.set_fluxunit(unit)
1833 self.set_freqframe(rcParams['scantable.freqframe'])
1834
1835 def __getitem__(self, key):
1836 if key < 0:
1837 key += self.nrow()
1838 if key >= self.nrow():
1839 raise IndexError("Row index out of range.")
1840 return self._getspectrum(key)
1841
1842 def __setitem__(self, key, value):
1843 if key < 0:
1844 key += self.nrow()
1845 if key >= self.nrow():
1846 raise IndexError("Row index out of range.")
1847 if not hasattr(value, "__len__") or \
1848 len(value) > self.nchan(self.getif(key)):
1849 raise ValueError("Spectrum length doesn't match.")
1850 return self._setspectrum(value, key)
1851
1852 def __len__(self):
1853 return self.nrow()
1854
1855 def __iter__(self):
1856 for i in range(len(self)):
1857 yield self[i]
Note: See TracBrowser for help on using the repository browser.