source: trunk/python/scantable.py@ 1200

Last change on this file since 1200 was 1200, checked in by mar637, 18 years ago

Merge from Release2.1.0b tag

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 61.8 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import NUM
7from asap import linecatalog
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 #varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 print_log()
75
76 def save(self, name=None, format=None, overwrite=False):
77 """
78 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
79 Image FITS or MS2 format.
80 Parameters:
81 name: the name of the outputfile. For format "ASCII"
82 this is the root file name (data in 'name'.txt
83 and header in 'name'_header.txt)
84 format: an optional file format. Default is ASAP.
85 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
86 'SDFITS' (save as SDFITS file)
87 'ASCII' (saves as ascii text file)
88 'MS2' (saves as an aips++
89 MeasurementSet V2)
90 overwrite: If the file should be overwritten if it exists.
91 The default False is to return with warning
92 without writing the output. USE WITH CARE.
93 Example:
94 scan.save('myscan.asap')
95 scan.save('myscan.sdfits', 'SDFITS')
96 """
97 from os import path
98 if format is None: format = rcParams['scantable.save']
99 suffix = '.'+format.lower()
100 if name is None or name == "":
101 name = 'scantable'+suffix
102 msg = "No filename given. Using default name %s..." % name
103 asaplog.push(msg)
104 name = path.expandvars(name)
105 if path.isfile(name) or path.isdir(name):
106 if not overwrite:
107 msg = "File %s exists." % name
108 if rcParams['verbose']:
109 print msg
110 return
111 else:
112 raise IOError(msg)
113 format2 = format.upper()
114 if format2 == 'ASAP':
115 self._save(name)
116 else:
117 from asap._asap import stwriter as stw
118 writer = stw(format2)
119 writer.write(self, name)
120 print_log()
121 return
122
123 def copy(self):
124 """
125 Return a copy of this scantable.
126 Parameters:
127 none
128 Example:
129 copiedscan = scan.copy()
130 """
131 sd = scantable(Scantable._copy(self))
132 return sd
133
134 def drop_scan(self, scanid=None):
135 """
136 Return a new scantable where the specified scan number(s) has(have)
137 been dropped.
138 Parameters:
139 scanid: a (list of) scan number(s)
140 """
141 from asap import _is_sequence_or_number as _is_valid
142 from asap import _to_list
143 from asap import unique
144 if not _is_valid(scanid):
145 if rcParams['verbose']:
146 print "Please specify a scanno to drop from the scantable"
147 return
148 else:
149 raise RuntimeError("No scan given")
150 try:
151 scanid = _to_list(scanid)
152 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
153 for sid in scanid: allscans.remove(sid)
154 if len(allscans) == 0:
155 raise ValueError("Can't remove all scans")
156 except ValueError:
157 if rcParams['verbose']:
158 print "Couldn't find any match."
159 return
160 else: raise
161 try:
162 bsel = self.get_selection()
163 sel = selector()
164 sel.set_scans(allscans)
165 self.set_selection(bsel+sel)
166 scopy = self._copy()
167 self.set_selection(bsel)
168 return scantable(scopy)
169 except RuntimeError:
170 if rcParams['verbose']:
171 print "Couldn't find any match."
172 else:
173 raise
174
175
176 def get_scan(self, scanid=None):
177 """
178 Return a specific scan (by scanno) or collection of scans (by
179 source name) in a new scantable.
180 Parameters:
181 scanid: a (list of) scanno or a source name, unix-style
182 patterns are accepted for source name matching, e.g.
183 '*_R' gets all 'ref scans
184 Example:
185 # get all scans containing the source '323p459'
186 newscan = scan.get_scan('323p459')
187 # get all 'off' scans
188 refscans = scan.get_scan('*_R')
189 # get a susbset of scans by scanno (as listed in scan.summary())
190 newscan = scan.get_scan([0, 2, 7, 10])
191 """
192 if scanid is None:
193 if rcParams['verbose']:
194 print "Please specify a scan no or name to " \
195 "retrieve from the scantable"
196 return
197 else:
198 raise RuntimeError("No scan given")
199
200 try:
201 bsel = self.get_selection()
202 sel = selector()
203 if type(scanid) is str:
204 sel.set_name(scanid)
205 self.set_selection(bsel+sel)
206 scopy = self._copy()
207 self.set_selection(bsel)
208 return scantable(scopy)
209 elif type(scanid) is int:
210 sel.set_scans([scanid])
211 self.set_selection(bsel+sel)
212 scopy = self._copy()
213 self.set_selection(bsel)
214 return scantable(scopy)
215 elif type(scanid) is list:
216 sel.set_scans(scanid)
217 self.set_selection(sel)
218 scopy = self._copy()
219 self.set_selection(bsel)
220 return scantable(scopy)
221 else:
222 msg = "Illegal scanid type, use 'int' or 'list' if ints."
223 if rcParams['verbose']:
224 print msg
225 else:
226 raise TypeError(msg)
227 except RuntimeError:
228 if rcParams['verbose']: print "Couldn't find any match."
229 else: raise
230
231 def __str__(self):
232 return Scantable._summary(self, True)
233
234 def summary(self, filename=None):
235 """
236 Print a summary of the contents of this scantable.
237 Parameters:
238 filename: the name of a file to write the putput to
239 Default - no file output
240 verbose: print extra info such as the frequency table
241 The default (False) is taken from .asaprc
242 """
243 info = Scantable._summary(self, True)
244 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
245 if filename is not None:
246 if filename is "":
247 filename = 'scantable_summary.txt'
248 from os.path import expandvars, isdir
249 filename = expandvars(filename)
250 if not isdir(filename):
251 data = open(filename, 'w')
252 data.write(info)
253 data.close()
254 else:
255 msg = "Illegal file name '%s'." % (filename)
256 if rcParams['verbose']:
257 print msg
258 else:
259 raise IOError(msg)
260 if rcParams['verbose']:
261 try:
262 from IPython.genutils import page as pager
263 except ImportError:
264 from pydoc import pager
265 pager(info)
266 else:
267 return info
268
269
270 def get_selection(self):
271 """
272 Get the selection object currently set on this scantable.
273 Parameters:
274 none
275 Example:
276 sel = scan.get_selection()
277 sel.set_ifs(0) # select IF 0
278 scan.set_selection(sel) # apply modified selection
279 """
280 return selector(self._getselection())
281
282 def set_selection(self, selection=selector()):
283 """
284 Select a subset of the data. All following operations on this scantable
285 are only applied to thi selection.
286 Parameters:
287 selection: a selector object (default unset the selection)
288 Examples:
289 sel = selector() # create a selection object
290 self.set_scans([0, 3]) # select SCANNO 0 and 3
291 scan.set_selection(sel) # set the selection
292 scan.summary() # will only print summary of scanno 0 an 3
293 scan.set_selection() # unset the selection
294 """
295 self._setselection(selection)
296
297 def stats(self, stat='stddev', mask=None):
298 """
299 Determine the specified statistic of the current beam/if/pol
300 Takes a 'mask' as an optional parameter to specify which
301 channels should be excluded.
302 Parameters:
303 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
304 'var', 'stddev', 'avdev', 'rms', 'median'
305 mask: an optional mask specifying where the statistic
306 should be determined.
307 Example:
308 scan.set_unit('channel')
309 msk = scan.create_mask([100, 200], [500, 600])
310 scan.stats(stat='mean', mask=m)
311 """
312 if mask == None:
313 mask = []
314 axes = ['Beam', 'IF', 'Pol', 'Time']
315 if not self._check_ifs():
316 raise ValueError("Cannot apply mask as the IFs have different "
317 "number of channels. Please use setselection() "
318 "to select individual IFs")
319
320 statvals = self._math._stats(self, mask, stat)
321 out = ''
322 axes = []
323 for i in range(self.nrow()):
324 axis = []
325 axis.append(self.getscan(i))
326 axis.append(self.getbeam(i))
327 axis.append(self.getif(i))
328 axis.append(self.getpol(i))
329 axis.append(self.getcycle(i))
330 axes.append(axis)
331 tm = self._gettime(i)
332 src = self._getsourcename(i)
333 out += 'Scan[%d] (%s) ' % (axis[0], src)
334 out += 'Time[%s]:\n' % (tm)
335 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
336 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
337 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
338 out += '= %3.3f\n' % (statvals[i])
339 out += "--------------------------------------------------\n"
340
341 if rcParams['verbose']:
342 print "--------------------------------------------------"
343 print " ", stat
344 print "--------------------------------------------------"
345 print out
346 retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
347 'axes' : axes,
348 'data': statvals}
349 return retval
350
351 def stddev(self, mask=None):
352 """
353 Determine the standard deviation of the current beam/if/pol
354 Takes a 'mask' as an optional parameter to specify which
355 channels should be excluded.
356 Parameters:
357 mask: an optional mask specifying where the standard
358 deviation should be determined.
359
360 Example:
361 scan.set_unit('channel')
362 msk = scan.create_mask([100, 200], [500, 600])
363 scan.stddev(mask=m)
364 """
365 return self.stats(stat='stddev', mask=mask);
366
367
368 def column_names(self):
369 """
370 Return a list of column names, which can be used for selection.
371 """
372 return list(Scantable.column_names(self))
373
374 def get_tsys(self):
375 """
376 Return the System temperatures.
377 Parameters:
378
379 Returns:
380 a list of Tsys values for the current selection
381 """
382
383 return self._row_callback(self._gettsys, "Tsys")
384
385 def _row_callback(self, callback, label):
386 axes = []
387 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
388 out = ""
389 outvec = []
390 for i in range(self.nrow()):
391 axis = []
392 axis.append(self.getscan(i))
393 axis.append(self.getbeam(i))
394 axis.append(self.getif(i))
395 axis.append(self.getpol(i))
396 axis.append(self.getcycle(i))
397 axes.append(axis)
398 tm = self._gettime(i)
399 src = self._getsourcename(i)
400 out += 'Scan[%d] (%s) ' % (axis[0], src)
401 out += 'Time[%s]:\n' % (tm)
402 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
403 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
404 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
405 outvec.append(callback(i))
406 out += '= %3.3f\n' % (outvec[i])
407 out += "--------------------------------------------------\n"
408 if rcParams['verbose']:
409 print "--------------------------------------------------"
410 print " %s" % (label)
411 print "--------------------------------------------------"
412 print out
413 # disabled because the vector seems more useful
414 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
415 return outvec
416
417 def _get_column(self, callback, row=-1):
418 """
419 """
420 if row == -1:
421 return [callback(i) for i in range(self.nrow())]
422 else:
423 if 0 <= row < self.nrow():
424 return callback(row)
425
426
427 def get_time(self, row=-1):
428 """
429 Get a list of time stamps for the observations.
430 Return a string for each integration in the scantable.
431 Parameters:
432 row: row no of integration. Default -1 return all rows
433 Example:
434 none
435 """
436 from time import strptime
437 from datetime import datetime
438 times = self._get_column(self._gettime, row)
439 format = "%Y/%m/%d/%H:%M:%S"
440 if isinstance(times, list):
441 return [datetime(*strptime(i, format)[:6]) for i in times]
442 else:
443 return datetime(*strptime(times, format)[:6])
444
445 def get_sourcename(self, row=-1):
446 """
447 Get a list source names for the observations.
448 Return a string for each integration in the scantable.
449 Parameters:
450 row: row no of integration. Default -1 return all rows
451 Example:
452 none
453 """
454 return self._get_column(self._getsourcename, row)
455
456 def get_elevation(self, row=-1):
457 """
458 Get a list of elevations for the observations.
459 Return a float for each integration in the scantable.
460 Parameters:
461 row: row no of integration. Default -1 return all rows
462 Example:
463 none
464 """
465 return self._get_column(self._getelevation, row)
466
467 def get_azimuth(self, row=-1):
468 """
469 Get a list of azimuths for the observations.
470 Return a float for each integration in the scantable.
471 Parameters:
472 row: row no of integration. Default -1 return all rows
473 Example:
474 none
475 """
476 return self._get_column(self._getazimuth, row)
477
478 def get_parangle(self, row=-1):
479 """
480 Get a list of parallactic angles for the observations.
481 Return a float for each integration in the scantable.
482 Parameters:
483 row: row no of integration. Default -1 return all rows
484 Example:
485 none
486 """
487 return self._get_column(self._getparangle, row)
488
489 def get_direction(self, row=-1):
490 """
491 Get a list of Positions on the sky (direction) for the observations.
492 Return a float for each integration in the scantable.
493 Parameters:
494 row: row no of integration. Default -1 return all rows
495 Example:
496 none
497 """
498 return self._get_column(self._getdirection, row)
499
500 def set_unit(self, unit='channel'):
501 """
502 Set the unit for all following operations on this scantable
503 Parameters:
504 unit: optional unit, default is 'channel'
505 one of '*Hz', 'km/s', 'channel', ''
506 """
507 varlist = vars()
508 if unit in ['', 'pixel', 'channel']:
509 unit = ''
510 inf = list(self._getcoordinfo())
511 inf[0] = unit
512 self._setcoordinfo(inf)
513 self._add_history("set_unit", varlist)
514
515 def set_instrument(self, instr):
516 """
517 Set the instrument for subsequent processing
518 Parameters:
519 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
520 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
521 """
522 self._setInstrument(instr)
523 self._add_history("set_instument", vars())
524 print_log()
525
526 def set_feedtype(self, feedtype):
527 """
528 Overwrite the feed type, which might not be set correctly.
529 Parameters:
530 feedtype: 'linear' or 'circular'
531 """
532 self._setfeedtype(feedtype)
533 self._add_history("set_feedtype", vars())
534 print_log()
535
536 def set_doppler(self, doppler='RADIO'):
537 """
538 Set the doppler for all following operations on this scantable.
539 Parameters:
540 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
541 """
542 varlist = vars()
543 inf = list(self._getcoordinfo())
544 inf[2] = doppler
545 self._setcoordinfo(inf)
546 self._add_history("set_doppler", vars())
547 print_log()
548
549 def set_freqframe(self, frame=None):
550 """
551 Set the frame type of the Spectral Axis.
552 Parameters:
553 frame: an optional frame type, default 'LSRK'. Valid frames are:
554 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
555 'GEO', 'GALACTO', 'LGROUP', 'CMB'
556 Examples:
557 scan.set_freqframe('BARY')
558 """
559 if frame is None: frame = rcParams['scantable.freqframe']
560 varlist = vars()
561 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
562 'GEO', 'GALACTO', 'LGROUP', 'CMB']
563
564 if frame in valid:
565 inf = list(self._getcoordinfo())
566 inf[1] = frame
567 self._setcoordinfo(inf)
568 self._add_history("set_freqframe", varlist)
569 else:
570 msg = "Please specify a valid freq type. Valid types are:\n", valid
571 if rcParams['verbose']:
572 print msg
573 else:
574 raise TypeError(msg)
575 print_log()
576
577 def set_dirframe(self, frame=""):
578 """
579 Set the frame type of the Direction on the sky.
580 Parameters:
581 frame: an optional frame type, default ''. Valid frames are:
582 'J2000', 'B1950', 'GALACTIC'
583 Examples:
584 scan.set_dirframe('GALACTIC')
585 """
586 varlist = vars()
587 try:
588 Scantable.set_dirframe(self, frame)
589 except RuntimeError, msg:
590 if rcParams['verbose']:
591 print msg
592 else:
593 raise
594 self._add_history("set_dirframe", varlist)
595
596 def get_unit(self):
597 """
598 Get the default unit set in this scantable
599 Parameters:
600 Returns:
601 A unit string
602 """
603 inf = self._getcoordinfo()
604 unit = inf[0]
605 if unit == '': unit = 'channel'
606 return unit
607
608 def get_abcissa(self, rowno=0):
609 """
610 Get the abcissa in the current coordinate setup for the currently
611 selected Beam/IF/Pol
612 Parameters:
613 rowno: an optional row number in the scantable. Default is the
614 first row, i.e. rowno=0
615 Returns:
616 The abcissa values and it's format string (as a dictionary)
617 """
618 abc = self._getabcissa(rowno)
619 lbl = self._getabcissalabel(rowno)
620 print_log()
621 return abc, lbl
622
623 def flag(self, mask=None):
624 """
625 Flag the selected data using an optional channel mask.
626 Parameters:
627 mask: an optional channel mask, created with create_mask. Default
628 (no mask) is all channels.
629 """
630 varlist = vars()
631 if mask is None:
632 mask = []
633 try:
634 self._flag(mask)
635 except RuntimeError, msg:
636 if rcParams['verbose']:
637 print msg
638 return
639 else: raise
640 self._add_history("flag", varlist)
641
642 def lag_flag(self, frequency, unit="GHz", width=0.0, insitu=None):
643 """
644 Flag the data in 'lag' space by providing a frequency to remove.
645 Flagged data in the scantable gets set to 0.0 before the fft.
646 No taper is applied.
647 Parameters:
648 frequency: the frequency to remove
649 unit: the frequency unit ()default "GHz")
650 width: the width of the frequency to remove
651 """
652 if insitu is None: insitu = rcParams['insitu']
653 self._math._setinsitu(insitu)
654 varlist = vars()
655 base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
656 if not base.has_key(unit):
657 raise ValueError("%s is not a valid unit." % unit)
658 try:
659 s = scantable(self._math._lag_flag(self, frequency*base[unit],
660 width*base[unit]))
661 except RuntimeError, msg:
662 if rcParams['verbose']:
663 print msg
664 return
665 else: raise
666 s._add_history("lag_flag", varlist)
667 print_log()
668 if insitu:
669 self._assign(s)
670 else:
671 return s
672
673
674 def create_mask(self, *args, **kwargs):
675 """
676 Compute and return a mask based on [min, max] windows.
677 The specified windows are to be INCLUDED, when the mask is
678 applied.
679 Parameters:
680 [min, max], [min2, max2], ...
681 Pairs of start/end points (inclusive)specifying the regions
682 to be masked
683 invert: optional argument. If specified as True,
684 return an inverted mask, i.e. the regions
685 specified are EXCLUDED
686 row: create the mask using the specified row for
687 unit conversions, default is row=0
688 only necessary if frequency varies over rows.
689 Example:
690 scan.set_unit('channel')
691 a)
692 msk = scan.create_mask([400, 500], [800, 900])
693 # masks everything outside 400 and 500
694 # and 800 and 900 in the unit 'channel'
695
696 b)
697 msk = scan.create_mask([400, 500], [800, 900], invert=True)
698 # masks the regions between 400 and 500
699 # and 800 and 900 in the unit 'channel'
700 c)
701 mask only channel 400
702 msk = scan.create_mask([400, 400])
703 """
704 row = 0
705 if kwargs.has_key("row"):
706 row = kwargs.get("row")
707 data = self._getabcissa(row)
708 u = self._getcoordinfo()[0]
709 if rcParams['verbose']:
710 if u == "": u = "channel"
711 msg = "The current mask window unit is %s" % u
712 i = self._check_ifs()
713 if not i:
714 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
715 asaplog.push(msg)
716 n = self.nchan()
717 msk = NUM.zeros(n)
718 # test if args is a 'list' or a 'normal *args - UGLY!!!
719
720 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
721 and args or args[0]
722 for window in ws:
723 if (len(window) != 2 or window[0] > window[1] ):
724 raise TypeError("A window needs to be defined as [min, max]")
725 for i in range(n):
726 if data[i] >= window[0] and data[i] <= window[1]:
727 msk[i] = 1
728 if kwargs.has_key('invert'):
729 if kwargs.get('invert'):
730 msk = NUM.logical_not(msk)
731 print_log()
732 return msk
733
734 def get_restfreqs(self):
735 """
736 Get the restfrequency(s) stored in this scantable.
737 The return value(s) are always of unit 'Hz'
738 Parameters:
739 none
740 Returns:
741 a list of doubles
742 """
743 return list(self._getrestfreqs())
744
745
746 def set_restfreqs(self, freqs=None, unit='Hz'):
747 """
748 Set or replace the restfrequency specified and
749 If the 'freqs' argument holds a scalar,
750 then that rest frequency will be applied to all the selected
751 data. If the 'freqs' argument holds
752 a vector, then it MUST be of equal or smaller length than
753 the number of IFs (and the available restfrequencies will be
754 replaced by this vector). In this case, *all* data have
755 the restfrequency set per IF according
756 to the corresponding value you give in the 'freqs' vector.
757 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
758 IF 1 gets restfreq 2e9.
759 You can also specify the frequencies via a linecatalog/
760
761 Parameters:
762 freqs: list of rest frequency values or string idenitfiers
763 unit: unit for rest frequency (default 'Hz')
764
765 Example:
766 # set the given restfrequency for the whole table
767 scan.set_restfreqs(freqs=1.4e9)
768 # If thee number of IFs in the data is >= 2 the IF0 gets the first
769 # value IF1 the second...
770 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
771 #set the given restfrequency for the whole table (by name)
772 scan.set_restfreqs(freqs="OH1667")
773
774 Note:
775 To do more sophisticate Restfrequency setting, e.g. on a
776 source and IF basis, use scantable.set_selection() before using
777 this function.
778 # provide your scantable is call scan
779 selection = selector()
780 selection.set_name("ORION*")
781 selection.set_ifs([1])
782 scan.set_selection(selection)
783 scan.set_restfreqs(freqs=86.6e9)
784
785 """
786 varlist = vars()
787 from asap import linecatalog
788 # simple value
789 if isinstance(freqs, int) or isinstance(freqs, float):
790 self._setrestfreqs(freqs, "",unit)
791 # list of values
792 elif isinstance(freqs, list) or isinstance(freqs, tuple):
793 # list values are scalars
794 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
795 sel = selector()
796 savesel = self._getselection()
797 for i in xrange(len(freqs)):
798 sel.set_ifs([i])
799 self._setselection(sel)
800 self._setrestfreqs(freqs[i], "",unit)
801 self._setselection(savesel)
802 # list values are tuples, (value, name)
803 elif isinstance(freqs[-1], dict):
804 sel = selector()
805 savesel = self._getselection()
806 for i in xrange(len(freqs)):
807 sel.set_ifs([i])
808 self._setrestfreqs(freqs[i]["value"],
809 freqs[i]["name"], "MHz")
810 self._setselection(sel)
811 self._setselection(savesel)
812 # freqs are to be taken from a linecatalog
813 elif isinstance(freqs, linecatalog):
814 sel = selector()
815 savesel = self._getselection()
816 for i in xrange(freqs.nrow()):
817 sel.set_ifs([i])
818 self._setselection(sel)
819 self._setrestfreqs(freqs.get_frequency(i),
820 freqs.get_name(i), "MHz")
821 # ensure that we are not iterating past nIF
822 if i == self.nif()-1: break
823 self._setselection(savesel)
824 else:
825 return
826 self._add_history("set_restfreqs", varlist)
827
828
829
830 def history(self):
831 hist = list(self._gethistory())
832 out = "-"*80
833 for h in hist:
834 if h.startswith("---"):
835 out += "\n"+h
836 else:
837 items = h.split("##")
838 date = items[0]
839 func = items[1]
840 items = items[2:]
841 out += "\n"+date+"\n"
842 out += "Function: %s\n Parameters:" % (func)
843 for i in items:
844 s = i.split("=")
845 out += "\n %s = %s" % (s[0], s[1])
846 out += "\n"+"-"*80
847 try:
848 from IPython.genutils import page as pager
849 except ImportError:
850 from pydoc import pager
851 pager(out)
852 return
853
854 #
855 # Maths business
856 #
857
858 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
859 """
860 Return the (time) weighted average of a scan.
861 Note:
862 in channels only - align if necessary
863 Parameters:
864 mask: an optional mask (only used for 'var' and 'tsys'
865 weighting)
866 scanav: True averages each scan separately
867 False (default) averages all scans together,
868 weight: Weighting scheme.
869 'none' (mean no weight)
870 'var' (1/var(spec) weighted)
871 'tsys' (1/Tsys**2 weighted)
872 'tint' (integration time weighted)
873 'tintsys' (Tint/Tsys**2)
874 'median' ( median averaging)
875 The default is 'tint'
876 align: align the spectra in velocity before averaging. It takes
877 the time of the first spectrum as reference time.
878 Example:
879 # time average the scantable without using a mask
880 newscan = scan.average_time()
881 """
882 varlist = vars()
883 if weight is None: weight = 'TINT'
884 if mask is None: mask = ()
885 if scanav: scanav = "SCAN"
886 else: scanav = "NONE"
887 scan = (self, )
888 try:
889 if align:
890 scan = (self.freq_align(insitu=False), )
891 s = None
892 if weight.upper() == 'MEDIAN':
893 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
894 scanav))
895 else:
896 s = scantable(self._math._average(scan, mask, weight.upper(),
897 scanav))
898 except RuntimeError, msg:
899 if rcParams['verbose']:
900 print msg
901 return
902 else: raise
903 s._add_history("average_time", varlist)
904 print_log()
905 return s
906
907 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
908 """
909 Return a scan where all spectra are converted to either
910 Jansky or Kelvin depending upon the flux units of the scan table.
911 By default the function tries to look the values up internally.
912 If it can't find them (or if you want to over-ride), you must
913 specify EITHER jyperk OR eta (and D which it will try to look up
914 also if you don't set it). jyperk takes precedence if you set both.
915 Parameters:
916 jyperk: the Jy / K conversion factor
917 eta: the aperture efficiency
918 d: the geomtric diameter (metres)
919 insitu: if False a new scantable is returned.
920 Otherwise, the scaling is done in-situ
921 The default is taken from .asaprc (False)
922 allaxes: if True apply to all spectra. Otherwise
923 apply only to the selected (beam/pol/if)spectra only
924 The default is taken from .asaprc (True if none)
925 """
926 if insitu is None: insitu = rcParams['insitu']
927 self._math._setinsitu(insitu)
928 varlist = vars()
929 if jyperk is None: jyperk = -1.0
930 if d is None: d = -1.0
931 if eta is None: eta = -1.0
932 s = scantable(self._math._convertflux(self, d, eta, jyperk))
933 s._add_history("convert_flux", varlist)
934 print_log()
935 if insitu: self._assign(s)
936 else: return s
937
938 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
939 """
940 Return a scan after applying a gain-elevation correction.
941 The correction can be made via either a polynomial or a
942 table-based interpolation (and extrapolation if necessary).
943 You specify polynomial coefficients, an ascii table or neither.
944 If you specify neither, then a polynomial correction will be made
945 with built in coefficients known for certain telescopes (an error
946 will occur if the instrument is not known).
947 The data and Tsys are *divided* by the scaling factors.
948 Parameters:
949 poly: Polynomial coefficients (default None) to compute a
950 gain-elevation correction as a function of
951 elevation (in degrees).
952 filename: The name of an ascii file holding correction factors.
953 The first row of the ascii file must give the column
954 names and these MUST include columns
955 "ELEVATION" (degrees) and "FACTOR" (multiply data
956 by this) somewhere.
957 The second row must give the data type of the
958 column. Use 'R' for Real and 'I' for Integer.
959 An example file would be
960 (actual factors are arbitrary) :
961
962 TIME ELEVATION FACTOR
963 R R R
964 0.1 0 0.8
965 0.2 20 0.85
966 0.3 40 0.9
967 0.4 60 0.85
968 0.5 80 0.8
969 0.6 90 0.75
970 method: Interpolation method when correcting from a table.
971 Values are "nearest", "linear" (default), "cubic"
972 and "spline"
973 insitu: if False a new scantable is returned.
974 Otherwise, the scaling is done in-situ
975 The default is taken from .asaprc (False)
976 """
977
978 if insitu is None: insitu = rcParams['insitu']
979 self._math._setinsitu(insitu)
980 varlist = vars()
981 if poly is None:
982 poly = ()
983 from os.path import expandvars
984 filename = expandvars(filename)
985 s = scantable(self._math._gainel(self, poly, filename, method))
986 s._add_history("gain_el", varlist)
987 print_log()
988 if insitu: self._assign(s)
989 else: return s
990
991 def freq_align(self, reftime=None, method='cubic', insitu=None):
992 """
993 Return a scan where all rows have been aligned in frequency/velocity.
994 The alignment frequency frame (e.g. LSRK) is that set by function
995 set_freqframe.
996 Parameters:
997 reftime: reference time to align at. By default, the time of
998 the first row of data is used.
999 method: Interpolation method for regridding the spectra.
1000 Choose from "nearest", "linear", "cubic" (default)
1001 and "spline"
1002 insitu: if False a new scantable is returned.
1003 Otherwise, the scaling is done in-situ
1004 The default is taken from .asaprc (False)
1005 """
1006 if insitu is None: insitu = rcParams["insitu"]
1007 self._math._setinsitu(insitu)
1008 varlist = vars()
1009 if reftime is None: reftime = ""
1010 s = scantable(self._math._freq_align(self, reftime, method))
1011 s._add_history("freq_align", varlist)
1012 print_log()
1013 if insitu: self._assign(s)
1014 else: return s
1015
1016 def opacity(self, tau, insitu=None):
1017 """
1018 Apply an opacity correction. The data
1019 and Tsys are multiplied by the correction factor.
1020 Parameters:
1021 tau: Opacity from which the correction factor is
1022 exp(tau*ZD)
1023 where ZD is the zenith-distance
1024 insitu: if False a new scantable is returned.
1025 Otherwise, the scaling is done in-situ
1026 The default is taken from .asaprc (False)
1027 """
1028 if insitu is None: insitu = rcParams['insitu']
1029 self._math._setinsitu(insitu)
1030 varlist = vars()
1031 s = scantable(self._math._opacity(self, tau))
1032 s._add_history("opacity", varlist)
1033 print_log()
1034 if insitu: self._assign(s)
1035 else: return s
1036
1037 def bin(self, width=5, insitu=None):
1038 """
1039 Return a scan where all spectra have been binned up.
1040 width: The bin width (default=5) in pixels
1041 insitu: if False a new scantable is returned.
1042 Otherwise, the scaling is done in-situ
1043 The default is taken from .asaprc (False)
1044 """
1045 if insitu is None: insitu = rcParams['insitu']
1046 self._math._setinsitu(insitu)
1047 varlist = vars()
1048 s = scantable(self._math._bin(self, width))
1049 s._add_history("bin", varlist)
1050 print_log()
1051 if insitu: self._assign(s)
1052 else: return s
1053
1054
1055 def resample(self, width=5, method='cubic', insitu=None):
1056 """
1057 Return a scan where all spectra have been binned up
1058 width: The bin width (default=5) in pixels
1059 method: Interpolation method when correcting from a table.
1060 Values are "nearest", "linear", "cubic" (default)
1061 and "spline"
1062 insitu: if False a new scantable is returned.
1063 Otherwise, the scaling is done in-situ
1064 The default is taken from .asaprc (False)
1065 """
1066 if insitu is None: insitu = rcParams['insitu']
1067 self._math._setinsitu(insitu)
1068 varlist = vars()
1069 s = scantable(self._math._resample(self, method, width))
1070 s._add_history("resample", varlist)
1071 print_log()
1072 if insitu: self._assign(s)
1073 else: return s
1074
1075
1076 def average_pol(self, mask=None, weight='none'):
1077 """
1078 Average the Polarisations together.
1079 Parameters:
1080 mask: An optional mask defining the region, where the
1081 averaging will be applied. The output will have all
1082 specified points masked.
1083 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1084 weighted), or 'tsys' (1/Tsys**2 weighted)
1085 """
1086 varlist = vars()
1087 if mask is None:
1088 mask = ()
1089 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1090 s._add_history("average_pol", varlist)
1091 print_log()
1092 return s
1093
1094 def average_beam(self, mask=None, weight='none'):
1095 """
1096 Average the Beams together.
1097 Parameters:
1098 mask: An optional mask defining the region, where the
1099 averaging will be applied. The output will have all
1100 specified points masked.
1101 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1102 weighted), or 'tsys' (1/Tsys**2 weighted)
1103 """
1104 varlist = vars()
1105 if mask is None:
1106 mask = ()
1107 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1108 s._add_history("average_beam", varlist)
1109 print_log()
1110 return s
1111
1112 def convert_pol(self, poltype=None):
1113 """
1114 Convert the data to a different polarisation type.
1115 Parameters:
1116 poltype: The new polarisation type. Valid types are:
1117 "linear", "stokes" and "circular"
1118 """
1119 varlist = vars()
1120 try:
1121 s = scantable(self._math._convertpol(self, poltype))
1122 except RuntimeError, msg:
1123 if rcParams['verbose']:
1124 print msg
1125 return
1126 else:
1127 raise
1128 s._add_history("convert_pol", varlist)
1129 print_log()
1130 return s
1131
1132 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1133 """
1134 Smooth the spectrum by the specified kernel (conserving flux).
1135 Parameters:
1136 scan: The input scan
1137 kernel: The type of smoothing kernel. Select from
1138 'hanning' (default), 'gaussian' and 'boxcar'.
1139 The first three characters are sufficient.
1140 width: The width of the kernel in pixels. For hanning this is
1141 ignored otherwise it defauls to 5 pixels.
1142 For 'gaussian' it is the Full Width Half
1143 Maximum. For 'boxcar' it is the full width.
1144 insitu: if False a new scantable is returned.
1145 Otherwise, the scaling is done in-situ
1146 The default is taken from .asaprc (False)
1147 Example:
1148 none
1149 """
1150 if insitu is None: insitu = rcParams['insitu']
1151 self._math._setinsitu(insitu)
1152 varlist = vars()
1153 s = scantable(self._math._smooth(self, kernel.lower(), width))
1154 s._add_history("smooth", varlist)
1155 print_log()
1156 if insitu: self._assign(s)
1157 else: return s
1158
1159
1160 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1161 """
1162 Return a scan which has been baselined (all rows) by a polynomial.
1163 Parameters:
1164 scan: a scantable
1165 mask: an optional mask
1166 order: the order of the polynomial (default is 0)
1167 plot: plot the fit and the residual. In this each
1168 indivual fit has to be approved, by typing 'y'
1169 or 'n'
1170 insitu: if False a new scantable is returned.
1171 Otherwise, the scaling is done in-situ
1172 The default is taken from .asaprc (False)
1173 Example:
1174 # return a scan baselined by a third order polynomial,
1175 # not using a mask
1176 bscan = scan.poly_baseline(order=3)
1177 """
1178 if insitu is None: insitu = rcParams['insitu']
1179 varlist = vars()
1180 if mask is None:
1181 mask = list(NUM.ones(self.nchan(-1)))
1182 from asap.asapfitter import fitter
1183 f = fitter()
1184 f.set_scan(self, mask)
1185 f.set_function(poly=order)
1186 s = f.auto_fit(insitu, plot=plot)
1187 s._add_history("poly_baseline", varlist)
1188 print_log()
1189 if insitu: self._assign(s)
1190 else: return s
1191
1192 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1193 threshold=3, plot=False, insitu=None):
1194 """
1195 Return a scan which has been baselined (all rows) by a polynomial.
1196 Spectral lines are detected first using linefinder and masked out
1197 to avoid them affecting the baseline solution.
1198
1199 Parameters:
1200 mask: an optional mask retreived from scantable
1201 edge: an optional number of channel to drop at
1202 the edge of spectrum. If only one value is
1203 specified, the same number will be dropped from
1204 both sides of the spectrum. Default is to keep
1205 all channels. Nested tuples represent individual
1206 edge selection for different IFs (a number of spectral
1207 channels can be different)
1208 order: the order of the polynomial (default is 0)
1209 threshold: the threshold used by line finder. It is better to
1210 keep it large as only strong lines affect the
1211 baseline solution.
1212 plot: plot the fit and the residual. In this each
1213 indivual fit has to be approved, by typing 'y'
1214 or 'n'
1215 insitu: if False a new scantable is returned.
1216 Otherwise, the scaling is done in-situ
1217 The default is taken from .asaprc (False)
1218
1219 Example:
1220 scan2=scan.auto_poly_baseline(order=7)
1221 """
1222 if insitu is None: insitu = rcParams['insitu']
1223 varlist = vars()
1224 from asap.asapfitter import fitter
1225 from asap.asaplinefind import linefinder
1226 from asap import _is_sequence_or_number as _is_valid
1227
1228 # check whether edge is set up for each IF individually
1229 individualedge = False;
1230 if len(edge) > 1:
1231 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1232 individualedge = True;
1233
1234 if not _is_valid(edge, int) and not individualedge:
1235 raise ValueError, "Parameter 'edge' has to be an integer or a \
1236 pair of integers specified as a tuple. Nested tuples are allowed \
1237 to make individual selection for different IFs."
1238
1239 curedge = (0, 0)
1240 if individualedge:
1241 for edgepar in edge:
1242 if not _is_valid(edgepar, int):
1243 raise ValueError, "Each element of the 'edge' tuple has \
1244 to be a pair of integers or an integer."
1245 else:
1246 curedge = edge;
1247
1248 # setup fitter
1249 f = fitter()
1250 f.set_function(poly=order)
1251
1252 # setup line finder
1253 fl = linefinder()
1254 fl.set_options(threshold=threshold)
1255
1256 if not insitu:
1257 workscan = self.copy()
1258 else:
1259 workscan = self
1260
1261 fl.set_scan(workscan)
1262
1263 rows = range(workscan.nrow())
1264 asaplog.push("Processing:")
1265 for r in rows:
1266 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1267 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1268 workscan.getpol(r), workscan.getcycle(r))
1269 asaplog.push(msg, False)
1270
1271 # figure out edge parameter
1272 if individualedge:
1273 if len(edge) >= workscan.getif(r):
1274 raise RuntimeError, "Number of edge elements appear to " \
1275 "be less than the number of IFs"
1276 curedge = edge[workscan.getif(r)]
1277
1278 # setup line finder
1279 fl.find_lines(r, mask, curedge)
1280 f.set_scan(workscan, fl.get_mask())
1281 f.x = workscan._getabcissa(r)
1282 f.y = workscan._getspectrum(r)
1283 f.data = None
1284 f.fit()
1285 x = f.get_parameters()
1286 if plot:
1287 f.plot(residual=True)
1288 x = raw_input("Accept fit ( [y]/n ): ")
1289 if x.upper() == 'N':
1290 continue
1291 workscan._setspectrum(f.fitter.getresidual(), r)
1292 if plot:
1293 f._p.unmap()
1294 f._p = None
1295 workscan._add_history("auto_poly_baseline", varlist)
1296 if insitu:
1297 self._assign(workscan)
1298 else:
1299 return workscan
1300
1301 def rotate_linpolphase(self, angle):
1302 """
1303 Rotate the phase of the complex polarization O=Q+iU correlation.
1304 This is always done in situ in the raw data. So if you call this
1305 function more than once then each call rotates the phase further.
1306 Parameters:
1307 angle: The angle (degrees) to rotate (add) by.
1308 Examples:
1309 scan.rotate_linpolphase(2.3)
1310 """
1311 varlist = vars()
1312 self._math._rotate_linpolphase(self, angle)
1313 self._add_history("rotate_linpolphase", varlist)
1314 print_log()
1315 return
1316
1317
1318 def rotate_xyphase(self, angle):
1319 """
1320 Rotate the phase of the XY correlation. This is always done in situ
1321 in the data. So if you call this function more than once
1322 then each call rotates the phase further.
1323 Parameters:
1324 angle: The angle (degrees) to rotate (add) by.
1325 Examples:
1326 scan.rotate_xyphase(2.3)
1327 """
1328 varlist = vars()
1329 self._math._rotate_xyphase(self, angle)
1330 self._add_history("rotate_xyphase", varlist)
1331 print_log()
1332 return
1333
1334 def swap_linears(self):
1335 """
1336 Swap the linear polarisations XX and YY
1337 """
1338 varlist = vars()
1339 self._math._swap_linears(self)
1340 self._add_history("swap_linears", varlist)
1341 print_log()
1342 return
1343
1344 def invert_phase(self):
1345 """
1346 Invert the phase of the complex polarisation
1347 """
1348 varlist = vars()
1349 self._math._invert_phase(self)
1350 self._add_history("invert_phase", varlist)
1351 print_log()
1352 return
1353
1354 def add(self, offset, insitu=None):
1355 """
1356 Return a scan where all spectra have the offset added
1357 Parameters:
1358 offset: the offset
1359 insitu: if False a new scantable is returned.
1360 Otherwise, the scaling is done in-situ
1361 The default is taken from .asaprc (False)
1362 """
1363 if insitu is None: insitu = rcParams['insitu']
1364 self._math._setinsitu(insitu)
1365 varlist = vars()
1366 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1367 s._add_history("add", varlist)
1368 print_log()
1369 if insitu:
1370 self._assign(s)
1371 else:
1372 return s
1373
1374 def scale(self, factor, tsys=True, insitu=None, ):
1375 """
1376 Return a scan where all spectra are scaled by the give 'factor'
1377 Parameters:
1378 factor: the scaling factor
1379 insitu: if False a new scantable is returned.
1380 Otherwise, the scaling is done in-situ
1381 The default is taken from .asaprc (False)
1382 tsys: if True (default) then apply the operation to Tsys
1383 as well as the data
1384 """
1385 if insitu is None: insitu = rcParams['insitu']
1386 self._math._setinsitu(insitu)
1387 varlist = vars()
1388 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1389 s._add_history("scale", varlist)
1390 print_log()
1391 if insitu:
1392 self._assign(s)
1393 else:
1394 return s
1395
1396 def auto_quotient(self, mode='time', preserve=True):
1397 """
1398 This function allows to build quotients automatically.
1399 It assumes the observation to have the same numer of
1400 "ons" and "offs"
1401 It will support "closest off in time" in the future
1402 Parameters:
1403 mode: the on/off detection mode; 'suffix' (default)
1404 'suffix' identifies 'off' scans by the
1405 trailing '_R' (Mopra/Parkes) or
1406 '_e'/'_w' (Tid)
1407 preserve: you can preserve (default) the continuum or
1408 remove it. The equations used are
1409 preserve: Output = Toff * (on/off) - Toff
1410 remove: Output = Toff * (on/off) - Ton
1411 """
1412 modes = ["time"]
1413 if not mode in modes:
1414 msg = "please provide valid mode. Valid modes are %s" % (modes)
1415 raise ValueError(msg)
1416 varlist = vars()
1417 s = scantable(self._math._auto_quotient(self, mode, preserve))
1418 s._add_history("auto_quotient", varlist)
1419 print_log()
1420 return s
1421
1422 def mx_quotient(self, mask = None, weight='median', preserve=True):
1423 """
1424 Form a quotient using "off" beams when observing in "MX" mode.
1425 Parameters:
1426 mask: an optional mask to be used when weight == 'stddev'
1427 weight: How to average the off beams. Default is 'median'.
1428 preserve: you can preserve (default) the continuum or
1429 remove it. The equations used are
1430 preserve: Output = Toff * (on/off) - Toff
1431 remove: Output = Toff * (on/off) - Ton
1432 """
1433 if mask is None: mask = ()
1434 varlist = vars()
1435 on = scantable(self._math._mx_extract(self, 'on'))
1436 preoff = scantable(self._math._mx_extract(self, 'off'))
1437 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1438 from asapmath import quotient
1439 q = quotient(on, off, preserve)
1440 q._add_history("mx_quotient", varlist)
1441 print_log()
1442 return q
1443
1444 def freq_switch(self, insitu=None):
1445 """
1446 Apply frequency switching to the data.
1447 Parameters:
1448 insitu: if False a new scantable is returned.
1449 Otherwise, the swictching is done in-situ
1450 The default is taken from .asaprc (False)
1451 Example:
1452 none
1453 """
1454 if insitu is None: insitu = rcParams['insitu']
1455 self._math._setinsitu(insitu)
1456 varlist = vars()
1457 s = scantable(self._math._freqswitch(self))
1458 s._add_history("freq_switch", varlist)
1459 print_log()
1460 if insitu: self._assign(s)
1461 else: return s
1462
1463 def recalc_azel(self):
1464 """
1465 Recalculate the azimuth and elevation for each position.
1466 Parameters:
1467 none
1468 Example:
1469 """
1470 varlist = vars()
1471 self._recalcazel()
1472 self._add_history("recalc_azel", varlist)
1473 print_log()
1474 return
1475
1476 def __add__(self, other):
1477 varlist = vars()
1478 s = None
1479 if isinstance(other, scantable):
1480 print "scantable + scantable NYI"
1481 return
1482 elif isinstance(other, float):
1483 s = scantable(self._math._unaryop(self, other, "ADD", False))
1484 else:
1485 raise TypeError("Other input is not a scantable or float value")
1486 s._add_history("operator +", varlist)
1487 print_log()
1488 return s
1489
1490 def __sub__(self, other):
1491 """
1492 implicit on all axes and on Tsys
1493 """
1494 varlist = vars()
1495 s = None
1496 if isinstance(other, scantable):
1497 print "scantable - scantable NYI"
1498 return
1499 elif isinstance(other, float):
1500 s = scantable(self._math._unaryop(self, other, "SUB", False))
1501 else:
1502 raise TypeError("Other input is not a scantable or float value")
1503 s._add_history("operator -", varlist)
1504 print_log()
1505 return s
1506
1507 def __mul__(self, other):
1508 """
1509 implicit on all axes and on Tsys
1510 """
1511 varlist = vars()
1512 s = None
1513 if isinstance(other, scantable):
1514 print "scantable * scantable NYI"
1515 return
1516 elif isinstance(other, float):
1517 s = scantable(self._math._unaryop(self, other, "MUL", False))
1518 else:
1519 raise TypeError("Other input is not a scantable or float value")
1520 s._add_history("operator *", varlist)
1521 print_log()
1522 return s
1523
1524
1525 def __div__(self, other):
1526 """
1527 implicit on all axes and on Tsys
1528 """
1529 varlist = vars()
1530 s = None
1531 if isinstance(other, scantable):
1532 print "scantable / scantable NYI"
1533 return
1534 elif isinstance(other, float):
1535 if other == 0.0:
1536 raise ZeroDivisionError("Dividing by zero is not recommended")
1537 s = scantable(self._math._unaryop(self, other, "DIV", False))
1538 else:
1539 raise TypeError("Other input is not a scantable or float value")
1540 s._add_history("operator /", varlist)
1541 print_log()
1542 return s
1543
1544 def get_fit(self, row=0):
1545 """
1546 Print or return the stored fits for a row in the scantable
1547 Parameters:
1548 row: the row which the fit has been applied to.
1549 """
1550 if row > self.nrow():
1551 return
1552 from asap.asapfit import asapfit
1553 fit = asapfit(self._getfit(row))
1554 if rcParams['verbose']:
1555 print fit
1556 return
1557 else:
1558 return fit.as_dict()
1559
1560 def _add_history(self, funcname, parameters):
1561 # create date
1562 sep = "##"
1563 from datetime import datetime
1564 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1565 hist = dstr+sep
1566 hist += funcname+sep#cdate+sep
1567 if parameters.has_key('self'): del parameters['self']
1568 for k, v in parameters.iteritems():
1569 if type(v) is dict:
1570 for k2, v2 in v.iteritems():
1571 hist += k2
1572 hist += "="
1573 if isinstance(v2, scantable):
1574 hist += 'scantable'
1575 elif k2 == 'mask':
1576 if isinstance(v2, list) or isinstance(v2, tuple):
1577 hist += str(self._zip_mask(v2))
1578 else:
1579 hist += str(v2)
1580 else:
1581 hist += str(v2)
1582 else:
1583 hist += k
1584 hist += "="
1585 if isinstance(v, scantable):
1586 hist += 'scantable'
1587 elif k == 'mask':
1588 if isinstance(v, list) or isinstance(v, tuple):
1589 hist += str(self._zip_mask(v))
1590 else:
1591 hist += str(v)
1592 else:
1593 hist += str(v)
1594 hist += sep
1595 hist = hist[:-2] # remove trailing '##'
1596 self._addhistory(hist)
1597
1598
1599 def _zip_mask(self, mask):
1600 mask = list(mask)
1601 i = 0
1602 segments = []
1603 while mask[i:].count(1):
1604 i += mask[i:].index(1)
1605 if mask[i:].count(0):
1606 j = i + mask[i:].index(0)
1607 else:
1608 j = len(mask)
1609 segments.append([i, j])
1610 i = j
1611 return segments
1612
1613 def _get_ordinate_label(self):
1614 fu = "("+self.get_fluxunit()+")"
1615 import re
1616 lbl = "Intensity"
1617 if re.match(".K.", fu):
1618 lbl = "Brightness Temperature "+ fu
1619 elif re.match(".Jy.", fu):
1620 lbl = "Flux density "+ fu
1621 return lbl
1622
1623 def _check_ifs(self):
1624 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1625 nchans = filter(lambda t: t > 0, nchans)
1626 return (sum(nchans)/len(nchans) == nchans[0])
1627
1628 def _fill(self, names, unit, average):
1629 import os
1630 from asap._asap import stfiller
1631 first = True
1632 fullnames = []
1633 for name in names:
1634 name = os.path.expandvars(name)
1635 name = os.path.expanduser(name)
1636 if not os.path.exists(name):
1637 msg = "File '%s' does not exists" % (name)
1638 if rcParams['verbose']:
1639 asaplog.push(msg)
1640 print asaplog.pop().strip()
1641 return
1642 raise IOError(msg)
1643 fullnames.append(name)
1644 if average:
1645 asaplog.push('Auto averaging integrations')
1646 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1647 for name in fullnames:
1648 tbl = Scantable(stype)
1649 r = stfiller(tbl)
1650 msg = "Importing %s..." % (name)
1651 asaplog.push(msg, False)
1652 print_log()
1653 r._open(name, -1, -1)
1654 r._read()
1655 #tbl = r._getdata()
1656 if average:
1657 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1658 #tbl = tbl2
1659 if not first:
1660 tbl = self._math._merge([self, tbl])
1661 #tbl = tbl2
1662 Scantable.__init__(self, tbl)
1663 r._close()
1664 del r, tbl
1665 first = False
1666 if unit is not None:
1667 self.set_fluxunit(unit)
1668 self.set_freqframe(rcParams['scantable.freqframe'])
1669 #self._add_history("scantable", varlist)
1670
Note: See TracBrowser for help on using the repository browser.