source: trunk/python/scantable.py@ 1209

Last change on this file since 1209 was 1203, checked in by mar637, 18 years ago

Using FFTServer::fft0 now, don't know what the difference is. Adde better docs, to explain the fact that the frequency to remove is really a period withing the bandwidth.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 62.0 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import NUM
7from asap import linecatalog
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 #varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 print_log()
75
76 def save(self, name=None, format=None, overwrite=False):
77 """
78 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
79 Image FITS or MS2 format.
80 Parameters:
81 name: the name of the outputfile. For format "ASCII"
82 this is the root file name (data in 'name'.txt
83 and header in 'name'_header.txt)
84 format: an optional file format. Default is ASAP.
85 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
86 'SDFITS' (save as SDFITS file)
87 'ASCII' (saves as ascii text file)
88 'MS2' (saves as an aips++
89 MeasurementSet V2)
90 overwrite: If the file should be overwritten if it exists.
91 The default False is to return with warning
92 without writing the output. USE WITH CARE.
93 Example:
94 scan.save('myscan.asap')
95 scan.save('myscan.sdfits', 'SDFITS')
96 """
97 from os import path
98 if format is None: format = rcParams['scantable.save']
99 suffix = '.'+format.lower()
100 if name is None or name == "":
101 name = 'scantable'+suffix
102 msg = "No filename given. Using default name %s..." % name
103 asaplog.push(msg)
104 name = path.expandvars(name)
105 if path.isfile(name) or path.isdir(name):
106 if not overwrite:
107 msg = "File %s exists." % name
108 if rcParams['verbose']:
109 print msg
110 return
111 else:
112 raise IOError(msg)
113 format2 = format.upper()
114 if format2 == 'ASAP':
115 self._save(name)
116 else:
117 from asap._asap import stwriter as stw
118 writer = stw(format2)
119 writer.write(self, name)
120 print_log()
121 return
122
123 def copy(self):
124 """
125 Return a copy of this scantable.
126 Parameters:
127 none
128 Example:
129 copiedscan = scan.copy()
130 """
131 sd = scantable(Scantable._copy(self))
132 return sd
133
134 def drop_scan(self, scanid=None):
135 """
136 Return a new scantable where the specified scan number(s) has(have)
137 been dropped.
138 Parameters:
139 scanid: a (list of) scan number(s)
140 """
141 from asap import _is_sequence_or_number as _is_valid
142 from asap import _to_list
143 from asap import unique
144 if not _is_valid(scanid):
145 if rcParams['verbose']:
146 print "Please specify a scanno to drop from the scantable"
147 return
148 else:
149 raise RuntimeError("No scan given")
150 try:
151 scanid = _to_list(scanid)
152 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
153 for sid in scanid: allscans.remove(sid)
154 if len(allscans) == 0:
155 raise ValueError("Can't remove all scans")
156 except ValueError:
157 if rcParams['verbose']:
158 print "Couldn't find any match."
159 return
160 else: raise
161 try:
162 bsel = self.get_selection()
163 sel = selector()
164 sel.set_scans(allscans)
165 self.set_selection(bsel+sel)
166 scopy = self._copy()
167 self.set_selection(bsel)
168 return scantable(scopy)
169 except RuntimeError:
170 if rcParams['verbose']:
171 print "Couldn't find any match."
172 else:
173 raise
174
175
176 def get_scan(self, scanid=None):
177 """
178 Return a specific scan (by scanno) or collection of scans (by
179 source name) in a new scantable.
180 Parameters:
181 scanid: a (list of) scanno or a source name, unix-style
182 patterns are accepted for source name matching, e.g.
183 '*_R' gets all 'ref scans
184 Example:
185 # get all scans containing the source '323p459'
186 newscan = scan.get_scan('323p459')
187 # get all 'off' scans
188 refscans = scan.get_scan('*_R')
189 # get a susbset of scans by scanno (as listed in scan.summary())
190 newscan = scan.get_scan([0, 2, 7, 10])
191 """
192 if scanid is None:
193 if rcParams['verbose']:
194 print "Please specify a scan no or name to " \
195 "retrieve from the scantable"
196 return
197 else:
198 raise RuntimeError("No scan given")
199
200 try:
201 bsel = self.get_selection()
202 sel = selector()
203 if type(scanid) is str:
204 sel.set_name(scanid)
205 self.set_selection(bsel+sel)
206 scopy = self._copy()
207 self.set_selection(bsel)
208 return scantable(scopy)
209 elif type(scanid) is int:
210 sel.set_scans([scanid])
211 self.set_selection(bsel+sel)
212 scopy = self._copy()
213 self.set_selection(bsel)
214 return scantable(scopy)
215 elif type(scanid) is list:
216 sel.set_scans(scanid)
217 self.set_selection(sel)
218 scopy = self._copy()
219 self.set_selection(bsel)
220 return scantable(scopy)
221 else:
222 msg = "Illegal scanid type, use 'int' or 'list' if ints."
223 if rcParams['verbose']:
224 print msg
225 else:
226 raise TypeError(msg)
227 except RuntimeError:
228 if rcParams['verbose']: print "Couldn't find any match."
229 else: raise
230
231 def __str__(self):
232 return Scantable._summary(self, True)
233
234 def summary(self, filename=None):
235 """
236 Print a summary of the contents of this scantable.
237 Parameters:
238 filename: the name of a file to write the putput to
239 Default - no file output
240 verbose: print extra info such as the frequency table
241 The default (False) is taken from .asaprc
242 """
243 info = Scantable._summary(self, True)
244 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
245 if filename is not None:
246 if filename is "":
247 filename = 'scantable_summary.txt'
248 from os.path import expandvars, isdir
249 filename = expandvars(filename)
250 if not isdir(filename):
251 data = open(filename, 'w')
252 data.write(info)
253 data.close()
254 else:
255 msg = "Illegal file name '%s'." % (filename)
256 if rcParams['verbose']:
257 print msg
258 else:
259 raise IOError(msg)
260 if rcParams['verbose']:
261 try:
262 from IPython.genutils import page as pager
263 except ImportError:
264 from pydoc import pager
265 pager(info)
266 else:
267 return info
268
269
270 def get_selection(self):
271 """
272 Get the selection object currently set on this scantable.
273 Parameters:
274 none
275 Example:
276 sel = scan.get_selection()
277 sel.set_ifs(0) # select IF 0
278 scan.set_selection(sel) # apply modified selection
279 """
280 return selector(self._getselection())
281
282 def set_selection(self, selection=selector()):
283 """
284 Select a subset of the data. All following operations on this scantable
285 are only applied to thi selection.
286 Parameters:
287 selection: a selector object (default unset the selection)
288 Examples:
289 sel = selector() # create a selection object
290 self.set_scans([0, 3]) # select SCANNO 0 and 3
291 scan.set_selection(sel) # set the selection
292 scan.summary() # will only print summary of scanno 0 an 3
293 scan.set_selection() # unset the selection
294 """
295 self._setselection(selection)
296
297 def stats(self, stat='stddev', mask=None):
298 """
299 Determine the specified statistic of the current beam/if/pol
300 Takes a 'mask' as an optional parameter to specify which
301 channels should be excluded.
302 Parameters:
303 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
304 'var', 'stddev', 'avdev', 'rms', 'median'
305 mask: an optional mask specifying where the statistic
306 should be determined.
307 Example:
308 scan.set_unit('channel')
309 msk = scan.create_mask([100, 200], [500, 600])
310 scan.stats(stat='mean', mask=m)
311 """
312 if mask == None:
313 mask = []
314 axes = ['Beam', 'IF', 'Pol', 'Time']
315 if not self._check_ifs():
316 raise ValueError("Cannot apply mask as the IFs have different "
317 "number of channels. Please use setselection() "
318 "to select individual IFs")
319
320 statvals = self._math._stats(self, mask, stat)
321 out = ''
322 axes = []
323 for i in range(self.nrow()):
324 axis = []
325 axis.append(self.getscan(i))
326 axis.append(self.getbeam(i))
327 axis.append(self.getif(i))
328 axis.append(self.getpol(i))
329 axis.append(self.getcycle(i))
330 axes.append(axis)
331 tm = self._gettime(i)
332 src = self._getsourcename(i)
333 out += 'Scan[%d] (%s) ' % (axis[0], src)
334 out += 'Time[%s]:\n' % (tm)
335 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
336 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
337 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
338 out += '= %3.3f\n' % (statvals[i])
339 out += "--------------------------------------------------\n"
340
341 if rcParams['verbose']:
342 print "--------------------------------------------------"
343 print " ", stat
344 print "--------------------------------------------------"
345 print out
346 retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
347 'axes' : axes,
348 'data': statvals}
349 return retval
350
351 def stddev(self, mask=None):
352 """
353 Determine the standard deviation of the current beam/if/pol
354 Takes a 'mask' as an optional parameter to specify which
355 channels should be excluded.
356 Parameters:
357 mask: an optional mask specifying where the standard
358 deviation should be determined.
359
360 Example:
361 scan.set_unit('channel')
362 msk = scan.create_mask([100, 200], [500, 600])
363 scan.stddev(mask=m)
364 """
365 return self.stats(stat='stddev', mask=mask);
366
367
368 def column_names(self):
369 """
370 Return a list of column names, which can be used for selection.
371 """
372 return list(Scantable.column_names(self))
373
374 def get_tsys(self):
375 """
376 Return the System temperatures.
377 Parameters:
378
379 Returns:
380 a list of Tsys values for the current selection
381 """
382
383 return self._row_callback(self._gettsys, "Tsys")
384
385 def _row_callback(self, callback, label):
386 axes = []
387 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
388 out = ""
389 outvec = []
390 for i in range(self.nrow()):
391 axis = []
392 axis.append(self.getscan(i))
393 axis.append(self.getbeam(i))
394 axis.append(self.getif(i))
395 axis.append(self.getpol(i))
396 axis.append(self.getcycle(i))
397 axes.append(axis)
398 tm = self._gettime(i)
399 src = self._getsourcename(i)
400 out += 'Scan[%d] (%s) ' % (axis[0], src)
401 out += 'Time[%s]:\n' % (tm)
402 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
403 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
404 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
405 outvec.append(callback(i))
406 out += '= %3.3f\n' % (outvec[i])
407 out += "--------------------------------------------------\n"
408 if rcParams['verbose']:
409 print "--------------------------------------------------"
410 print " %s" % (label)
411 print "--------------------------------------------------"
412 print out
413 # disabled because the vector seems more useful
414 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
415 return outvec
416
417 def _get_column(self, callback, row=-1):
418 """
419 """
420 if row == -1:
421 return [callback(i) for i in range(self.nrow())]
422 else:
423 if 0 <= row < self.nrow():
424 return callback(row)
425
426
427 def get_time(self, row=-1):
428 """
429 Get a list of time stamps for the observations.
430 Return a string for each integration in the scantable.
431 Parameters:
432 row: row no of integration. Default -1 return all rows
433 Example:
434 none
435 """
436 from time import strptime
437 from datetime import datetime
438 times = self._get_column(self._gettime, row)
439 format = "%Y/%m/%d/%H:%M:%S"
440 if isinstance(times, list):
441 return [datetime(*strptime(i, format)[:6]) for i in times]
442 else:
443 return datetime(*strptime(times, format)[:6])
444
445 def get_sourcename(self, row=-1):
446 """
447 Get a list source names for the observations.
448 Return a string for each integration in the scantable.
449 Parameters:
450 row: row no of integration. Default -1 return all rows
451 Example:
452 none
453 """
454 return self._get_column(self._getsourcename, row)
455
456 def get_elevation(self, row=-1):
457 """
458 Get a list of elevations for the observations.
459 Return a float for each integration in the scantable.
460 Parameters:
461 row: row no of integration. Default -1 return all rows
462 Example:
463 none
464 """
465 return self._get_column(self._getelevation, row)
466
467 def get_azimuth(self, row=-1):
468 """
469 Get a list of azimuths for the observations.
470 Return a float for each integration in the scantable.
471 Parameters:
472 row: row no of integration. Default -1 return all rows
473 Example:
474 none
475 """
476 return self._get_column(self._getazimuth, row)
477
478 def get_parangle(self, row=-1):
479 """
480 Get a list of parallactic angles for the observations.
481 Return a float for each integration in the scantable.
482 Parameters:
483 row: row no of integration. Default -1 return all rows
484 Example:
485 none
486 """
487 return self._get_column(self._getparangle, row)
488
489 def get_direction(self, row=-1):
490 """
491 Get a list of Positions on the sky (direction) for the observations.
492 Return a float for each integration in the scantable.
493 Parameters:
494 row: row no of integration. Default -1 return all rows
495 Example:
496 none
497 """
498 return self._get_column(self._getdirection, row)
499
500 def set_unit(self, unit='channel'):
501 """
502 Set the unit for all following operations on this scantable
503 Parameters:
504 unit: optional unit, default is 'channel'
505 one of '*Hz', 'km/s', 'channel', ''
506 """
507 varlist = vars()
508 if unit in ['', 'pixel', 'channel']:
509 unit = ''
510 inf = list(self._getcoordinfo())
511 inf[0] = unit
512 self._setcoordinfo(inf)
513 self._add_history("set_unit", varlist)
514
515 def set_instrument(self, instr):
516 """
517 Set the instrument for subsequent processing
518 Parameters:
519 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
520 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
521 """
522 self._setInstrument(instr)
523 self._add_history("set_instument", vars())
524 print_log()
525
526 def set_feedtype(self, feedtype):
527 """
528 Overwrite the feed type, which might not be set correctly.
529 Parameters:
530 feedtype: 'linear' or 'circular'
531 """
532 self._setfeedtype(feedtype)
533 self._add_history("set_feedtype", vars())
534 print_log()
535
536 def set_doppler(self, doppler='RADIO'):
537 """
538 Set the doppler for all following operations on this scantable.
539 Parameters:
540 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
541 """
542 varlist = vars()
543 inf = list(self._getcoordinfo())
544 inf[2] = doppler
545 self._setcoordinfo(inf)
546 self._add_history("set_doppler", vars())
547 print_log()
548
549 def set_freqframe(self, frame=None):
550 """
551 Set the frame type of the Spectral Axis.
552 Parameters:
553 frame: an optional frame type, default 'LSRK'. Valid frames are:
554 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
555 'GEO', 'GALACTO', 'LGROUP', 'CMB'
556 Examples:
557 scan.set_freqframe('BARY')
558 """
559 if frame is None: frame = rcParams['scantable.freqframe']
560 varlist = vars()
561 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
562 'GEO', 'GALACTO', 'LGROUP', 'CMB']
563
564 if frame in valid:
565 inf = list(self._getcoordinfo())
566 inf[1] = frame
567 self._setcoordinfo(inf)
568 self._add_history("set_freqframe", varlist)
569 else:
570 msg = "Please specify a valid freq type. Valid types are:\n", valid
571 if rcParams['verbose']:
572 print msg
573 else:
574 raise TypeError(msg)
575 print_log()
576
577 def set_dirframe(self, frame=""):
578 """
579 Set the frame type of the Direction on the sky.
580 Parameters:
581 frame: an optional frame type, default ''. Valid frames are:
582 'J2000', 'B1950', 'GALACTIC'
583 Examples:
584 scan.set_dirframe('GALACTIC')
585 """
586 varlist = vars()
587 try:
588 Scantable.set_dirframe(self, frame)
589 except RuntimeError, msg:
590 if rcParams['verbose']:
591 print msg
592 else:
593 raise
594 self._add_history("set_dirframe", varlist)
595
596 def get_unit(self):
597 """
598 Get the default unit set in this scantable
599 Parameters:
600 Returns:
601 A unit string
602 """
603 inf = self._getcoordinfo()
604 unit = inf[0]
605 if unit == '': unit = 'channel'
606 return unit
607
608 def get_abcissa(self, rowno=0):
609 """
610 Get the abcissa in the current coordinate setup for the currently
611 selected Beam/IF/Pol
612 Parameters:
613 rowno: an optional row number in the scantable. Default is the
614 first row, i.e. rowno=0
615 Returns:
616 The abcissa values and it's format string (as a dictionary)
617 """
618 abc = self._getabcissa(rowno)
619 lbl = self._getabcissalabel(rowno)
620 print_log()
621 return abc, lbl
622
623 def flag(self, mask=None):
624 """
625 Flag the selected data using an optional channel mask.
626 Parameters:
627 mask: an optional channel mask, created with create_mask. Default
628 (no mask) is all channels.
629 """
630 varlist = vars()
631 if mask is None:
632 mask = []
633 try:
634 self._flag(mask)
635 except RuntimeError, msg:
636 if rcParams['verbose']:
637 print msg
638 return
639 else: raise
640 self._add_history("flag", varlist)
641
642 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
643 """
644 Flag the data in 'lag' space by providing a frequency to remove.
645 Flagged data in the scantable gets set to 0.0 before the fft.
646 No taper is applied.
647 Parameters:
648 frequency: the frequency (really a period within the bandwidth) to remove
649 width: the width of the frequency to remove, to remove a range
650 of frequencies aroung the centre.
651 unit: the frequency unit (default "GHz")
652 Notes:
653 It is recommended to flag edges of the band or strong signals beforehand.
654 """
655 if insitu is None: insitu = rcParams['insitu']
656 self._math._setinsitu(insitu)
657 varlist = vars()
658 base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
659 if not base.has_key(unit):
660 raise ValueError("%s is not a valid unit." % unit)
661 try:
662 s = scantable(self._math._lag_flag(self, frequency*base[unit],
663 width*base[unit]))
664 except RuntimeError, msg:
665 if rcParams['verbose']:
666 print msg
667 return
668 else: raise
669 s._add_history("lag_flag", varlist)
670 print_log()
671 if insitu:
672 self._assign(s)
673 else:
674 return s
675
676
677 def create_mask(self, *args, **kwargs):
678 """
679 Compute and return a mask based on [min, max] windows.
680 The specified windows are to be INCLUDED, when the mask is
681 applied.
682 Parameters:
683 [min, max], [min2, max2], ...
684 Pairs of start/end points (inclusive)specifying the regions
685 to be masked
686 invert: optional argument. If specified as True,
687 return an inverted mask, i.e. the regions
688 specified are EXCLUDED
689 row: create the mask using the specified row for
690 unit conversions, default is row=0
691 only necessary if frequency varies over rows.
692 Example:
693 scan.set_unit('channel')
694 a)
695 msk = scan.create_mask([400, 500], [800, 900])
696 # masks everything outside 400 and 500
697 # and 800 and 900 in the unit 'channel'
698
699 b)
700 msk = scan.create_mask([400, 500], [800, 900], invert=True)
701 # masks the regions between 400 and 500
702 # and 800 and 900 in the unit 'channel'
703 c)
704 mask only channel 400
705 msk = scan.create_mask([400, 400])
706 """
707 row = 0
708 if kwargs.has_key("row"):
709 row = kwargs.get("row")
710 data = self._getabcissa(row)
711 u = self._getcoordinfo()[0]
712 if rcParams['verbose']:
713 if u == "": u = "channel"
714 msg = "The current mask window unit is %s" % u
715 i = self._check_ifs()
716 if not i:
717 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
718 asaplog.push(msg)
719 n = self.nchan()
720 msk = NUM.zeros(n)
721 # test if args is a 'list' or a 'normal *args - UGLY!!!
722
723 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
724 and args or args[0]
725 for window in ws:
726 if (len(window) != 2 or window[0] > window[1] ):
727 raise TypeError("A window needs to be defined as [min, max]")
728 for i in range(n):
729 if data[i] >= window[0] and data[i] <= window[1]:
730 msk[i] = 1
731 if kwargs.has_key('invert'):
732 if kwargs.get('invert'):
733 msk = NUM.logical_not(msk)
734 print_log()
735 return msk
736
737 def get_restfreqs(self):
738 """
739 Get the restfrequency(s) stored in this scantable.
740 The return value(s) are always of unit 'Hz'
741 Parameters:
742 none
743 Returns:
744 a list of doubles
745 """
746 return list(self._getrestfreqs())
747
748
749 def set_restfreqs(self, freqs=None, unit='Hz'):
750 """
751 Set or replace the restfrequency specified and
752 If the 'freqs' argument holds a scalar,
753 then that rest frequency will be applied to all the selected
754 data. If the 'freqs' argument holds
755 a vector, then it MUST be of equal or smaller length than
756 the number of IFs (and the available restfrequencies will be
757 replaced by this vector). In this case, *all* data have
758 the restfrequency set per IF according
759 to the corresponding value you give in the 'freqs' vector.
760 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
761 IF 1 gets restfreq 2e9.
762 You can also specify the frequencies via a linecatalog/
763
764 Parameters:
765 freqs: list of rest frequency values or string idenitfiers
766 unit: unit for rest frequency (default 'Hz')
767
768 Example:
769 # set the given restfrequency for the whole table
770 scan.set_restfreqs(freqs=1.4e9)
771 # If thee number of IFs in the data is >= 2 the IF0 gets the first
772 # value IF1 the second...
773 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
774 #set the given restfrequency for the whole table (by name)
775 scan.set_restfreqs(freqs="OH1667")
776
777 Note:
778 To do more sophisticate Restfrequency setting, e.g. on a
779 source and IF basis, use scantable.set_selection() before using
780 this function.
781 # provide your scantable is call scan
782 selection = selector()
783 selection.set_name("ORION*")
784 selection.set_ifs([1])
785 scan.set_selection(selection)
786 scan.set_restfreqs(freqs=86.6e9)
787
788 """
789 varlist = vars()
790 from asap import linecatalog
791 # simple value
792 if isinstance(freqs, int) or isinstance(freqs, float):
793 self._setrestfreqs(freqs, "",unit)
794 # list of values
795 elif isinstance(freqs, list) or isinstance(freqs, tuple):
796 # list values are scalars
797 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
798 sel = selector()
799 savesel = self._getselection()
800 for i in xrange(len(freqs)):
801 sel.set_ifs([i])
802 self._setselection(sel)
803 self._setrestfreqs(freqs[i], "",unit)
804 self._setselection(savesel)
805 # list values are tuples, (value, name)
806 elif isinstance(freqs[-1], dict):
807 sel = selector()
808 savesel = self._getselection()
809 for i in xrange(len(freqs)):
810 sel.set_ifs([i])
811 self._setrestfreqs(freqs[i]["value"],
812 freqs[i]["name"], "MHz")
813 self._setselection(sel)
814 self._setselection(savesel)
815 # freqs are to be taken from a linecatalog
816 elif isinstance(freqs, linecatalog):
817 sel = selector()
818 savesel = self._getselection()
819 for i in xrange(freqs.nrow()):
820 sel.set_ifs([i])
821 self._setselection(sel)
822 self._setrestfreqs(freqs.get_frequency(i),
823 freqs.get_name(i), "MHz")
824 # ensure that we are not iterating past nIF
825 if i == self.nif()-1: break
826 self._setselection(savesel)
827 else:
828 return
829 self._add_history("set_restfreqs", varlist)
830
831
832
833 def history(self):
834 hist = list(self._gethistory())
835 out = "-"*80
836 for h in hist:
837 if h.startswith("---"):
838 out += "\n"+h
839 else:
840 items = h.split("##")
841 date = items[0]
842 func = items[1]
843 items = items[2:]
844 out += "\n"+date+"\n"
845 out += "Function: %s\n Parameters:" % (func)
846 for i in items:
847 s = i.split("=")
848 out += "\n %s = %s" % (s[0], s[1])
849 out += "\n"+"-"*80
850 try:
851 from IPython.genutils import page as pager
852 except ImportError:
853 from pydoc import pager
854 pager(out)
855 return
856
857 #
858 # Maths business
859 #
860
861 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
862 """
863 Return the (time) weighted average of a scan.
864 Note:
865 in channels only - align if necessary
866 Parameters:
867 mask: an optional mask (only used for 'var' and 'tsys'
868 weighting)
869 scanav: True averages each scan separately
870 False (default) averages all scans together,
871 weight: Weighting scheme.
872 'none' (mean no weight)
873 'var' (1/var(spec) weighted)
874 'tsys' (1/Tsys**2 weighted)
875 'tint' (integration time weighted)
876 'tintsys' (Tint/Tsys**2)
877 'median' ( median averaging)
878 The default is 'tint'
879 align: align the spectra in velocity before averaging. It takes
880 the time of the first spectrum as reference time.
881 Example:
882 # time average the scantable without using a mask
883 newscan = scan.average_time()
884 """
885 varlist = vars()
886 if weight is None: weight = 'TINT'
887 if mask is None: mask = ()
888 if scanav: scanav = "SCAN"
889 else: scanav = "NONE"
890 scan = (self, )
891 try:
892 if align:
893 scan = (self.freq_align(insitu=False), )
894 s = None
895 if weight.upper() == 'MEDIAN':
896 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
897 scanav))
898 else:
899 s = scantable(self._math._average(scan, mask, weight.upper(),
900 scanav))
901 except RuntimeError, msg:
902 if rcParams['verbose']:
903 print msg
904 return
905 else: raise
906 s._add_history("average_time", varlist)
907 print_log()
908 return s
909
910 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
911 """
912 Return a scan where all spectra are converted to either
913 Jansky or Kelvin depending upon the flux units of the scan table.
914 By default the function tries to look the values up internally.
915 If it can't find them (or if you want to over-ride), you must
916 specify EITHER jyperk OR eta (and D which it will try to look up
917 also if you don't set it). jyperk takes precedence if you set both.
918 Parameters:
919 jyperk: the Jy / K conversion factor
920 eta: the aperture efficiency
921 d: the geomtric diameter (metres)
922 insitu: if False a new scantable is returned.
923 Otherwise, the scaling is done in-situ
924 The default is taken from .asaprc (False)
925 allaxes: if True apply to all spectra. Otherwise
926 apply only to the selected (beam/pol/if)spectra only
927 The default is taken from .asaprc (True if none)
928 """
929 if insitu is None: insitu = rcParams['insitu']
930 self._math._setinsitu(insitu)
931 varlist = vars()
932 if jyperk is None: jyperk = -1.0
933 if d is None: d = -1.0
934 if eta is None: eta = -1.0
935 s = scantable(self._math._convertflux(self, d, eta, jyperk))
936 s._add_history("convert_flux", varlist)
937 print_log()
938 if insitu: self._assign(s)
939 else: return s
940
941 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
942 """
943 Return a scan after applying a gain-elevation correction.
944 The correction can be made via either a polynomial or a
945 table-based interpolation (and extrapolation if necessary).
946 You specify polynomial coefficients, an ascii table or neither.
947 If you specify neither, then a polynomial correction will be made
948 with built in coefficients known for certain telescopes (an error
949 will occur if the instrument is not known).
950 The data and Tsys are *divided* by the scaling factors.
951 Parameters:
952 poly: Polynomial coefficients (default None) to compute a
953 gain-elevation correction as a function of
954 elevation (in degrees).
955 filename: The name of an ascii file holding correction factors.
956 The first row of the ascii file must give the column
957 names and these MUST include columns
958 "ELEVATION" (degrees) and "FACTOR" (multiply data
959 by this) somewhere.
960 The second row must give the data type of the
961 column. Use 'R' for Real and 'I' for Integer.
962 An example file would be
963 (actual factors are arbitrary) :
964
965 TIME ELEVATION FACTOR
966 R R R
967 0.1 0 0.8
968 0.2 20 0.85
969 0.3 40 0.9
970 0.4 60 0.85
971 0.5 80 0.8
972 0.6 90 0.75
973 method: Interpolation method when correcting from a table.
974 Values are "nearest", "linear" (default), "cubic"
975 and "spline"
976 insitu: if False a new scantable is returned.
977 Otherwise, the scaling is done in-situ
978 The default is taken from .asaprc (False)
979 """
980
981 if insitu is None: insitu = rcParams['insitu']
982 self._math._setinsitu(insitu)
983 varlist = vars()
984 if poly is None:
985 poly = ()
986 from os.path import expandvars
987 filename = expandvars(filename)
988 s = scantable(self._math._gainel(self, poly, filename, method))
989 s._add_history("gain_el", varlist)
990 print_log()
991 if insitu: self._assign(s)
992 else: return s
993
994 def freq_align(self, reftime=None, method='cubic', insitu=None):
995 """
996 Return a scan where all rows have been aligned in frequency/velocity.
997 The alignment frequency frame (e.g. LSRK) is that set by function
998 set_freqframe.
999 Parameters:
1000 reftime: reference time to align at. By default, the time of
1001 the first row of data is used.
1002 method: Interpolation method for regridding the spectra.
1003 Choose from "nearest", "linear", "cubic" (default)
1004 and "spline"
1005 insitu: if False a new scantable is returned.
1006 Otherwise, the scaling is done in-situ
1007 The default is taken from .asaprc (False)
1008 """
1009 if insitu is None: insitu = rcParams["insitu"]
1010 self._math._setinsitu(insitu)
1011 varlist = vars()
1012 if reftime is None: reftime = ""
1013 s = scantable(self._math._freq_align(self, reftime, method))
1014 s._add_history("freq_align", varlist)
1015 print_log()
1016 if insitu: self._assign(s)
1017 else: return s
1018
1019 def opacity(self, tau, insitu=None):
1020 """
1021 Apply an opacity correction. The data
1022 and Tsys are multiplied by the correction factor.
1023 Parameters:
1024 tau: Opacity from which the correction factor is
1025 exp(tau*ZD)
1026 where ZD is the zenith-distance
1027 insitu: if False a new scantable is returned.
1028 Otherwise, the scaling is done in-situ
1029 The default is taken from .asaprc (False)
1030 """
1031 if insitu is None: insitu = rcParams['insitu']
1032 self._math._setinsitu(insitu)
1033 varlist = vars()
1034 s = scantable(self._math._opacity(self, tau))
1035 s._add_history("opacity", varlist)
1036 print_log()
1037 if insitu: self._assign(s)
1038 else: return s
1039
1040 def bin(self, width=5, insitu=None):
1041 """
1042 Return a scan where all spectra have been binned up.
1043 width: The bin width (default=5) in pixels
1044 insitu: if False a new scantable is returned.
1045 Otherwise, the scaling is done in-situ
1046 The default is taken from .asaprc (False)
1047 """
1048 if insitu is None: insitu = rcParams['insitu']
1049 self._math._setinsitu(insitu)
1050 varlist = vars()
1051 s = scantable(self._math._bin(self, width))
1052 s._add_history("bin", varlist)
1053 print_log()
1054 if insitu: self._assign(s)
1055 else: return s
1056
1057
1058 def resample(self, width=5, method='cubic', insitu=None):
1059 """
1060 Return a scan where all spectra have been binned up
1061 width: The bin width (default=5) in pixels
1062 method: Interpolation method when correcting from a table.
1063 Values are "nearest", "linear", "cubic" (default)
1064 and "spline"
1065 insitu: if False a new scantable is returned.
1066 Otherwise, the scaling is done in-situ
1067 The default is taken from .asaprc (False)
1068 """
1069 if insitu is None: insitu = rcParams['insitu']
1070 self._math._setinsitu(insitu)
1071 varlist = vars()
1072 s = scantable(self._math._resample(self, method, width))
1073 s._add_history("resample", varlist)
1074 print_log()
1075 if insitu: self._assign(s)
1076 else: return s
1077
1078
1079 def average_pol(self, mask=None, weight='none'):
1080 """
1081 Average the Polarisations together.
1082 Parameters:
1083 mask: An optional mask defining the region, where the
1084 averaging will be applied. The output will have all
1085 specified points masked.
1086 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1087 weighted), or 'tsys' (1/Tsys**2 weighted)
1088 """
1089 varlist = vars()
1090 if mask is None:
1091 mask = ()
1092 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1093 s._add_history("average_pol", varlist)
1094 print_log()
1095 return s
1096
1097 def average_beam(self, mask=None, weight='none'):
1098 """
1099 Average the Beams together.
1100 Parameters:
1101 mask: An optional mask defining the region, where the
1102 averaging will be applied. The output will have all
1103 specified points masked.
1104 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1105 weighted), or 'tsys' (1/Tsys**2 weighted)
1106 """
1107 varlist = vars()
1108 if mask is None:
1109 mask = ()
1110 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1111 s._add_history("average_beam", varlist)
1112 print_log()
1113 return s
1114
1115 def convert_pol(self, poltype=None):
1116 """
1117 Convert the data to a different polarisation type.
1118 Parameters:
1119 poltype: The new polarisation type. Valid types are:
1120 "linear", "stokes" and "circular"
1121 """
1122 varlist = vars()
1123 try:
1124 s = scantable(self._math._convertpol(self, poltype))
1125 except RuntimeError, msg:
1126 if rcParams['verbose']:
1127 print msg
1128 return
1129 else:
1130 raise
1131 s._add_history("convert_pol", varlist)
1132 print_log()
1133 return s
1134
1135 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1136 """
1137 Smooth the spectrum by the specified kernel (conserving flux).
1138 Parameters:
1139 scan: The input scan
1140 kernel: The type of smoothing kernel. Select from
1141 'hanning' (default), 'gaussian' and 'boxcar'.
1142 The first three characters are sufficient.
1143 width: The width of the kernel in pixels. For hanning this is
1144 ignored otherwise it defauls to 5 pixels.
1145 For 'gaussian' it is the Full Width Half
1146 Maximum. For 'boxcar' it is the full width.
1147 insitu: if False a new scantable is returned.
1148 Otherwise, the scaling is done in-situ
1149 The default is taken from .asaprc (False)
1150 Example:
1151 none
1152 """
1153 if insitu is None: insitu = rcParams['insitu']
1154 self._math._setinsitu(insitu)
1155 varlist = vars()
1156 s = scantable(self._math._smooth(self, kernel.lower(), width))
1157 s._add_history("smooth", varlist)
1158 print_log()
1159 if insitu: self._assign(s)
1160 else: return s
1161
1162
1163 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1164 """
1165 Return a scan which has been baselined (all rows) by a polynomial.
1166 Parameters:
1167 scan: a scantable
1168 mask: an optional mask
1169 order: the order of the polynomial (default is 0)
1170 plot: plot the fit and the residual. In this each
1171 indivual fit has to be approved, by typing 'y'
1172 or 'n'
1173 insitu: if False a new scantable is returned.
1174 Otherwise, the scaling is done in-situ
1175 The default is taken from .asaprc (False)
1176 Example:
1177 # return a scan baselined by a third order polynomial,
1178 # not using a mask
1179 bscan = scan.poly_baseline(order=3)
1180 """
1181 if insitu is None: insitu = rcParams['insitu']
1182 varlist = vars()
1183 if mask is None:
1184 mask = list(NUM.ones(self.nchan(-1)))
1185 from asap.asapfitter import fitter
1186 f = fitter()
1187 f.set_scan(self, mask)
1188 f.set_function(poly=order)
1189 s = f.auto_fit(insitu, plot=plot)
1190 s._add_history("poly_baseline", varlist)
1191 print_log()
1192 if insitu: self._assign(s)
1193 else: return s
1194
1195 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1196 threshold=3, plot=False, insitu=None):
1197 """
1198 Return a scan which has been baselined (all rows) by a polynomial.
1199 Spectral lines are detected first using linefinder and masked out
1200 to avoid them affecting the baseline solution.
1201
1202 Parameters:
1203 mask: an optional mask retreived from scantable
1204 edge: an optional number of channel to drop at
1205 the edge of spectrum. If only one value is
1206 specified, the same number will be dropped from
1207 both sides of the spectrum. Default is to keep
1208 all channels. Nested tuples represent individual
1209 edge selection for different IFs (a number of spectral
1210 channels can be different)
1211 order: the order of the polynomial (default is 0)
1212 threshold: the threshold used by line finder. It is better to
1213 keep it large as only strong lines affect the
1214 baseline solution.
1215 plot: plot the fit and the residual. In this each
1216 indivual fit has to be approved, by typing 'y'
1217 or 'n'
1218 insitu: if False a new scantable is returned.
1219 Otherwise, the scaling is done in-situ
1220 The default is taken from .asaprc (False)
1221
1222 Example:
1223 scan2=scan.auto_poly_baseline(order=7)
1224 """
1225 if insitu is None: insitu = rcParams['insitu']
1226 varlist = vars()
1227 from asap.asapfitter import fitter
1228 from asap.asaplinefind import linefinder
1229 from asap import _is_sequence_or_number as _is_valid
1230
1231 # check whether edge is set up for each IF individually
1232 individualedge = False;
1233 if len(edge) > 1:
1234 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1235 individualedge = True;
1236
1237 if not _is_valid(edge, int) and not individualedge:
1238 raise ValueError, "Parameter 'edge' has to be an integer or a \
1239 pair of integers specified as a tuple. Nested tuples are allowed \
1240 to make individual selection for different IFs."
1241
1242 curedge = (0, 0)
1243 if individualedge:
1244 for edgepar in edge:
1245 if not _is_valid(edgepar, int):
1246 raise ValueError, "Each element of the 'edge' tuple has \
1247 to be a pair of integers or an integer."
1248 else:
1249 curedge = edge;
1250
1251 # setup fitter
1252 f = fitter()
1253 f.set_function(poly=order)
1254
1255 # setup line finder
1256 fl = linefinder()
1257 fl.set_options(threshold=threshold)
1258
1259 if not insitu:
1260 workscan = self.copy()
1261 else:
1262 workscan = self
1263
1264 fl.set_scan(workscan)
1265
1266 rows = range(workscan.nrow())
1267 asaplog.push("Processing:")
1268 for r in rows:
1269 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1270 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1271 workscan.getpol(r), workscan.getcycle(r))
1272 asaplog.push(msg, False)
1273
1274 # figure out edge parameter
1275 if individualedge:
1276 if len(edge) >= workscan.getif(r):
1277 raise RuntimeError, "Number of edge elements appear to " \
1278 "be less than the number of IFs"
1279 curedge = edge[workscan.getif(r)]
1280
1281 # setup line finder
1282 fl.find_lines(r, mask, curedge)
1283 f.set_scan(workscan, fl.get_mask())
1284 f.x = workscan._getabcissa(r)
1285 f.y = workscan._getspectrum(r)
1286 f.data = None
1287 f.fit()
1288 x = f.get_parameters()
1289 if plot:
1290 f.plot(residual=True)
1291 x = raw_input("Accept fit ( [y]/n ): ")
1292 if x.upper() == 'N':
1293 continue
1294 workscan._setspectrum(f.fitter.getresidual(), r)
1295 if plot:
1296 f._p.unmap()
1297 f._p = None
1298 workscan._add_history("auto_poly_baseline", varlist)
1299 if insitu:
1300 self._assign(workscan)
1301 else:
1302 return workscan
1303
1304 def rotate_linpolphase(self, angle):
1305 """
1306 Rotate the phase of the complex polarization O=Q+iU correlation.
1307 This is always done in situ in the raw data. So if you call this
1308 function more than once then each call rotates the phase further.
1309 Parameters:
1310 angle: The angle (degrees) to rotate (add) by.
1311 Examples:
1312 scan.rotate_linpolphase(2.3)
1313 """
1314 varlist = vars()
1315 self._math._rotate_linpolphase(self, angle)
1316 self._add_history("rotate_linpolphase", varlist)
1317 print_log()
1318 return
1319
1320
1321 def rotate_xyphase(self, angle):
1322 """
1323 Rotate the phase of the XY correlation. This is always done in situ
1324 in the data. So if you call this function more than once
1325 then each call rotates the phase further.
1326 Parameters:
1327 angle: The angle (degrees) to rotate (add) by.
1328 Examples:
1329 scan.rotate_xyphase(2.3)
1330 """
1331 varlist = vars()
1332 self._math._rotate_xyphase(self, angle)
1333 self._add_history("rotate_xyphase", varlist)
1334 print_log()
1335 return
1336
1337 def swap_linears(self):
1338 """
1339 Swap the linear polarisations XX and YY
1340 """
1341 varlist = vars()
1342 self._math._swap_linears(self)
1343 self._add_history("swap_linears", varlist)
1344 print_log()
1345 return
1346
1347 def invert_phase(self):
1348 """
1349 Invert the phase of the complex polarisation
1350 """
1351 varlist = vars()
1352 self._math._invert_phase(self)
1353 self._add_history("invert_phase", varlist)
1354 print_log()
1355 return
1356
1357 def add(self, offset, insitu=None):
1358 """
1359 Return a scan where all spectra have the offset added
1360 Parameters:
1361 offset: the offset
1362 insitu: if False a new scantable is returned.
1363 Otherwise, the scaling is done in-situ
1364 The default is taken from .asaprc (False)
1365 """
1366 if insitu is None: insitu = rcParams['insitu']
1367 self._math._setinsitu(insitu)
1368 varlist = vars()
1369 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1370 s._add_history("add", varlist)
1371 print_log()
1372 if insitu:
1373 self._assign(s)
1374 else:
1375 return s
1376
1377 def scale(self, factor, tsys=True, insitu=None, ):
1378 """
1379 Return a scan where all spectra are scaled by the give 'factor'
1380 Parameters:
1381 factor: the scaling factor
1382 insitu: if False a new scantable is returned.
1383 Otherwise, the scaling is done in-situ
1384 The default is taken from .asaprc (False)
1385 tsys: if True (default) then apply the operation to Tsys
1386 as well as the data
1387 """
1388 if insitu is None: insitu = rcParams['insitu']
1389 self._math._setinsitu(insitu)
1390 varlist = vars()
1391 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1392 s._add_history("scale", varlist)
1393 print_log()
1394 if insitu:
1395 self._assign(s)
1396 else:
1397 return s
1398
1399 def auto_quotient(self, mode='time', preserve=True):
1400 """
1401 This function allows to build quotients automatically.
1402 It assumes the observation to have the same numer of
1403 "ons" and "offs"
1404 It will support "closest off in time" in the future
1405 Parameters:
1406 mode: the on/off detection mode; 'suffix' (default)
1407 'suffix' identifies 'off' scans by the
1408 trailing '_R' (Mopra/Parkes) or
1409 '_e'/'_w' (Tid)
1410 preserve: you can preserve (default) the continuum or
1411 remove it. The equations used are
1412 preserve: Output = Toff * (on/off) - Toff
1413 remove: Output = Toff * (on/off) - Ton
1414 """
1415 modes = ["time"]
1416 if not mode in modes:
1417 msg = "please provide valid mode. Valid modes are %s" % (modes)
1418 raise ValueError(msg)
1419 varlist = vars()
1420 s = scantable(self._math._auto_quotient(self, mode, preserve))
1421 s._add_history("auto_quotient", varlist)
1422 print_log()
1423 return s
1424
1425 def mx_quotient(self, mask = None, weight='median', preserve=True):
1426 """
1427 Form a quotient using "off" beams when observing in "MX" mode.
1428 Parameters:
1429 mask: an optional mask to be used when weight == 'stddev'
1430 weight: How to average the off beams. Default is 'median'.
1431 preserve: you can preserve (default) the continuum or
1432 remove it. The equations used are
1433 preserve: Output = Toff * (on/off) - Toff
1434 remove: Output = Toff * (on/off) - Ton
1435 """
1436 if mask is None: mask = ()
1437 varlist = vars()
1438 on = scantable(self._math._mx_extract(self, 'on'))
1439 preoff = scantable(self._math._mx_extract(self, 'off'))
1440 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1441 from asapmath import quotient
1442 q = quotient(on, off, preserve)
1443 q._add_history("mx_quotient", varlist)
1444 print_log()
1445 return q
1446
1447 def freq_switch(self, insitu=None):
1448 """
1449 Apply frequency switching to the data.
1450 Parameters:
1451 insitu: if False a new scantable is returned.
1452 Otherwise, the swictching is done in-situ
1453 The default is taken from .asaprc (False)
1454 Example:
1455 none
1456 """
1457 if insitu is None: insitu = rcParams['insitu']
1458 self._math._setinsitu(insitu)
1459 varlist = vars()
1460 s = scantable(self._math._freqswitch(self))
1461 s._add_history("freq_switch", varlist)
1462 print_log()
1463 if insitu: self._assign(s)
1464 else: return s
1465
1466 def recalc_azel(self):
1467 """
1468 Recalculate the azimuth and elevation for each position.
1469 Parameters:
1470 none
1471 Example:
1472 """
1473 varlist = vars()
1474 self._recalcazel()
1475 self._add_history("recalc_azel", varlist)
1476 print_log()
1477 return
1478
1479 def __add__(self, other):
1480 varlist = vars()
1481 s = None
1482 if isinstance(other, scantable):
1483 print "scantable + scantable NYI"
1484 return
1485 elif isinstance(other, float):
1486 s = scantable(self._math._unaryop(self, other, "ADD", False))
1487 else:
1488 raise TypeError("Other input is not a scantable or float value")
1489 s._add_history("operator +", varlist)
1490 print_log()
1491 return s
1492
1493 def __sub__(self, other):
1494 """
1495 implicit on all axes and on Tsys
1496 """
1497 varlist = vars()
1498 s = None
1499 if isinstance(other, scantable):
1500 print "scantable - scantable NYI"
1501 return
1502 elif isinstance(other, float):
1503 s = scantable(self._math._unaryop(self, other, "SUB", False))
1504 else:
1505 raise TypeError("Other input is not a scantable or float value")
1506 s._add_history("operator -", varlist)
1507 print_log()
1508 return s
1509
1510 def __mul__(self, other):
1511 """
1512 implicit on all axes and on Tsys
1513 """
1514 varlist = vars()
1515 s = None
1516 if isinstance(other, scantable):
1517 print "scantable * scantable NYI"
1518 return
1519 elif isinstance(other, float):
1520 s = scantable(self._math._unaryop(self, other, "MUL", False))
1521 else:
1522 raise TypeError("Other input is not a scantable or float value")
1523 s._add_history("operator *", varlist)
1524 print_log()
1525 return s
1526
1527
1528 def __div__(self, other):
1529 """
1530 implicit on all axes and on Tsys
1531 """
1532 varlist = vars()
1533 s = None
1534 if isinstance(other, scantable):
1535 print "scantable / scantable NYI"
1536 return
1537 elif isinstance(other, float):
1538 if other == 0.0:
1539 raise ZeroDivisionError("Dividing by zero is not recommended")
1540 s = scantable(self._math._unaryop(self, other, "DIV", False))
1541 else:
1542 raise TypeError("Other input is not a scantable or float value")
1543 s._add_history("operator /", varlist)
1544 print_log()
1545 return s
1546
1547 def get_fit(self, row=0):
1548 """
1549 Print or return the stored fits for a row in the scantable
1550 Parameters:
1551 row: the row which the fit has been applied to.
1552 """
1553 if row > self.nrow():
1554 return
1555 from asap.asapfit import asapfit
1556 fit = asapfit(self._getfit(row))
1557 if rcParams['verbose']:
1558 print fit
1559 return
1560 else:
1561 return fit.as_dict()
1562
1563 def _add_history(self, funcname, parameters):
1564 # create date
1565 sep = "##"
1566 from datetime import datetime
1567 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1568 hist = dstr+sep
1569 hist += funcname+sep#cdate+sep
1570 if parameters.has_key('self'): del parameters['self']
1571 for k, v in parameters.iteritems():
1572 if type(v) is dict:
1573 for k2, v2 in v.iteritems():
1574 hist += k2
1575 hist += "="
1576 if isinstance(v2, scantable):
1577 hist += 'scantable'
1578 elif k2 == 'mask':
1579 if isinstance(v2, list) or isinstance(v2, tuple):
1580 hist += str(self._zip_mask(v2))
1581 else:
1582 hist += str(v2)
1583 else:
1584 hist += str(v2)
1585 else:
1586 hist += k
1587 hist += "="
1588 if isinstance(v, scantable):
1589 hist += 'scantable'
1590 elif k == 'mask':
1591 if isinstance(v, list) or isinstance(v, tuple):
1592 hist += str(self._zip_mask(v))
1593 else:
1594 hist += str(v)
1595 else:
1596 hist += str(v)
1597 hist += sep
1598 hist = hist[:-2] # remove trailing '##'
1599 self._addhistory(hist)
1600
1601
1602 def _zip_mask(self, mask):
1603 mask = list(mask)
1604 i = 0
1605 segments = []
1606 while mask[i:].count(1):
1607 i += mask[i:].index(1)
1608 if mask[i:].count(0):
1609 j = i + mask[i:].index(0)
1610 else:
1611 j = len(mask)
1612 segments.append([i, j])
1613 i = j
1614 return segments
1615
1616 def _get_ordinate_label(self):
1617 fu = "("+self.get_fluxunit()+")"
1618 import re
1619 lbl = "Intensity"
1620 if re.match(".K.", fu):
1621 lbl = "Brightness Temperature "+ fu
1622 elif re.match(".Jy.", fu):
1623 lbl = "Flux density "+ fu
1624 return lbl
1625
1626 def _check_ifs(self):
1627 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1628 nchans = filter(lambda t: t > 0, nchans)
1629 return (sum(nchans)/len(nchans) == nchans[0])
1630
1631 def _fill(self, names, unit, average):
1632 import os
1633 from asap._asap import stfiller
1634 first = True
1635 fullnames = []
1636 for name in names:
1637 name = os.path.expandvars(name)
1638 name = os.path.expanduser(name)
1639 if not os.path.exists(name):
1640 msg = "File '%s' does not exists" % (name)
1641 if rcParams['verbose']:
1642 asaplog.push(msg)
1643 print asaplog.pop().strip()
1644 return
1645 raise IOError(msg)
1646 fullnames.append(name)
1647 if average:
1648 asaplog.push('Auto averaging integrations')
1649 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1650 for name in fullnames:
1651 tbl = Scantable(stype)
1652 r = stfiller(tbl)
1653 msg = "Importing %s..." % (name)
1654 asaplog.push(msg, False)
1655 print_log()
1656 r._open(name, -1, -1)
1657 r._read()
1658 #tbl = r._getdata()
1659 if average:
1660 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1661 #tbl = tbl2
1662 if not first:
1663 tbl = self._math._merge([self, tbl])
1664 #tbl = tbl2
1665 Scantable.__init__(self, tbl)
1666 r._close()
1667 del r, tbl
1668 first = False
1669 if unit is not None:
1670 self.set_fluxunit(unit)
1671 self.set_freqframe(rcParams['scantable.freqframe'])
1672 #self._add_history("scantable", varlist)
1673
Note: See TracBrowser for help on using the repository browser.