source: trunk/python/scantable.py@ 1178

Last change on this file since 1178 was 1175, checked in by mar637, 18 years ago

various changes to support the pylab plotter 'xyplotter'

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 60.3 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1134]6from asap import NUM
[1153]7from asap import linecatalog
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1118]35 #varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1118]41 if isinstance(filename, str):
[976]42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
[718]47 if rcParams['verbose']:
[976]48 asaplog.push(s)
49 print asaplog.pop().strip()
[718]50 return
[976]51 raise IOError(s)
[1115]52 if os.path.isdir(filename) \
[1118]53 and not os.path.exists(filename+'/table.f1'):
[976]54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
[1118]56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
[976]58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
[718]61 else:
[1118]62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
[976]64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
[226]69 else:
[1118]70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
[714]74 print_log()
[102]75
[876]76 def save(self, name=None, format=None, overwrite=False):
[116]77 """
[710]78 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
[280]79 Image FITS or MS2 format.
[116]80 Parameters:
[1093]81 name: the name of the outputfile. For format "ASCII"
82 this is the root file name (data in 'name'.txt
[497]83 and header in 'name'_header.txt)
[116]84 format: an optional file format. Default is ASAP.
[280]85 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]86 'SDFITS' (save as SDFITS file)
[200]87 'ASCII' (saves as ascii text file)
[226]88 'MS2' (saves as an aips++
89 MeasurementSet V2)
[411]90 overwrite: If the file should be overwritten if it exists.
[256]91 The default False is to return with warning
[411]92 without writing the output. USE WITH CARE.
[116]93 Example:
94 scan.save('myscan.asap')
[1118]95 scan.save('myscan.sdfits', 'SDFITS')
[116]96 """
[411]97 from os import path
[226]98 if format is None: format = rcParams['scantable.save']
[256]99 suffix = '.'+format.lower()
[1118]100 if name is None or name == "":
[256]101 name = 'scantable'+suffix
[718]102 msg = "No filename given. Using default name %s..." % name
103 asaplog.push(msg)
[411]104 name = path.expandvars(name)
[256]105 if path.isfile(name) or path.isdir(name):
106 if not overwrite:
[718]107 msg = "File %s exists." % name
108 if rcParams['verbose']:
109 print msg
110 return
111 else:
112 raise IOError(msg)
[451]113 format2 = format.upper()
114 if format2 == 'ASAP':
[116]115 self._save(name)
116 else:
[989]117 from asap._asap import stwriter as stw
[1118]118 writer = stw(format2)
119 writer.write(self, name)
[718]120 print_log()
[116]121 return
122
[102]123 def copy(self):
124 """
125 Return a copy of this scantable.
126 Parameters:
[113]127 none
[102]128 Example:
129 copiedscan = scan.copy()
130 """
[876]131 sd = scantable(Scantable._copy(self))
[113]132 return sd
133
[1093]134 def drop_scan(self, scanid=None):
135 """
136 Return a new scantable where the specified scan number(s) has(have)
137 been dropped.
138 Parameters:
139 scanid: a (list of) scan number(s)
140 """
141 from asap import _is_sequence_or_number as _is_valid
142 from asap import _to_list
143 from asap import unique
144 if not _is_valid(scanid):
145 if rcParams['verbose']:
146 print "Please specify a scanno to drop from the scantable"
147 return
148 else:
149 raise RuntimeError("No scan given")
150 try:
151 scanid = _to_list(scanid)
152 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
153 for sid in scanid: allscans.remove(sid)
[1118]154 if len(allscans) == 0:
155 raise ValueError("Can't remove all scans")
[1093]156 except ValueError:
157 if rcParams['verbose']:
158 print "Couldn't find any match."
159 return
160 else: raise
161 try:
162 bsel = self.get_selection()
163 sel = selector()
164 sel.set_scans(allscans)
165 self.set_selection(bsel+sel)
166 scopy = self._copy()
167 self.set_selection(bsel)
168 return scantable(scopy)
169 except RuntimeError:
[1118]170 if rcParams['verbose']:
171 print "Couldn't find any match."
172 else:
173 raise
[1093]174
175
[102]176 def get_scan(self, scanid=None):
177 """
178 Return a specific scan (by scanno) or collection of scans (by
179 source name) in a new scantable.
180 Parameters:
[513]181 scanid: a (list of) scanno or a source name, unix-style
182 patterns are accepted for source name matching, e.g.
183 '*_R' gets all 'ref scans
[102]184 Example:
[513]185 # get all scans containing the source '323p459'
186 newscan = scan.get_scan('323p459')
187 # get all 'off' scans
188 refscans = scan.get_scan('*_R')
189 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]190 newscan = scan.get_scan([0, 2, 7, 10])
[102]191 """
192 if scanid is None:
[718]193 if rcParams['verbose']:
[1118]194 print "Please specify a scan no or name to " \
195 "retrieve from the scantable"
[718]196 return
197 else:
198 raise RuntimeError("No scan given")
199
[102]200 try:
[946]201 bsel = self.get_selection()
202 sel = selector()
[102]203 if type(scanid) is str:
[946]204 sel.set_name(scanid)
205 self.set_selection(bsel+sel)
[876]206 scopy = self._copy()
[946]207 self.set_selection(bsel)
[876]208 return scantable(scopy)
[102]209 elif type(scanid) is int:
[946]210 sel.set_scans([scanid])
211 self.set_selection(bsel+sel)
[876]212 scopy = self._copy()
[946]213 self.set_selection(bsel)
[876]214 return scantable(scopy)
[381]215 elif type(scanid) is list:
[946]216 sel.set_scans(scanid)
217 self.set_selection(sel)
[876]218 scopy = self._copy()
[946]219 self.set_selection(bsel)
[876]220 return scantable(scopy)
[381]221 else:
[718]222 msg = "Illegal scanid type, use 'int' or 'list' if ints."
223 if rcParams['verbose']:
224 print msg
225 else:
226 raise TypeError(msg)
[102]227 except RuntimeError:
[718]228 if rcParams['verbose']: print "Couldn't find any match."
229 else: raise
[102]230
231 def __str__(self):
[1118]232 return Scantable._summary(self, True)
[102]233
[976]234 def summary(self, filename=None):
[102]235 """
236 Print a summary of the contents of this scantable.
237 Parameters:
238 filename: the name of a file to write the putput to
239 Default - no file output
[381]240 verbose: print extra info such as the frequency table
241 The default (False) is taken from .asaprc
[102]242 """
[976]243 info = Scantable._summary(self, True)
244 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]245 if filename is not None:
[256]246 if filename is "":
247 filename = 'scantable_summary.txt'
[415]248 from os.path import expandvars, isdir
[411]249 filename = expandvars(filename)
[415]250 if not isdir(filename):
[413]251 data = open(filename, 'w')
252 data.write(info)
253 data.close()
254 else:
[718]255 msg = "Illegal file name '%s'." % (filename)
256 if rcParams['verbose']:
257 print msg
258 else:
259 raise IOError(msg)
260 if rcParams['verbose']:
[794]261 try:
262 from IPython.genutils import page as pager
263 except ImportError:
264 from pydoc import pager
265 pager(info)
[718]266 else:
267 return info
[710]268
[946]269
270 def get_selection(self):
271 """
[1005]272 Get the selection object currently set on this scantable.
273 Parameters:
274 none
275 Example:
276 sel = scan.get_selection()
277 sel.set_ifs(0) # select IF 0
278 scan.set_selection(sel) # apply modified selection
[946]279 """
280 return selector(self._getselection())
281
[1005]282 def set_selection(self, selection=selector()):
[946]283 """
[1005]284 Select a subset of the data. All following operations on this scantable
285 are only applied to thi selection.
286 Parameters:
287 selection: a selector object (default unset the selection)
288 Examples:
289 sel = selector() # create a selection object
[1118]290 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]291 scan.set_selection(sel) # set the selection
292 scan.summary() # will only print summary of scanno 0 an 3
293 scan.set_selection() # unset the selection
[946]294 """
295 self._setselection(selection)
296
[876]297 def stats(self, stat='stddev', mask=None):
[102]298 """
[135]299 Determine the specified statistic of the current beam/if/pol
[102]300 Takes a 'mask' as an optional parameter to specify which
301 channels should be excluded.
302 Parameters:
[135]303 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
304 'var', 'stddev', 'avdev', 'rms', 'median'
305 mask: an optional mask specifying where the statistic
[102]306 should be determined.
307 Example:
[113]308 scan.set_unit('channel')
[1118]309 msk = scan.create_mask([100, 200], [500, 600])
[135]310 scan.stats(stat='mean', mask=m)
[102]311 """
312 if mask == None:
[876]313 mask = []
[1118]314 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]315 if not self._check_ifs():
[1118]316 raise ValueError("Cannot apply mask as the IFs have different "
317 "number of channels. Please use setselection() "
318 "to select individual IFs")
[256]319
[876]320 statvals = self._math._stats(self, mask, stat)
321 out = ''
322 axes = []
323 for i in range(self.nrow()):
324 axis = []
325 axis.append(self.getscan(i))
326 axis.append(self.getbeam(i))
327 axis.append(self.getif(i))
328 axis.append(self.getpol(i))
329 axis.append(self.getcycle(i))
330 axes.append(axis)
331 tm = self._gettime(i)
332 src = self._getsourcename(i)
333 out += 'Scan[%d] (%s) ' % (axis[0], src)
334 out += 'Time[%s]:\n' % (tm)
335 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
336 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
337 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
338 out += '= %3.3f\n' % (statvals[i])
339 out += "--------------------------------------------------\n"
[256]340
[876]341 if rcParams['verbose']:
342 print "--------------------------------------------------"
[1118]343 print " ", stat
[876]344 print "--------------------------------------------------"
345 print out
[1118]346 retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
[876]347 'axes' : axes,
348 'data': statvals}
349 return retval
[102]350
[1118]351 def stddev(self, mask=None):
[135]352 """
353 Determine the standard deviation of the current beam/if/pol
354 Takes a 'mask' as an optional parameter to specify which
355 channels should be excluded.
356 Parameters:
357 mask: an optional mask specifying where the standard
358 deviation should be determined.
359
360 Example:
361 scan.set_unit('channel')
[1118]362 msk = scan.create_mask([100, 200], [500, 600])
[135]363 scan.stddev(mask=m)
364 """
[1118]365 return self.stats(stat='stddev', mask=mask);
[135]366
[1003]367
368 def column_names(self):
369 """
370 Return a list of column names, which can be used for selection.
371 """
372 return list(Scantable.column_names(self))
373
[876]374 def get_tsys(self):
[113]375 """
376 Return the System temperatures.
377 Parameters:
[876]378
[113]379 Returns:
[876]380 a list of Tsys values for the current selection
[113]381 """
[256]382
[876]383 return self._row_callback(self._gettsys, "Tsys")
[256]384
[876]385 def _row_callback(self, callback, label):
386 axes = []
[1118]387 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]388 out = ""
[1118]389 outvec = []
[876]390 for i in range(self.nrow()):
391 axis = []
392 axis.append(self.getscan(i))
393 axis.append(self.getbeam(i))
394 axis.append(self.getif(i))
395 axis.append(self.getpol(i))
396 axis.append(self.getcycle(i))
397 axes.append(axis)
398 tm = self._gettime(i)
399 src = self._getsourcename(i)
400 out += 'Scan[%d] (%s) ' % (axis[0], src)
401 out += 'Time[%s]:\n' % (tm)
402 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
403 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
404 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
405 outvec.append(callback(i))
406 out += '= %3.3f\n' % (outvec[i])
407 out += "--------------------------------------------------\n"
408 if rcParams['verbose']:
409 print "--------------------------------------------------"
410 print " %s" % (label)
411 print "--------------------------------------------------"
412 print out
[1175]413 # disabled because the vector seems more useful
414 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
415 return outvec
[256]416
[1070]417 def _get_column(self, callback, row=-1):
418 """
419 """
420 if row == -1:
421 return [callback(i) for i in range(self.nrow())]
422 else:
423 if 0 <= row < self.nrow():
424 return callback(row)
[256]425
[1070]426
[407]427 def get_time(self, row=-1):
[113]428 """
429 Get a list of time stamps for the observations.
[181]430 Return a string for each integration in the scantable.
[113]431 Parameters:
[407]432 row: row no of integration. Default -1 return all rows
[113]433 Example:
434 none
435 """
[1175]436 from time import strptime
437 from datetime import datetime
438 times = self._get_column(self._gettime, row)
439 format = "%Y/%m/%d/%H:%M:%S"
440 if isinstance(times, list):
441 return [datetime(*strptime(i, format)[:6]) for i in times]
442 else:
443 return datetime(*strptime(times, format)[:6])
[102]444
[714]445 def get_sourcename(self, row=-1):
446 """
[794]447 Get a list source names for the observations.
[714]448 Return a string for each integration in the scantable.
449 Parameters:
450 row: row no of integration. Default -1 return all rows
451 Example:
452 none
453 """
[1070]454 return self._get_column(self._getsourcename, row)
[714]455
[794]456 def get_elevation(self, row=-1):
457 """
458 Get a list of elevations for the observations.
459 Return a float for each integration in the scantable.
460 Parameters:
461 row: row no of integration. Default -1 return all rows
462 Example:
463 none
464 """
[1070]465 return self._get_column(self._getelevation, row)
[794]466
467 def get_azimuth(self, row=-1):
468 """
469 Get a list of azimuths for the observations.
470 Return a float for each integration in the scantable.
471 Parameters:
472 row: row no of integration. Default -1 return all rows
473 Example:
474 none
475 """
[1070]476 return self._get_column(self._getazimuth, row)
[794]477
478 def get_parangle(self, row=-1):
479 """
480 Get a list of parallactic angles for the observations.
481 Return a float for each integration in the scantable.
482 Parameters:
483 row: row no of integration. Default -1 return all rows
484 Example:
485 none
486 """
[1070]487 return self._get_column(self._getparangle, row)
[794]488
[1070]489 def get_direction(self, row=-1):
490 """
491 Get a list of Positions on the sky (direction) for the observations.
492 Return a float for each integration in the scantable.
493 Parameters:
494 row: row no of integration. Default -1 return all rows
495 Example:
496 none
497 """
498 return self._get_column(self._getdirection, row)
499
[102]500 def set_unit(self, unit='channel'):
501 """
502 Set the unit for all following operations on this scantable
503 Parameters:
504 unit: optional unit, default is 'channel'
[1118]505 one of '*Hz', 'km/s', 'channel', ''
[102]506 """
[484]507 varlist = vars()
[1118]508 if unit in ['', 'pixel', 'channel']:
[113]509 unit = ''
510 inf = list(self._getcoordinfo())
511 inf[0] = unit
512 self._setcoordinfo(inf)
[1118]513 self._add_history("set_unit", varlist)
[113]514
[484]515 def set_instrument(self, instr):
[358]516 """
517 Set the instrument for subsequent processing
518 Parameters:
[710]519 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]520 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]521 """
522 self._setInstrument(instr)
[1118]523 self._add_history("set_instument", vars())
[718]524 print_log()
[358]525
[276]526 def set_doppler(self, doppler='RADIO'):
527 """
528 Set the doppler for all following operations on this scantable.
529 Parameters:
530 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
531 """
[484]532 varlist = vars()
[276]533 inf = list(self._getcoordinfo())
534 inf[2] = doppler
535 self._setcoordinfo(inf)
[1118]536 self._add_history("set_doppler", vars())
[718]537 print_log()
[710]538
[226]539 def set_freqframe(self, frame=None):
[113]540 """
541 Set the frame type of the Spectral Axis.
542 Parameters:
[591]543 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]544 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
545 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]546 Examples:
547 scan.set_freqframe('BARY')
548 """
[484]549 if frame is None: frame = rcParams['scantable.freqframe']
550 varlist = vars()
[1118]551 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
552 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]553
[989]554 if frame in valid:
[113]555 inf = list(self._getcoordinfo())
556 inf[1] = frame
557 self._setcoordinfo(inf)
[1118]558 self._add_history("set_freqframe", varlist)
[102]559 else:
[1118]560 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]561 if rcParams['verbose']:
562 print msg
563 else:
564 raise TypeError(msg)
565 print_log()
[710]566
[989]567 def set_dirframe(self, frame=""):
568 """
569 Set the frame type of the Direction on the sky.
570 Parameters:
571 frame: an optional frame type, default ''. Valid frames are:
572 'J2000', 'B1950', 'GALACTIC'
573 Examples:
574 scan.set_dirframe('GALACTIC')
575 """
576 varlist = vars()
577 try:
578 Scantable.set_dirframe(self, frame)
[1118]579 except RuntimeError, msg:
[989]580 if rcParams['verbose']:
581 print msg
582 else:
583 raise
[1118]584 self._add_history("set_dirframe", varlist)
[989]585
[113]586 def get_unit(self):
587 """
588 Get the default unit set in this scantable
589 Parameters:
590 Returns:
591 A unit string
592 """
593 inf = self._getcoordinfo()
594 unit = inf[0]
595 if unit == '': unit = 'channel'
596 return unit
[102]597
[158]598 def get_abcissa(self, rowno=0):
[102]599 """
[158]600 Get the abcissa in the current coordinate setup for the currently
[113]601 selected Beam/IF/Pol
602 Parameters:
[226]603 rowno: an optional row number in the scantable. Default is the
604 first row, i.e. rowno=0
[113]605 Returns:
[407]606 The abcissa values and it's format string (as a dictionary)
[113]607 """
[256]608 abc = self._getabcissa(rowno)
[710]609 lbl = self._getabcissalabel(rowno)
[718]610 print_log()
[158]611 return abc, lbl
[113]612
[1118]613 def flag(self, mask=None):
[1001]614 """
615 Flag the selected data using an optional channel mask.
616 Parameters:
617 mask: an optional channel mask, created with create_mask. Default
618 (no mask) is all channels.
619 """
620 varlist = vars()
[1118]621 if mask is None:
622 mask = []
[1001]623 try:
624 self._flag(mask)
[1118]625 except RuntimeError, msg:
[1001]626 if rcParams['verbose']:
627 print msg
628 return
629 else: raise
630 self._add_history("flag", varlist)
631
632
[113]633 def create_mask(self, *args, **kwargs):
634 """
[1118]635 Compute and return a mask based on [min, max] windows.
[189]636 The specified windows are to be INCLUDED, when the mask is
[113]637 applied.
[102]638 Parameters:
[1118]639 [min, max], [min2, max2], ...
[1024]640 Pairs of start/end points (inclusive)specifying the regions
[102]641 to be masked
[189]642 invert: optional argument. If specified as True,
643 return an inverted mask, i.e. the regions
644 specified are EXCLUDED
[513]645 row: create the mask using the specified row for
646 unit conversions, default is row=0
647 only necessary if frequency varies over rows.
[102]648 Example:
[113]649 scan.set_unit('channel')
650 a)
[1118]651 msk = scan.create_mask([400, 500], [800, 900])
[189]652 # masks everything outside 400 and 500
[113]653 # and 800 and 900 in the unit 'channel'
654
655 b)
[1118]656 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]657 # masks the regions between 400 and 500
[113]658 # and 800 and 900 in the unit 'channel'
[1024]659 c)
660 mask only channel 400
[1118]661 msk = scan.create_mask([400, 400])
[102]662 """
[513]663 row = 0
664 if kwargs.has_key("row"):
665 row = kwargs.get("row")
666 data = self._getabcissa(row)
[113]667 u = self._getcoordinfo()[0]
[718]668 if rcParams['verbose']:
[113]669 if u == "": u = "channel"
[718]670 msg = "The current mask window unit is %s" % u
[1118]671 i = self._check_ifs()
672 if not i:
[876]673 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]674 asaplog.push(msg)
[102]675 n = self.nchan()
[1134]676 msk = NUM.zeros(n)
[710]677 # test if args is a 'list' or a 'normal *args - UGLY!!!
678
[1118]679 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
680 and args or args[0]
[710]681 for window in ws:
[102]682 if (len(window) != 2 or window[0] > window[1] ):
[1118]683 raise TypeError("A window needs to be defined as [min, max]")
[102]684 for i in range(n):
[1024]685 if data[i] >= window[0] and data[i] <= window[1]:
[189]686 msk[i] = 1
[113]687 if kwargs.has_key('invert'):
688 if kwargs.get('invert'):
[1134]689 msk = NUM.logical_not(msk)
[718]690 print_log()
[102]691 return msk
[710]692
[256]693 def get_restfreqs(self):
694 """
695 Get the restfrequency(s) stored in this scantable.
696 The return value(s) are always of unit 'Hz'
697 Parameters:
698 none
699 Returns:
700 a list of doubles
701 """
702 return list(self._getrestfreqs())
[102]703
[402]704
[931]705 def set_restfreqs(self, freqs=None, unit='Hz'):
706 """
707 Set or replace the restfrequency specified and
708 If the 'freqs' argument holds a scalar,
709 then that rest frequency will be applied to all the selected
710 data. If the 'freqs' argument holds
711 a vector, then it MUST be of equal or smaller length than
712 the number of IFs (and the available restfrequencies will be
713 replaced by this vector). In this case, *all* data have
714 the restfrequency set per IF according
715 to the corresponding value you give in the 'freqs' vector.
[1118]716 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]717 IF 1 gets restfreq 2e9.
[1153]718 You can also specify the frequencies via a linecatalog/
719
[931]720 Parameters:
721 freqs: list of rest frequency values or string idenitfiers
722 unit: unit for rest frequency (default 'Hz')
[402]723
[931]724 Example:
725 # set the given restfrequency for the whole table
726 scan.set_restfreqs(freqs=1.4e9)
727 # If thee number of IFs in the data is >= 2 the IF0 gets the first
728 # value IF1 the second...
[1118]729 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]730 #set the given restfrequency for the whole table (by name)
731 scan.set_restfreqs(freqs="OH1667")
[391]732
[931]733 Note:
734 To do more sophisticate Restfrequency setting, e.g. on a
735 source and IF basis, use scantable.set_selection() before using
736 this function.
737 # provide your scantable is call scan
738 selection = selector()
739 selection.set_name("ORION*")
740 selection.set_ifs([1])
741 scan.set_selection(selection)
742 scan.set_restfreqs(freqs=86.6e9)
743
744 """
745 varlist = vars()
[1157]746 from asap import linecatalog
747 # simple value
[1118]748 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]749 self._setrestfreqs(freqs, "",unit)
[1157]750 # list of values
[1118]751 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]752 # list values are scalars
[1118]753 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]754 sel = selector()
755 savesel = self._getselection()
756 for i in xrange(len(freqs)):
[946]757 sel.set_ifs([i])
[931]758 self._setselection(sel)
[1153]759 self._setrestfreqs(freqs[i], "",unit)
[931]760 self._setselection(savesel)
[1157]761 # list values are tuples, (value, name)
762 elif isinstance(freqs[-1], dict):
763 sel = selector()
764 savesel = self._getselection()
765 for i in xrange(len(freqs)):
766 sel.set_ifs([i])
767 self._setrestfreqs(freqs[i]["value"],
768 freqs[i]["name"], "MHz")
769 self._setselection(sel)
770 self._setselection(savesel)
771 # freqs are to be taken from a linecatalog
[1153]772 elif isinstance(freqs, linecatalog):
773 sel = selector()
774 savesel = self._getselection()
775 for i in xrange(freqs.nrow()):
776 sel.set_ifs([i])
777 self._setselection(sel)
778 self._setrestfreqs(freqs.get_frequency(i),
779 freqs.get_name(i), "MHz")
780 # ensure that we are not iterating past nIF
781 if i == self.nif()-1: break
782 self._setselection(savesel)
[931]783 else:
784 return
785 self._add_history("set_restfreqs", varlist)
786
787
788
[484]789 def history(self):
790 hist = list(self._gethistory())
[794]791 out = "-"*80
[484]792 for h in hist:
[489]793 if h.startswith("---"):
[794]794 out += "\n"+h
[489]795 else:
796 items = h.split("##")
797 date = items[0]
798 func = items[1]
799 items = items[2:]
[794]800 out += "\n"+date+"\n"
801 out += "Function: %s\n Parameters:" % (func)
[489]802 for i in items:
803 s = i.split("=")
[1118]804 out += "\n %s = %s" % (s[0], s[1])
[794]805 out += "\n"+"-"*80
806 try:
807 from IPython.genutils import page as pager
808 except ImportError:
809 from pydoc import pager
810 pager(out)
[484]811 return
812
[513]813 #
814 # Maths business
815 #
816
[931]817 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]818 """
[1070]819 Return the (time) weighted average of a scan.
[513]820 Note:
[1070]821 in channels only - align if necessary
[513]822 Parameters:
823 mask: an optional mask (only used for 'var' and 'tsys'
824 weighting)
[558]825 scanav: True averages each scan separately
826 False (default) averages all scans together,
[1099]827 weight: Weighting scheme.
828 'none' (mean no weight)
829 'var' (1/var(spec) weighted)
830 'tsys' (1/Tsys**2 weighted)
831 'tint' (integration time weighted)
832 'tintsys' (Tint/Tsys**2)
833 'median' ( median averaging)
[535]834 The default is 'tint'
[931]835 align: align the spectra in velocity before averaging. It takes
836 the time of the first spectrum as reference time.
[513]837 Example:
838 # time average the scantable without using a mask
[710]839 newscan = scan.average_time()
[513]840 """
841 varlist = vars()
[976]842 if weight is None: weight = 'TINT'
[513]843 if mask is None: mask = ()
[1099]844 if scanav: scanav = "SCAN"
845 else: scanav = "NONE"
[1118]846 scan = (self, )
[989]847 try:
[1118]848 if align:
849 scan = (self.freq_align(insitu=False), )
850 s = None
851 if weight.upper() == 'MEDIAN':
852 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
853 scanav))
854 else:
855 s = scantable(self._math._average(scan, mask, weight.upper(),
856 scanav))
857 except RuntimeError, msg:
[989]858 if rcParams['verbose']:
859 print msg
860 return
861 else: raise
[1099]862 s._add_history("average_time", varlist)
[718]863 print_log()
[513]864 return s
[710]865
[876]866 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]867 """
868 Return a scan where all spectra are converted to either
869 Jansky or Kelvin depending upon the flux units of the scan table.
870 By default the function tries to look the values up internally.
871 If it can't find them (or if you want to over-ride), you must
872 specify EITHER jyperk OR eta (and D which it will try to look up
873 also if you don't set it). jyperk takes precedence if you set both.
874 Parameters:
875 jyperk: the Jy / K conversion factor
876 eta: the aperture efficiency
877 d: the geomtric diameter (metres)
878 insitu: if False a new scantable is returned.
879 Otherwise, the scaling is done in-situ
880 The default is taken from .asaprc (False)
881 allaxes: if True apply to all spectra. Otherwise
882 apply only to the selected (beam/pol/if)spectra only
883 The default is taken from .asaprc (True if none)
884 """
885 if insitu is None: insitu = rcParams['insitu']
[876]886 self._math._setinsitu(insitu)
[513]887 varlist = vars()
888 if jyperk is None: jyperk = -1.0
889 if d is None: d = -1.0
890 if eta is None: eta = -1.0
[876]891 s = scantable(self._math._convertflux(self, d, eta, jyperk))
892 s._add_history("convert_flux", varlist)
893 print_log()
894 if insitu: self._assign(s)
895 else: return s
[513]896
[876]897 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]898 """
899 Return a scan after applying a gain-elevation correction.
900 The correction can be made via either a polynomial or a
901 table-based interpolation (and extrapolation if necessary).
902 You specify polynomial coefficients, an ascii table or neither.
903 If you specify neither, then a polynomial correction will be made
904 with built in coefficients known for certain telescopes (an error
905 will occur if the instrument is not known).
906 The data and Tsys are *divided* by the scaling factors.
907 Parameters:
908 poly: Polynomial coefficients (default None) to compute a
909 gain-elevation correction as a function of
910 elevation (in degrees).
911 filename: The name of an ascii file holding correction factors.
912 The first row of the ascii file must give the column
913 names and these MUST include columns
914 "ELEVATION" (degrees) and "FACTOR" (multiply data
915 by this) somewhere.
916 The second row must give the data type of the
917 column. Use 'R' for Real and 'I' for Integer.
918 An example file would be
919 (actual factors are arbitrary) :
920
921 TIME ELEVATION FACTOR
922 R R R
923 0.1 0 0.8
924 0.2 20 0.85
925 0.3 40 0.9
926 0.4 60 0.85
927 0.5 80 0.8
928 0.6 90 0.75
929 method: Interpolation method when correcting from a table.
930 Values are "nearest", "linear" (default), "cubic"
931 and "spline"
932 insitu: if False a new scantable is returned.
933 Otherwise, the scaling is done in-situ
934 The default is taken from .asaprc (False)
935 """
936
937 if insitu is None: insitu = rcParams['insitu']
[876]938 self._math._setinsitu(insitu)
[513]939 varlist = vars()
940 if poly is None:
[1118]941 poly = ()
[513]942 from os.path import expandvars
943 filename = expandvars(filename)
[876]944 s = scantable(self._math._gainel(self, poly, filename, method))
945 s._add_history("gain_el", varlist)
946 print_log()
947 if insitu: self._assign(s)
948 else: return s
[710]949
[931]950 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]951 """
952 Return a scan where all rows have been aligned in frequency/velocity.
953 The alignment frequency frame (e.g. LSRK) is that set by function
954 set_freqframe.
955 Parameters:
956 reftime: reference time to align at. By default, the time of
957 the first row of data is used.
958 method: Interpolation method for regridding the spectra.
959 Choose from "nearest", "linear", "cubic" (default)
960 and "spline"
961 insitu: if False a new scantable is returned.
962 Otherwise, the scaling is done in-situ
963 The default is taken from .asaprc (False)
964 """
[931]965 if insitu is None: insitu = rcParams["insitu"]
[876]966 self._math._setinsitu(insitu)
[513]967 varlist = vars()
[931]968 if reftime is None: reftime = ""
969 s = scantable(self._math._freq_align(self, reftime, method))
[876]970 s._add_history("freq_align", varlist)
971 print_log()
972 if insitu: self._assign(s)
973 else: return s
[513]974
[876]975 def opacity(self, tau, insitu=None):
[513]976 """
977 Apply an opacity correction. The data
978 and Tsys are multiplied by the correction factor.
979 Parameters:
980 tau: Opacity from which the correction factor is
981 exp(tau*ZD)
982 where ZD is the zenith-distance
983 insitu: if False a new scantable is returned.
984 Otherwise, the scaling is done in-situ
985 The default is taken from .asaprc (False)
986 """
987 if insitu is None: insitu = rcParams['insitu']
[876]988 self._math._setinsitu(insitu)
[513]989 varlist = vars()
[876]990 s = scantable(self._math._opacity(self, tau))
991 s._add_history("opacity", varlist)
992 print_log()
993 if insitu: self._assign(s)
994 else: return s
[513]995
996 def bin(self, width=5, insitu=None):
997 """
998 Return a scan where all spectra have been binned up.
999 width: The bin width (default=5) in pixels
1000 insitu: if False a new scantable is returned.
1001 Otherwise, the scaling is done in-situ
1002 The default is taken from .asaprc (False)
1003 """
1004 if insitu is None: insitu = rcParams['insitu']
[876]1005 self._math._setinsitu(insitu)
[513]1006 varlist = vars()
[876]1007 s = scantable(self._math._bin(self, width))
[1118]1008 s._add_history("bin", varlist)
[876]1009 print_log()
1010 if insitu: self._assign(s)
1011 else: return s
[513]1012
[710]1013
[513]1014 def resample(self, width=5, method='cubic', insitu=None):
1015 """
1016 Return a scan where all spectra have been binned up
1017 width: The bin width (default=5) in pixels
1018 method: Interpolation method when correcting from a table.
1019 Values are "nearest", "linear", "cubic" (default)
1020 and "spline"
1021 insitu: if False a new scantable is returned.
1022 Otherwise, the scaling is done in-situ
1023 The default is taken from .asaprc (False)
1024 """
1025 if insitu is None: insitu = rcParams['insitu']
[876]1026 self._math._setinsitu(insitu)
[513]1027 varlist = vars()
[876]1028 s = scantable(self._math._resample(self, method, width))
[1118]1029 s._add_history("resample", varlist)
[876]1030 print_log()
1031 if insitu: self._assign(s)
1032 else: return s
[513]1033
1034
[946]1035 def average_pol(self, mask=None, weight='none'):
1036 """
1037 Average the Polarisations together.
1038 Parameters:
1039 mask: An optional mask defining the region, where the
1040 averaging will be applied. The output will have all
1041 specified points masked.
1042 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1043 weighted), or 'tsys' (1/Tsys**2 weighted)
1044 """
1045 varlist = vars()
1046 if mask is None:
1047 mask = ()
[1010]1048 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1049 s._add_history("average_pol", varlist)
[946]1050 print_log()
[992]1051 return s
[513]1052
[1145]1053 def average_beam(self, mask=None, weight='none'):
1054 """
1055 Average the Beams together.
1056 Parameters:
1057 mask: An optional mask defining the region, where the
1058 averaging will be applied. The output will have all
1059 specified points masked.
1060 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1061 weighted), or 'tsys' (1/Tsys**2 weighted)
1062 """
1063 varlist = vars()
1064 if mask is None:
1065 mask = ()
1066 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1067 s._add_history("average_beam", varlist)
1068 print_log()
1069 return s
1070
[992]1071 def convert_pol(self, poltype=None):
1072 """
1073 Convert the data to a different polarisation type.
1074 Parameters:
1075 poltype: The new polarisation type. Valid types are:
1076 "linear", "stokes" and "circular"
1077 """
1078 varlist = vars()
1079 try:
1080 s = scantable(self._math._convertpol(self, poltype))
[1118]1081 except RuntimeError, msg:
[992]1082 if rcParams['verbose']:
[1118]1083 print msg
1084 return
[992]1085 else:
1086 raise
[1118]1087 s._add_history("convert_pol", varlist)
[992]1088 print_log()
1089 return s
1090
[876]1091 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1092 """
1093 Smooth the spectrum by the specified kernel (conserving flux).
1094 Parameters:
1095 scan: The input scan
1096 kernel: The type of smoothing kernel. Select from
1097 'hanning' (default), 'gaussian' and 'boxcar'.
1098 The first three characters are sufficient.
1099 width: The width of the kernel in pixels. For hanning this is
1100 ignored otherwise it defauls to 5 pixels.
1101 For 'gaussian' it is the Full Width Half
1102 Maximum. For 'boxcar' it is the full width.
1103 insitu: if False a new scantable is returned.
1104 Otherwise, the scaling is done in-situ
1105 The default is taken from .asaprc (False)
1106 Example:
1107 none
1108 """
1109 if insitu is None: insitu = rcParams['insitu']
[876]1110 self._math._setinsitu(insitu)
[513]1111 varlist = vars()
[1118]1112 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1113 s._add_history("smooth", varlist)
1114 print_log()
1115 if insitu: self._assign(s)
1116 else: return s
[513]1117
[876]1118
[1061]1119 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
[513]1120 """
1121 Return a scan which has been baselined (all rows) by a polynomial.
1122 Parameters:
[794]1123 scan: a scantable
1124 mask: an optional mask
1125 order: the order of the polynomial (default is 0)
[1061]1126 plot: plot the fit and the residual. In this each
1127 indivual fit has to be approved, by typing 'y'
1128 or 'n'
[794]1129 insitu: if False a new scantable is returned.
1130 Otherwise, the scaling is done in-situ
1131 The default is taken from .asaprc (False)
[513]1132 Example:
1133 # return a scan baselined by a third order polynomial,
1134 # not using a mask
1135 bscan = scan.poly_baseline(order=3)
[579]1136 """
[513]1137 if insitu is None: insitu = rcParams['insitu']
1138 varlist = vars()
1139 if mask is None:
[1134]1140 mask = list(NUM.ones(self.nchan(-1)))
[513]1141 from asap.asapfitter import fitter
1142 f = fitter()
1143 f.set_scan(self, mask)
1144 f.set_function(poly=order)
[1061]1145 s = f.auto_fit(insitu, plot=plot)
[876]1146 s._add_history("poly_baseline", varlist)
[718]1147 print_log()
[876]1148 if insitu: self._assign(s)
1149 else: return s
[513]1150
[1118]1151 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1061]1152 threshold=3, plot=False, insitu=None):
[880]1153 """
1154 Return a scan which has been baselined (all rows) by a polynomial.
1155 Spectral lines are detected first using linefinder and masked out
1156 to avoid them affecting the baseline solution.
1157
1158 Parameters:
1159 mask: an optional mask retreived from scantable
1160 edge: an optional number of channel to drop at
1161 the edge of spectrum. If only one value is
1162 specified, the same number will be dropped from
1163 both sides of the spectrum. Default is to keep
[907]1164 all channels. Nested tuples represent individual
[976]1165 edge selection for different IFs (a number of spectral
1166 channels can be different)
[880]1167 order: the order of the polynomial (default is 0)
1168 threshold: the threshold used by line finder. It is better to
1169 keep it large as only strong lines affect the
1170 baseline solution.
[1061]1171 plot: plot the fit and the residual. In this each
1172 indivual fit has to be approved, by typing 'y'
1173 or 'n'
[880]1174 insitu: if False a new scantable is returned.
1175 Otherwise, the scaling is done in-situ
1176 The default is taken from .asaprc (False)
1177
1178 Example:
1179 scan2=scan.auto_poly_baseline(order=7)
1180 """
1181 if insitu is None: insitu = rcParams['insitu']
1182 varlist = vars()
1183 from asap.asapfitter import fitter
1184 from asap.asaplinefind import linefinder
1185 from asap import _is_sequence_or_number as _is_valid
1186
[976]1187 # check whether edge is set up for each IF individually
[1118]1188 individualedge = False;
1189 if len(edge) > 1:
1190 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1191 individualedge = True;
[907]1192
[1118]1193 if not _is_valid(edge, int) and not individualedge:
[909]1194 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1195 pair of integers specified as a tuple. Nested tuples are allowed \
1196 to make individual selection for different IFs."
[919]1197
[1118]1198 curedge = (0, 0)
1199 if individualedge:
1200 for edgepar in edge:
1201 if not _is_valid(edgepar, int):
1202 raise ValueError, "Each element of the 'edge' tuple has \
1203 to be a pair of integers or an integer."
[907]1204 else:
[1118]1205 curedge = edge;
[880]1206
1207 # setup fitter
1208 f = fitter()
1209 f.set_function(poly=order)
1210
1211 # setup line finder
[1118]1212 fl = linefinder()
[880]1213 fl.set_options(threshold=threshold)
1214
1215 if not insitu:
[1118]1216 workscan = self.copy()
[880]1217 else:
[1118]1218 workscan = self
[880]1219
[907]1220 fl.set_scan(workscan)
1221
[1118]1222 rows = range(workscan.nrow())
[880]1223 asaplog.push("Processing:")
1224 for r in rows:
[1118]1225 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1226 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1227 workscan.getpol(r), workscan.getcycle(r))
[880]1228 asaplog.push(msg, False)
[907]1229
[976]1230 # figure out edge parameter
[1118]1231 if individualedge:
1232 if len(edge) >= workscan.getif(r):
1233 raise RuntimeError, "Number of edge elements appear to " \
1234 "be less than the number of IFs"
1235 curedge = edge[workscan.getif(r)]
[919]1236
[976]1237 # setup line finder
[1118]1238 fl.find_lines(r, mask, curedge)
[880]1239 f.set_scan(workscan, fl.get_mask())
1240 f.x = workscan._getabcissa(r)
1241 f.y = workscan._getspectrum(r)
1242 f.data = None
1243 f.fit()
1244 x = f.get_parameters()
[1061]1245 if plot:
1246 f.plot(residual=True)
1247 x = raw_input("Accept fit ( [y]/n ): ")
1248 if x.upper() == 'N':
1249 continue
[880]1250 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1251 if plot:
1252 f._p.unmap()
1253 f._p = None
1254 workscan._add_history("auto_poly_baseline", varlist)
[880]1255 if insitu:
1256 self._assign(workscan)
1257 else:
1258 return workscan
1259
[914]1260 def rotate_linpolphase(self, angle):
1261 """
1262 Rotate the phase of the complex polarization O=Q+iU correlation.
1263 This is always done in situ in the raw data. So if you call this
1264 function more than once then each call rotates the phase further.
1265 Parameters:
1266 angle: The angle (degrees) to rotate (add) by.
1267 Examples:
1268 scan.rotate_linpolphase(2.3)
1269 """
1270 varlist = vars()
[936]1271 self._math._rotate_linpolphase(self, angle)
[914]1272 self._add_history("rotate_linpolphase", varlist)
1273 print_log()
1274 return
[710]1275
[513]1276
[914]1277 def rotate_xyphase(self, angle):
1278 """
1279 Rotate the phase of the XY correlation. This is always done in situ
1280 in the data. So if you call this function more than once
1281 then each call rotates the phase further.
1282 Parameters:
1283 angle: The angle (degrees) to rotate (add) by.
1284 Examples:
1285 scan.rotate_xyphase(2.3)
1286 """
1287 varlist = vars()
[936]1288 self._math._rotate_xyphase(self, angle)
[914]1289 self._add_history("rotate_xyphase", varlist)
1290 print_log()
1291 return
1292
1293 def swap_linears(self):
1294 """
1295 Swap the linear polarisations XX and YY
1296 """
1297 varlist = vars()
[936]1298 self._math._swap_linears(self)
[914]1299 self._add_history("swap_linears", varlist)
1300 print_log()
1301 return
1302
1303 def invert_phase(self):
1304 """
1305 Invert the phase of the complex polarisation
1306 """
1307 varlist = vars()
[936]1308 self._math._invert_phase(self)
[914]1309 self._add_history("invert_phase", varlist)
1310 print_log()
1311 return
1312
[876]1313 def add(self, offset, insitu=None):
[513]1314 """
1315 Return a scan where all spectra have the offset added
1316 Parameters:
1317 offset: the offset
1318 insitu: if False a new scantable is returned.
1319 Otherwise, the scaling is done in-situ
1320 The default is taken from .asaprc (False)
1321 """
1322 if insitu is None: insitu = rcParams['insitu']
[876]1323 self._math._setinsitu(insitu)
[513]1324 varlist = vars()
[876]1325 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1326 s._add_history("add", varlist)
[876]1327 print_log()
1328 if insitu:
1329 self._assign(s)
1330 else:
[513]1331 return s
1332
[1118]1333 def scale(self, factor, tsys=True, insitu=None, ):
[513]1334 """
1335 Return a scan where all spectra are scaled by the give 'factor'
1336 Parameters:
1337 factor: the scaling factor
1338 insitu: if False a new scantable is returned.
1339 Otherwise, the scaling is done in-situ
1340 The default is taken from .asaprc (False)
1341 tsys: if True (default) then apply the operation to Tsys
1342 as well as the data
1343 """
1344 if insitu is None: insitu = rcParams['insitu']
[876]1345 self._math._setinsitu(insitu)
[513]1346 varlist = vars()
[876]1347 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1348 s._add_history("scale", varlist)
[876]1349 print_log()
1350 if insitu:
1351 self._assign(s)
1352 else:
[513]1353 return s
1354
[876]1355 def auto_quotient(self, mode='time', preserve=True):
[670]1356 """
1357 This function allows to build quotients automatically.
1358 It assumes the observation to have the same numer of
1359 "ons" and "offs"
1360 It will support "closest off in time" in the future
1361 Parameters:
1362 mode: the on/off detection mode; 'suffix' (default)
1363 'suffix' identifies 'off' scans by the
1364 trailing '_R' (Mopra/Parkes) or
1365 '_e'/'_w' (Tid)
[710]1366 preserve: you can preserve (default) the continuum or
1367 remove it. The equations used are
[670]1368 preserve: Output = Toff * (on/off) - Toff
[1070]1369 remove: Output = Toff * (on/off) - Ton
[670]1370 """
[876]1371 modes = ["time"]
[670]1372 if not mode in modes:
[876]1373 msg = "please provide valid mode. Valid modes are %s" % (modes)
1374 raise ValueError(msg)
1375 varlist = vars()
[1070]1376 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1377 s._add_history("auto_quotient", varlist)
[876]1378 print_log()
1379 return s
[710]1380
[1145]1381 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1382 """
[1143]1383 Form a quotient using "off" beams when observing in "MX" mode.
1384 Parameters:
[1145]1385 mask: an optional mask to be used when weight == 'stddev'
[1143]1386 weight: How to average the off beams. Default is 'median'.
[1145]1387 preserve: you can preserve (default) the continuum or
1388 remove it. The equations used are
1389 preserve: Output = Toff * (on/off) - Toff
1390 remove: Output = Toff * (on/off) - Ton
[1141]1391 """
[1143]1392 if mask is None: mask = ()
[1141]1393 varlist = vars()
1394 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1395 preoff = scantable(self._math._mx_extract(self, 'off'))
1396 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1141]1397 from asapmath import quotient
[1145]1398 q = quotient(on, off, preserve)
[1143]1399 q._add_history("mx_quotient", varlist)
[1145]1400 print_log()
[1143]1401 return q
[513]1402
[718]1403 def freq_switch(self, insitu=None):
1404 """
1405 Apply frequency switching to the data.
1406 Parameters:
1407 insitu: if False a new scantable is returned.
1408 Otherwise, the swictching is done in-situ
1409 The default is taken from .asaprc (False)
1410 Example:
1411 none
1412 """
1413 if insitu is None: insitu = rcParams['insitu']
[876]1414 self._math._setinsitu(insitu)
[718]1415 varlist = vars()
[876]1416 s = scantable(self._math._freqswitch(self))
[1118]1417 s._add_history("freq_switch", varlist)
[876]1418 print_log()
1419 if insitu: self._assign(s)
1420 else: return s
[718]1421
[780]1422 def recalc_azel(self):
1423 """
1424 Recalculate the azimuth and elevation for each position.
1425 Parameters:
1426 none
1427 Example:
1428 """
1429 varlist = vars()
[876]1430 self._recalcazel()
[780]1431 self._add_history("recalc_azel", varlist)
1432 print_log()
1433 return
1434
[513]1435 def __add__(self, other):
1436 varlist = vars()
1437 s = None
1438 if isinstance(other, scantable):
[876]1439 print "scantable + scantable NYI"
1440 return
[513]1441 elif isinstance(other, float):
[876]1442 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1443 else:
[718]1444 raise TypeError("Other input is not a scantable or float value")
[513]1445 s._add_history("operator +", varlist)
[718]1446 print_log()
[513]1447 return s
1448
1449 def __sub__(self, other):
1450 """
1451 implicit on all axes and on Tsys
1452 """
1453 varlist = vars()
1454 s = None
1455 if isinstance(other, scantable):
[876]1456 print "scantable - scantable NYI"
1457 return
[513]1458 elif isinstance(other, float):
[876]1459 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1460 else:
[718]1461 raise TypeError("Other input is not a scantable or float value")
[513]1462 s._add_history("operator -", varlist)
[718]1463 print_log()
[513]1464 return s
[710]1465
[513]1466 def __mul__(self, other):
1467 """
1468 implicit on all axes and on Tsys
1469 """
1470 varlist = vars()
1471 s = None
1472 if isinstance(other, scantable):
[876]1473 print "scantable * scantable NYI"
1474 return
[513]1475 elif isinstance(other, float):
[876]1476 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1477 else:
[718]1478 raise TypeError("Other input is not a scantable or float value")
[513]1479 s._add_history("operator *", varlist)
[718]1480 print_log()
[513]1481 return s
1482
[710]1483
[513]1484 def __div__(self, other):
1485 """
1486 implicit on all axes and on Tsys
1487 """
1488 varlist = vars()
1489 s = None
1490 if isinstance(other, scantable):
[876]1491 print "scantable / scantable NYI"
1492 return
[513]1493 elif isinstance(other, float):
1494 if other == 0.0:
[718]1495 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1496 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1497 else:
[718]1498 raise TypeError("Other input is not a scantable or float value")
[513]1499 s._add_history("operator /", varlist)
[718]1500 print_log()
[513]1501 return s
1502
[530]1503 def get_fit(self, row=0):
1504 """
1505 Print or return the stored fits for a row in the scantable
1506 Parameters:
1507 row: the row which the fit has been applied to.
1508 """
1509 if row > self.nrow():
1510 return
[976]1511 from asap.asapfit import asapfit
[530]1512 fit = asapfit(self._getfit(row))
[718]1513 if rcParams['verbose']:
[530]1514 print fit
1515 return
1516 else:
1517 return fit.as_dict()
1518
[484]1519 def _add_history(self, funcname, parameters):
1520 # create date
1521 sep = "##"
1522 from datetime import datetime
1523 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1524 hist = dstr+sep
1525 hist += funcname+sep#cdate+sep
1526 if parameters.has_key('self'): del parameters['self']
[1118]1527 for k, v in parameters.iteritems():
[484]1528 if type(v) is dict:
[1118]1529 for k2, v2 in v.iteritems():
[484]1530 hist += k2
1531 hist += "="
[1118]1532 if isinstance(v2, scantable):
[484]1533 hist += 'scantable'
1534 elif k2 == 'mask':
[1118]1535 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1536 hist += str(self._zip_mask(v2))
1537 else:
1538 hist += str(v2)
[484]1539 else:
[513]1540 hist += str(v2)
[484]1541 else:
1542 hist += k
1543 hist += "="
[1118]1544 if isinstance(v, scantable):
[484]1545 hist += 'scantable'
1546 elif k == 'mask':
[1118]1547 if isinstance(v, list) or isinstance(v, tuple):
[513]1548 hist += str(self._zip_mask(v))
1549 else:
1550 hist += str(v)
[484]1551 else:
1552 hist += str(v)
1553 hist += sep
1554 hist = hist[:-2] # remove trailing '##'
1555 self._addhistory(hist)
1556
[710]1557
[484]1558 def _zip_mask(self, mask):
1559 mask = list(mask)
1560 i = 0
1561 segments = []
1562 while mask[i:].count(1):
1563 i += mask[i:].index(1)
1564 if mask[i:].count(0):
1565 j = i + mask[i:].index(0)
1566 else:
[710]1567 j = len(mask)
[1118]1568 segments.append([i, j])
[710]1569 i = j
[484]1570 return segments
[714]1571
[626]1572 def _get_ordinate_label(self):
1573 fu = "("+self.get_fluxunit()+")"
1574 import re
1575 lbl = "Intensity"
[1118]1576 if re.match(".K.", fu):
[626]1577 lbl = "Brightness Temperature "+ fu
[1118]1578 elif re.match(".Jy.", fu):
[626]1579 lbl = "Flux density "+ fu
1580 return lbl
[710]1581
[876]1582 def _check_ifs(self):
1583 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1584 nchans = filter(lambda t: t > 0, nchans)
[876]1585 return (sum(nchans)/len(nchans) == nchans[0])
[976]1586
1587 def _fill(self, names, unit, average):
1588 import os
1589 from asap._asap import stfiller
1590 first = True
1591 fullnames = []
1592 for name in names:
1593 name = os.path.expandvars(name)
1594 name = os.path.expanduser(name)
1595 if not os.path.exists(name):
1596 msg = "File '%s' does not exists" % (name)
1597 if rcParams['verbose']:
1598 asaplog.push(msg)
1599 print asaplog.pop().strip()
1600 return
1601 raise IOError(msg)
1602 fullnames.append(name)
1603 if average:
1604 asaplog.push('Auto averaging integrations')
[1079]1605 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1606 for name in fullnames:
[1073]1607 tbl = Scantable(stype)
1608 r = stfiller(tbl)
[976]1609 msg = "Importing %s..." % (name)
[1118]1610 asaplog.push(msg, False)
[976]1611 print_log()
[1118]1612 r._open(name, -1, -1)
[976]1613 r._read()
[1073]1614 #tbl = r._getdata()
[976]1615 if average:
[1118]1616 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1617 #tbl = tbl2
1618 if not first:
1619 tbl = self._math._merge([self, tbl])
1620 #tbl = tbl2
1621 Scantable.__init__(self, tbl)
1622 r._close()
[1118]1623 del r, tbl
[976]1624 first = False
1625 if unit is not None:
1626 self.set_fluxunit(unit)
1627 self.set_freqframe(rcParams['scantable.freqframe'])
1628 #self._add_history("scantable", varlist)
1629
Note: See TracBrowser for help on using the repository browser.