source: trunk/python/scantable.py@ 1156

Last change on this file since 1156 was 1153, checked in by mar637, 18 years ago

lots of changes to support soft refresh, for things like text overlays, linecatlogs etc. reworked plot_lines to to auto-peak detection. added forwarding functions to matplotlib.axes. drawing functions

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 59.4 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1134]6from asap import NUM
[1153]7from asap import linecatalog
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1118]35 #varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1118]41 if isinstance(filename, str):
[976]42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
[718]47 if rcParams['verbose']:
[976]48 asaplog.push(s)
49 print asaplog.pop().strip()
[718]50 return
[976]51 raise IOError(s)
[1115]52 if os.path.isdir(filename) \
[1118]53 and not os.path.exists(filename+'/table.f1'):
[976]54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
[1118]56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
[976]58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
[718]61 else:
[1118]62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
[976]64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
[226]69 else:
[1118]70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
[714]74 print_log()
[102]75
[876]76 def save(self, name=None, format=None, overwrite=False):
[116]77 """
[710]78 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
[280]79 Image FITS or MS2 format.
[116]80 Parameters:
[1093]81 name: the name of the outputfile. For format "ASCII"
82 this is the root file name (data in 'name'.txt
[497]83 and header in 'name'_header.txt)
[116]84 format: an optional file format. Default is ASAP.
[280]85 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]86 'SDFITS' (save as SDFITS file)
[200]87 'ASCII' (saves as ascii text file)
[226]88 'MS2' (saves as an aips++
89 MeasurementSet V2)
[411]90 overwrite: If the file should be overwritten if it exists.
[256]91 The default False is to return with warning
[411]92 without writing the output. USE WITH CARE.
[116]93 Example:
94 scan.save('myscan.asap')
[1118]95 scan.save('myscan.sdfits', 'SDFITS')
[116]96 """
[411]97 from os import path
[226]98 if format is None: format = rcParams['scantable.save']
[256]99 suffix = '.'+format.lower()
[1118]100 if name is None or name == "":
[256]101 name = 'scantable'+suffix
[718]102 msg = "No filename given. Using default name %s..." % name
103 asaplog.push(msg)
[411]104 name = path.expandvars(name)
[256]105 if path.isfile(name) or path.isdir(name):
106 if not overwrite:
[718]107 msg = "File %s exists." % name
108 if rcParams['verbose']:
109 print msg
110 return
111 else:
112 raise IOError(msg)
[451]113 format2 = format.upper()
114 if format2 == 'ASAP':
[116]115 self._save(name)
116 else:
[989]117 from asap._asap import stwriter as stw
[1118]118 writer = stw(format2)
119 writer.write(self, name)
[718]120 print_log()
[116]121 return
122
[102]123 def copy(self):
124 """
125 Return a copy of this scantable.
126 Parameters:
[113]127 none
[102]128 Example:
129 copiedscan = scan.copy()
130 """
[876]131 sd = scantable(Scantable._copy(self))
[113]132 return sd
133
[1093]134 def drop_scan(self, scanid=None):
135 """
136 Return a new scantable where the specified scan number(s) has(have)
137 been dropped.
138 Parameters:
139 scanid: a (list of) scan number(s)
140 """
141 from asap import _is_sequence_or_number as _is_valid
142 from asap import _to_list
143 from asap import unique
144 if not _is_valid(scanid):
145 if rcParams['verbose']:
146 print "Please specify a scanno to drop from the scantable"
147 return
148 else:
149 raise RuntimeError("No scan given")
150 try:
151 scanid = _to_list(scanid)
152 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
153 for sid in scanid: allscans.remove(sid)
[1118]154 if len(allscans) == 0:
155 raise ValueError("Can't remove all scans")
[1093]156 except ValueError:
157 if rcParams['verbose']:
158 print "Couldn't find any match."
159 return
160 else: raise
161 try:
162 bsel = self.get_selection()
163 sel = selector()
164 sel.set_scans(allscans)
165 self.set_selection(bsel+sel)
166 scopy = self._copy()
167 self.set_selection(bsel)
168 return scantable(scopy)
169 except RuntimeError:
[1118]170 if rcParams['verbose']:
171 print "Couldn't find any match."
172 else:
173 raise
[1093]174
175
[102]176 def get_scan(self, scanid=None):
177 """
178 Return a specific scan (by scanno) or collection of scans (by
179 source name) in a new scantable.
180 Parameters:
[513]181 scanid: a (list of) scanno or a source name, unix-style
182 patterns are accepted for source name matching, e.g.
183 '*_R' gets all 'ref scans
[102]184 Example:
[513]185 # get all scans containing the source '323p459'
186 newscan = scan.get_scan('323p459')
187 # get all 'off' scans
188 refscans = scan.get_scan('*_R')
189 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]190 newscan = scan.get_scan([0, 2, 7, 10])
[102]191 """
192 if scanid is None:
[718]193 if rcParams['verbose']:
[1118]194 print "Please specify a scan no or name to " \
195 "retrieve from the scantable"
[718]196 return
197 else:
198 raise RuntimeError("No scan given")
199
[102]200 try:
[946]201 bsel = self.get_selection()
202 sel = selector()
[102]203 if type(scanid) is str:
[946]204 sel.set_name(scanid)
205 self.set_selection(bsel+sel)
[876]206 scopy = self._copy()
[946]207 self.set_selection(bsel)
[876]208 return scantable(scopy)
[102]209 elif type(scanid) is int:
[946]210 sel.set_scans([scanid])
211 self.set_selection(bsel+sel)
[876]212 scopy = self._copy()
[946]213 self.set_selection(bsel)
[876]214 return scantable(scopy)
[381]215 elif type(scanid) is list:
[946]216 sel.set_scans(scanid)
217 self.set_selection(sel)
[876]218 scopy = self._copy()
[946]219 self.set_selection(bsel)
[876]220 return scantable(scopy)
[381]221 else:
[718]222 msg = "Illegal scanid type, use 'int' or 'list' if ints."
223 if rcParams['verbose']:
224 print msg
225 else:
226 raise TypeError(msg)
[102]227 except RuntimeError:
[718]228 if rcParams['verbose']: print "Couldn't find any match."
229 else: raise
[102]230
231 def __str__(self):
[1118]232 return Scantable._summary(self, True)
[102]233
[976]234 def summary(self, filename=None):
[102]235 """
236 Print a summary of the contents of this scantable.
237 Parameters:
238 filename: the name of a file to write the putput to
239 Default - no file output
[381]240 verbose: print extra info such as the frequency table
241 The default (False) is taken from .asaprc
[102]242 """
[976]243 info = Scantable._summary(self, True)
244 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]245 if filename is not None:
[256]246 if filename is "":
247 filename = 'scantable_summary.txt'
[415]248 from os.path import expandvars, isdir
[411]249 filename = expandvars(filename)
[415]250 if not isdir(filename):
[413]251 data = open(filename, 'w')
252 data.write(info)
253 data.close()
254 else:
[718]255 msg = "Illegal file name '%s'." % (filename)
256 if rcParams['verbose']:
257 print msg
258 else:
259 raise IOError(msg)
260 if rcParams['verbose']:
[794]261 try:
262 from IPython.genutils import page as pager
263 except ImportError:
264 from pydoc import pager
265 pager(info)
[718]266 else:
267 return info
[710]268
[946]269
270 def get_selection(self):
271 """
[1005]272 Get the selection object currently set on this scantable.
273 Parameters:
274 none
275 Example:
276 sel = scan.get_selection()
277 sel.set_ifs(0) # select IF 0
278 scan.set_selection(sel) # apply modified selection
[946]279 """
280 return selector(self._getselection())
281
[1005]282 def set_selection(self, selection=selector()):
[946]283 """
[1005]284 Select a subset of the data. All following operations on this scantable
285 are only applied to thi selection.
286 Parameters:
287 selection: a selector object (default unset the selection)
288 Examples:
289 sel = selector() # create a selection object
[1118]290 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]291 scan.set_selection(sel) # set the selection
292 scan.summary() # will only print summary of scanno 0 an 3
293 scan.set_selection() # unset the selection
[946]294 """
295 self._setselection(selection)
296
[876]297 def stats(self, stat='stddev', mask=None):
[102]298 """
[135]299 Determine the specified statistic of the current beam/if/pol
[102]300 Takes a 'mask' as an optional parameter to specify which
301 channels should be excluded.
302 Parameters:
[135]303 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
304 'var', 'stddev', 'avdev', 'rms', 'median'
305 mask: an optional mask specifying where the statistic
[102]306 should be determined.
307 Example:
[113]308 scan.set_unit('channel')
[1118]309 msk = scan.create_mask([100, 200], [500, 600])
[135]310 scan.stats(stat='mean', mask=m)
[102]311 """
312 if mask == None:
[876]313 mask = []
[1118]314 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]315 if not self._check_ifs():
[1118]316 raise ValueError("Cannot apply mask as the IFs have different "
317 "number of channels. Please use setselection() "
318 "to select individual IFs")
[256]319
[876]320 statvals = self._math._stats(self, mask, stat)
321 out = ''
322 axes = []
323 for i in range(self.nrow()):
324 axis = []
325 axis.append(self.getscan(i))
326 axis.append(self.getbeam(i))
327 axis.append(self.getif(i))
328 axis.append(self.getpol(i))
329 axis.append(self.getcycle(i))
330 axes.append(axis)
331 tm = self._gettime(i)
332 src = self._getsourcename(i)
333 out += 'Scan[%d] (%s) ' % (axis[0], src)
334 out += 'Time[%s]:\n' % (tm)
335 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
336 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
337 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
338 out += '= %3.3f\n' % (statvals[i])
339 out += "--------------------------------------------------\n"
[256]340
[876]341 if rcParams['verbose']:
342 print "--------------------------------------------------"
[1118]343 print " ", stat
[876]344 print "--------------------------------------------------"
345 print out
[1118]346 retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
[876]347 'axes' : axes,
348 'data': statvals}
349 return retval
[102]350
[1118]351 def stddev(self, mask=None):
[135]352 """
353 Determine the standard deviation of the current beam/if/pol
354 Takes a 'mask' as an optional parameter to specify which
355 channels should be excluded.
356 Parameters:
357 mask: an optional mask specifying where the standard
358 deviation should be determined.
359
360 Example:
361 scan.set_unit('channel')
[1118]362 msk = scan.create_mask([100, 200], [500, 600])
[135]363 scan.stddev(mask=m)
364 """
[1118]365 return self.stats(stat='stddev', mask=mask);
[135]366
[1003]367
368 def column_names(self):
369 """
370 Return a list of column names, which can be used for selection.
371 """
372 return list(Scantable.column_names(self))
373
[876]374 def get_tsys(self):
[113]375 """
376 Return the System temperatures.
377 Parameters:
[876]378
[113]379 Returns:
[876]380 a list of Tsys values for the current selection
[113]381 """
[256]382
[876]383 return self._row_callback(self._gettsys, "Tsys")
[256]384
[876]385 def _row_callback(self, callback, label):
386 axes = []
[1118]387 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]388 out = ""
[1118]389 outvec = []
[876]390 for i in range(self.nrow()):
391 axis = []
392 axis.append(self.getscan(i))
393 axis.append(self.getbeam(i))
394 axis.append(self.getif(i))
395 axis.append(self.getpol(i))
396 axis.append(self.getcycle(i))
397 axes.append(axis)
398 tm = self._gettime(i)
399 src = self._getsourcename(i)
400 out += 'Scan[%d] (%s) ' % (axis[0], src)
401 out += 'Time[%s]:\n' % (tm)
402 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
403 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
404 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
405 outvec.append(callback(i))
406 out += '= %3.3f\n' % (outvec[i])
407 out += "--------------------------------------------------\n"
408 if rcParams['verbose']:
409 print "--------------------------------------------------"
410 print " %s" % (label)
411 print "--------------------------------------------------"
412 print out
413 retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
414 return retval
[256]415
[1070]416 def _get_column(self, callback, row=-1):
417 """
418 """
419 if row == -1:
420 return [callback(i) for i in range(self.nrow())]
421 else:
422 if 0 <= row < self.nrow():
423 return callback(row)
[256]424
[1070]425
[407]426 def get_time(self, row=-1):
[113]427 """
428 Get a list of time stamps for the observations.
[181]429 Return a string for each integration in the scantable.
[113]430 Parameters:
[407]431 row: row no of integration. Default -1 return all rows
[113]432 Example:
433 none
434 """
[1070]435 return self._get_column(self._gettime, row)
[102]436
[714]437 def get_sourcename(self, row=-1):
438 """
[794]439 Get a list source names for the observations.
[714]440 Return a string for each integration in the scantable.
441 Parameters:
442 row: row no of integration. Default -1 return all rows
443 Example:
444 none
445 """
[1070]446 return self._get_column(self._getsourcename, row)
[714]447
[794]448 def get_elevation(self, row=-1):
449 """
450 Get a list of elevations for the observations.
451 Return a float for each integration in the scantable.
452 Parameters:
453 row: row no of integration. Default -1 return all rows
454 Example:
455 none
456 """
[1070]457 return self._get_column(self._getelevation, row)
[794]458
459 def get_azimuth(self, row=-1):
460 """
461 Get a list of azimuths for the observations.
462 Return a float for each integration in the scantable.
463 Parameters:
464 row: row no of integration. Default -1 return all rows
465 Example:
466 none
467 """
[1070]468 return self._get_column(self._getazimuth, row)
[794]469
470 def get_parangle(self, row=-1):
471 """
472 Get a list of parallactic angles for the observations.
473 Return a float for each integration in the scantable.
474 Parameters:
475 row: row no of integration. Default -1 return all rows
476 Example:
477 none
478 """
[1070]479 return self._get_column(self._getparangle, row)
[794]480
[1070]481 def get_direction(self, row=-1):
482 """
483 Get a list of Positions on the sky (direction) for the observations.
484 Return a float for each integration in the scantable.
485 Parameters:
486 row: row no of integration. Default -1 return all rows
487 Example:
488 none
489 """
490 return self._get_column(self._getdirection, row)
491
[102]492 def set_unit(self, unit='channel'):
493 """
494 Set the unit for all following operations on this scantable
495 Parameters:
496 unit: optional unit, default is 'channel'
[1118]497 one of '*Hz', 'km/s', 'channel', ''
[102]498 """
[484]499 varlist = vars()
[1118]500 if unit in ['', 'pixel', 'channel']:
[113]501 unit = ''
502 inf = list(self._getcoordinfo())
503 inf[0] = unit
504 self._setcoordinfo(inf)
[1118]505 self._add_history("set_unit", varlist)
[113]506
[484]507 def set_instrument(self, instr):
[358]508 """
509 Set the instrument for subsequent processing
510 Parameters:
[710]511 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]512 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]513 """
514 self._setInstrument(instr)
[1118]515 self._add_history("set_instument", vars())
[718]516 print_log()
[358]517
[276]518 def set_doppler(self, doppler='RADIO'):
519 """
520 Set the doppler for all following operations on this scantable.
521 Parameters:
522 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
523 """
[484]524 varlist = vars()
[276]525 inf = list(self._getcoordinfo())
526 inf[2] = doppler
527 self._setcoordinfo(inf)
[1118]528 self._add_history("set_doppler", vars())
[718]529 print_log()
[710]530
[226]531 def set_freqframe(self, frame=None):
[113]532 """
533 Set the frame type of the Spectral Axis.
534 Parameters:
[591]535 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]536 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
537 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]538 Examples:
539 scan.set_freqframe('BARY')
540 """
[484]541 if frame is None: frame = rcParams['scantable.freqframe']
542 varlist = vars()
[1118]543 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
544 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]545
[989]546 if frame in valid:
[113]547 inf = list(self._getcoordinfo())
548 inf[1] = frame
549 self._setcoordinfo(inf)
[1118]550 self._add_history("set_freqframe", varlist)
[102]551 else:
[1118]552 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]553 if rcParams['verbose']:
554 print msg
555 else:
556 raise TypeError(msg)
557 print_log()
[710]558
[989]559 def set_dirframe(self, frame=""):
560 """
561 Set the frame type of the Direction on the sky.
562 Parameters:
563 frame: an optional frame type, default ''. Valid frames are:
564 'J2000', 'B1950', 'GALACTIC'
565 Examples:
566 scan.set_dirframe('GALACTIC')
567 """
568 varlist = vars()
569 try:
570 Scantable.set_dirframe(self, frame)
[1118]571 except RuntimeError, msg:
[989]572 if rcParams['verbose']:
573 print msg
574 else:
575 raise
[1118]576 self._add_history("set_dirframe", varlist)
[989]577
[113]578 def get_unit(self):
579 """
580 Get the default unit set in this scantable
581 Parameters:
582 Returns:
583 A unit string
584 """
585 inf = self._getcoordinfo()
586 unit = inf[0]
587 if unit == '': unit = 'channel'
588 return unit
[102]589
[158]590 def get_abcissa(self, rowno=0):
[102]591 """
[158]592 Get the abcissa in the current coordinate setup for the currently
[113]593 selected Beam/IF/Pol
594 Parameters:
[226]595 rowno: an optional row number in the scantable. Default is the
596 first row, i.e. rowno=0
[113]597 Returns:
[407]598 The abcissa values and it's format string (as a dictionary)
[113]599 """
[256]600 abc = self._getabcissa(rowno)
[710]601 lbl = self._getabcissalabel(rowno)
[718]602 print_log()
[158]603 return abc, lbl
[113]604
[1118]605 def flag(self, mask=None):
[1001]606 """
607 Flag the selected data using an optional channel mask.
608 Parameters:
609 mask: an optional channel mask, created with create_mask. Default
610 (no mask) is all channels.
611 """
612 varlist = vars()
[1118]613 if mask is None:
614 mask = []
[1001]615 try:
616 self._flag(mask)
[1118]617 except RuntimeError, msg:
[1001]618 if rcParams['verbose']:
619 print msg
620 return
621 else: raise
622 self._add_history("flag", varlist)
623
624
[113]625 def create_mask(self, *args, **kwargs):
626 """
[1118]627 Compute and return a mask based on [min, max] windows.
[189]628 The specified windows are to be INCLUDED, when the mask is
[113]629 applied.
[102]630 Parameters:
[1118]631 [min, max], [min2, max2], ...
[1024]632 Pairs of start/end points (inclusive)specifying the regions
[102]633 to be masked
[189]634 invert: optional argument. If specified as True,
635 return an inverted mask, i.e. the regions
636 specified are EXCLUDED
[513]637 row: create the mask using the specified row for
638 unit conversions, default is row=0
639 only necessary if frequency varies over rows.
[102]640 Example:
[113]641 scan.set_unit('channel')
642 a)
[1118]643 msk = scan.create_mask([400, 500], [800, 900])
[189]644 # masks everything outside 400 and 500
[113]645 # and 800 and 900 in the unit 'channel'
646
647 b)
[1118]648 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]649 # masks the regions between 400 and 500
[113]650 # and 800 and 900 in the unit 'channel'
[1024]651 c)
652 mask only channel 400
[1118]653 msk = scan.create_mask([400, 400])
[102]654 """
[513]655 row = 0
656 if kwargs.has_key("row"):
657 row = kwargs.get("row")
658 data = self._getabcissa(row)
[113]659 u = self._getcoordinfo()[0]
[718]660 if rcParams['verbose']:
[113]661 if u == "": u = "channel"
[718]662 msg = "The current mask window unit is %s" % u
[1118]663 i = self._check_ifs()
664 if not i:
[876]665 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]666 asaplog.push(msg)
[102]667 n = self.nchan()
[1134]668 msk = NUM.zeros(n)
[710]669 # test if args is a 'list' or a 'normal *args - UGLY!!!
670
[1118]671 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
672 and args or args[0]
[710]673 for window in ws:
[102]674 if (len(window) != 2 or window[0] > window[1] ):
[1118]675 raise TypeError("A window needs to be defined as [min, max]")
[102]676 for i in range(n):
[1024]677 if data[i] >= window[0] and data[i] <= window[1]:
[189]678 msk[i] = 1
[113]679 if kwargs.has_key('invert'):
680 if kwargs.get('invert'):
[1134]681 msk = NUM.logical_not(msk)
[718]682 print_log()
[102]683 return msk
[710]684
[256]685 def get_restfreqs(self):
686 """
687 Get the restfrequency(s) stored in this scantable.
688 The return value(s) are always of unit 'Hz'
689 Parameters:
690 none
691 Returns:
692 a list of doubles
693 """
694 return list(self._getrestfreqs())
[102]695
[402]696
[931]697 def set_restfreqs(self, freqs=None, unit='Hz'):
698 """
699 Set or replace the restfrequency specified and
700 If the 'freqs' argument holds a scalar,
701 then that rest frequency will be applied to all the selected
702 data. If the 'freqs' argument holds
703 a vector, then it MUST be of equal or smaller length than
704 the number of IFs (and the available restfrequencies will be
705 replaced by this vector). In this case, *all* data have
706 the restfrequency set per IF according
707 to the corresponding value you give in the 'freqs' vector.
[1118]708 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]709 IF 1 gets restfreq 2e9.
[1153]710 You can also specify the frequencies via a linecatalog/
711
[931]712 Parameters:
713 freqs: list of rest frequency values or string idenitfiers
714 unit: unit for rest frequency (default 'Hz')
[402]715
[931]716 Example:
717 # set the given restfrequency for the whole table
718 scan.set_restfreqs(freqs=1.4e9)
719 # If thee number of IFs in the data is >= 2 the IF0 gets the first
720 # value IF1 the second...
[1118]721 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]722 #set the given restfrequency for the whole table (by name)
723 scan.set_restfreqs(freqs="OH1667")
[391]724
[931]725 Note:
726 To do more sophisticate Restfrequency setting, e.g. on a
727 source and IF basis, use scantable.set_selection() before using
728 this function.
729 # provide your scantable is call scan
730 selection = selector()
731 selection.set_name("ORION*")
732 selection.set_ifs([1])
733 scan.set_selection(selection)
734 scan.set_restfreqs(freqs=86.6e9)
735
736 """
737 varlist = vars()
738
739 t = type(freqs)
[1118]740 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]741 self._setrestfreqs(freqs, "",unit)
[1118]742 elif isinstance(freqs, list) or isinstance(freqs, tuple):
743 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]744 sel = selector()
745 savesel = self._getselection()
746 for i in xrange(len(freqs)):
[946]747 sel.set_ifs([i])
[931]748 self._setselection(sel)
[1153]749 self._setrestfreqs(freqs[i], "",unit)
[931]750 self._setselection(savesel)
751 elif isinstance(freqs[-1], str):
752 # not yet implemented
753 pass
[1153]754 elif isinstance(freqs, linecatalog):
755 sel = selector()
756 savesel = self._getselection()
757 for i in xrange(freqs.nrow()):
758 sel.set_ifs([i])
759 self._setselection(sel)
760 self._setrestfreqs(freqs.get_frequency(i),
761 freqs.get_name(i), "MHz")
762 # ensure that we are not iterating past nIF
763 if i == self.nif()-1: break
764 self._setselection(savesel)
[931]765 else:
766 return
767 self._add_history("set_restfreqs", varlist)
768
769
770
[484]771 def history(self):
772 hist = list(self._gethistory())
[794]773 out = "-"*80
[484]774 for h in hist:
[489]775 if h.startswith("---"):
[794]776 out += "\n"+h
[489]777 else:
778 items = h.split("##")
779 date = items[0]
780 func = items[1]
781 items = items[2:]
[794]782 out += "\n"+date+"\n"
783 out += "Function: %s\n Parameters:" % (func)
[489]784 for i in items:
785 s = i.split("=")
[1118]786 out += "\n %s = %s" % (s[0], s[1])
[794]787 out += "\n"+"-"*80
788 try:
789 from IPython.genutils import page as pager
790 except ImportError:
791 from pydoc import pager
792 pager(out)
[484]793 return
794
[513]795 #
796 # Maths business
797 #
798
[931]799 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]800 """
[1070]801 Return the (time) weighted average of a scan.
[513]802 Note:
[1070]803 in channels only - align if necessary
[513]804 Parameters:
805 mask: an optional mask (only used for 'var' and 'tsys'
806 weighting)
[558]807 scanav: True averages each scan separately
808 False (default) averages all scans together,
[1099]809 weight: Weighting scheme.
810 'none' (mean no weight)
811 'var' (1/var(spec) weighted)
812 'tsys' (1/Tsys**2 weighted)
813 'tint' (integration time weighted)
814 'tintsys' (Tint/Tsys**2)
815 'median' ( median averaging)
[535]816 The default is 'tint'
[931]817 align: align the spectra in velocity before averaging. It takes
818 the time of the first spectrum as reference time.
[513]819 Example:
820 # time average the scantable without using a mask
[710]821 newscan = scan.average_time()
[513]822 """
823 varlist = vars()
[976]824 if weight is None: weight = 'TINT'
[513]825 if mask is None: mask = ()
[1099]826 if scanav: scanav = "SCAN"
827 else: scanav = "NONE"
[1118]828 scan = (self, )
[989]829 try:
[1118]830 if align:
831 scan = (self.freq_align(insitu=False), )
832 s = None
833 if weight.upper() == 'MEDIAN':
834 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
835 scanav))
836 else:
837 s = scantable(self._math._average(scan, mask, weight.upper(),
838 scanav))
839 except RuntimeError, msg:
[989]840 if rcParams['verbose']:
841 print msg
842 return
843 else: raise
[1099]844 s._add_history("average_time", varlist)
[718]845 print_log()
[513]846 return s
[710]847
[876]848 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]849 """
850 Return a scan where all spectra are converted to either
851 Jansky or Kelvin depending upon the flux units of the scan table.
852 By default the function tries to look the values up internally.
853 If it can't find them (or if you want to over-ride), you must
854 specify EITHER jyperk OR eta (and D which it will try to look up
855 also if you don't set it). jyperk takes precedence if you set both.
856 Parameters:
857 jyperk: the Jy / K conversion factor
858 eta: the aperture efficiency
859 d: the geomtric diameter (metres)
860 insitu: if False a new scantable is returned.
861 Otherwise, the scaling is done in-situ
862 The default is taken from .asaprc (False)
863 allaxes: if True apply to all spectra. Otherwise
864 apply only to the selected (beam/pol/if)spectra only
865 The default is taken from .asaprc (True if none)
866 """
867 if insitu is None: insitu = rcParams['insitu']
[876]868 self._math._setinsitu(insitu)
[513]869 varlist = vars()
870 if jyperk is None: jyperk = -1.0
871 if d is None: d = -1.0
872 if eta is None: eta = -1.0
[876]873 s = scantable(self._math._convertflux(self, d, eta, jyperk))
874 s._add_history("convert_flux", varlist)
875 print_log()
876 if insitu: self._assign(s)
877 else: return s
[513]878
[876]879 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]880 """
881 Return a scan after applying a gain-elevation correction.
882 The correction can be made via either a polynomial or a
883 table-based interpolation (and extrapolation if necessary).
884 You specify polynomial coefficients, an ascii table or neither.
885 If you specify neither, then a polynomial correction will be made
886 with built in coefficients known for certain telescopes (an error
887 will occur if the instrument is not known).
888 The data and Tsys are *divided* by the scaling factors.
889 Parameters:
890 poly: Polynomial coefficients (default None) to compute a
891 gain-elevation correction as a function of
892 elevation (in degrees).
893 filename: The name of an ascii file holding correction factors.
894 The first row of the ascii file must give the column
895 names and these MUST include columns
896 "ELEVATION" (degrees) and "FACTOR" (multiply data
897 by this) somewhere.
898 The second row must give the data type of the
899 column. Use 'R' for Real and 'I' for Integer.
900 An example file would be
901 (actual factors are arbitrary) :
902
903 TIME ELEVATION FACTOR
904 R R R
905 0.1 0 0.8
906 0.2 20 0.85
907 0.3 40 0.9
908 0.4 60 0.85
909 0.5 80 0.8
910 0.6 90 0.75
911 method: Interpolation method when correcting from a table.
912 Values are "nearest", "linear" (default), "cubic"
913 and "spline"
914 insitu: if False a new scantable is returned.
915 Otherwise, the scaling is done in-situ
916 The default is taken from .asaprc (False)
917 """
918
919 if insitu is None: insitu = rcParams['insitu']
[876]920 self._math._setinsitu(insitu)
[513]921 varlist = vars()
922 if poly is None:
[1118]923 poly = ()
[513]924 from os.path import expandvars
925 filename = expandvars(filename)
[876]926 s = scantable(self._math._gainel(self, poly, filename, method))
927 s._add_history("gain_el", varlist)
928 print_log()
929 if insitu: self._assign(s)
930 else: return s
[710]931
[931]932 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]933 """
934 Return a scan where all rows have been aligned in frequency/velocity.
935 The alignment frequency frame (e.g. LSRK) is that set by function
936 set_freqframe.
937 Parameters:
938 reftime: reference time to align at. By default, the time of
939 the first row of data is used.
940 method: Interpolation method for regridding the spectra.
941 Choose from "nearest", "linear", "cubic" (default)
942 and "spline"
943 insitu: if False a new scantable is returned.
944 Otherwise, the scaling is done in-situ
945 The default is taken from .asaprc (False)
946 """
[931]947 if insitu is None: insitu = rcParams["insitu"]
[876]948 self._math._setinsitu(insitu)
[513]949 varlist = vars()
[931]950 if reftime is None: reftime = ""
951 s = scantable(self._math._freq_align(self, reftime, method))
[876]952 s._add_history("freq_align", varlist)
953 print_log()
954 if insitu: self._assign(s)
955 else: return s
[513]956
[876]957 def opacity(self, tau, insitu=None):
[513]958 """
959 Apply an opacity correction. The data
960 and Tsys are multiplied by the correction factor.
961 Parameters:
962 tau: Opacity from which the correction factor is
963 exp(tau*ZD)
964 where ZD is the zenith-distance
965 insitu: if False a new scantable is returned.
966 Otherwise, the scaling is done in-situ
967 The default is taken from .asaprc (False)
968 """
969 if insitu is None: insitu = rcParams['insitu']
[876]970 self._math._setinsitu(insitu)
[513]971 varlist = vars()
[876]972 s = scantable(self._math._opacity(self, tau))
973 s._add_history("opacity", varlist)
974 print_log()
975 if insitu: self._assign(s)
976 else: return s
[513]977
978 def bin(self, width=5, insitu=None):
979 """
980 Return a scan where all spectra have been binned up.
981 width: The bin width (default=5) in pixels
982 insitu: if False a new scantable is returned.
983 Otherwise, the scaling is done in-situ
984 The default is taken from .asaprc (False)
985 """
986 if insitu is None: insitu = rcParams['insitu']
[876]987 self._math._setinsitu(insitu)
[513]988 varlist = vars()
[876]989 s = scantable(self._math._bin(self, width))
[1118]990 s._add_history("bin", varlist)
[876]991 print_log()
992 if insitu: self._assign(s)
993 else: return s
[513]994
[710]995
[513]996 def resample(self, width=5, method='cubic', insitu=None):
997 """
998 Return a scan where all spectra have been binned up
999 width: The bin width (default=5) in pixels
1000 method: Interpolation method when correcting from a table.
1001 Values are "nearest", "linear", "cubic" (default)
1002 and "spline"
1003 insitu: if False a new scantable is returned.
1004 Otherwise, the scaling is done in-situ
1005 The default is taken from .asaprc (False)
1006 """
1007 if insitu is None: insitu = rcParams['insitu']
[876]1008 self._math._setinsitu(insitu)
[513]1009 varlist = vars()
[876]1010 s = scantable(self._math._resample(self, method, width))
[1118]1011 s._add_history("resample", varlist)
[876]1012 print_log()
1013 if insitu: self._assign(s)
1014 else: return s
[513]1015
1016
[946]1017 def average_pol(self, mask=None, weight='none'):
1018 """
1019 Average the Polarisations together.
1020 Parameters:
1021 mask: An optional mask defining the region, where the
1022 averaging will be applied. The output will have all
1023 specified points masked.
1024 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1025 weighted), or 'tsys' (1/Tsys**2 weighted)
1026 """
1027 varlist = vars()
1028 if mask is None:
1029 mask = ()
[1010]1030 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1031 s._add_history("average_pol", varlist)
[946]1032 print_log()
[992]1033 return s
[513]1034
[1145]1035 def average_beam(self, mask=None, weight='none'):
1036 """
1037 Average the Beams together.
1038 Parameters:
1039 mask: An optional mask defining the region, where the
1040 averaging will be applied. The output will have all
1041 specified points masked.
1042 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1043 weighted), or 'tsys' (1/Tsys**2 weighted)
1044 """
1045 varlist = vars()
1046 if mask is None:
1047 mask = ()
1048 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1049 s._add_history("average_beam", varlist)
1050 print_log()
1051 return s
1052
[992]1053 def convert_pol(self, poltype=None):
1054 """
1055 Convert the data to a different polarisation type.
1056 Parameters:
1057 poltype: The new polarisation type. Valid types are:
1058 "linear", "stokes" and "circular"
1059 """
1060 varlist = vars()
1061 try:
1062 s = scantable(self._math._convertpol(self, poltype))
[1118]1063 except RuntimeError, msg:
[992]1064 if rcParams['verbose']:
[1118]1065 print msg
1066 return
[992]1067 else:
1068 raise
[1118]1069 s._add_history("convert_pol", varlist)
[992]1070 print_log()
1071 return s
1072
[876]1073 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1074 """
1075 Smooth the spectrum by the specified kernel (conserving flux).
1076 Parameters:
1077 scan: The input scan
1078 kernel: The type of smoothing kernel. Select from
1079 'hanning' (default), 'gaussian' and 'boxcar'.
1080 The first three characters are sufficient.
1081 width: The width of the kernel in pixels. For hanning this is
1082 ignored otherwise it defauls to 5 pixels.
1083 For 'gaussian' it is the Full Width Half
1084 Maximum. For 'boxcar' it is the full width.
1085 insitu: if False a new scantable is returned.
1086 Otherwise, the scaling is done in-situ
1087 The default is taken from .asaprc (False)
1088 Example:
1089 none
1090 """
1091 if insitu is None: insitu = rcParams['insitu']
[876]1092 self._math._setinsitu(insitu)
[513]1093 varlist = vars()
[1118]1094 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1095 s._add_history("smooth", varlist)
1096 print_log()
1097 if insitu: self._assign(s)
1098 else: return s
[513]1099
[876]1100
[1061]1101 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
[513]1102 """
1103 Return a scan which has been baselined (all rows) by a polynomial.
1104 Parameters:
[794]1105 scan: a scantable
1106 mask: an optional mask
1107 order: the order of the polynomial (default is 0)
[1061]1108 plot: plot the fit and the residual. In this each
1109 indivual fit has to be approved, by typing 'y'
1110 or 'n'
[794]1111 insitu: if False a new scantable is returned.
1112 Otherwise, the scaling is done in-situ
1113 The default is taken from .asaprc (False)
[513]1114 Example:
1115 # return a scan baselined by a third order polynomial,
1116 # not using a mask
1117 bscan = scan.poly_baseline(order=3)
[579]1118 """
[513]1119 if insitu is None: insitu = rcParams['insitu']
1120 varlist = vars()
1121 if mask is None:
[1134]1122 mask = list(NUM.ones(self.nchan(-1)))
[513]1123 from asap.asapfitter import fitter
1124 f = fitter()
1125 f.set_scan(self, mask)
1126 f.set_function(poly=order)
[1061]1127 s = f.auto_fit(insitu, plot=plot)
[876]1128 s._add_history("poly_baseline", varlist)
[718]1129 print_log()
[876]1130 if insitu: self._assign(s)
1131 else: return s
[513]1132
[1118]1133 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1061]1134 threshold=3, plot=False, insitu=None):
[880]1135 """
1136 Return a scan which has been baselined (all rows) by a polynomial.
1137 Spectral lines are detected first using linefinder and masked out
1138 to avoid them affecting the baseline solution.
1139
1140 Parameters:
1141 mask: an optional mask retreived from scantable
1142 edge: an optional number of channel to drop at
1143 the edge of spectrum. If only one value is
1144 specified, the same number will be dropped from
1145 both sides of the spectrum. Default is to keep
[907]1146 all channels. Nested tuples represent individual
[976]1147 edge selection for different IFs (a number of spectral
1148 channels can be different)
[880]1149 order: the order of the polynomial (default is 0)
1150 threshold: the threshold used by line finder. It is better to
1151 keep it large as only strong lines affect the
1152 baseline solution.
[1061]1153 plot: plot the fit and the residual. In this each
1154 indivual fit has to be approved, by typing 'y'
1155 or 'n'
[880]1156 insitu: if False a new scantable is returned.
1157 Otherwise, the scaling is done in-situ
1158 The default is taken from .asaprc (False)
1159
1160 Example:
1161 scan2=scan.auto_poly_baseline(order=7)
1162 """
1163 if insitu is None: insitu = rcParams['insitu']
1164 varlist = vars()
1165 from asap.asapfitter import fitter
1166 from asap.asaplinefind import linefinder
1167 from asap import _is_sequence_or_number as _is_valid
1168
[976]1169 # check whether edge is set up for each IF individually
[1118]1170 individualedge = False;
1171 if len(edge) > 1:
1172 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1173 individualedge = True;
[907]1174
[1118]1175 if not _is_valid(edge, int) and not individualedge:
[909]1176 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1177 pair of integers specified as a tuple. Nested tuples are allowed \
1178 to make individual selection for different IFs."
[919]1179
[1118]1180 curedge = (0, 0)
1181 if individualedge:
1182 for edgepar in edge:
1183 if not _is_valid(edgepar, int):
1184 raise ValueError, "Each element of the 'edge' tuple has \
1185 to be a pair of integers or an integer."
[907]1186 else:
[1118]1187 curedge = edge;
[880]1188
1189 # setup fitter
1190 f = fitter()
1191 f.set_function(poly=order)
1192
1193 # setup line finder
[1118]1194 fl = linefinder()
[880]1195 fl.set_options(threshold=threshold)
1196
1197 if not insitu:
[1118]1198 workscan = self.copy()
[880]1199 else:
[1118]1200 workscan = self
[880]1201
[907]1202 fl.set_scan(workscan)
1203
[1118]1204 rows = range(workscan.nrow())
[880]1205 asaplog.push("Processing:")
1206 for r in rows:
[1118]1207 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1208 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1209 workscan.getpol(r), workscan.getcycle(r))
[880]1210 asaplog.push(msg, False)
[907]1211
[976]1212 # figure out edge parameter
[1118]1213 if individualedge:
1214 if len(edge) >= workscan.getif(r):
1215 raise RuntimeError, "Number of edge elements appear to " \
1216 "be less than the number of IFs"
1217 curedge = edge[workscan.getif(r)]
[919]1218
[976]1219 # setup line finder
[1118]1220 fl.find_lines(r, mask, curedge)
[880]1221 f.set_scan(workscan, fl.get_mask())
1222 f.x = workscan._getabcissa(r)
1223 f.y = workscan._getspectrum(r)
1224 f.data = None
1225 f.fit()
1226 x = f.get_parameters()
[1061]1227 if plot:
1228 f.plot(residual=True)
1229 x = raw_input("Accept fit ( [y]/n ): ")
1230 if x.upper() == 'N':
1231 continue
[880]1232 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1233 if plot:
1234 f._p.unmap()
1235 f._p = None
1236 workscan._add_history("auto_poly_baseline", varlist)
[880]1237 if insitu:
1238 self._assign(workscan)
1239 else:
1240 return workscan
1241
[914]1242 def rotate_linpolphase(self, angle):
1243 """
1244 Rotate the phase of the complex polarization O=Q+iU correlation.
1245 This is always done in situ in the raw data. So if you call this
1246 function more than once then each call rotates the phase further.
1247 Parameters:
1248 angle: The angle (degrees) to rotate (add) by.
1249 Examples:
1250 scan.rotate_linpolphase(2.3)
1251 """
1252 varlist = vars()
[936]1253 self._math._rotate_linpolphase(self, angle)
[914]1254 self._add_history("rotate_linpolphase", varlist)
1255 print_log()
1256 return
[710]1257
[513]1258
[914]1259 def rotate_xyphase(self, angle):
1260 """
1261 Rotate the phase of the XY correlation. This is always done in situ
1262 in the data. So if you call this function more than once
1263 then each call rotates the phase further.
1264 Parameters:
1265 angle: The angle (degrees) to rotate (add) by.
1266 Examples:
1267 scan.rotate_xyphase(2.3)
1268 """
1269 varlist = vars()
[936]1270 self._math._rotate_xyphase(self, angle)
[914]1271 self._add_history("rotate_xyphase", varlist)
1272 print_log()
1273 return
1274
1275 def swap_linears(self):
1276 """
1277 Swap the linear polarisations XX and YY
1278 """
1279 varlist = vars()
[936]1280 self._math._swap_linears(self)
[914]1281 self._add_history("swap_linears", varlist)
1282 print_log()
1283 return
1284
1285 def invert_phase(self):
1286 """
1287 Invert the phase of the complex polarisation
1288 """
1289 varlist = vars()
[936]1290 self._math._invert_phase(self)
[914]1291 self._add_history("invert_phase", varlist)
1292 print_log()
1293 return
1294
[876]1295 def add(self, offset, insitu=None):
[513]1296 """
1297 Return a scan where all spectra have the offset added
1298 Parameters:
1299 offset: the offset
1300 insitu: if False a new scantable is returned.
1301 Otherwise, the scaling is done in-situ
1302 The default is taken from .asaprc (False)
1303 """
1304 if insitu is None: insitu = rcParams['insitu']
[876]1305 self._math._setinsitu(insitu)
[513]1306 varlist = vars()
[876]1307 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1308 s._add_history("add", varlist)
[876]1309 print_log()
1310 if insitu:
1311 self._assign(s)
1312 else:
[513]1313 return s
1314
[1118]1315 def scale(self, factor, tsys=True, insitu=None, ):
[513]1316 """
1317 Return a scan where all spectra are scaled by the give 'factor'
1318 Parameters:
1319 factor: the scaling factor
1320 insitu: if False a new scantable is returned.
1321 Otherwise, the scaling is done in-situ
1322 The default is taken from .asaprc (False)
1323 tsys: if True (default) then apply the operation to Tsys
1324 as well as the data
1325 """
1326 if insitu is None: insitu = rcParams['insitu']
[876]1327 self._math._setinsitu(insitu)
[513]1328 varlist = vars()
[876]1329 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1330 s._add_history("scale", varlist)
[876]1331 print_log()
1332 if insitu:
1333 self._assign(s)
1334 else:
[513]1335 return s
1336
[876]1337 def auto_quotient(self, mode='time', preserve=True):
[670]1338 """
1339 This function allows to build quotients automatically.
1340 It assumes the observation to have the same numer of
1341 "ons" and "offs"
1342 It will support "closest off in time" in the future
1343 Parameters:
1344 mode: the on/off detection mode; 'suffix' (default)
1345 'suffix' identifies 'off' scans by the
1346 trailing '_R' (Mopra/Parkes) or
1347 '_e'/'_w' (Tid)
[710]1348 preserve: you can preserve (default) the continuum or
1349 remove it. The equations used are
[670]1350 preserve: Output = Toff * (on/off) - Toff
[1070]1351 remove: Output = Toff * (on/off) - Ton
[670]1352 """
[876]1353 modes = ["time"]
[670]1354 if not mode in modes:
[876]1355 msg = "please provide valid mode. Valid modes are %s" % (modes)
1356 raise ValueError(msg)
1357 varlist = vars()
[1070]1358 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1359 s._add_history("auto_quotient", varlist)
[876]1360 print_log()
1361 return s
[710]1362
[1145]1363 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1364 """
[1143]1365 Form a quotient using "off" beams when observing in "MX" mode.
1366 Parameters:
[1145]1367 mask: an optional mask to be used when weight == 'stddev'
[1143]1368 weight: How to average the off beams. Default is 'median'.
[1145]1369 preserve: you can preserve (default) the continuum or
1370 remove it. The equations used are
1371 preserve: Output = Toff * (on/off) - Toff
1372 remove: Output = Toff * (on/off) - Ton
[1141]1373 """
[1143]1374 if mask is None: mask = ()
[1141]1375 varlist = vars()
1376 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1377 preoff = scantable(self._math._mx_extract(self, 'off'))
1378 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1141]1379 from asapmath import quotient
[1145]1380 q = quotient(on, off, preserve)
[1143]1381 q._add_history("mx_quotient", varlist)
[1145]1382 print_log()
[1143]1383 return q
[513]1384
[718]1385 def freq_switch(self, insitu=None):
1386 """
1387 Apply frequency switching to the data.
1388 Parameters:
1389 insitu: if False a new scantable is returned.
1390 Otherwise, the swictching is done in-situ
1391 The default is taken from .asaprc (False)
1392 Example:
1393 none
1394 """
1395 if insitu is None: insitu = rcParams['insitu']
[876]1396 self._math._setinsitu(insitu)
[718]1397 varlist = vars()
[876]1398 s = scantable(self._math._freqswitch(self))
[1118]1399 s._add_history("freq_switch", varlist)
[876]1400 print_log()
1401 if insitu: self._assign(s)
1402 else: return s
[718]1403
[780]1404 def recalc_azel(self):
1405 """
1406 Recalculate the azimuth and elevation for each position.
1407 Parameters:
1408 none
1409 Example:
1410 """
1411 varlist = vars()
[876]1412 self._recalcazel()
[780]1413 self._add_history("recalc_azel", varlist)
1414 print_log()
1415 return
1416
[513]1417 def __add__(self, other):
1418 varlist = vars()
1419 s = None
1420 if isinstance(other, scantable):
[876]1421 print "scantable + scantable NYI"
1422 return
[513]1423 elif isinstance(other, float):
[876]1424 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1425 else:
[718]1426 raise TypeError("Other input is not a scantable or float value")
[513]1427 s._add_history("operator +", varlist)
[718]1428 print_log()
[513]1429 return s
1430
1431 def __sub__(self, other):
1432 """
1433 implicit on all axes and on Tsys
1434 """
1435 varlist = vars()
1436 s = None
1437 if isinstance(other, scantable):
[876]1438 print "scantable - scantable NYI"
1439 return
[513]1440 elif isinstance(other, float):
[876]1441 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1442 else:
[718]1443 raise TypeError("Other input is not a scantable or float value")
[513]1444 s._add_history("operator -", varlist)
[718]1445 print_log()
[513]1446 return s
[710]1447
[513]1448 def __mul__(self, other):
1449 """
1450 implicit on all axes and on Tsys
1451 """
1452 varlist = vars()
1453 s = None
1454 if isinstance(other, scantable):
[876]1455 print "scantable * scantable NYI"
1456 return
[513]1457 elif isinstance(other, float):
[876]1458 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1459 else:
[718]1460 raise TypeError("Other input is not a scantable or float value")
[513]1461 s._add_history("operator *", varlist)
[718]1462 print_log()
[513]1463 return s
1464
[710]1465
[513]1466 def __div__(self, other):
1467 """
1468 implicit on all axes and on Tsys
1469 """
1470 varlist = vars()
1471 s = None
1472 if isinstance(other, scantable):
[876]1473 print "scantable / scantable NYI"
1474 return
[513]1475 elif isinstance(other, float):
1476 if other == 0.0:
[718]1477 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1478 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1479 else:
[718]1480 raise TypeError("Other input is not a scantable or float value")
[513]1481 s._add_history("operator /", varlist)
[718]1482 print_log()
[513]1483 return s
1484
[530]1485 def get_fit(self, row=0):
1486 """
1487 Print or return the stored fits for a row in the scantable
1488 Parameters:
1489 row: the row which the fit has been applied to.
1490 """
1491 if row > self.nrow():
1492 return
[976]1493 from asap.asapfit import asapfit
[530]1494 fit = asapfit(self._getfit(row))
[718]1495 if rcParams['verbose']:
[530]1496 print fit
1497 return
1498 else:
1499 return fit.as_dict()
1500
[484]1501 def _add_history(self, funcname, parameters):
1502 # create date
1503 sep = "##"
1504 from datetime import datetime
1505 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1506 hist = dstr+sep
1507 hist += funcname+sep#cdate+sep
1508 if parameters.has_key('self'): del parameters['self']
[1118]1509 for k, v in parameters.iteritems():
[484]1510 if type(v) is dict:
[1118]1511 for k2, v2 in v.iteritems():
[484]1512 hist += k2
1513 hist += "="
[1118]1514 if isinstance(v2, scantable):
[484]1515 hist += 'scantable'
1516 elif k2 == 'mask':
[1118]1517 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1518 hist += str(self._zip_mask(v2))
1519 else:
1520 hist += str(v2)
[484]1521 else:
[513]1522 hist += str(v2)
[484]1523 else:
1524 hist += k
1525 hist += "="
[1118]1526 if isinstance(v, scantable):
[484]1527 hist += 'scantable'
1528 elif k == 'mask':
[1118]1529 if isinstance(v, list) or isinstance(v, tuple):
[513]1530 hist += str(self._zip_mask(v))
1531 else:
1532 hist += str(v)
[484]1533 else:
1534 hist += str(v)
1535 hist += sep
1536 hist = hist[:-2] # remove trailing '##'
1537 self._addhistory(hist)
1538
[710]1539
[484]1540 def _zip_mask(self, mask):
1541 mask = list(mask)
1542 i = 0
1543 segments = []
1544 while mask[i:].count(1):
1545 i += mask[i:].index(1)
1546 if mask[i:].count(0):
1547 j = i + mask[i:].index(0)
1548 else:
[710]1549 j = len(mask)
[1118]1550 segments.append([i, j])
[710]1551 i = j
[484]1552 return segments
[714]1553
[626]1554 def _get_ordinate_label(self):
1555 fu = "("+self.get_fluxunit()+")"
1556 import re
1557 lbl = "Intensity"
[1118]1558 if re.match(".K.", fu):
[626]1559 lbl = "Brightness Temperature "+ fu
[1118]1560 elif re.match(".Jy.", fu):
[626]1561 lbl = "Flux density "+ fu
1562 return lbl
[710]1563
[876]1564 def _check_ifs(self):
1565 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1566 nchans = filter(lambda t: t > 0, nchans)
[876]1567 return (sum(nchans)/len(nchans) == nchans[0])
[976]1568
1569 def _fill(self, names, unit, average):
1570 import os
1571 from asap._asap import stfiller
1572 first = True
1573 fullnames = []
1574 for name in names:
1575 name = os.path.expandvars(name)
1576 name = os.path.expanduser(name)
1577 if not os.path.exists(name):
1578 msg = "File '%s' does not exists" % (name)
1579 if rcParams['verbose']:
1580 asaplog.push(msg)
1581 print asaplog.pop().strip()
1582 return
1583 raise IOError(msg)
1584 fullnames.append(name)
1585 if average:
1586 asaplog.push('Auto averaging integrations')
[1079]1587 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1588 for name in fullnames:
[1073]1589 tbl = Scantable(stype)
1590 r = stfiller(tbl)
[976]1591 msg = "Importing %s..." % (name)
[1118]1592 asaplog.push(msg, False)
[976]1593 print_log()
[1118]1594 r._open(name, -1, -1)
[976]1595 r._read()
[1073]1596 #tbl = r._getdata()
[976]1597 if average:
[1118]1598 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1599 #tbl = tbl2
1600 if not first:
1601 tbl = self._math._merge([self, tbl])
1602 #tbl = tbl2
1603 Scantable.__init__(self, tbl)
1604 r._close()
[1118]1605 del r, tbl
[976]1606 first = False
1607 if unit is not None:
1608 self.set_fluxunit(unit)
1609 self.set_freqframe(rcParams['scantable.freqframe'])
1610 #self._add_history("scantable", varlist)
1611
Note: See TracBrowser for help on using the repository browser.