source: branches/alma/python/scantable.py@ 1734

Last change on this file since 1734 was 1701, checked in by WataruKawasaki, 15 years ago

New Development: Yes

JIRA Issue: Yes (CAS-1800 + CAS-1807)

Ready to Release: Yes

Interface Changes: Yes

What Interface Changed: added new methods to scantable and fitter.

Test Programs:

Put in Release Notes: No

Module(s): sdfit, sdflag

Description: added new methods 'scantable.clip' and 'fitter.set_lorentz_parameters'.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 87.0 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[1684]14 def __init__(self, filename, average=None, unit=None, getpt=None, antenna=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[1522]32 getpt: for MeasurementSet input data only:
33 If True, all pointing data are filled.
34 The deafult is False, which makes time to load
[1685]35 the MS data faster in some cases.
36 antenna: Antenna selection. integer (id) or string (name
37 or id).
[710]38 """
[976]39 if average is None:
[710]40 average = rcParams['scantable.autoaverage']
[1496]41 if getpt is None:
[1673]42 getpt = True
[1684]43 if antenna is None:
44 antenna = ''
45 elif type(antenna) == int:
46 antenna = '%s'%antenna
47 elif type(antenna) == list:
48 tmpstr = ''
49 for i in range( len(antenna) ):
50 if type(antenna[i]) == int:
51 tmpstr = tmpstr + ('%s,'%(antenna[i]))
52 elif type(antenna[i]) == str:
53 tmpstr=tmpstr+antenna[i]+','
54 else:
55 asaplog.push('Bad antenna selection.')
56 print_log('ERROR')
57 return
58 antenna = tmpstr.rstrip(',')
[1259]59 varlist = vars()
[876]60 from asap._asap import stmath
[1680]61 self._math = stmath( rcParams['insitu'] )
[876]62 if isinstance(filename, Scantable):
63 Scantable.__init__(self, filename)
[181]64 else:
[1603]65 if isinstance(filename, str):# or \
66# (isinstance(filename, list) or isinstance(filename, tuple)) \
67# and isinstance(filename[-1], str):
[976]68 import os.path
69 filename = os.path.expandvars(filename)
70 filename = os.path.expanduser(filename)
71 if not os.path.exists(filename):
72 s = "File '%s' not found." % (filename)
[718]73 if rcParams['verbose']:
[976]74 asaplog.push(s)
[1612]75 #print asaplog.pop().strip()
[1614]76 print_log('ERROR')
[718]77 return
[976]78 raise IOError(s)
[1115]79 if os.path.isdir(filename) \
[1118]80 and not os.path.exists(filename+'/table.f1'):
[976]81 # crude check if asap table
82 if os.path.exists(filename+'/table.info'):
[1118]83 ondisk = rcParams['scantable.storage'] == 'disk'
84 Scantable.__init__(self, filename, ondisk)
[976]85 if unit is not None:
86 self.set_fluxunit(unit)
[1496]87 # do not reset to the default freqframe
88 #self.set_freqframe(rcParams['scantable.freqframe'])
[718]89 else:
[1118]90 msg = "The given file '%s'is not a valid " \
91 "asap table." % (filename)
[976]92 if rcParams['verbose']:
[1612]93 #print msg
[1614]94 asaplog.push( msg )
95 print_log( 'ERROR' )
[976]96 return
97 else:
98 raise IOError(msg)
[226]99 else:
[1684]100 self._fill([filename], unit, average, getpt, antenna)
[1118]101 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]102 and isinstance(filename[-1], str):
[1684]103 self._fill(filename, unit, average, getpt, antenna)
[1259]104 self._add_history("scantable", varlist)
[714]105 print_log()
[102]106
[876]107 def save(self, name=None, format=None, overwrite=False):
[116]108 """
[1280]109 Store the scantable on disk. This can be an asap (aips++) Table,
110 SDFITS or MS2 format.
[116]111 Parameters:
[1093]112 name: the name of the outputfile. For format "ASCII"
113 this is the root file name (data in 'name'.txt
[497]114 and header in 'name'_header.txt)
[116]115 format: an optional file format. Default is ASAP.
[280]116 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]117 'SDFITS' (save as SDFITS file)
[200]118 'ASCII' (saves as ascii text file)
[226]119 'MS2' (saves as an aips++
120 MeasurementSet V2)
[1603]121 'FITS' (save as image FITS - not
122 readable by class)
123 'CLASS' (save as FITS readable by CLASS)
[411]124 overwrite: If the file should be overwritten if it exists.
[256]125 The default False is to return with warning
[411]126 without writing the output. USE WITH CARE.
[116]127 Example:
128 scan.save('myscan.asap')
[1118]129 scan.save('myscan.sdfits', 'SDFITS')
[116]130 """
[411]131 from os import path
[226]132 if format is None: format = rcParams['scantable.save']
[256]133 suffix = '.'+format.lower()
[1118]134 if name is None or name == "":
[256]135 name = 'scantable'+suffix
[718]136 msg = "No filename given. Using default name %s..." % name
137 asaplog.push(msg)
[411]138 name = path.expandvars(name)
[256]139 if path.isfile(name) or path.isdir(name):
140 if not overwrite:
[718]141 msg = "File %s exists." % name
142 if rcParams['verbose']:
[1612]143 #print msg
[1614]144 asaplog.push( msg )
145 print_log( 'ERROR' )
[718]146 return
147 else:
148 raise IOError(msg)
[451]149 format2 = format.upper()
150 if format2 == 'ASAP':
[116]151 self._save(name)
152 else:
[989]153 from asap._asap import stwriter as stw
[1118]154 writer = stw(format2)
155 writer.write(self, name)
[718]156 print_log()
[116]157 return
158
[102]159 def copy(self):
160 """
161 Return a copy of this scantable.
[1348]162 Note:
163 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]164 Parameters:
[113]165 none
[102]166 Example:
167 copiedscan = scan.copy()
168 """
[876]169 sd = scantable(Scantable._copy(self))
[113]170 return sd
171
[1093]172 def drop_scan(self, scanid=None):
173 """
174 Return a new scantable where the specified scan number(s) has(have)
175 been dropped.
176 Parameters:
177 scanid: a (list of) scan number(s)
178 """
179 from asap import _is_sequence_or_number as _is_valid
180 from asap import _to_list
181 from asap import unique
182 if not _is_valid(scanid):
183 if rcParams['verbose']:
[1612]184 #print "Please specify a scanno to drop from the scantable"
[1614]185 asaplog.push( 'Please specify a scanno to drop from the scantable' )
186 print_log( 'ERROR' )
[1093]187 return
188 else:
189 raise RuntimeError("No scan given")
190 try:
191 scanid = _to_list(scanid)
192 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
193 for sid in scanid: allscans.remove(sid)
[1118]194 if len(allscans) == 0:
195 raise ValueError("Can't remove all scans")
[1093]196 except ValueError:
197 if rcParams['verbose']:
[1612]198 #print "Couldn't find any match."
[1614]199 print_log()
200 asaplog.push( "Couldn't find any match." )
201 print_log( 'ERROR' )
[1093]202 return
203 else: raise
204 try:
205 bsel = self.get_selection()
206 sel = selector()
207 sel.set_scans(allscans)
208 self.set_selection(bsel+sel)
209 scopy = self._copy()
210 self.set_selection(bsel)
211 return scantable(scopy)
212 except RuntimeError:
[1118]213 if rcParams['verbose']:
[1612]214 #print "Couldn't find any match."
[1614]215 print_log()
216 asaplog.push( "Couldn't find any match." )
217 print_log( 'ERROR' )
[1118]218 else:
219 raise
[1093]220
221
[102]222 def get_scan(self, scanid=None):
223 """
224 Return a specific scan (by scanno) or collection of scans (by
225 source name) in a new scantable.
[1348]226 Note:
227 See scantable.drop_scan() for the inverse operation.
[102]228 Parameters:
[513]229 scanid: a (list of) scanno or a source name, unix-style
230 patterns are accepted for source name matching, e.g.
231 '*_R' gets all 'ref scans
[102]232 Example:
[513]233 # get all scans containing the source '323p459'
234 newscan = scan.get_scan('323p459')
235 # get all 'off' scans
236 refscans = scan.get_scan('*_R')
237 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]238 newscan = scan.get_scan([0, 2, 7, 10])
[102]239 """
240 if scanid is None:
[718]241 if rcParams['verbose']:
[1612]242 #print "Please specify a scan no or name to " \
243 # "retrieve from the scantable"
[1614]244 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
245 print_log( 'ERROR' )
[718]246 return
247 else:
248 raise RuntimeError("No scan given")
249
[102]250 try:
[946]251 bsel = self.get_selection()
252 sel = selector()
[102]253 if type(scanid) is str:
[946]254 sel.set_name(scanid)
255 self.set_selection(bsel+sel)
[876]256 scopy = self._copy()
[946]257 self.set_selection(bsel)
[876]258 return scantable(scopy)
[102]259 elif type(scanid) is int:
[946]260 sel.set_scans([scanid])
261 self.set_selection(bsel+sel)
[876]262 scopy = self._copy()
[946]263 self.set_selection(bsel)
[876]264 return scantable(scopy)
[381]265 elif type(scanid) is list:
[946]266 sel.set_scans(scanid)
267 self.set_selection(sel)
[876]268 scopy = self._copy()
[946]269 self.set_selection(bsel)
[876]270 return scantable(scopy)
[381]271 else:
[718]272 msg = "Illegal scanid type, use 'int' or 'list' if ints."
273 if rcParams['verbose']:
[1612]274 #print msg
[1614]275 asaplog.push( msg )
276 print_log( 'ERROR' )
[718]277 else:
278 raise TypeError(msg)
[102]279 except RuntimeError:
[1612]280 if rcParams['verbose']:
281 #print "Couldn't find any match."
[1614]282 print_log()
283 asaplog.push( "Couldn't find any match." )
284 print_log( 'ERROR' )
[718]285 else: raise
[102]286
287 def __str__(self):
[1118]288 return Scantable._summary(self, True)
[102]289
[976]290 def summary(self, filename=None):
[102]291 """
292 Print a summary of the contents of this scantable.
293 Parameters:
294 filename: the name of a file to write the putput to
295 Default - no file output
[381]296 verbose: print extra info such as the frequency table
297 The default (False) is taken from .asaprc
[102]298 """
[976]299 info = Scantable._summary(self, True)
300 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]301 if filename is not None:
[256]302 if filename is "":
303 filename = 'scantable_summary.txt'
[415]304 from os.path import expandvars, isdir
[411]305 filename = expandvars(filename)
[415]306 if not isdir(filename):
[413]307 data = open(filename, 'w')
308 data.write(info)
309 data.close()
310 else:
[718]311 msg = "Illegal file name '%s'." % (filename)
312 if rcParams['verbose']:
[1612]313 #print msg
[1614]314 asaplog.push( msg )
315 print_log( 'ERROR' )
[718]316 else:
317 raise IOError(msg)
318 if rcParams['verbose']:
[794]319 try:
320 from IPython.genutils import page as pager
321 except ImportError:
322 from pydoc import pager
323 pager(info)
[718]324 else:
325 return info
[710]326
[1603]327 def get_spectrum(self, rowno):
328 """Return the spectrum for the current row in the scantable as a list.
329 Parameters:
330 rowno: the row number to retrieve the spectrum from
331 """
332 return self._getspectrum(rowno)
[946]333
[1603]334 def get_mask(self, rowno):
335 """Return the mask for the current row in the scantable as a list.
336 Parameters:
337 rowno: the row number to retrieve the mask from
338 """
339 return self._getmask(rowno)
340
341 def set_spectrum(self, spec, rowno):
342 """Return the spectrum for the current row in the scantable as a list.
343 Parameters:
344 spec: the spectrum
345 rowno: the row number to set the spectrum for
346 """
347 assert(len(spec) == self.nchan())
348 return self._setspectrum(spec, rowno)
349
[946]350 def get_selection(self):
351 """
[1005]352 Get the selection object currently set on this scantable.
353 Parameters:
354 none
355 Example:
356 sel = scan.get_selection()
357 sel.set_ifs(0) # select IF 0
358 scan.set_selection(sel) # apply modified selection
[946]359 """
360 return selector(self._getselection())
361
[1005]362 def set_selection(self, selection=selector()):
[946]363 """
[1005]364 Select a subset of the data. All following operations on this scantable
365 are only applied to thi selection.
366 Parameters:
367 selection: a selector object (default unset the selection)
368 Examples:
369 sel = selector() # create a selection object
[1118]370 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]371 scan.set_selection(sel) # set the selection
372 scan.summary() # will only print summary of scanno 0 an 3
373 scan.set_selection() # unset the selection
[946]374 """
375 self._setselection(selection)
376
[1446]377 def get_row(self, row=0, insitu=None):
378 """
379 Select a row in the scantable.
380 Return a scantable with single row.
381 Parameters:
382 row: row no of integration, default is 0.
383 insitu: if False a new scantable is returned.
384 Otherwise, the scaling is done in-situ
385 The default is taken from .asaprc (False)
386 """
387 if insitu is None: insitu = rcParams['insitu']
388 if not insitu:
389 workscan = self.copy()
390 else:
391 workscan = self
392 # Select a row
393 sel=selector()
394 sel.set_scans([workscan.getscan(row)])
395 sel.set_cycles([workscan.getcycle(row)])
396 sel.set_beams([workscan.getbeam(row)])
397 sel.set_ifs([workscan.getif(row)])
398 sel.set_polarisations([workscan.getpol(row)])
399 sel.set_name(workscan._getsourcename(row))
400 workscan.set_selection(sel)
401 if not workscan.nrow() == 1:
402 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
403 raise RuntimeError(msg)
404 del sel
405 if insitu:
406 self._assign(workscan)
407 else:
408 return workscan
409
[1629]410 #def stats(self, stat='stddev', mask=None):
411 def stats(self, stat='stddev', mask=None, form='3.3f'):
[102]412 """
[135]413 Determine the specified statistic of the current beam/if/pol
[102]414 Takes a 'mask' as an optional parameter to specify which
415 channels should be excluded.
416 Parameters:
[1517]417 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
[1515]418 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[135]419 mask: an optional mask specifying where the statistic
[102]420 should be determined.
[1629]421 form: format string to print statistic values
[102]422 Example:
[113]423 scan.set_unit('channel')
[1118]424 msk = scan.create_mask([100, 200], [500, 600])
[135]425 scan.stats(stat='mean', mask=m)
[102]426 """
427 if mask == None:
[876]428 mask = []
[1118]429 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]430 if not self._check_ifs():
[1118]431 raise ValueError("Cannot apply mask as the IFs have different "
432 "number of channels. Please use setselection() "
433 "to select individual IFs")
[1530]434 rtnabc = False
435 if stat.lower().endswith('_abc'): rtnabc = True
436 getchan = False
437 if stat.lower().startswith('min') or stat.lower().startswith('max'):
[1517]438 chan = self._math._minmaxchan(self, mask, stat)
439 getchan = True
[1515]440 statvals = []
[1530]441 if not rtnabc: statvals = self._math._stats(self, mask, stat)
442
[876]443 out = ''
444 axes = []
445 for i in range(self.nrow()):
446 axis = []
447 axis.append(self.getscan(i))
448 axis.append(self.getbeam(i))
449 axis.append(self.getif(i))
450 axis.append(self.getpol(i))
451 axis.append(self.getcycle(i))
452 axes.append(axis)
453 tm = self._gettime(i)
454 src = self._getsourcename(i)
[1530]455 refstr = ''
456 statunit= ''
[1517]457 if getchan:
458 qx, qy = self.chan2data(rowno=i, chan=chan[i])
[1530]459 if rtnabc:
460 statvals.append(qx['value'])
[1629]461 #refstr = '(value: %3.3f' % (qy['value'])+' ['+qy['unit']+'])'
462 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
[1530]463 statunit= '['+qx['unit']+']'
464 else:
[1629]465 #refstr = '(@ %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
466 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
[1530]467 #statunit= ' ['+qy['unit']+']'
[876]468 out += 'Scan[%d] (%s) ' % (axis[0], src)
469 out += 'Time[%s]:\n' % (tm)
470 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
471 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
472 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
[1629]473 #out += '= %3.3f ' % (statvals[i]) +refstr+'\n'
474 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
[876]475 out += "--------------------------------------------------\n"
[256]476
[876]477 if rcParams['verbose']:
[1615]478 import os
[1625]479 if os.environ.has_key( 'USER' ):
480 usr=os.environ['USER']
481 else:
482 import commands
483 usr=commands.getoutput( 'whoami' )
[1613]484 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
[1612]485 f=open(tmpfile,'w')
486 print >> f, "--------------------------------------------------"
487 print >> f, " ", stat, statunit
488 print >> f, "--------------------------------------------------"
489 print >> f, out
490 f.close()
491 f=open(tmpfile,'r')
492 x=f.readlines()
493 f.close()
494 for xx in x:
[1666]495 asaplog.push( xx, False )
[1614]496 print_log()
[1295]497 #else:
498 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
499 # 'axes' : axes,
500 # 'data': statvals}
501 return statvals
[102]502
[1517]503 def chan2data(self, rowno=0, chan=0):
[1515]504 """
[1517]505 Returns channel/frequency/velocity and spectral value
[1515]506 at an arbitrary row and channel in the scantable.
507 Parameters:
508 rowno: a row number in the scantable. Default is the
509 first row, i.e. rowno=0
[1517]510 chan: a channel in the scantable. Default is the first
[1515]511 channel, i.e. pos=0
512 """
[1517]513 if isinstance(rowno, int) and isinstance(chan, int):
[1530]514 qx = {'unit': self.get_unit(),
515 'value': self._getabcissa(rowno)[chan]}
[1517]516 qy = {'unit': self.get_fluxunit(),
517 'value': self._getspectrum(rowno)[chan]}
518 return qx, qy
[1515]519
[1118]520 def stddev(self, mask=None):
[135]521 """
522 Determine the standard deviation of the current beam/if/pol
523 Takes a 'mask' as an optional parameter to specify which
524 channels should be excluded.
525 Parameters:
526 mask: an optional mask specifying where the standard
527 deviation should be determined.
528
529 Example:
530 scan.set_unit('channel')
[1118]531 msk = scan.create_mask([100, 200], [500, 600])
[135]532 scan.stddev(mask=m)
533 """
[1118]534 return self.stats(stat='stddev', mask=mask);
[135]535
[1003]536
[1259]537 def get_column_names(self):
[1003]538 """
539 Return a list of column names, which can be used for selection.
540 """
[1259]541 return list(Scantable.get_column_names(self))
[1003]542
[876]543 def get_tsys(self):
[113]544 """
545 Return the System temperatures.
546 Returns:
[876]547 a list of Tsys values for the current selection
[113]548 """
[256]549
[876]550 return self._row_callback(self._gettsys, "Tsys")
[256]551
[876]552 def _row_callback(self, callback, label):
553 axes = []
[1118]554 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]555 out = ""
[1118]556 outvec = []
[876]557 for i in range(self.nrow()):
558 axis = []
559 axis.append(self.getscan(i))
560 axis.append(self.getbeam(i))
561 axis.append(self.getif(i))
562 axis.append(self.getpol(i))
563 axis.append(self.getcycle(i))
564 axes.append(axis)
565 tm = self._gettime(i)
566 src = self._getsourcename(i)
567 out += 'Scan[%d] (%s) ' % (axis[0], src)
568 out += 'Time[%s]:\n' % (tm)
569 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
570 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
571 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
572 outvec.append(callback(i))
573 out += '= %3.3f\n' % (outvec[i])
574 out += "--------------------------------------------------\n"
575 if rcParams['verbose']:
[1615]576 asaplog.push("--------------------------------------------------")
577 asaplog.push(" %s" % (label))
578 asaplog.push("--------------------------------------------------")
579 asaplog.push(out)
[1614]580 print_log()
[1175]581 # disabled because the vector seems more useful
582 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
583 return outvec
[256]584
[1070]585 def _get_column(self, callback, row=-1):
586 """
587 """
588 if row == -1:
589 return [callback(i) for i in range(self.nrow())]
590 else:
591 if 0 <= row < self.nrow():
592 return callback(row)
[256]593
[1070]594
[1348]595 def get_time(self, row=-1, asdatetime=False):
[113]596 """
597 Get a list of time stamps for the observations.
[1348]598 Return a datetime object for each integration time stamp in the scantable.
[113]599 Parameters:
[1348]600 row: row no of integration. Default -1 return all rows
601 asdatetime: return values as datetime objects rather than strings
[113]602 Example:
603 none
604 """
[1175]605 from time import strptime
606 from datetime import datetime
[1457]607 times = self._get_column(self._gettime, row)
[1348]608 if not asdatetime:
[1603]609 return times
[1175]610 format = "%Y/%m/%d/%H:%M:%S"
611 if isinstance(times, list):
612 return [datetime(*strptime(i, format)[:6]) for i in times]
613 else:
614 return datetime(*strptime(times, format)[:6])
[102]615
[1348]616
617 def get_inttime(self, row=-1):
618 """
619 Get a list of integration times for the observations.
620 Return a time in seconds for each integration in the scantable.
621 Parameters:
622 row: row no of integration. Default -1 return all rows.
623 Example:
624 none
625 """
626 return self._get_column(self._getinttime, row)
627
628
[714]629 def get_sourcename(self, row=-1):
630 """
[794]631 Get a list source names for the observations.
[714]632 Return a string for each integration in the scantable.
633 Parameters:
[1348]634 row: row no of integration. Default -1 return all rows.
[714]635 Example:
636 none
637 """
[1070]638 return self._get_column(self._getsourcename, row)
[714]639
[794]640 def get_elevation(self, row=-1):
641 """
642 Get a list of elevations for the observations.
643 Return a float for each integration in the scantable.
644 Parameters:
[1348]645 row: row no of integration. Default -1 return all rows.
[794]646 Example:
647 none
648 """
[1070]649 return self._get_column(self._getelevation, row)
[794]650
651 def get_azimuth(self, row=-1):
652 """
653 Get a list of azimuths for the observations.
654 Return a float for each integration in the scantable.
655 Parameters:
[1348]656 row: row no of integration. Default -1 return all rows.
[794]657 Example:
658 none
659 """
[1070]660 return self._get_column(self._getazimuth, row)
[794]661
662 def get_parangle(self, row=-1):
663 """
664 Get a list of parallactic angles for the observations.
665 Return a float for each integration in the scantable.
666 Parameters:
[1348]667 row: row no of integration. Default -1 return all rows.
[794]668 Example:
669 none
670 """
[1070]671 return self._get_column(self._getparangle, row)
[794]672
[1070]673 def get_direction(self, row=-1):
674 """
675 Get a list of Positions on the sky (direction) for the observations.
676 Return a float for each integration in the scantable.
677 Parameters:
678 row: row no of integration. Default -1 return all rows
679 Example:
680 none
681 """
682 return self._get_column(self._getdirection, row)
683
[1389]684 def get_directionval(self, row=-1):
685 """
686 Get a list of Positions on the sky (direction) for the observations.
687 Return a float for each integration in the scantable.
688 Parameters:
689 row: row no of integration. Default -1 return all rows
690 Example:
691 none
692 """
693 return self._get_column(self._getdirectionvec, row)
694
[102]695 def set_unit(self, unit='channel'):
696 """
697 Set the unit for all following operations on this scantable
698 Parameters:
699 unit: optional unit, default is 'channel'
[1118]700 one of '*Hz', 'km/s', 'channel', ''
[102]701 """
[484]702 varlist = vars()
[1118]703 if unit in ['', 'pixel', 'channel']:
[113]704 unit = ''
705 inf = list(self._getcoordinfo())
706 inf[0] = unit
707 self._setcoordinfo(inf)
[1118]708 self._add_history("set_unit", varlist)
[113]709
[484]710 def set_instrument(self, instr):
[358]711 """
[1348]712 Set the instrument for subsequent processing.
[358]713 Parameters:
[710]714 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]715 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]716 """
717 self._setInstrument(instr)
[1118]718 self._add_history("set_instument", vars())
[718]719 print_log()
[358]720
[1190]721 def set_feedtype(self, feedtype):
722 """
723 Overwrite the feed type, which might not be set correctly.
724 Parameters:
725 feedtype: 'linear' or 'circular'
726 """
727 self._setfeedtype(feedtype)
728 self._add_history("set_feedtype", vars())
729 print_log()
730
[276]731 def set_doppler(self, doppler='RADIO'):
732 """
733 Set the doppler for all following operations on this scantable.
734 Parameters:
735 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
736 """
[484]737 varlist = vars()
[276]738 inf = list(self._getcoordinfo())
739 inf[2] = doppler
740 self._setcoordinfo(inf)
[1118]741 self._add_history("set_doppler", vars())
[718]742 print_log()
[710]743
[226]744 def set_freqframe(self, frame=None):
[113]745 """
746 Set the frame type of the Spectral Axis.
747 Parameters:
[591]748 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]749 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
750 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]751 Examples:
752 scan.set_freqframe('BARY')
753 """
[484]754 if frame is None: frame = rcParams['scantable.freqframe']
755 varlist = vars()
[1118]756 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
757 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]758
[989]759 if frame in valid:
[113]760 inf = list(self._getcoordinfo())
761 inf[1] = frame
762 self._setcoordinfo(inf)
[1118]763 self._add_history("set_freqframe", varlist)
[102]764 else:
[1118]765 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]766 if rcParams['verbose']:
[1612]767 #print msg
[1614]768 asaplog.push( msg )
769 print_log( 'ERROR' )
[718]770 else:
771 raise TypeError(msg)
772 print_log()
[710]773
[989]774 def set_dirframe(self, frame=""):
775 """
776 Set the frame type of the Direction on the sky.
777 Parameters:
778 frame: an optional frame type, default ''. Valid frames are:
779 'J2000', 'B1950', 'GALACTIC'
780 Examples:
781 scan.set_dirframe('GALACTIC')
782 """
783 varlist = vars()
784 try:
785 Scantable.set_dirframe(self, frame)
[1118]786 except RuntimeError, msg:
[989]787 if rcParams['verbose']:
[1612]788 #print msg
[1614]789 print_log()
[1676]790 asaplog.push( str(msg) )
[1614]791 print_log( 'ERROR' )
[989]792 else:
793 raise
[1118]794 self._add_history("set_dirframe", varlist)
[989]795
[113]796 def get_unit(self):
797 """
798 Get the default unit set in this scantable
799 Returns:
800 A unit string
801 """
802 inf = self._getcoordinfo()
803 unit = inf[0]
804 if unit == '': unit = 'channel'
805 return unit
[102]806
[158]807 def get_abcissa(self, rowno=0):
[102]808 """
[158]809 Get the abcissa in the current coordinate setup for the currently
[113]810 selected Beam/IF/Pol
811 Parameters:
[226]812 rowno: an optional row number in the scantable. Default is the
813 first row, i.e. rowno=0
[113]814 Returns:
[1348]815 The abcissa values and the format string (as a dictionary)
[113]816 """
[256]817 abc = self._getabcissa(rowno)
[710]818 lbl = self._getabcissalabel(rowno)
[718]819 print_log()
[158]820 return abc, lbl
[113]821
[1401]822 def flag(self, mask=None, unflag=False):
[1001]823 """
824 Flag the selected data using an optional channel mask.
825 Parameters:
826 mask: an optional channel mask, created with create_mask. Default
827 (no mask) is all channels.
[1401]828 unflag: if True, unflag the data
[1001]829 """
830 varlist = vars()
[1118]831 if mask is None:
832 mask = []
[1001]833 try:
[1401]834 self._flag(mask, unflag)
[1118]835 except RuntimeError, msg:
[1001]836 if rcParams['verbose']:
[1612]837 #print msg
[1614]838 print_log()
[1676]839 asaplog.push( str(msg) )
[1614]840 print_log( 'ERROR' )
[1001]841 return
842 else: raise
843 self._add_history("flag", varlist)
844
[1655]845 def flag_row(self, rows=[], unflag=False):
846 """
847 Flag the selected data in row-based manner.
848 Parameters:
849 rows: list of row numbers to be flagged. Default is no row (must be explicitly specified to execute row-based flagging).
850 unflag: if True, unflag the data.
851 """
852 varlist = vars()
853 try:
854 self._flag_row(rows, unflag)
855 except RuntimeError, msg:
856 if rcParams['verbose']:
857 print_log()
[1676]858 asaplog.push( str(msg) )
[1655]859 print_log('ERROR')
860 return
861 else: raise
862 self._add_history("flag_row", varlist)
863
[1701]864 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
865 """
866 Flag the selected data outside a specified range (in channel-base)
867 Parameters:
868 uthres: upper threshold.
869 dthres: lower threshold
870 clipoutside: True for flagging data outside the range [dthres:uthres].
871 False for glagging data inside the range.
872 unflag : if True, unflag the data.
873 """
874 varlist = vars()
875 try:
876 self._clip(uthres, dthres, clipoutside, unflag)
877 except RuntimeError, msg:
878 if rcParams['verbose']:
879 print_log()
880 asaplog.push(str(msg))
881 print_log('ERROR')
882 return
883 else: raise
884 self._add_history("clip", varlist)
[1655]885
[1203]886 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]887 """
888 Flag the data in 'lag' space by providing a frequency to remove.
889 Flagged data in the scantable gets set to 0.0 before the fft.
890 No taper is applied.
891 Parameters:
[1348]892 frequency: the frequency (really a period within the bandwidth)
893 to remove
894 width: the width of the frequency to remove, to remove a
[1603]895 range of frequencies around the centre.
[1203]896 unit: the frequency unit (default "GHz")
897 Notes:
[1348]898 It is recommended to flag edges of the band or strong
899 signals beforehand.
[1192]900 """
901 if insitu is None: insitu = rcParams['insitu']
902 self._math._setinsitu(insitu)
903 varlist = vars()
[1370]904 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]905 if not base.has_key(unit):
906 raise ValueError("%s is not a valid unit." % unit)
907 try:
[1200]908 s = scantable(self._math._lag_flag(self, frequency*base[unit],
909 width*base[unit]))
[1192]910 except RuntimeError, msg:
911 if rcParams['verbose']:
[1612]912 #print msg
[1614]913 print_log()
[1676]914 asaplog.push( str(msg) )
[1614]915 print_log( 'ERROR' )
[1192]916 return
917 else: raise
918 s._add_history("lag_flag", varlist)
919 print_log()
920 if insitu:
921 self._assign(s)
922 else:
923 return s
[1001]924
[1192]925
[113]926 def create_mask(self, *args, **kwargs):
927 """
[1118]928 Compute and return a mask based on [min, max] windows.
[189]929 The specified windows are to be INCLUDED, when the mask is
[113]930 applied.
[102]931 Parameters:
[1118]932 [min, max], [min2, max2], ...
[1024]933 Pairs of start/end points (inclusive)specifying the regions
[102]934 to be masked
[189]935 invert: optional argument. If specified as True,
936 return an inverted mask, i.e. the regions
937 specified are EXCLUDED
[513]938 row: create the mask using the specified row for
939 unit conversions, default is row=0
940 only necessary if frequency varies over rows.
[102]941 Example:
[113]942 scan.set_unit('channel')
943 a)
[1118]944 msk = scan.create_mask([400, 500], [800, 900])
[189]945 # masks everything outside 400 and 500
[113]946 # and 800 and 900 in the unit 'channel'
947
948 b)
[1118]949 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]950 # masks the regions between 400 and 500
[113]951 # and 800 and 900 in the unit 'channel'
[1024]952 c)
953 mask only channel 400
[1118]954 msk = scan.create_mask([400, 400])
[102]955 """
[513]956 row = 0
957 if kwargs.has_key("row"):
958 row = kwargs.get("row")
959 data = self._getabcissa(row)
[113]960 u = self._getcoordinfo()[0]
[718]961 if rcParams['verbose']:
[113]962 if u == "": u = "channel"
[718]963 msg = "The current mask window unit is %s" % u
[1118]964 i = self._check_ifs()
965 if not i:
[876]966 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]967 asaplog.push(msg)
[102]968 n = self.nchan()
[1295]969 msk = _n_bools(n, False)
[710]970 # test if args is a 'list' or a 'normal *args - UGLY!!!
971
[1118]972 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
973 and args or args[0]
[710]974 for window in ws:
[102]975 if (len(window) != 2 or window[0] > window[1] ):
[1118]976 raise TypeError("A window needs to be defined as [min, max]")
[102]977 for i in range(n):
[1024]978 if data[i] >= window[0] and data[i] <= window[1]:
[1295]979 msk[i] = True
[113]980 if kwargs.has_key('invert'):
981 if kwargs.get('invert'):
[1295]982 msk = mask_not(msk)
[718]983 print_log()
[102]984 return msk
[710]985
[1446]986 def get_masklist(self, mask=None, row=0):
[256]987 """
[1446]988 Compute and return a list of mask windows, [min, max].
989 Parameters:
990 mask: channel mask, created with create_mask.
991 row: calcutate the masklist using the specified row
992 for unit conversions, default is row=0
993 only necessary if frequency varies over rows.
994 Returns:
995 [min, max], [min2, max2], ...
996 Pairs of start/end points (inclusive)specifying
997 the masked regions
998 """
999 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1000 raise TypeError("The mask should be list or tuple.")
1001 if len(mask) < 2:
1002 raise TypeError("The mask elements should be > 1")
1003 if self.nchan() != len(mask):
1004 msg = "Number of channels in scantable != number of mask elements"
1005 raise TypeError(msg)
1006 data = self._getabcissa(row)
1007 u = self._getcoordinfo()[0]
1008 if rcParams['verbose']:
1009 if u == "": u = "channel"
1010 msg = "The current mask window unit is %s" % u
1011 i = self._check_ifs()
1012 if not i:
1013 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1014 asaplog.push(msg)
1015 masklist=[]
1016 ist, ien = None, None
1017 ist, ien=self.get_mask_indices(mask)
1018 if ist is not None and ien is not None:
1019 for i in xrange(len(ist)):
1020 range=[data[ist[i]],data[ien[i]]]
1021 range.sort()
1022 masklist.append([range[0],range[1]])
1023 return masklist
1024
1025 def get_mask_indices(self, mask=None):
1026 """
1027 Compute and Return lists of mask start indices and mask end indices.
1028 Parameters:
1029 mask: channel mask, created with create_mask.
1030 Returns:
1031 List of mask start indices and that of mask end indices,
1032 i.e., [istart1,istart2,....], [iend1,iend2,....].
1033 """
1034 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1035 raise TypeError("The mask should be list or tuple.")
1036 if len(mask) < 2:
1037 raise TypeError("The mask elements should be > 1")
1038 istart=[]
1039 iend=[]
1040 if mask[0]: istart.append(0)
1041 for i in range(len(mask)-1):
1042 if not mask[i] and mask[i+1]:
1043 istart.append(i+1)
1044 elif mask[i] and not mask[i+1]:
1045 iend.append(i)
1046 if mask[len(mask)-1]: iend.append(len(mask)-1)
1047 if len(istart) != len(iend):
1048 raise RuntimeError("Numbers of mask start != mask end.")
1049 for i in range(len(istart)):
1050 if istart[i] > iend[i]:
1051 raise RuntimeError("Mask start index > mask end index")
1052 break
1053 return istart,iend
1054
1055# def get_restfreqs(self):
1056# """
1057# Get the restfrequency(s) stored in this scantable.
1058# The return value(s) are always of unit 'Hz'
1059# Parameters:
1060# none
1061# Returns:
1062# a list of doubles
1063# """
1064# return list(self._getrestfreqs())
1065
1066 def get_restfreqs(self, ids=None):
1067 """
[256]1068 Get the restfrequency(s) stored in this scantable.
1069 The return value(s) are always of unit 'Hz'
1070 Parameters:
[1446]1071 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1072 be retrieved
[256]1073 Returns:
[1446]1074 dictionary containing ids and a list of doubles for each id
[256]1075 """
[1446]1076 if ids is None:
1077 rfreqs={}
1078 idlist = self.getmolnos()
1079 for i in idlist:
1080 rfreqs[i]=list(self._getrestfreqs(i))
1081 return rfreqs
1082 else:
1083 if type(ids)==list or type(ids)==tuple:
1084 rfreqs={}
1085 for i in ids:
1086 rfreqs[i]=list(self._getrestfreqs(i))
1087 return rfreqs
1088 else:
1089 return list(self._getrestfreqs(ids))
1090 #return list(self._getrestfreqs(ids))
[102]1091
[931]1092 def set_restfreqs(self, freqs=None, unit='Hz'):
1093 """
[1446]1094 ********NEED TO BE UPDATED begin************
[931]1095 Set or replace the restfrequency specified and
1096 If the 'freqs' argument holds a scalar,
1097 then that rest frequency will be applied to all the selected
1098 data. If the 'freqs' argument holds
1099 a vector, then it MUST be of equal or smaller length than
1100 the number of IFs (and the available restfrequencies will be
1101 replaced by this vector). In this case, *all* data have
1102 the restfrequency set per IF according
1103 to the corresponding value you give in the 'freqs' vector.
[1118]1104 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1105 IF 1 gets restfreq 2e9.
[1446]1106 ********NEED TO BE UPDATED end************
[1603]1107 You can also specify the frequencies via a linecatalog.
[1153]1108
[931]1109 Parameters:
1110 freqs: list of rest frequency values or string idenitfiers
1111 unit: unit for rest frequency (default 'Hz')
[402]1112
[931]1113 Example:
[1446]1114 # set the given restfrequency for the all currently selected IFs
[931]1115 scan.set_restfreqs(freqs=1.4e9)
[1446]1116 # set multiple restfrequencies to all the selected data
1117 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1118 # If the number of IFs in the data is >= 2 the IF0 gets the first
1119 # value IF1 the second... NOTE that freqs needs to be
1120 # specified in list of list (e.g. [[],[],...] ).
1121 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]1122 #set the given restfrequency for the whole table (by name)
1123 scan.set_restfreqs(freqs="OH1667")
[391]1124
[931]1125 Note:
1126 To do more sophisticate Restfrequency setting, e.g. on a
1127 source and IF basis, use scantable.set_selection() before using
1128 this function.
1129 # provide your scantable is call scan
1130 selection = selector()
1131 selection.set_name("ORION*")
1132 selection.set_ifs([1])
1133 scan.set_selection(selection)
1134 scan.set_restfreqs(freqs=86.6e9)
1135
1136 """
1137 varlist = vars()
[1157]1138 from asap import linecatalog
1139 # simple value
[1118]1140 if isinstance(freqs, int) or isinstance(freqs, float):
[1446]1141 # TT mod
1142 #self._setrestfreqs(freqs, "",unit)
1143 self._setrestfreqs([freqs], [""],unit)
[1157]1144 # list of values
[1118]1145 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1146 # list values are scalars
[1118]1147 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1446]1148 self._setrestfreqs(freqs, [""],unit)
[1157]1149 # list values are tuples, (value, name)
1150 elif isinstance(freqs[-1], dict):
[1446]1151 #sel = selector()
1152 #savesel = self._getselection()
1153 #iflist = self.getifnos()
1154 #for i in xrange(len(freqs)):
1155 # sel.set_ifs(iflist[i])
1156 # self._setselection(sel)
1157 # self._setrestfreqs(freqs[i], "",unit)
1158 #self._setselection(savesel)
1159 self._setrestfreqs(freqs["value"],
1160 freqs["name"], "MHz")
1161 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1162 sel = selector()
1163 savesel = self._getselection()
[1322]1164 iflist = self.getifnos()
[1446]1165 if len(freqs)>len(iflist):
1166 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]1167 for i in xrange(len(freqs)):
[1322]1168 sel.set_ifs(iflist[i])
[1259]1169 self._setselection(sel)
[1157]1170 self._setrestfreqs(freqs[i]["value"],
1171 freqs[i]["name"], "MHz")
1172 self._setselection(savesel)
1173 # freqs are to be taken from a linecatalog
[1153]1174 elif isinstance(freqs, linecatalog):
1175 sel = selector()
1176 savesel = self._getselection()
1177 for i in xrange(freqs.nrow()):
[1322]1178 sel.set_ifs(iflist[i])
[1153]1179 self._setselection(sel)
1180 self._setrestfreqs(freqs.get_frequency(i),
1181 freqs.get_name(i), "MHz")
1182 # ensure that we are not iterating past nIF
1183 if i == self.nif()-1: break
1184 self._setselection(savesel)
[931]1185 else:
1186 return
1187 self._add_history("set_restfreqs", varlist)
1188
[1360]1189 def shift_refpix(self, delta):
1190 """
1191 Shift the reference pixel of the Spectra Coordinate by an
1192 integer amount.
1193 Parameters:
1194 delta: the amount to shift by
1195 Note:
1196 Be careful using this with broadband data.
1197 """
1198 Scantable.shift(self, delta)
[931]1199
[1259]1200 def history(self, filename=None):
1201 """
1202 Print the history. Optionally to a file.
[1348]1203 Parameters:
1204 filename: The name of the file to save the history to.
[1259]1205 """
[484]1206 hist = list(self._gethistory())
[794]1207 out = "-"*80
[484]1208 for h in hist:
[489]1209 if h.startswith("---"):
[794]1210 out += "\n"+h
[489]1211 else:
1212 items = h.split("##")
1213 date = items[0]
1214 func = items[1]
1215 items = items[2:]
[794]1216 out += "\n"+date+"\n"
1217 out += "Function: %s\n Parameters:" % (func)
[489]1218 for i in items:
1219 s = i.split("=")
[1118]1220 out += "\n %s = %s" % (s[0], s[1])
[794]1221 out += "\n"+"-"*80
[1259]1222 if filename is not None:
1223 if filename is "":
1224 filename = 'scantable_history.txt'
1225 import os
1226 filename = os.path.expandvars(os.path.expanduser(filename))
1227 if not os.path.isdir(filename):
1228 data = open(filename, 'w')
1229 data.write(out)
1230 data.close()
1231 else:
1232 msg = "Illegal file name '%s'." % (filename)
1233 if rcParams['verbose']:
[1612]1234 #print msg
[1614]1235 asaplog.push( msg )
1236 print_log( 'ERROR' )
[1259]1237 else:
1238 raise IOError(msg)
1239 if rcParams['verbose']:
1240 try:
1241 from IPython.genutils import page as pager
1242 except ImportError:
1243 from pydoc import pager
1244 pager(out)
1245 else:
1246 return out
[484]1247 return
[513]1248 #
1249 # Maths business
1250 #
1251
[931]1252 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1253 """
[1070]1254 Return the (time) weighted average of a scan.
[513]1255 Note:
[1070]1256 in channels only - align if necessary
[513]1257 Parameters:
1258 mask: an optional mask (only used for 'var' and 'tsys'
1259 weighting)
[558]1260 scanav: True averages each scan separately
1261 False (default) averages all scans together,
[1099]1262 weight: Weighting scheme.
1263 'none' (mean no weight)
1264 'var' (1/var(spec) weighted)
1265 'tsys' (1/Tsys**2 weighted)
1266 'tint' (integration time weighted)
1267 'tintsys' (Tint/Tsys**2)
1268 'median' ( median averaging)
[535]1269 The default is 'tint'
[931]1270 align: align the spectra in velocity before averaging. It takes
1271 the time of the first spectrum as reference time.
[513]1272 Example:
1273 # time average the scantable without using a mask
[710]1274 newscan = scan.average_time()
[513]1275 """
1276 varlist = vars()
[976]1277 if weight is None: weight = 'TINT'
[513]1278 if mask is None: mask = ()
[1099]1279 if scanav: scanav = "SCAN"
1280 else: scanav = "NONE"
[1118]1281 scan = (self, )
[989]1282 try:
[1118]1283 if align:
1284 scan = (self.freq_align(insitu=False), )
1285 s = None
1286 if weight.upper() == 'MEDIAN':
1287 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1288 scanav))
1289 else:
1290 s = scantable(self._math._average(scan, mask, weight.upper(),
1291 scanav))
1292 except RuntimeError, msg:
[989]1293 if rcParams['verbose']:
[1612]1294 #print msg
[1614]1295 print_log()
[1676]1296 asaplog.push( str(msg) )
[1614]1297 print_log( 'ERROR' )
[989]1298 return
1299 else: raise
[1099]1300 s._add_history("average_time", varlist)
[718]1301 print_log()
[513]1302 return s
[710]1303
[876]1304 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1305 """
1306 Return a scan where all spectra are converted to either
1307 Jansky or Kelvin depending upon the flux units of the scan table.
1308 By default the function tries to look the values up internally.
1309 If it can't find them (or if you want to over-ride), you must
1310 specify EITHER jyperk OR eta (and D which it will try to look up
1311 also if you don't set it). jyperk takes precedence if you set both.
1312 Parameters:
1313 jyperk: the Jy / K conversion factor
1314 eta: the aperture efficiency
1315 d: the geomtric diameter (metres)
1316 insitu: if False a new scantable is returned.
1317 Otherwise, the scaling is done in-situ
1318 The default is taken from .asaprc (False)
1319 """
1320 if insitu is None: insitu = rcParams['insitu']
[876]1321 self._math._setinsitu(insitu)
[513]1322 varlist = vars()
1323 if jyperk is None: jyperk = -1.0
1324 if d is None: d = -1.0
1325 if eta is None: eta = -1.0
[876]1326 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1327 s._add_history("convert_flux", varlist)
1328 print_log()
1329 if insitu: self._assign(s)
1330 else: return s
[513]1331
[876]1332 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1333 """
1334 Return a scan after applying a gain-elevation correction.
1335 The correction can be made via either a polynomial or a
1336 table-based interpolation (and extrapolation if necessary).
1337 You specify polynomial coefficients, an ascii table or neither.
1338 If you specify neither, then a polynomial correction will be made
1339 with built in coefficients known for certain telescopes (an error
1340 will occur if the instrument is not known).
1341 The data and Tsys are *divided* by the scaling factors.
1342 Parameters:
1343 poly: Polynomial coefficients (default None) to compute a
1344 gain-elevation correction as a function of
1345 elevation (in degrees).
1346 filename: The name of an ascii file holding correction factors.
1347 The first row of the ascii file must give the column
1348 names and these MUST include columns
1349 "ELEVATION" (degrees) and "FACTOR" (multiply data
1350 by this) somewhere.
1351 The second row must give the data type of the
1352 column. Use 'R' for Real and 'I' for Integer.
1353 An example file would be
1354 (actual factors are arbitrary) :
1355
1356 TIME ELEVATION FACTOR
1357 R R R
1358 0.1 0 0.8
1359 0.2 20 0.85
1360 0.3 40 0.9
1361 0.4 60 0.85
1362 0.5 80 0.8
1363 0.6 90 0.75
1364 method: Interpolation method when correcting from a table.
1365 Values are "nearest", "linear" (default), "cubic"
1366 and "spline"
1367 insitu: if False a new scantable is returned.
1368 Otherwise, the scaling is done in-situ
1369 The default is taken from .asaprc (False)
1370 """
1371
1372 if insitu is None: insitu = rcParams['insitu']
[876]1373 self._math._setinsitu(insitu)
[513]1374 varlist = vars()
1375 if poly is None:
[1118]1376 poly = ()
[513]1377 from os.path import expandvars
1378 filename = expandvars(filename)
[876]1379 s = scantable(self._math._gainel(self, poly, filename, method))
1380 s._add_history("gain_el", varlist)
1381 print_log()
1382 if insitu: self._assign(s)
1383 else: return s
[710]1384
[931]1385 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1386 """
1387 Return a scan where all rows have been aligned in frequency/velocity.
1388 The alignment frequency frame (e.g. LSRK) is that set by function
1389 set_freqframe.
1390 Parameters:
1391 reftime: reference time to align at. By default, the time of
1392 the first row of data is used.
1393 method: Interpolation method for regridding the spectra.
1394 Choose from "nearest", "linear", "cubic" (default)
1395 and "spline"
1396 insitu: if False a new scantable is returned.
1397 Otherwise, the scaling is done in-situ
1398 The default is taken from .asaprc (False)
1399 """
[931]1400 if insitu is None: insitu = rcParams["insitu"]
[876]1401 self._math._setinsitu(insitu)
[513]1402 varlist = vars()
[931]1403 if reftime is None: reftime = ""
1404 s = scantable(self._math._freq_align(self, reftime, method))
[876]1405 s._add_history("freq_align", varlist)
1406 print_log()
1407 if insitu: self._assign(s)
1408 else: return s
[513]1409
[876]1410 def opacity(self, tau, insitu=None):
[513]1411 """
1412 Apply an opacity correction. The data
1413 and Tsys are multiplied by the correction factor.
1414 Parameters:
1415 tau: Opacity from which the correction factor is
1416 exp(tau*ZD)
1417 where ZD is the zenith-distance
1418 insitu: if False a new scantable is returned.
1419 Otherwise, the scaling is done in-situ
1420 The default is taken from .asaprc (False)
1421 """
1422 if insitu is None: insitu = rcParams['insitu']
[876]1423 self._math._setinsitu(insitu)
[513]1424 varlist = vars()
[876]1425 s = scantable(self._math._opacity(self, tau))
1426 s._add_history("opacity", varlist)
1427 print_log()
1428 if insitu: self._assign(s)
1429 else: return s
[513]1430
1431 def bin(self, width=5, insitu=None):
1432 """
1433 Return a scan where all spectra have been binned up.
[1348]1434 Parameters:
[513]1435 width: The bin width (default=5) in pixels
1436 insitu: if False a new scantable is returned.
1437 Otherwise, the scaling is done in-situ
1438 The default is taken from .asaprc (False)
1439 """
1440 if insitu is None: insitu = rcParams['insitu']
[876]1441 self._math._setinsitu(insitu)
[513]1442 varlist = vars()
[876]1443 s = scantable(self._math._bin(self, width))
[1118]1444 s._add_history("bin", varlist)
[876]1445 print_log()
1446 if insitu: self._assign(s)
1447 else: return s
[513]1448
[710]1449
[513]1450 def resample(self, width=5, method='cubic', insitu=None):
1451 """
[1348]1452 Return a scan where all spectra have been binned up.
1453
1454 Parameters:
[513]1455 width: The bin width (default=5) in pixels
1456 method: Interpolation method when correcting from a table.
1457 Values are "nearest", "linear", "cubic" (default)
1458 and "spline"
1459 insitu: if False a new scantable is returned.
1460 Otherwise, the scaling is done in-situ
1461 The default is taken from .asaprc (False)
1462 """
1463 if insitu is None: insitu = rcParams['insitu']
[876]1464 self._math._setinsitu(insitu)
[513]1465 varlist = vars()
[876]1466 s = scantable(self._math._resample(self, method, width))
[1118]1467 s._add_history("resample", varlist)
[876]1468 print_log()
1469 if insitu: self._assign(s)
1470 else: return s
[513]1471
1472
[946]1473 def average_pol(self, mask=None, weight='none'):
1474 """
1475 Average the Polarisations together.
1476 Parameters:
1477 mask: An optional mask defining the region, where the
1478 averaging will be applied. The output will have all
1479 specified points masked.
1480 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1481 weighted), or 'tsys' (1/Tsys**2 weighted)
1482 """
1483 varlist = vars()
1484 if mask is None:
1485 mask = ()
[1010]1486 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1487 s._add_history("average_pol", varlist)
[946]1488 print_log()
[992]1489 return s
[513]1490
[1145]1491 def average_beam(self, mask=None, weight='none'):
1492 """
1493 Average the Beams together.
1494 Parameters:
1495 mask: An optional mask defining the region, where the
1496 averaging will be applied. The output will have all
1497 specified points masked.
1498 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1499 weighted), or 'tsys' (1/Tsys**2 weighted)
1500 """
1501 varlist = vars()
1502 if mask is None:
1503 mask = ()
1504 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1505 s._add_history("average_beam", varlist)
1506 print_log()
1507 return s
1508
[992]1509 def convert_pol(self, poltype=None):
1510 """
1511 Convert the data to a different polarisation type.
1512 Parameters:
1513 poltype: The new polarisation type. Valid types are:
1514 "linear", "stokes" and "circular"
1515 """
1516 varlist = vars()
1517 try:
1518 s = scantable(self._math._convertpol(self, poltype))
[1118]1519 except RuntimeError, msg:
[992]1520 if rcParams['verbose']:
[1612]1521 #print msg
[1614]1522 print_log()
[1676]1523 asaplog.push( str(msg) )
[1614]1524 print_log( 'ERROR' )
[1118]1525 return
[992]1526 else:
1527 raise
[1118]1528 s._add_history("convert_pol", varlist)
[992]1529 print_log()
1530 return s
1531
[1653]1532 #def smooth(self, kernel="hanning", width=5.0, insitu=None):
1533 def smooth(self, kernel="hanning", width=5.0, plot=False, insitu=None):
[513]1534 """
1535 Smooth the spectrum by the specified kernel (conserving flux).
1536 Parameters:
1537 kernel: The type of smoothing kernel. Select from
[1373]1538 'hanning' (default), 'gaussian', 'boxcar' and
1539 'rmedian'
[513]1540 width: The width of the kernel in pixels. For hanning this is
1541 ignored otherwise it defauls to 5 pixels.
1542 For 'gaussian' it is the Full Width Half
1543 Maximum. For 'boxcar' it is the full width.
[1373]1544 For 'rmedian' it is the half width.
[1653]1545 plot: plot the original and the smoothed spectra.
1546 In this each indivual fit has to be approved, by
1547 typing 'y' or 'n'
[513]1548 insitu: if False a new scantable is returned.
1549 Otherwise, the scaling is done in-situ
1550 The default is taken from .asaprc (False)
1551 Example:
1552 none
1553 """
1554 if insitu is None: insitu = rcParams['insitu']
[876]1555 self._math._setinsitu(insitu)
[513]1556 varlist = vars()
[1653]1557
1558 if plot: orgscan = self.copy()
1559
[1118]1560 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1561 s._add_history("smooth", varlist)
[1653]1562
1563 if plot:
1564 if rcParams['plotter.gui']:
1565 from asap.asaplotgui import asaplotgui as asaplot
1566 else:
1567 from asap.asaplot import asaplot
1568 self._p=asaplot()
1569 self._p.set_panels()
1570 ylab=s._get_ordinate_label()
1571 #self._p.palette(0,["#777777","red"])
1572 for r in xrange(s.nrow()):
1573 xsm=s._getabcissa(r)
1574 ysm=s._getspectrum(r)
1575 xorg=orgscan._getabcissa(r)
1576 yorg=orgscan._getspectrum(r)
1577 self._p.clear()
1578 self._p.hold()
1579 self._p.set_axes('ylabel',ylab)
1580 self._p.set_axes('xlabel',s._getabcissalabel(r))
1581 self._p.set_axes('title',s._getsourcename(r))
1582 self._p.set_line(label='Original',color="#777777")
1583 self._p.plot(xorg,yorg)
1584 self._p.set_line(label='Smoothed',color="red")
1585 self._p.plot(xsm,ysm)
1586 ### Ugly part for legend
1587 for i in [0,1]:
1588 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1589 self._p.release()
1590 ### Ugly part for legend
1591 self._p.subplots[0]['lines']=[]
1592 res = raw_input("Accept smoothing ([y]/n): ")
1593 if res.upper() == 'N':
1594 s._setspectrum(yorg, r)
1595 self._p.unmap()
1596 self._p = None
1597 del orgscan
1598
[876]1599 print_log()
1600 if insitu: self._assign(s)
1601 else: return s
[513]1602
[876]1603
[1389]1604 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1605 """
1606 Return a scan which has been baselined (all rows) by a polynomial.
1607 Parameters:
[794]1608 mask: an optional mask
1609 order: the order of the polynomial (default is 0)
[1061]1610 plot: plot the fit and the residual. In this each
1611 indivual fit has to be approved, by typing 'y'
1612 or 'n'
[1389]1613 uselin: use linear polynomial fit
[794]1614 insitu: if False a new scantable is returned.
1615 Otherwise, the scaling is done in-situ
1616 The default is taken from .asaprc (False)
[513]1617 Example:
1618 # return a scan baselined by a third order polynomial,
1619 # not using a mask
1620 bscan = scan.poly_baseline(order=3)
[579]1621 """
[513]1622 if insitu is None: insitu = rcParams['insitu']
[1649]1623 if not insitu:
1624 workscan = self.copy()
1625 else:
1626 workscan = self
[513]1627 varlist = vars()
1628 if mask is None:
[1295]1629 mask = [True for i in xrange(self.nchan(-1))]
[1637]1630
[513]1631 from asap.asapfitter import fitter
[1217]1632 try:
1633 f = fitter()
[1389]1634 if uselin:
1635 f.set_function(lpoly=order)
1636 else:
1637 f.set_function(poly=order)
[1637]1638
[1655]1639 rows = range(workscan.nrow())
[1637]1640 if len(rows) > 0:
1641 self.blpars = []
1642
1643 for r in rows:
1644 # take into account flagtra info (CAS-1434)
[1655]1645 flagtra = workscan._getmask(r)
[1637]1646 actualmask = mask[:]
1647 if len(actualmask) == 0:
1648 actualmask = list(flagtra[:])
1649 else:
1650 if len(actualmask) != len(flagtra):
1651 raise RuntimeError, "Mask and flagtra have different length"
1652 else:
1653 for i in range(0, len(actualmask)):
1654 actualmask[i] = actualmask[i] and flagtra[i]
[1655]1655 f.set_scan(workscan, actualmask)
1656 f.x = workscan._getabcissa(r)
1657 f.y = workscan._getspectrum(r)
[1637]1658 f.data = None
1659 f.fit()
[1653]1660 if plot:
1661 f.plot(residual=True)
1662 x = raw_input("Accept fit ( [y]/n ): ")
1663 if x.upper() == 'N':
1664 self.blpars.append(None)
1665 continue
[1649]1666 workscan._setspectrum(f.fitter.getresidual(), r)
[1655]1667 self.blpars.append(f.get_parameters())
1668
[1653]1669 if plot:
1670 f._p.unmap()
1671 f._p = None
[1649]1672 workscan._add_history("poly_baseline", varlist)
[1217]1673 print_log()
[1649]1674 if insitu: self._assign(workscan)
1675 else: return workscan
[1217]1676 except RuntimeError:
1677 msg = "The fit failed, possibly because it didn't converge."
1678 if rcParams['verbose']:
[1612]1679 #print msg
[1614]1680 print_log()
[1676]1681 asaplog.push( str(msg) )
[1614]1682 print_log( 'ERROR' )
[1217]1683 return
1684 else:
1685 raise RuntimeError(msg)
[513]1686
[1217]1687
[1118]1688 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1689 threshold=3, chan_avg_limit=1, plot=False,
1690 insitu=None):
[880]1691 """
1692 Return a scan which has been baselined (all rows) by a polynomial.
1693 Spectral lines are detected first using linefinder and masked out
1694 to avoid them affecting the baseline solution.
1695
1696 Parameters:
1697 mask: an optional mask retreived from scantable
1698 edge: an optional number of channel to drop at
1699 the edge of spectrum. If only one value is
1700 specified, the same number will be dropped from
1701 both sides of the spectrum. Default is to keep
[907]1702 all channels. Nested tuples represent individual
[976]1703 edge selection for different IFs (a number of spectral
1704 channels can be different)
[880]1705 order: the order of the polynomial (default is 0)
1706 threshold: the threshold used by line finder. It is better to
1707 keep it large as only strong lines affect the
1708 baseline solution.
[1280]1709 chan_avg_limit:
1710 a maximum number of consequtive spectral channels to
1711 average during the search of weak and broad lines.
1712 The default is no averaging (and no search for weak
1713 lines). If such lines can affect the fitted baseline
1714 (e.g. a high order polynomial is fitted), increase this
1715 parameter (usually values up to 8 are reasonable). Most
1716 users of this method should find the default value
1717 sufficient.
[1061]1718 plot: plot the fit and the residual. In this each
1719 indivual fit has to be approved, by typing 'y'
1720 or 'n'
[880]1721 insitu: if False a new scantable is returned.
1722 Otherwise, the scaling is done in-situ
1723 The default is taken from .asaprc (False)
1724
1725 Example:
1726 scan2=scan.auto_poly_baseline(order=7)
1727 """
1728 if insitu is None: insitu = rcParams['insitu']
1729 varlist = vars()
1730 from asap.asapfitter import fitter
1731 from asap.asaplinefind import linefinder
1732 from asap import _is_sequence_or_number as _is_valid
1733
[976]1734 # check whether edge is set up for each IF individually
[1118]1735 individualedge = False;
1736 if len(edge) > 1:
1737 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1738 individualedge = True;
[907]1739
[1118]1740 if not _is_valid(edge, int) and not individualedge:
[909]1741 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1742 pair of integers specified as a tuple. Nested tuples are allowed \
1743 to make individual selection for different IFs."
[919]1744
[1118]1745 curedge = (0, 0)
1746 if individualedge:
1747 for edgepar in edge:
1748 if not _is_valid(edgepar, int):
1749 raise ValueError, "Each element of the 'edge' tuple has \
1750 to be a pair of integers or an integer."
[907]1751 else:
[1118]1752 curedge = edge;
[880]1753
1754 # setup fitter
1755 f = fitter()
1756 f.set_function(poly=order)
1757
1758 # setup line finder
[1118]1759 fl = linefinder()
[1268]1760 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1761
1762 if not insitu:
[1118]1763 workscan = self.copy()
[880]1764 else:
[1118]1765 workscan = self
[880]1766
[907]1767 fl.set_scan(workscan)
1768
[1118]1769 rows = range(workscan.nrow())
[1446]1770 # Save parameters of baseline fits & masklists as a class attribute.
1771 # NOTICE: It does not reflect changes in scantable!
1772 if len(rows) > 0:
1773 self.blpars=[]
1774 self.masklists=[]
[880]1775 asaplog.push("Processing:")
1776 for r in rows:
[1118]1777 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1778 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1779 workscan.getpol(r), workscan.getcycle(r))
[880]1780 asaplog.push(msg, False)
[907]1781
[976]1782 # figure out edge parameter
[1118]1783 if individualedge:
1784 if len(edge) >= workscan.getif(r):
1785 raise RuntimeError, "Number of edge elements appear to " \
1786 "be less than the number of IFs"
1787 curedge = edge[workscan.getif(r)]
[919]1788
[1637]1789 # take into account flagtra info (CAS-1434)
1790 flagtra = workscan._getmask(r)
1791 actualmask = mask[:]
1792 if len(actualmask) == 0:
1793 actualmask = list(flagtra[:])
1794 else:
1795 if len(actualmask) != len(flagtra):
1796 raise RuntimeError, "Mask and flagtra have different length"
1797 else:
1798 for i in range(0, len(actualmask)):
1799 actualmask[i] = actualmask[i] and flagtra[i]
1800
[976]1801 # setup line finder
[1637]1802 fl.find_lines(r, actualmask, curedge)
[1446]1803 outmask=fl.get_mask()
[880]1804 f.set_scan(workscan, fl.get_mask())
1805 f.x = workscan._getabcissa(r)
1806 f.y = workscan._getspectrum(r)
1807 f.data = None
1808 f.fit()
[1446]1809
1810 # Show mask list
1811 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1812 msg = "mask range: "+str(masklist)
1813 asaplog.push(msg, False)
1814
[1061]1815 if plot:
1816 f.plot(residual=True)
1817 x = raw_input("Accept fit ( [y]/n ): ")
1818 if x.upper() == 'N':
[1446]1819 self.blpars.append(None)
1820 self.masklists.append(None)
[1061]1821 continue
[1655]1822
[880]1823 workscan._setspectrum(f.fitter.getresidual(), r)
[1655]1824 self.blpars.append(f.get_parameters())
[1446]1825 self.masklists.append(masklist)
[1061]1826 if plot:
1827 f._p.unmap()
1828 f._p = None
1829 workscan._add_history("auto_poly_baseline", varlist)
[880]1830 if insitu:
1831 self._assign(workscan)
1832 else:
1833 return workscan
1834
[914]1835 def rotate_linpolphase(self, angle):
1836 """
1837 Rotate the phase of the complex polarization O=Q+iU correlation.
1838 This is always done in situ in the raw data. So if you call this
1839 function more than once then each call rotates the phase further.
1840 Parameters:
1841 angle: The angle (degrees) to rotate (add) by.
1842 Examples:
1843 scan.rotate_linpolphase(2.3)
1844 """
1845 varlist = vars()
[936]1846 self._math._rotate_linpolphase(self, angle)
[914]1847 self._add_history("rotate_linpolphase", varlist)
1848 print_log()
1849 return
[710]1850
[513]1851
[914]1852 def rotate_xyphase(self, angle):
1853 """
1854 Rotate the phase of the XY correlation. This is always done in situ
1855 in the data. So if you call this function more than once
1856 then each call rotates the phase further.
1857 Parameters:
1858 angle: The angle (degrees) to rotate (add) by.
1859 Examples:
1860 scan.rotate_xyphase(2.3)
1861 """
1862 varlist = vars()
[936]1863 self._math._rotate_xyphase(self, angle)
[914]1864 self._add_history("rotate_xyphase", varlist)
1865 print_log()
1866 return
1867
1868 def swap_linears(self):
1869 """
[1348]1870 Swap the linear polarisations XX and YY, or better the first two
1871 polarisations as this also works for ciculars.
[914]1872 """
1873 varlist = vars()
[936]1874 self._math._swap_linears(self)
[914]1875 self._add_history("swap_linears", varlist)
1876 print_log()
1877 return
1878
1879 def invert_phase(self):
1880 """
1881 Invert the phase of the complex polarisation
1882 """
1883 varlist = vars()
[936]1884 self._math._invert_phase(self)
[914]1885 self._add_history("invert_phase", varlist)
1886 print_log()
1887 return
1888
[876]1889 def add(self, offset, insitu=None):
[513]1890 """
1891 Return a scan where all spectra have the offset added
1892 Parameters:
1893 offset: the offset
1894 insitu: if False a new scantable is returned.
1895 Otherwise, the scaling is done in-situ
1896 The default is taken from .asaprc (False)
1897 """
1898 if insitu is None: insitu = rcParams['insitu']
[876]1899 self._math._setinsitu(insitu)
[513]1900 varlist = vars()
[876]1901 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1902 s._add_history("add", varlist)
[876]1903 print_log()
1904 if insitu:
1905 self._assign(s)
1906 else:
[513]1907 return s
1908
[1680]1909 def scale(self, factor, tsys=True, insitu=None):
[513]1910 """
1911 Return a scan where all spectra are scaled by the give 'factor'
1912 Parameters:
[1677]1913 factor: the scaling factor (float or 1D float list)
[513]1914 insitu: if False a new scantable is returned.
1915 Otherwise, the scaling is done in-situ
1916 The default is taken from .asaprc (False)
1917 tsys: if True (default) then apply the operation to Tsys
1918 as well as the data
1919 """
1920 if insitu is None: insitu = rcParams['insitu']
[876]1921 self._math._setinsitu(insitu)
[513]1922 varlist = vars()
[1680]1923 s = None
[1681]1924 import numpy
1925 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
1926 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
[1680]1927 from asapmath import _array2dOp
1928 s = _array2dOp( self.copy(), factor, "MUL", tsys )
[1677]1929 else:
[1681]1930 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
[1677]1931 else:
1932 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]1933 s._add_history("scale", varlist)
[876]1934 print_log()
1935 if insitu:
1936 self._assign(s)
1937 else:
[513]1938 return s
1939
[1603]1940 def set_sourcetype(self, match, matchtype="pattern",
1941 sourcetype="reference"):
1942 """
1943 Set the type of the source to be an source or reference scan
1944 using the provided pattern:
1945 Parameters:
1946 match: a Unix style pattern, regular expression or selector
1947 matchtype: 'pattern' (default) UNIX style pattern or
1948 'regex' regular expression
1949 sourcetype: the type of the source to use (source/reference)
1950 """
1951 varlist = vars()
1952 basesel = self.get_selection()
1953 stype = -1
1954 if sourcetype.lower().startswith("r"):
1955 stype = 1
1956 elif sourcetype.lower().startswith("s"):
1957 stype = 0
1958 else:
1959 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1960 if matchtype.lower().startswith("p"):
1961 matchtype = "pattern"
1962 elif matchtype.lower().startswith("r"):
1963 matchtype = "regex"
1964 else:
1965 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1966 sel = selector()
1967 if isinstance(match, selector):
1968 sel = match
1969 else:
1970 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1971 self.set_selection(basesel+sel)
1972 self._setsourcetype(stype)
1973 self.set_selection(basesel)
1974 s._add_history("set_sourcetype", varlist)
1975
[1631]1976 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[670]1977 """
1978 This function allows to build quotients automatically.
[1631]1979 It assumes the observation to have the same number of
[670]1980 "ons" and "offs"
1981 Parameters:
[710]1982 preserve: you can preserve (default) the continuum or
1983 remove it. The equations used are
[670]1984 preserve: Output = Toff * (on/off) - Toff
[1070]1985 remove: Output = Toff * (on/off) - Ton
[1348]1986 mode: the on/off detection mode
1987 'paired' (default)
1988 identifies 'off' scans by the
1989 trailing '_R' (Mopra/Parkes) or
1990 '_e'/'_w' (Tid) and matches
1991 on/off pairs from the observing pattern
[1603]1992 'time'
1993 finds the closest off in time
[1348]1994
[670]1995 """
[1348]1996 modes = ["time", "paired"]
[670]1997 if not mode in modes:
[876]1998 msg = "please provide valid mode. Valid modes are %s" % (modes)
1999 raise ValueError(msg)
2000 varlist = vars()
[1348]2001 s = None
2002 if mode.lower() == "paired":
2003 basesel = self.get_selection()
[1356]2004 sel = selector()+basesel
2005 sel.set_query("SRCTYPE==1")
2006 self.set_selection(sel)
[1348]2007 offs = self.copy()
2008 sel.set_query("SRCTYPE==0")
[1356]2009 self.set_selection(sel)
[1348]2010 ons = self.copy()
2011 s = scantable(self._math._quotient(ons, offs, preserve))
2012 self.set_selection(basesel)
2013 elif mode.lower() == "time":
2014 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2015 s._add_history("auto_quotient", varlist)
[876]2016 print_log()
2017 return s
[710]2018
[1145]2019 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]2020 """
[1143]2021 Form a quotient using "off" beams when observing in "MX" mode.
2022 Parameters:
[1145]2023 mask: an optional mask to be used when weight == 'stddev'
[1143]2024 weight: How to average the off beams. Default is 'median'.
[1145]2025 preserve: you can preserve (default) the continuum or
2026 remove it. The equations used are
2027 preserve: Output = Toff * (on/off) - Toff
2028 remove: Output = Toff * (on/off) - Ton
[1217]2029 """
[1143]2030 if mask is None: mask = ()
[1141]2031 varlist = vars()
2032 on = scantable(self._math._mx_extract(self, 'on'))
[1143]2033 preoff = scantable(self._math._mx_extract(self, 'off'))
2034 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2035 from asapmath import quotient
[1145]2036 q = quotient(on, off, preserve)
[1143]2037 q._add_history("mx_quotient", varlist)
[1145]2038 print_log()
[1217]2039 return q
[513]2040
[718]2041 def freq_switch(self, insitu=None):
2042 """
2043 Apply frequency switching to the data.
2044 Parameters:
2045 insitu: if False a new scantable is returned.
2046 Otherwise, the swictching is done in-situ
2047 The default is taken from .asaprc (False)
2048 Example:
2049 none
2050 """
2051 if insitu is None: insitu = rcParams['insitu']
[876]2052 self._math._setinsitu(insitu)
[718]2053 varlist = vars()
[876]2054 s = scantable(self._math._freqswitch(self))
[1118]2055 s._add_history("freq_switch", varlist)
[876]2056 print_log()
2057 if insitu: self._assign(s)
2058 else: return s
[718]2059
[780]2060 def recalc_azel(self):
2061 """
2062 Recalculate the azimuth and elevation for each position.
2063 Parameters:
2064 none
2065 Example:
2066 """
2067 varlist = vars()
[876]2068 self._recalcazel()
[780]2069 self._add_history("recalc_azel", varlist)
2070 print_log()
2071 return
2072
[513]2073 def __add__(self, other):
[1681]2074 """
2075 implicit on all axes and on Tsys
2076 """
2077 return self._operation( other, "ADD" )
[513]2078
2079 def __sub__(self, other):
2080 """
2081 implicit on all axes and on Tsys
2082 """
[1681]2083 return self._operation( other, 'SUB' )
[710]2084
[513]2085 def __mul__(self, other):
2086 """
2087 implicit on all axes and on Tsys
2088 """
[1681]2089 return self._operation( other, 'MUL' )
[513]2090
2091 def __div__(self, other):
2092 """
2093 implicit on all axes and on Tsys
2094 """
[1681]2095 return self._operation( other, 'DIV' )
[513]2096
[530]2097 def get_fit(self, row=0):
2098 """
2099 Print or return the stored fits for a row in the scantable
2100 Parameters:
2101 row: the row which the fit has been applied to.
2102 """
2103 if row > self.nrow():
2104 return
[976]2105 from asap.asapfit import asapfit
[530]2106 fit = asapfit(self._getfit(row))
[718]2107 if rcParams['verbose']:
[1612]2108 #print fit
[1614]2109 asaplog.push( '%s' %(fit) )
2110 print_log()
[530]2111 return
2112 else:
2113 return fit.as_dict()
2114
[1603]2115 def flag_nans(self):
2116 """
2117 Utility function to flag NaN values in the scantable.
2118 """
2119 import numpy
2120 basesel = self.get_selection()
2121 for i in range(self.nrow()):
2122 sel = selector()+basesel
2123 sel.set_scans(self.getscan(i))
2124 sel.set_beams(self.getbeam(i))
2125 sel.set_ifs(self.getif(i))
2126 sel.set_polarisations(self.getpol(i))
2127 self.set_selection(sel)
2128 nans = numpy.isnan(self._getspectrum(0))
2129 if numpy.any(nans):
2130 bnans = [ bool(v) for v in nans]
2131 self.flag(bnans)
2132 self.set_selection(basesel)
2133
2134
[484]2135 def _add_history(self, funcname, parameters):
[1603]2136 if not rcParams['scantable.history']:
2137 return
[484]2138 # create date
2139 sep = "##"
2140 from datetime import datetime
2141 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2142 hist = dstr+sep
2143 hist += funcname+sep#cdate+sep
2144 if parameters.has_key('self'): del parameters['self']
[1118]2145 for k, v in parameters.iteritems():
[484]2146 if type(v) is dict:
[1118]2147 for k2, v2 in v.iteritems():
[484]2148 hist += k2
2149 hist += "="
[1118]2150 if isinstance(v2, scantable):
[484]2151 hist += 'scantable'
2152 elif k2 == 'mask':
[1118]2153 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2154 hist += str(self._zip_mask(v2))
2155 else:
2156 hist += str(v2)
[484]2157 else:
[513]2158 hist += str(v2)
[484]2159 else:
2160 hist += k
2161 hist += "="
[1118]2162 if isinstance(v, scantable):
[484]2163 hist += 'scantable'
2164 elif k == 'mask':
[1118]2165 if isinstance(v, list) or isinstance(v, tuple):
[513]2166 hist += str(self._zip_mask(v))
2167 else:
2168 hist += str(v)
[484]2169 else:
2170 hist += str(v)
2171 hist += sep
2172 hist = hist[:-2] # remove trailing '##'
2173 self._addhistory(hist)
2174
[710]2175
[484]2176 def _zip_mask(self, mask):
2177 mask = list(mask)
2178 i = 0
2179 segments = []
2180 while mask[i:].count(1):
2181 i += mask[i:].index(1)
2182 if mask[i:].count(0):
2183 j = i + mask[i:].index(0)
2184 else:
[710]2185 j = len(mask)
[1118]2186 segments.append([i, j])
[710]2187 i = j
[484]2188 return segments
[714]2189
[626]2190 def _get_ordinate_label(self):
2191 fu = "("+self.get_fluxunit()+")"
2192 import re
2193 lbl = "Intensity"
[1118]2194 if re.match(".K.", fu):
[626]2195 lbl = "Brightness Temperature "+ fu
[1118]2196 elif re.match(".Jy.", fu):
[626]2197 lbl = "Flux density "+ fu
2198 return lbl
[710]2199
[876]2200 def _check_ifs(self):
2201 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2202 nchans = filter(lambda t: t > 0, nchans)
[876]2203 return (sum(nchans)/len(nchans) == nchans[0])
[976]2204
[1684]2205 def _fill(self, names, unit, average, getpt, antenna):
[976]2206 import os
2207 from asap._asap import stfiller
2208 first = True
2209 fullnames = []
2210 for name in names:
2211 name = os.path.expandvars(name)
2212 name = os.path.expanduser(name)
2213 if not os.path.exists(name):
2214 msg = "File '%s' does not exists" % (name)
2215 if rcParams['verbose']:
2216 asaplog.push(msg)
[1612]2217 #print asaplog.pop().strip()
[1614]2218 print_log( 'ERROR' )
[976]2219 return
2220 raise IOError(msg)
2221 fullnames.append(name)
2222 if average:
2223 asaplog.push('Auto averaging integrations')
[1079]2224 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2225 for name in fullnames:
[1073]2226 tbl = Scantable(stype)
2227 r = stfiller(tbl)
[1603]2228 rx = rcParams['scantable.reference']
2229 r._setreferenceexpr(rx)
[976]2230 msg = "Importing %s..." % (name)
[1118]2231 asaplog.push(msg, False)
[976]2232 print_log()
[1684]2233 r._open(name, antenna, -1, -1, getpt)
[976]2234 r._read()
2235 if average:
[1118]2236 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2237 if not first:
2238 tbl = self._math._merge([self, tbl])
2239 Scantable.__init__(self, tbl)
2240 r._close()
[1118]2241 del r, tbl
[976]2242 first = False
2243 if unit is not None:
2244 self.set_fluxunit(unit)
[1446]2245 #self.set_freqframe(rcParams['scantable.freqframe'])
[976]2246
[1603]2247 def __getitem__(self, key):
2248 if key < 0:
2249 key += self.nrow()
2250 if key >= self.nrow():
2251 raise IndexError("Row index out of range.")
2252 return self._getspectrum(key)
2253
2254 def __setitem__(self, key, value):
2255 if key < 0:
2256 key += self.nrow()
2257 if key >= self.nrow():
2258 raise IndexError("Row index out of range.")
2259 if not hasattr(value, "__len__") or \
2260 len(value) > self.nchan(self.getif(key)):
2261 raise ValueError("Spectrum length doesn't match.")
2262 return self._setspectrum(value, key)
2263
2264 def __len__(self):
2265 return self.nrow()
2266
2267 def __iter__(self):
2268 for i in range(len(self)):
2269 yield self[i]
[1681]2270
2271 def _operation(self, other, opmode):
2272 varlist = vars()
2273 s = None
2274 import numpy
2275 if isinstance(other, scantable):
2276 s = scantable(self._math._binaryop(self.copy(), other, opmode))
2277 elif isinstance(other, float) or isinstance(other, int):
2278 if opmode == 'DIV' and float(other) == 0.0:
2279 raise ZeroDivisionError("Dividing by zero is not recommended")
2280 s = scantable(self._math._unaryop(self.copy(), other, opmode, False))
2281 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
2282 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
2283 from asapmath import _array2dOp
2284 s = _array2dOp( self.copy(), other, opmode, False )
2285 else:
2286 s = scantable(self._math._arrayop(self.copy(), other, opmode, False))
2287 else:
2288 raise TypeError("Other input is not a scantable or float value or float list")
2289 opdic = {}
2290 opdic['ADD'] = '+'
2291 opdic['SUB'] = '-'
2292 opdic['MUL'] = '*'
2293 opdic['DIV'] = '/'
2294 s._add_history("operator %s" % opdic[opmode], varlist)
2295 print_log()
2296 return s
2297
2298
Note: See TracBrowser for help on using the repository browser.