source: branches/alma/python/scantable.py@ 1681

Last change on this file since 1681 was 1681, checked in by Takeshi Nakazato, 15 years ago

New Development: No

JIRA Issue: Yes CAS-1823

Ready to Release: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Allow operation of scantable with numpy.ndarray.

Internal process of four operations (add, sub, mul, div) looks very similar
on Python level, so that I have defined _operation() method to unify their
internal process. Currently add, sub, mul, div just
call _operation with slightly different parameters.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 85.4 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[1496]14 def __init__(self, filename, average=None, unit=None, getpt=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[1522]32 getpt: for MeasurementSet input data only:
33 If True, all pointing data are filled.
34 The deafult is False, which makes time to load
35 the MS data faster in some cases.
[710]36 """
[976]37 if average is None:
[710]38 average = rcParams['scantable.autoaverage']
[1496]39 if getpt is None:
[1673]40 getpt = True
[1259]41 varlist = vars()
[876]42 from asap._asap import stmath
[1680]43 self._math = stmath( rcParams['insitu'] )
[876]44 if isinstance(filename, Scantable):
45 Scantable.__init__(self, filename)
[181]46 else:
[1603]47 if isinstance(filename, str):# or \
48# (isinstance(filename, list) or isinstance(filename, tuple)) \
49# and isinstance(filename[-1], str):
[976]50 import os.path
51 filename = os.path.expandvars(filename)
52 filename = os.path.expanduser(filename)
53 if not os.path.exists(filename):
54 s = "File '%s' not found." % (filename)
[718]55 if rcParams['verbose']:
[976]56 asaplog.push(s)
[1612]57 #print asaplog.pop().strip()
[1614]58 print_log('ERROR')
[718]59 return
[976]60 raise IOError(s)
[1115]61 if os.path.isdir(filename) \
[1118]62 and not os.path.exists(filename+'/table.f1'):
[976]63 # crude check if asap table
64 if os.path.exists(filename+'/table.info'):
[1118]65 ondisk = rcParams['scantable.storage'] == 'disk'
66 Scantable.__init__(self, filename, ondisk)
[976]67 if unit is not None:
68 self.set_fluxunit(unit)
[1496]69 # do not reset to the default freqframe
70 #self.set_freqframe(rcParams['scantable.freqframe'])
[718]71 else:
[1118]72 msg = "The given file '%s'is not a valid " \
73 "asap table." % (filename)
[976]74 if rcParams['verbose']:
[1612]75 #print msg
[1614]76 asaplog.push( msg )
77 print_log( 'ERROR' )
[976]78 return
79 else:
80 raise IOError(msg)
[226]81 else:
[1496]82 self._fill([filename], unit, average, getpt)
[1118]83 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]84 and isinstance(filename[-1], str):
[1496]85 self._fill(filename, unit, average, getpt)
[1259]86 self._add_history("scantable", varlist)
[714]87 print_log()
[102]88
[876]89 def save(self, name=None, format=None, overwrite=False):
[116]90 """
[1280]91 Store the scantable on disk. This can be an asap (aips++) Table,
92 SDFITS or MS2 format.
[116]93 Parameters:
[1093]94 name: the name of the outputfile. For format "ASCII"
95 this is the root file name (data in 'name'.txt
[497]96 and header in 'name'_header.txt)
[116]97 format: an optional file format. Default is ASAP.
[280]98 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]99 'SDFITS' (save as SDFITS file)
[200]100 'ASCII' (saves as ascii text file)
[226]101 'MS2' (saves as an aips++
102 MeasurementSet V2)
[1603]103 'FITS' (save as image FITS - not
104 readable by class)
105 'CLASS' (save as FITS readable by CLASS)
[411]106 overwrite: If the file should be overwritten if it exists.
[256]107 The default False is to return with warning
[411]108 without writing the output. USE WITH CARE.
[116]109 Example:
110 scan.save('myscan.asap')
[1118]111 scan.save('myscan.sdfits', 'SDFITS')
[116]112 """
[411]113 from os import path
[226]114 if format is None: format = rcParams['scantable.save']
[256]115 suffix = '.'+format.lower()
[1118]116 if name is None or name == "":
[256]117 name = 'scantable'+suffix
[718]118 msg = "No filename given. Using default name %s..." % name
119 asaplog.push(msg)
[411]120 name = path.expandvars(name)
[256]121 if path.isfile(name) or path.isdir(name):
122 if not overwrite:
[718]123 msg = "File %s exists." % name
124 if rcParams['verbose']:
[1612]125 #print msg
[1614]126 asaplog.push( msg )
127 print_log( 'ERROR' )
[718]128 return
129 else:
130 raise IOError(msg)
[451]131 format2 = format.upper()
132 if format2 == 'ASAP':
[116]133 self._save(name)
134 else:
[989]135 from asap._asap import stwriter as stw
[1118]136 writer = stw(format2)
137 writer.write(self, name)
[718]138 print_log()
[116]139 return
140
[102]141 def copy(self):
142 """
143 Return a copy of this scantable.
[1348]144 Note:
145 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]146 Parameters:
[113]147 none
[102]148 Example:
149 copiedscan = scan.copy()
150 """
[876]151 sd = scantable(Scantable._copy(self))
[113]152 return sd
153
[1093]154 def drop_scan(self, scanid=None):
155 """
156 Return a new scantable where the specified scan number(s) has(have)
157 been dropped.
158 Parameters:
159 scanid: a (list of) scan number(s)
160 """
161 from asap import _is_sequence_or_number as _is_valid
162 from asap import _to_list
163 from asap import unique
164 if not _is_valid(scanid):
165 if rcParams['verbose']:
[1612]166 #print "Please specify a scanno to drop from the scantable"
[1614]167 asaplog.push( 'Please specify a scanno to drop from the scantable' )
168 print_log( 'ERROR' )
[1093]169 return
170 else:
171 raise RuntimeError("No scan given")
172 try:
173 scanid = _to_list(scanid)
174 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
175 for sid in scanid: allscans.remove(sid)
[1118]176 if len(allscans) == 0:
177 raise ValueError("Can't remove all scans")
[1093]178 except ValueError:
179 if rcParams['verbose']:
[1612]180 #print "Couldn't find any match."
[1614]181 print_log()
182 asaplog.push( "Couldn't find any match." )
183 print_log( 'ERROR' )
[1093]184 return
185 else: raise
186 try:
187 bsel = self.get_selection()
188 sel = selector()
189 sel.set_scans(allscans)
190 self.set_selection(bsel+sel)
191 scopy = self._copy()
192 self.set_selection(bsel)
193 return scantable(scopy)
194 except RuntimeError:
[1118]195 if rcParams['verbose']:
[1612]196 #print "Couldn't find any match."
[1614]197 print_log()
198 asaplog.push( "Couldn't find any match." )
199 print_log( 'ERROR' )
[1118]200 else:
201 raise
[1093]202
203
[102]204 def get_scan(self, scanid=None):
205 """
206 Return a specific scan (by scanno) or collection of scans (by
207 source name) in a new scantable.
[1348]208 Note:
209 See scantable.drop_scan() for the inverse operation.
[102]210 Parameters:
[513]211 scanid: a (list of) scanno or a source name, unix-style
212 patterns are accepted for source name matching, e.g.
213 '*_R' gets all 'ref scans
[102]214 Example:
[513]215 # get all scans containing the source '323p459'
216 newscan = scan.get_scan('323p459')
217 # get all 'off' scans
218 refscans = scan.get_scan('*_R')
219 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]220 newscan = scan.get_scan([0, 2, 7, 10])
[102]221 """
222 if scanid is None:
[718]223 if rcParams['verbose']:
[1612]224 #print "Please specify a scan no or name to " \
225 # "retrieve from the scantable"
[1614]226 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
227 print_log( 'ERROR' )
[718]228 return
229 else:
230 raise RuntimeError("No scan given")
231
[102]232 try:
[946]233 bsel = self.get_selection()
234 sel = selector()
[102]235 if type(scanid) is str:
[946]236 sel.set_name(scanid)
237 self.set_selection(bsel+sel)
[876]238 scopy = self._copy()
[946]239 self.set_selection(bsel)
[876]240 return scantable(scopy)
[102]241 elif type(scanid) is int:
[946]242 sel.set_scans([scanid])
243 self.set_selection(bsel+sel)
[876]244 scopy = self._copy()
[946]245 self.set_selection(bsel)
[876]246 return scantable(scopy)
[381]247 elif type(scanid) is list:
[946]248 sel.set_scans(scanid)
249 self.set_selection(sel)
[876]250 scopy = self._copy()
[946]251 self.set_selection(bsel)
[876]252 return scantable(scopy)
[381]253 else:
[718]254 msg = "Illegal scanid type, use 'int' or 'list' if ints."
255 if rcParams['verbose']:
[1612]256 #print msg
[1614]257 asaplog.push( msg )
258 print_log( 'ERROR' )
[718]259 else:
260 raise TypeError(msg)
[102]261 except RuntimeError:
[1612]262 if rcParams['verbose']:
263 #print "Couldn't find any match."
[1614]264 print_log()
265 asaplog.push( "Couldn't find any match." )
266 print_log( 'ERROR' )
[718]267 else: raise
[102]268
269 def __str__(self):
[1118]270 return Scantable._summary(self, True)
[102]271
[976]272 def summary(self, filename=None):
[102]273 """
274 Print a summary of the contents of this scantable.
275 Parameters:
276 filename: the name of a file to write the putput to
277 Default - no file output
[381]278 verbose: print extra info such as the frequency table
279 The default (False) is taken from .asaprc
[102]280 """
[976]281 info = Scantable._summary(self, True)
282 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]283 if filename is not None:
[256]284 if filename is "":
285 filename = 'scantable_summary.txt'
[415]286 from os.path import expandvars, isdir
[411]287 filename = expandvars(filename)
[415]288 if not isdir(filename):
[413]289 data = open(filename, 'w')
290 data.write(info)
291 data.close()
292 else:
[718]293 msg = "Illegal file name '%s'." % (filename)
294 if rcParams['verbose']:
[1612]295 #print msg
[1614]296 asaplog.push( msg )
297 print_log( 'ERROR' )
[718]298 else:
299 raise IOError(msg)
300 if rcParams['verbose']:
[794]301 try:
302 from IPython.genutils import page as pager
303 except ImportError:
304 from pydoc import pager
305 pager(info)
[718]306 else:
307 return info
[710]308
[1603]309 def get_spectrum(self, rowno):
310 """Return the spectrum for the current row in the scantable as a list.
311 Parameters:
312 rowno: the row number to retrieve the spectrum from
313 """
314 return self._getspectrum(rowno)
[946]315
[1603]316 def get_mask(self, rowno):
317 """Return the mask for the current row in the scantable as a list.
318 Parameters:
319 rowno: the row number to retrieve the mask from
320 """
321 return self._getmask(rowno)
322
323 def set_spectrum(self, spec, rowno):
324 """Return the spectrum for the current row in the scantable as a list.
325 Parameters:
326 spec: the spectrum
327 rowno: the row number to set the spectrum for
328 """
329 assert(len(spec) == self.nchan())
330 return self._setspectrum(spec, rowno)
331
[946]332 def get_selection(self):
333 """
[1005]334 Get the selection object currently set on this scantable.
335 Parameters:
336 none
337 Example:
338 sel = scan.get_selection()
339 sel.set_ifs(0) # select IF 0
340 scan.set_selection(sel) # apply modified selection
[946]341 """
342 return selector(self._getselection())
343
[1005]344 def set_selection(self, selection=selector()):
[946]345 """
[1005]346 Select a subset of the data. All following operations on this scantable
347 are only applied to thi selection.
348 Parameters:
349 selection: a selector object (default unset the selection)
350 Examples:
351 sel = selector() # create a selection object
[1118]352 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]353 scan.set_selection(sel) # set the selection
354 scan.summary() # will only print summary of scanno 0 an 3
355 scan.set_selection() # unset the selection
[946]356 """
357 self._setselection(selection)
358
[1446]359 def get_row(self, row=0, insitu=None):
360 """
361 Select a row in the scantable.
362 Return a scantable with single row.
363 Parameters:
364 row: row no of integration, default is 0.
365 insitu: if False a new scantable is returned.
366 Otherwise, the scaling is done in-situ
367 The default is taken from .asaprc (False)
368 """
369 if insitu is None: insitu = rcParams['insitu']
370 if not insitu:
371 workscan = self.copy()
372 else:
373 workscan = self
374 # Select a row
375 sel=selector()
376 sel.set_scans([workscan.getscan(row)])
377 sel.set_cycles([workscan.getcycle(row)])
378 sel.set_beams([workscan.getbeam(row)])
379 sel.set_ifs([workscan.getif(row)])
380 sel.set_polarisations([workscan.getpol(row)])
381 sel.set_name(workscan._getsourcename(row))
382 workscan.set_selection(sel)
383 if not workscan.nrow() == 1:
384 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
385 raise RuntimeError(msg)
386 del sel
387 if insitu:
388 self._assign(workscan)
389 else:
390 return workscan
391
[1629]392 #def stats(self, stat='stddev', mask=None):
393 def stats(self, stat='stddev', mask=None, form='3.3f'):
[102]394 """
[135]395 Determine the specified statistic of the current beam/if/pol
[102]396 Takes a 'mask' as an optional parameter to specify which
397 channels should be excluded.
398 Parameters:
[1517]399 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
[1515]400 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[135]401 mask: an optional mask specifying where the statistic
[102]402 should be determined.
[1629]403 form: format string to print statistic values
[102]404 Example:
[113]405 scan.set_unit('channel')
[1118]406 msk = scan.create_mask([100, 200], [500, 600])
[135]407 scan.stats(stat='mean', mask=m)
[102]408 """
409 if mask == None:
[876]410 mask = []
[1118]411 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]412 if not self._check_ifs():
[1118]413 raise ValueError("Cannot apply mask as the IFs have different "
414 "number of channels. Please use setselection() "
415 "to select individual IFs")
[1530]416 rtnabc = False
417 if stat.lower().endswith('_abc'): rtnabc = True
418 getchan = False
419 if stat.lower().startswith('min') or stat.lower().startswith('max'):
[1517]420 chan = self._math._minmaxchan(self, mask, stat)
421 getchan = True
[1515]422 statvals = []
[1530]423 if not rtnabc: statvals = self._math._stats(self, mask, stat)
424
[876]425 out = ''
426 axes = []
427 for i in range(self.nrow()):
428 axis = []
429 axis.append(self.getscan(i))
430 axis.append(self.getbeam(i))
431 axis.append(self.getif(i))
432 axis.append(self.getpol(i))
433 axis.append(self.getcycle(i))
434 axes.append(axis)
435 tm = self._gettime(i)
436 src = self._getsourcename(i)
[1530]437 refstr = ''
438 statunit= ''
[1517]439 if getchan:
440 qx, qy = self.chan2data(rowno=i, chan=chan[i])
[1530]441 if rtnabc:
442 statvals.append(qx['value'])
[1629]443 #refstr = '(value: %3.3f' % (qy['value'])+' ['+qy['unit']+'])'
444 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
[1530]445 statunit= '['+qx['unit']+']'
446 else:
[1629]447 #refstr = '(@ %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
448 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
[1530]449 #statunit= ' ['+qy['unit']+']'
[876]450 out += 'Scan[%d] (%s) ' % (axis[0], src)
451 out += 'Time[%s]:\n' % (tm)
452 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
453 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
454 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
[1629]455 #out += '= %3.3f ' % (statvals[i]) +refstr+'\n'
456 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
[876]457 out += "--------------------------------------------------\n"
[256]458
[876]459 if rcParams['verbose']:
[1615]460 import os
[1625]461 if os.environ.has_key( 'USER' ):
462 usr=os.environ['USER']
463 else:
464 import commands
465 usr=commands.getoutput( 'whoami' )
[1613]466 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
[1612]467 f=open(tmpfile,'w')
468 print >> f, "--------------------------------------------------"
469 print >> f, " ", stat, statunit
470 print >> f, "--------------------------------------------------"
471 print >> f, out
472 f.close()
473 f=open(tmpfile,'r')
474 x=f.readlines()
475 f.close()
476 for xx in x:
[1666]477 asaplog.push( xx, False )
[1614]478 print_log()
[1295]479 #else:
480 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
481 # 'axes' : axes,
482 # 'data': statvals}
483 return statvals
[102]484
[1517]485 def chan2data(self, rowno=0, chan=0):
[1515]486 """
[1517]487 Returns channel/frequency/velocity and spectral value
[1515]488 at an arbitrary row and channel in the scantable.
489 Parameters:
490 rowno: a row number in the scantable. Default is the
491 first row, i.e. rowno=0
[1517]492 chan: a channel in the scantable. Default is the first
[1515]493 channel, i.e. pos=0
494 """
[1517]495 if isinstance(rowno, int) and isinstance(chan, int):
[1530]496 qx = {'unit': self.get_unit(),
497 'value': self._getabcissa(rowno)[chan]}
[1517]498 qy = {'unit': self.get_fluxunit(),
499 'value': self._getspectrum(rowno)[chan]}
500 return qx, qy
[1515]501
[1118]502 def stddev(self, mask=None):
[135]503 """
504 Determine the standard deviation of the current beam/if/pol
505 Takes a 'mask' as an optional parameter to specify which
506 channels should be excluded.
507 Parameters:
508 mask: an optional mask specifying where the standard
509 deviation should be determined.
510
511 Example:
512 scan.set_unit('channel')
[1118]513 msk = scan.create_mask([100, 200], [500, 600])
[135]514 scan.stddev(mask=m)
515 """
[1118]516 return self.stats(stat='stddev', mask=mask);
[135]517
[1003]518
[1259]519 def get_column_names(self):
[1003]520 """
521 Return a list of column names, which can be used for selection.
522 """
[1259]523 return list(Scantable.get_column_names(self))
[1003]524
[876]525 def get_tsys(self):
[113]526 """
527 Return the System temperatures.
528 Returns:
[876]529 a list of Tsys values for the current selection
[113]530 """
[256]531
[876]532 return self._row_callback(self._gettsys, "Tsys")
[256]533
[876]534 def _row_callback(self, callback, label):
535 axes = []
[1118]536 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]537 out = ""
[1118]538 outvec = []
[876]539 for i in range(self.nrow()):
540 axis = []
541 axis.append(self.getscan(i))
542 axis.append(self.getbeam(i))
543 axis.append(self.getif(i))
544 axis.append(self.getpol(i))
545 axis.append(self.getcycle(i))
546 axes.append(axis)
547 tm = self._gettime(i)
548 src = self._getsourcename(i)
549 out += 'Scan[%d] (%s) ' % (axis[0], src)
550 out += 'Time[%s]:\n' % (tm)
551 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
552 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
553 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
554 outvec.append(callback(i))
555 out += '= %3.3f\n' % (outvec[i])
556 out += "--------------------------------------------------\n"
557 if rcParams['verbose']:
[1615]558 asaplog.push("--------------------------------------------------")
559 asaplog.push(" %s" % (label))
560 asaplog.push("--------------------------------------------------")
561 asaplog.push(out)
[1614]562 print_log()
[1175]563 # disabled because the vector seems more useful
564 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
565 return outvec
[256]566
[1070]567 def _get_column(self, callback, row=-1):
568 """
569 """
570 if row == -1:
571 return [callback(i) for i in range(self.nrow())]
572 else:
573 if 0 <= row < self.nrow():
574 return callback(row)
[256]575
[1070]576
[1348]577 def get_time(self, row=-1, asdatetime=False):
[113]578 """
579 Get a list of time stamps for the observations.
[1348]580 Return a datetime object for each integration time stamp in the scantable.
[113]581 Parameters:
[1348]582 row: row no of integration. Default -1 return all rows
583 asdatetime: return values as datetime objects rather than strings
[113]584 Example:
585 none
586 """
[1175]587 from time import strptime
588 from datetime import datetime
[1457]589 times = self._get_column(self._gettime, row)
[1348]590 if not asdatetime:
[1603]591 return times
[1175]592 format = "%Y/%m/%d/%H:%M:%S"
593 if isinstance(times, list):
594 return [datetime(*strptime(i, format)[:6]) for i in times]
595 else:
596 return datetime(*strptime(times, format)[:6])
[102]597
[1348]598
599 def get_inttime(self, row=-1):
600 """
601 Get a list of integration times for the observations.
602 Return a time in seconds for each integration in the scantable.
603 Parameters:
604 row: row no of integration. Default -1 return all rows.
605 Example:
606 none
607 """
608 return self._get_column(self._getinttime, row)
609
610
[714]611 def get_sourcename(self, row=-1):
612 """
[794]613 Get a list source names for the observations.
[714]614 Return a string for each integration in the scantable.
615 Parameters:
[1348]616 row: row no of integration. Default -1 return all rows.
[714]617 Example:
618 none
619 """
[1070]620 return self._get_column(self._getsourcename, row)
[714]621
[794]622 def get_elevation(self, row=-1):
623 """
624 Get a list of elevations for the observations.
625 Return a float for each integration in the scantable.
626 Parameters:
[1348]627 row: row no of integration. Default -1 return all rows.
[794]628 Example:
629 none
630 """
[1070]631 return self._get_column(self._getelevation, row)
[794]632
633 def get_azimuth(self, row=-1):
634 """
635 Get a list of azimuths for the observations.
636 Return a float for each integration in the scantable.
637 Parameters:
[1348]638 row: row no of integration. Default -1 return all rows.
[794]639 Example:
640 none
641 """
[1070]642 return self._get_column(self._getazimuth, row)
[794]643
644 def get_parangle(self, row=-1):
645 """
646 Get a list of parallactic angles for the observations.
647 Return a float for each integration in the scantable.
648 Parameters:
[1348]649 row: row no of integration. Default -1 return all rows.
[794]650 Example:
651 none
652 """
[1070]653 return self._get_column(self._getparangle, row)
[794]654
[1070]655 def get_direction(self, row=-1):
656 """
657 Get a list of Positions on the sky (direction) for the observations.
658 Return a float for each integration in the scantable.
659 Parameters:
660 row: row no of integration. Default -1 return all rows
661 Example:
662 none
663 """
664 return self._get_column(self._getdirection, row)
665
[1389]666 def get_directionval(self, row=-1):
667 """
668 Get a list of Positions on the sky (direction) for the observations.
669 Return a float for each integration in the scantable.
670 Parameters:
671 row: row no of integration. Default -1 return all rows
672 Example:
673 none
674 """
675 return self._get_column(self._getdirectionvec, row)
676
[102]677 def set_unit(self, unit='channel'):
678 """
679 Set the unit for all following operations on this scantable
680 Parameters:
681 unit: optional unit, default is 'channel'
[1118]682 one of '*Hz', 'km/s', 'channel', ''
[102]683 """
[484]684 varlist = vars()
[1118]685 if unit in ['', 'pixel', 'channel']:
[113]686 unit = ''
687 inf = list(self._getcoordinfo())
688 inf[0] = unit
689 self._setcoordinfo(inf)
[1118]690 self._add_history("set_unit", varlist)
[113]691
[484]692 def set_instrument(self, instr):
[358]693 """
[1348]694 Set the instrument for subsequent processing.
[358]695 Parameters:
[710]696 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]697 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]698 """
699 self._setInstrument(instr)
[1118]700 self._add_history("set_instument", vars())
[718]701 print_log()
[358]702
[1190]703 def set_feedtype(self, feedtype):
704 """
705 Overwrite the feed type, which might not be set correctly.
706 Parameters:
707 feedtype: 'linear' or 'circular'
708 """
709 self._setfeedtype(feedtype)
710 self._add_history("set_feedtype", vars())
711 print_log()
712
[276]713 def set_doppler(self, doppler='RADIO'):
714 """
715 Set the doppler for all following operations on this scantable.
716 Parameters:
717 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
718 """
[484]719 varlist = vars()
[276]720 inf = list(self._getcoordinfo())
721 inf[2] = doppler
722 self._setcoordinfo(inf)
[1118]723 self._add_history("set_doppler", vars())
[718]724 print_log()
[710]725
[226]726 def set_freqframe(self, frame=None):
[113]727 """
728 Set the frame type of the Spectral Axis.
729 Parameters:
[591]730 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]731 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
732 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]733 Examples:
734 scan.set_freqframe('BARY')
735 """
[484]736 if frame is None: frame = rcParams['scantable.freqframe']
737 varlist = vars()
[1118]738 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
739 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]740
[989]741 if frame in valid:
[113]742 inf = list(self._getcoordinfo())
743 inf[1] = frame
744 self._setcoordinfo(inf)
[1118]745 self._add_history("set_freqframe", varlist)
[102]746 else:
[1118]747 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]748 if rcParams['verbose']:
[1612]749 #print msg
[1614]750 asaplog.push( msg )
751 print_log( 'ERROR' )
[718]752 else:
753 raise TypeError(msg)
754 print_log()
[710]755
[989]756 def set_dirframe(self, frame=""):
757 """
758 Set the frame type of the Direction on the sky.
759 Parameters:
760 frame: an optional frame type, default ''. Valid frames are:
761 'J2000', 'B1950', 'GALACTIC'
762 Examples:
763 scan.set_dirframe('GALACTIC')
764 """
765 varlist = vars()
766 try:
767 Scantable.set_dirframe(self, frame)
[1118]768 except RuntimeError, msg:
[989]769 if rcParams['verbose']:
[1612]770 #print msg
[1614]771 print_log()
[1676]772 asaplog.push( str(msg) )
[1614]773 print_log( 'ERROR' )
[989]774 else:
775 raise
[1118]776 self._add_history("set_dirframe", varlist)
[989]777
[113]778 def get_unit(self):
779 """
780 Get the default unit set in this scantable
781 Returns:
782 A unit string
783 """
784 inf = self._getcoordinfo()
785 unit = inf[0]
786 if unit == '': unit = 'channel'
787 return unit
[102]788
[158]789 def get_abcissa(self, rowno=0):
[102]790 """
[158]791 Get the abcissa in the current coordinate setup for the currently
[113]792 selected Beam/IF/Pol
793 Parameters:
[226]794 rowno: an optional row number in the scantable. Default is the
795 first row, i.e. rowno=0
[113]796 Returns:
[1348]797 The abcissa values and the format string (as a dictionary)
[113]798 """
[256]799 abc = self._getabcissa(rowno)
[710]800 lbl = self._getabcissalabel(rowno)
[718]801 print_log()
[158]802 return abc, lbl
[113]803
[1401]804 def flag(self, mask=None, unflag=False):
[1001]805 """
806 Flag the selected data using an optional channel mask.
807 Parameters:
808 mask: an optional channel mask, created with create_mask. Default
809 (no mask) is all channels.
[1401]810 unflag: if True, unflag the data
[1001]811 """
812 varlist = vars()
[1118]813 if mask is None:
814 mask = []
[1001]815 try:
[1401]816 self._flag(mask, unflag)
[1118]817 except RuntimeError, msg:
[1001]818 if rcParams['verbose']:
[1612]819 #print msg
[1614]820 print_log()
[1676]821 asaplog.push( str(msg) )
[1614]822 print_log( 'ERROR' )
[1001]823 return
824 else: raise
825 self._add_history("flag", varlist)
826
[1655]827 def flag_row(self, rows=[], unflag=False):
828 """
829 Flag the selected data in row-based manner.
830 Parameters:
831 rows: list of row numbers to be flagged. Default is no row (must be explicitly specified to execute row-based flagging).
832 unflag: if True, unflag the data.
833 """
834 varlist = vars()
835 try:
836 self._flag_row(rows, unflag)
837 except RuntimeError, msg:
838 if rcParams['verbose']:
839 print_log()
[1676]840 asaplog.push( str(msg) )
[1655]841 print_log('ERROR')
842 return
843 else: raise
844 self._add_history("flag_row", varlist)
845
846
[1203]847 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]848 """
849 Flag the data in 'lag' space by providing a frequency to remove.
850 Flagged data in the scantable gets set to 0.0 before the fft.
851 No taper is applied.
852 Parameters:
[1348]853 frequency: the frequency (really a period within the bandwidth)
854 to remove
855 width: the width of the frequency to remove, to remove a
[1603]856 range of frequencies around the centre.
[1203]857 unit: the frequency unit (default "GHz")
858 Notes:
[1348]859 It is recommended to flag edges of the band or strong
860 signals beforehand.
[1192]861 """
862 if insitu is None: insitu = rcParams['insitu']
863 self._math._setinsitu(insitu)
864 varlist = vars()
[1370]865 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]866 if not base.has_key(unit):
867 raise ValueError("%s is not a valid unit." % unit)
868 try:
[1200]869 s = scantable(self._math._lag_flag(self, frequency*base[unit],
870 width*base[unit]))
[1192]871 except RuntimeError, msg:
872 if rcParams['verbose']:
[1612]873 #print msg
[1614]874 print_log()
[1676]875 asaplog.push( str(msg) )
[1614]876 print_log( 'ERROR' )
[1192]877 return
878 else: raise
879 s._add_history("lag_flag", varlist)
880 print_log()
881 if insitu:
882 self._assign(s)
883 else:
884 return s
[1001]885
[1192]886
[113]887 def create_mask(self, *args, **kwargs):
888 """
[1118]889 Compute and return a mask based on [min, max] windows.
[189]890 The specified windows are to be INCLUDED, when the mask is
[113]891 applied.
[102]892 Parameters:
[1118]893 [min, max], [min2, max2], ...
[1024]894 Pairs of start/end points (inclusive)specifying the regions
[102]895 to be masked
[189]896 invert: optional argument. If specified as True,
897 return an inverted mask, i.e. the regions
898 specified are EXCLUDED
[513]899 row: create the mask using the specified row for
900 unit conversions, default is row=0
901 only necessary if frequency varies over rows.
[102]902 Example:
[113]903 scan.set_unit('channel')
904 a)
[1118]905 msk = scan.create_mask([400, 500], [800, 900])
[189]906 # masks everything outside 400 and 500
[113]907 # and 800 and 900 in the unit 'channel'
908
909 b)
[1118]910 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]911 # masks the regions between 400 and 500
[113]912 # and 800 and 900 in the unit 'channel'
[1024]913 c)
914 mask only channel 400
[1118]915 msk = scan.create_mask([400, 400])
[102]916 """
[513]917 row = 0
918 if kwargs.has_key("row"):
919 row = kwargs.get("row")
920 data = self._getabcissa(row)
[113]921 u = self._getcoordinfo()[0]
[718]922 if rcParams['verbose']:
[113]923 if u == "": u = "channel"
[718]924 msg = "The current mask window unit is %s" % u
[1118]925 i = self._check_ifs()
926 if not i:
[876]927 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]928 asaplog.push(msg)
[102]929 n = self.nchan()
[1295]930 msk = _n_bools(n, False)
[710]931 # test if args is a 'list' or a 'normal *args - UGLY!!!
932
[1118]933 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
934 and args or args[0]
[710]935 for window in ws:
[102]936 if (len(window) != 2 or window[0] > window[1] ):
[1118]937 raise TypeError("A window needs to be defined as [min, max]")
[102]938 for i in range(n):
[1024]939 if data[i] >= window[0] and data[i] <= window[1]:
[1295]940 msk[i] = True
[113]941 if kwargs.has_key('invert'):
942 if kwargs.get('invert'):
[1295]943 msk = mask_not(msk)
[718]944 print_log()
[102]945 return msk
[710]946
[1446]947 def get_masklist(self, mask=None, row=0):
[256]948 """
[1446]949 Compute and return a list of mask windows, [min, max].
950 Parameters:
951 mask: channel mask, created with create_mask.
952 row: calcutate the masklist using the specified row
953 for unit conversions, default is row=0
954 only necessary if frequency varies over rows.
955 Returns:
956 [min, max], [min2, max2], ...
957 Pairs of start/end points (inclusive)specifying
958 the masked regions
959 """
960 if not (isinstance(mask,list) or isinstance(mask, tuple)):
961 raise TypeError("The mask should be list or tuple.")
962 if len(mask) < 2:
963 raise TypeError("The mask elements should be > 1")
964 if self.nchan() != len(mask):
965 msg = "Number of channels in scantable != number of mask elements"
966 raise TypeError(msg)
967 data = self._getabcissa(row)
968 u = self._getcoordinfo()[0]
969 if rcParams['verbose']:
970 if u == "": u = "channel"
971 msg = "The current mask window unit is %s" % u
972 i = self._check_ifs()
973 if not i:
974 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
975 asaplog.push(msg)
976 masklist=[]
977 ist, ien = None, None
978 ist, ien=self.get_mask_indices(mask)
979 if ist is not None and ien is not None:
980 for i in xrange(len(ist)):
981 range=[data[ist[i]],data[ien[i]]]
982 range.sort()
983 masklist.append([range[0],range[1]])
984 return masklist
985
986 def get_mask_indices(self, mask=None):
987 """
988 Compute and Return lists of mask start indices and mask end indices.
989 Parameters:
990 mask: channel mask, created with create_mask.
991 Returns:
992 List of mask start indices and that of mask end indices,
993 i.e., [istart1,istart2,....], [iend1,iend2,....].
994 """
995 if not (isinstance(mask,list) or isinstance(mask, tuple)):
996 raise TypeError("The mask should be list or tuple.")
997 if len(mask) < 2:
998 raise TypeError("The mask elements should be > 1")
999 istart=[]
1000 iend=[]
1001 if mask[0]: istart.append(0)
1002 for i in range(len(mask)-1):
1003 if not mask[i] and mask[i+1]:
1004 istart.append(i+1)
1005 elif mask[i] and not mask[i+1]:
1006 iend.append(i)
1007 if mask[len(mask)-1]: iend.append(len(mask)-1)
1008 if len(istart) != len(iend):
1009 raise RuntimeError("Numbers of mask start != mask end.")
1010 for i in range(len(istart)):
1011 if istart[i] > iend[i]:
1012 raise RuntimeError("Mask start index > mask end index")
1013 break
1014 return istart,iend
1015
1016# def get_restfreqs(self):
1017# """
1018# Get the restfrequency(s) stored in this scantable.
1019# The return value(s) are always of unit 'Hz'
1020# Parameters:
1021# none
1022# Returns:
1023# a list of doubles
1024# """
1025# return list(self._getrestfreqs())
1026
1027 def get_restfreqs(self, ids=None):
1028 """
[256]1029 Get the restfrequency(s) stored in this scantable.
1030 The return value(s) are always of unit 'Hz'
1031 Parameters:
[1446]1032 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1033 be retrieved
[256]1034 Returns:
[1446]1035 dictionary containing ids and a list of doubles for each id
[256]1036 """
[1446]1037 if ids is None:
1038 rfreqs={}
1039 idlist = self.getmolnos()
1040 for i in idlist:
1041 rfreqs[i]=list(self._getrestfreqs(i))
1042 return rfreqs
1043 else:
1044 if type(ids)==list or type(ids)==tuple:
1045 rfreqs={}
1046 for i in ids:
1047 rfreqs[i]=list(self._getrestfreqs(i))
1048 return rfreqs
1049 else:
1050 return list(self._getrestfreqs(ids))
1051 #return list(self._getrestfreqs(ids))
[102]1052
[931]1053 def set_restfreqs(self, freqs=None, unit='Hz'):
1054 """
[1446]1055 ********NEED TO BE UPDATED begin************
[931]1056 Set or replace the restfrequency specified and
1057 If the 'freqs' argument holds a scalar,
1058 then that rest frequency will be applied to all the selected
1059 data. If the 'freqs' argument holds
1060 a vector, then it MUST be of equal or smaller length than
1061 the number of IFs (and the available restfrequencies will be
1062 replaced by this vector). In this case, *all* data have
1063 the restfrequency set per IF according
1064 to the corresponding value you give in the 'freqs' vector.
[1118]1065 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1066 IF 1 gets restfreq 2e9.
[1446]1067 ********NEED TO BE UPDATED end************
[1603]1068 You can also specify the frequencies via a linecatalog.
[1153]1069
[931]1070 Parameters:
1071 freqs: list of rest frequency values or string idenitfiers
1072 unit: unit for rest frequency (default 'Hz')
[402]1073
[931]1074 Example:
[1446]1075 # set the given restfrequency for the all currently selected IFs
[931]1076 scan.set_restfreqs(freqs=1.4e9)
[1446]1077 # set multiple restfrequencies to all the selected data
1078 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1079 # If the number of IFs in the data is >= 2 the IF0 gets the first
1080 # value IF1 the second... NOTE that freqs needs to be
1081 # specified in list of list (e.g. [[],[],...] ).
1082 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]1083 #set the given restfrequency for the whole table (by name)
1084 scan.set_restfreqs(freqs="OH1667")
[391]1085
[931]1086 Note:
1087 To do more sophisticate Restfrequency setting, e.g. on a
1088 source and IF basis, use scantable.set_selection() before using
1089 this function.
1090 # provide your scantable is call scan
1091 selection = selector()
1092 selection.set_name("ORION*")
1093 selection.set_ifs([1])
1094 scan.set_selection(selection)
1095 scan.set_restfreqs(freqs=86.6e9)
1096
1097 """
1098 varlist = vars()
[1157]1099 from asap import linecatalog
1100 # simple value
[1118]1101 if isinstance(freqs, int) or isinstance(freqs, float):
[1446]1102 # TT mod
1103 #self._setrestfreqs(freqs, "",unit)
1104 self._setrestfreqs([freqs], [""],unit)
[1157]1105 # list of values
[1118]1106 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1107 # list values are scalars
[1118]1108 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1446]1109 self._setrestfreqs(freqs, [""],unit)
[1157]1110 # list values are tuples, (value, name)
1111 elif isinstance(freqs[-1], dict):
[1446]1112 #sel = selector()
1113 #savesel = self._getselection()
1114 #iflist = self.getifnos()
1115 #for i in xrange(len(freqs)):
1116 # sel.set_ifs(iflist[i])
1117 # self._setselection(sel)
1118 # self._setrestfreqs(freqs[i], "",unit)
1119 #self._setselection(savesel)
1120 self._setrestfreqs(freqs["value"],
1121 freqs["name"], "MHz")
1122 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1123 sel = selector()
1124 savesel = self._getselection()
[1322]1125 iflist = self.getifnos()
[1446]1126 if len(freqs)>len(iflist):
1127 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]1128 for i in xrange(len(freqs)):
[1322]1129 sel.set_ifs(iflist[i])
[1259]1130 self._setselection(sel)
[1157]1131 self._setrestfreqs(freqs[i]["value"],
1132 freqs[i]["name"], "MHz")
1133 self._setselection(savesel)
1134 # freqs are to be taken from a linecatalog
[1153]1135 elif isinstance(freqs, linecatalog):
1136 sel = selector()
1137 savesel = self._getselection()
1138 for i in xrange(freqs.nrow()):
[1322]1139 sel.set_ifs(iflist[i])
[1153]1140 self._setselection(sel)
1141 self._setrestfreqs(freqs.get_frequency(i),
1142 freqs.get_name(i), "MHz")
1143 # ensure that we are not iterating past nIF
1144 if i == self.nif()-1: break
1145 self._setselection(savesel)
[931]1146 else:
1147 return
1148 self._add_history("set_restfreqs", varlist)
1149
[1360]1150 def shift_refpix(self, delta):
1151 """
1152 Shift the reference pixel of the Spectra Coordinate by an
1153 integer amount.
1154 Parameters:
1155 delta: the amount to shift by
1156 Note:
1157 Be careful using this with broadband data.
1158 """
1159 Scantable.shift(self, delta)
[931]1160
[1259]1161 def history(self, filename=None):
1162 """
1163 Print the history. Optionally to a file.
[1348]1164 Parameters:
1165 filename: The name of the file to save the history to.
[1259]1166 """
[484]1167 hist = list(self._gethistory())
[794]1168 out = "-"*80
[484]1169 for h in hist:
[489]1170 if h.startswith("---"):
[794]1171 out += "\n"+h
[489]1172 else:
1173 items = h.split("##")
1174 date = items[0]
1175 func = items[1]
1176 items = items[2:]
[794]1177 out += "\n"+date+"\n"
1178 out += "Function: %s\n Parameters:" % (func)
[489]1179 for i in items:
1180 s = i.split("=")
[1118]1181 out += "\n %s = %s" % (s[0], s[1])
[794]1182 out += "\n"+"-"*80
[1259]1183 if filename is not None:
1184 if filename is "":
1185 filename = 'scantable_history.txt'
1186 import os
1187 filename = os.path.expandvars(os.path.expanduser(filename))
1188 if not os.path.isdir(filename):
1189 data = open(filename, 'w')
1190 data.write(out)
1191 data.close()
1192 else:
1193 msg = "Illegal file name '%s'." % (filename)
1194 if rcParams['verbose']:
[1612]1195 #print msg
[1614]1196 asaplog.push( msg )
1197 print_log( 'ERROR' )
[1259]1198 else:
1199 raise IOError(msg)
1200 if rcParams['verbose']:
1201 try:
1202 from IPython.genutils import page as pager
1203 except ImportError:
1204 from pydoc import pager
1205 pager(out)
1206 else:
1207 return out
[484]1208 return
[513]1209 #
1210 # Maths business
1211 #
1212
[931]1213 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1214 """
[1070]1215 Return the (time) weighted average of a scan.
[513]1216 Note:
[1070]1217 in channels only - align if necessary
[513]1218 Parameters:
1219 mask: an optional mask (only used for 'var' and 'tsys'
1220 weighting)
[558]1221 scanav: True averages each scan separately
1222 False (default) averages all scans together,
[1099]1223 weight: Weighting scheme.
1224 'none' (mean no weight)
1225 'var' (1/var(spec) weighted)
1226 'tsys' (1/Tsys**2 weighted)
1227 'tint' (integration time weighted)
1228 'tintsys' (Tint/Tsys**2)
1229 'median' ( median averaging)
[535]1230 The default is 'tint'
[931]1231 align: align the spectra in velocity before averaging. It takes
1232 the time of the first spectrum as reference time.
[513]1233 Example:
1234 # time average the scantable without using a mask
[710]1235 newscan = scan.average_time()
[513]1236 """
1237 varlist = vars()
[976]1238 if weight is None: weight = 'TINT'
[513]1239 if mask is None: mask = ()
[1099]1240 if scanav: scanav = "SCAN"
1241 else: scanav = "NONE"
[1118]1242 scan = (self, )
[989]1243 try:
[1118]1244 if align:
1245 scan = (self.freq_align(insitu=False), )
1246 s = None
1247 if weight.upper() == 'MEDIAN':
1248 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1249 scanav))
1250 else:
1251 s = scantable(self._math._average(scan, mask, weight.upper(),
1252 scanav))
1253 except RuntimeError, msg:
[989]1254 if rcParams['verbose']:
[1612]1255 #print msg
[1614]1256 print_log()
[1676]1257 asaplog.push( str(msg) )
[1614]1258 print_log( 'ERROR' )
[989]1259 return
1260 else: raise
[1099]1261 s._add_history("average_time", varlist)
[718]1262 print_log()
[513]1263 return s
[710]1264
[876]1265 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1266 """
1267 Return a scan where all spectra are converted to either
1268 Jansky or Kelvin depending upon the flux units of the scan table.
1269 By default the function tries to look the values up internally.
1270 If it can't find them (or if you want to over-ride), you must
1271 specify EITHER jyperk OR eta (and D which it will try to look up
1272 also if you don't set it). jyperk takes precedence if you set both.
1273 Parameters:
1274 jyperk: the Jy / K conversion factor
1275 eta: the aperture efficiency
1276 d: the geomtric diameter (metres)
1277 insitu: if False a new scantable is returned.
1278 Otherwise, the scaling is done in-situ
1279 The default is taken from .asaprc (False)
1280 """
1281 if insitu is None: insitu = rcParams['insitu']
[876]1282 self._math._setinsitu(insitu)
[513]1283 varlist = vars()
1284 if jyperk is None: jyperk = -1.0
1285 if d is None: d = -1.0
1286 if eta is None: eta = -1.0
[876]1287 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1288 s._add_history("convert_flux", varlist)
1289 print_log()
1290 if insitu: self._assign(s)
1291 else: return s
[513]1292
[876]1293 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1294 """
1295 Return a scan after applying a gain-elevation correction.
1296 The correction can be made via either a polynomial or a
1297 table-based interpolation (and extrapolation if necessary).
1298 You specify polynomial coefficients, an ascii table or neither.
1299 If you specify neither, then a polynomial correction will be made
1300 with built in coefficients known for certain telescopes (an error
1301 will occur if the instrument is not known).
1302 The data and Tsys are *divided* by the scaling factors.
1303 Parameters:
1304 poly: Polynomial coefficients (default None) to compute a
1305 gain-elevation correction as a function of
1306 elevation (in degrees).
1307 filename: The name of an ascii file holding correction factors.
1308 The first row of the ascii file must give the column
1309 names and these MUST include columns
1310 "ELEVATION" (degrees) and "FACTOR" (multiply data
1311 by this) somewhere.
1312 The second row must give the data type of the
1313 column. Use 'R' for Real and 'I' for Integer.
1314 An example file would be
1315 (actual factors are arbitrary) :
1316
1317 TIME ELEVATION FACTOR
1318 R R R
1319 0.1 0 0.8
1320 0.2 20 0.85
1321 0.3 40 0.9
1322 0.4 60 0.85
1323 0.5 80 0.8
1324 0.6 90 0.75
1325 method: Interpolation method when correcting from a table.
1326 Values are "nearest", "linear" (default), "cubic"
1327 and "spline"
1328 insitu: if False a new scantable is returned.
1329 Otherwise, the scaling is done in-situ
1330 The default is taken from .asaprc (False)
1331 """
1332
1333 if insitu is None: insitu = rcParams['insitu']
[876]1334 self._math._setinsitu(insitu)
[513]1335 varlist = vars()
1336 if poly is None:
[1118]1337 poly = ()
[513]1338 from os.path import expandvars
1339 filename = expandvars(filename)
[876]1340 s = scantable(self._math._gainel(self, poly, filename, method))
1341 s._add_history("gain_el", varlist)
1342 print_log()
1343 if insitu: self._assign(s)
1344 else: return s
[710]1345
[931]1346 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1347 """
1348 Return a scan where all rows have been aligned in frequency/velocity.
1349 The alignment frequency frame (e.g. LSRK) is that set by function
1350 set_freqframe.
1351 Parameters:
1352 reftime: reference time to align at. By default, the time of
1353 the first row of data is used.
1354 method: Interpolation method for regridding the spectra.
1355 Choose from "nearest", "linear", "cubic" (default)
1356 and "spline"
1357 insitu: if False a new scantable is returned.
1358 Otherwise, the scaling is done in-situ
1359 The default is taken from .asaprc (False)
1360 """
[931]1361 if insitu is None: insitu = rcParams["insitu"]
[876]1362 self._math._setinsitu(insitu)
[513]1363 varlist = vars()
[931]1364 if reftime is None: reftime = ""
1365 s = scantable(self._math._freq_align(self, reftime, method))
[876]1366 s._add_history("freq_align", varlist)
1367 print_log()
1368 if insitu: self._assign(s)
1369 else: return s
[513]1370
[876]1371 def opacity(self, tau, insitu=None):
[513]1372 """
1373 Apply an opacity correction. The data
1374 and Tsys are multiplied by the correction factor.
1375 Parameters:
1376 tau: Opacity from which the correction factor is
1377 exp(tau*ZD)
1378 where ZD is the zenith-distance
1379 insitu: if False a new scantable is returned.
1380 Otherwise, the scaling is done in-situ
1381 The default is taken from .asaprc (False)
1382 """
1383 if insitu is None: insitu = rcParams['insitu']
[876]1384 self._math._setinsitu(insitu)
[513]1385 varlist = vars()
[876]1386 s = scantable(self._math._opacity(self, tau))
1387 s._add_history("opacity", varlist)
1388 print_log()
1389 if insitu: self._assign(s)
1390 else: return s
[513]1391
1392 def bin(self, width=5, insitu=None):
1393 """
1394 Return a scan where all spectra have been binned up.
[1348]1395 Parameters:
[513]1396 width: The bin width (default=5) in pixels
1397 insitu: if False a new scantable is returned.
1398 Otherwise, the scaling is done in-situ
1399 The default is taken from .asaprc (False)
1400 """
1401 if insitu is None: insitu = rcParams['insitu']
[876]1402 self._math._setinsitu(insitu)
[513]1403 varlist = vars()
[876]1404 s = scantable(self._math._bin(self, width))
[1118]1405 s._add_history("bin", varlist)
[876]1406 print_log()
1407 if insitu: self._assign(s)
1408 else: return s
[513]1409
[710]1410
[513]1411 def resample(self, width=5, method='cubic', insitu=None):
1412 """
[1348]1413 Return a scan where all spectra have been binned up.
1414
1415 Parameters:
[513]1416 width: The bin width (default=5) in pixels
1417 method: Interpolation method when correcting from a table.
1418 Values are "nearest", "linear", "cubic" (default)
1419 and "spline"
1420 insitu: if False a new scantable is returned.
1421 Otherwise, the scaling is done in-situ
1422 The default is taken from .asaprc (False)
1423 """
1424 if insitu is None: insitu = rcParams['insitu']
[876]1425 self._math._setinsitu(insitu)
[513]1426 varlist = vars()
[876]1427 s = scantable(self._math._resample(self, method, width))
[1118]1428 s._add_history("resample", varlist)
[876]1429 print_log()
1430 if insitu: self._assign(s)
1431 else: return s
[513]1432
1433
[946]1434 def average_pol(self, mask=None, weight='none'):
1435 """
1436 Average the Polarisations together.
1437 Parameters:
1438 mask: An optional mask defining the region, where the
1439 averaging will be applied. The output will have all
1440 specified points masked.
1441 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1442 weighted), or 'tsys' (1/Tsys**2 weighted)
1443 """
1444 varlist = vars()
1445 if mask is None:
1446 mask = ()
[1010]1447 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1448 s._add_history("average_pol", varlist)
[946]1449 print_log()
[992]1450 return s
[513]1451
[1145]1452 def average_beam(self, mask=None, weight='none'):
1453 """
1454 Average the Beams together.
1455 Parameters:
1456 mask: An optional mask defining the region, where the
1457 averaging will be applied. The output will have all
1458 specified points masked.
1459 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1460 weighted), or 'tsys' (1/Tsys**2 weighted)
1461 """
1462 varlist = vars()
1463 if mask is None:
1464 mask = ()
1465 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1466 s._add_history("average_beam", varlist)
1467 print_log()
1468 return s
1469
[992]1470 def convert_pol(self, poltype=None):
1471 """
1472 Convert the data to a different polarisation type.
1473 Parameters:
1474 poltype: The new polarisation type. Valid types are:
1475 "linear", "stokes" and "circular"
1476 """
1477 varlist = vars()
1478 try:
1479 s = scantable(self._math._convertpol(self, poltype))
[1118]1480 except RuntimeError, msg:
[992]1481 if rcParams['verbose']:
[1612]1482 #print msg
[1614]1483 print_log()
[1676]1484 asaplog.push( str(msg) )
[1614]1485 print_log( 'ERROR' )
[1118]1486 return
[992]1487 else:
1488 raise
[1118]1489 s._add_history("convert_pol", varlist)
[992]1490 print_log()
1491 return s
1492
[1653]1493 #def smooth(self, kernel="hanning", width=5.0, insitu=None):
1494 def smooth(self, kernel="hanning", width=5.0, plot=False, insitu=None):
[513]1495 """
1496 Smooth the spectrum by the specified kernel (conserving flux).
1497 Parameters:
1498 kernel: The type of smoothing kernel. Select from
[1373]1499 'hanning' (default), 'gaussian', 'boxcar' and
1500 'rmedian'
[513]1501 width: The width of the kernel in pixels. For hanning this is
1502 ignored otherwise it defauls to 5 pixels.
1503 For 'gaussian' it is the Full Width Half
1504 Maximum. For 'boxcar' it is the full width.
[1373]1505 For 'rmedian' it is the half width.
[1653]1506 plot: plot the original and the smoothed spectra.
1507 In this each indivual fit has to be approved, by
1508 typing 'y' or 'n'
[513]1509 insitu: if False a new scantable is returned.
1510 Otherwise, the scaling is done in-situ
1511 The default is taken from .asaprc (False)
1512 Example:
1513 none
1514 """
1515 if insitu is None: insitu = rcParams['insitu']
[876]1516 self._math._setinsitu(insitu)
[513]1517 varlist = vars()
[1653]1518
1519 if plot: orgscan = self.copy()
1520
[1118]1521 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1522 s._add_history("smooth", varlist)
[1653]1523
1524 if plot:
1525 if rcParams['plotter.gui']:
1526 from asap.asaplotgui import asaplotgui as asaplot
1527 else:
1528 from asap.asaplot import asaplot
1529 self._p=asaplot()
1530 self._p.set_panels()
1531 ylab=s._get_ordinate_label()
1532 #self._p.palette(0,["#777777","red"])
1533 for r in xrange(s.nrow()):
1534 xsm=s._getabcissa(r)
1535 ysm=s._getspectrum(r)
1536 xorg=orgscan._getabcissa(r)
1537 yorg=orgscan._getspectrum(r)
1538 self._p.clear()
1539 self._p.hold()
1540 self._p.set_axes('ylabel',ylab)
1541 self._p.set_axes('xlabel',s._getabcissalabel(r))
1542 self._p.set_axes('title',s._getsourcename(r))
1543 self._p.set_line(label='Original',color="#777777")
1544 self._p.plot(xorg,yorg)
1545 self._p.set_line(label='Smoothed',color="red")
1546 self._p.plot(xsm,ysm)
1547 ### Ugly part for legend
1548 for i in [0,1]:
1549 self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1550 self._p.release()
1551 ### Ugly part for legend
1552 self._p.subplots[0]['lines']=[]
1553 res = raw_input("Accept smoothing ([y]/n): ")
1554 if res.upper() == 'N':
1555 s._setspectrum(yorg, r)
1556 self._p.unmap()
1557 self._p = None
1558 del orgscan
1559
[876]1560 print_log()
1561 if insitu: self._assign(s)
1562 else: return s
[513]1563
[876]1564
[1389]1565 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1566 """
1567 Return a scan which has been baselined (all rows) by a polynomial.
1568 Parameters:
[794]1569 mask: an optional mask
1570 order: the order of the polynomial (default is 0)
[1061]1571 plot: plot the fit and the residual. In this each
1572 indivual fit has to be approved, by typing 'y'
1573 or 'n'
[1389]1574 uselin: use linear polynomial fit
[794]1575 insitu: if False a new scantable is returned.
1576 Otherwise, the scaling is done in-situ
1577 The default is taken from .asaprc (False)
[513]1578 Example:
1579 # return a scan baselined by a third order polynomial,
1580 # not using a mask
1581 bscan = scan.poly_baseline(order=3)
[579]1582 """
[513]1583 if insitu is None: insitu = rcParams['insitu']
[1649]1584 if not insitu:
1585 workscan = self.copy()
1586 else:
1587 workscan = self
[513]1588 varlist = vars()
1589 if mask is None:
[1295]1590 mask = [True for i in xrange(self.nchan(-1))]
[1637]1591
[513]1592 from asap.asapfitter import fitter
[1217]1593 try:
1594 f = fitter()
[1389]1595 if uselin:
1596 f.set_function(lpoly=order)
1597 else:
1598 f.set_function(poly=order)
[1637]1599
[1655]1600 rows = range(workscan.nrow())
[1637]1601 if len(rows) > 0:
1602 self.blpars = []
1603
1604 for r in rows:
1605 # take into account flagtra info (CAS-1434)
[1655]1606 flagtra = workscan._getmask(r)
[1637]1607 actualmask = mask[:]
1608 if len(actualmask) == 0:
1609 actualmask = list(flagtra[:])
1610 else:
1611 if len(actualmask) != len(flagtra):
1612 raise RuntimeError, "Mask and flagtra have different length"
1613 else:
1614 for i in range(0, len(actualmask)):
1615 actualmask[i] = actualmask[i] and flagtra[i]
[1655]1616 f.set_scan(workscan, actualmask)
1617 f.x = workscan._getabcissa(r)
1618 f.y = workscan._getspectrum(r)
[1637]1619 f.data = None
1620 f.fit()
[1653]1621 if plot:
1622 f.plot(residual=True)
1623 x = raw_input("Accept fit ( [y]/n ): ")
1624 if x.upper() == 'N':
1625 self.blpars.append(None)
1626 continue
[1649]1627 workscan._setspectrum(f.fitter.getresidual(), r)
[1655]1628 self.blpars.append(f.get_parameters())
1629
[1653]1630 if plot:
1631 f._p.unmap()
1632 f._p = None
[1649]1633 workscan._add_history("poly_baseline", varlist)
[1217]1634 print_log()
[1649]1635 if insitu: self._assign(workscan)
1636 else: return workscan
[1217]1637 except RuntimeError:
1638 msg = "The fit failed, possibly because it didn't converge."
1639 if rcParams['verbose']:
[1612]1640 #print msg
[1614]1641 print_log()
[1676]1642 asaplog.push( str(msg) )
[1614]1643 print_log( 'ERROR' )
[1217]1644 return
1645 else:
1646 raise RuntimeError(msg)
[513]1647
[1217]1648
[1118]1649 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1650 threshold=3, chan_avg_limit=1, plot=False,
1651 insitu=None):
[880]1652 """
1653 Return a scan which has been baselined (all rows) by a polynomial.
1654 Spectral lines are detected first using linefinder and masked out
1655 to avoid them affecting the baseline solution.
1656
1657 Parameters:
1658 mask: an optional mask retreived from scantable
1659 edge: an optional number of channel to drop at
1660 the edge of spectrum. If only one value is
1661 specified, the same number will be dropped from
1662 both sides of the spectrum. Default is to keep
[907]1663 all channels. Nested tuples represent individual
[976]1664 edge selection for different IFs (a number of spectral
1665 channels can be different)
[880]1666 order: the order of the polynomial (default is 0)
1667 threshold: the threshold used by line finder. It is better to
1668 keep it large as only strong lines affect the
1669 baseline solution.
[1280]1670 chan_avg_limit:
1671 a maximum number of consequtive spectral channels to
1672 average during the search of weak and broad lines.
1673 The default is no averaging (and no search for weak
1674 lines). If such lines can affect the fitted baseline
1675 (e.g. a high order polynomial is fitted), increase this
1676 parameter (usually values up to 8 are reasonable). Most
1677 users of this method should find the default value
1678 sufficient.
[1061]1679 plot: plot the fit and the residual. In this each
1680 indivual fit has to be approved, by typing 'y'
1681 or 'n'
[880]1682 insitu: if False a new scantable is returned.
1683 Otherwise, the scaling is done in-situ
1684 The default is taken from .asaprc (False)
1685
1686 Example:
1687 scan2=scan.auto_poly_baseline(order=7)
1688 """
1689 if insitu is None: insitu = rcParams['insitu']
1690 varlist = vars()
1691 from asap.asapfitter import fitter
1692 from asap.asaplinefind import linefinder
1693 from asap import _is_sequence_or_number as _is_valid
1694
[976]1695 # check whether edge is set up for each IF individually
[1118]1696 individualedge = False;
1697 if len(edge) > 1:
1698 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1699 individualedge = True;
[907]1700
[1118]1701 if not _is_valid(edge, int) and not individualedge:
[909]1702 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1703 pair of integers specified as a tuple. Nested tuples are allowed \
1704 to make individual selection for different IFs."
[919]1705
[1118]1706 curedge = (0, 0)
1707 if individualedge:
1708 for edgepar in edge:
1709 if not _is_valid(edgepar, int):
1710 raise ValueError, "Each element of the 'edge' tuple has \
1711 to be a pair of integers or an integer."
[907]1712 else:
[1118]1713 curedge = edge;
[880]1714
1715 # setup fitter
1716 f = fitter()
1717 f.set_function(poly=order)
1718
1719 # setup line finder
[1118]1720 fl = linefinder()
[1268]1721 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1722
1723 if not insitu:
[1118]1724 workscan = self.copy()
[880]1725 else:
[1118]1726 workscan = self
[880]1727
[907]1728 fl.set_scan(workscan)
1729
[1118]1730 rows = range(workscan.nrow())
[1446]1731 # Save parameters of baseline fits & masklists as a class attribute.
1732 # NOTICE: It does not reflect changes in scantable!
1733 if len(rows) > 0:
1734 self.blpars=[]
1735 self.masklists=[]
[880]1736 asaplog.push("Processing:")
1737 for r in rows:
[1118]1738 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1739 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1740 workscan.getpol(r), workscan.getcycle(r))
[880]1741 asaplog.push(msg, False)
[907]1742
[976]1743 # figure out edge parameter
[1118]1744 if individualedge:
1745 if len(edge) >= workscan.getif(r):
1746 raise RuntimeError, "Number of edge elements appear to " \
1747 "be less than the number of IFs"
1748 curedge = edge[workscan.getif(r)]
[919]1749
[1637]1750 # take into account flagtra info (CAS-1434)
1751 flagtra = workscan._getmask(r)
1752 actualmask = mask[:]
1753 if len(actualmask) == 0:
1754 actualmask = list(flagtra[:])
1755 else:
1756 if len(actualmask) != len(flagtra):
1757 raise RuntimeError, "Mask and flagtra have different length"
1758 else:
1759 for i in range(0, len(actualmask)):
1760 actualmask[i] = actualmask[i] and flagtra[i]
1761
[976]1762 # setup line finder
[1637]1763 fl.find_lines(r, actualmask, curedge)
[1446]1764 outmask=fl.get_mask()
[880]1765 f.set_scan(workscan, fl.get_mask())
1766 f.x = workscan._getabcissa(r)
1767 f.y = workscan._getspectrum(r)
1768 f.data = None
1769 f.fit()
[1446]1770
1771 # Show mask list
1772 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1773 msg = "mask range: "+str(masklist)
1774 asaplog.push(msg, False)
1775
[1061]1776 if plot:
1777 f.plot(residual=True)
1778 x = raw_input("Accept fit ( [y]/n ): ")
1779 if x.upper() == 'N':
[1446]1780 self.blpars.append(None)
1781 self.masklists.append(None)
[1061]1782 continue
[1655]1783
[880]1784 workscan._setspectrum(f.fitter.getresidual(), r)
[1655]1785 self.blpars.append(f.get_parameters())
[1446]1786 self.masklists.append(masklist)
[1061]1787 if plot:
1788 f._p.unmap()
1789 f._p = None
1790 workscan._add_history("auto_poly_baseline", varlist)
[880]1791 if insitu:
1792 self._assign(workscan)
1793 else:
1794 return workscan
1795
[914]1796 def rotate_linpolphase(self, angle):
1797 """
1798 Rotate the phase of the complex polarization O=Q+iU correlation.
1799 This is always done in situ in the raw data. So if you call this
1800 function more than once then each call rotates the phase further.
1801 Parameters:
1802 angle: The angle (degrees) to rotate (add) by.
1803 Examples:
1804 scan.rotate_linpolphase(2.3)
1805 """
1806 varlist = vars()
[936]1807 self._math._rotate_linpolphase(self, angle)
[914]1808 self._add_history("rotate_linpolphase", varlist)
1809 print_log()
1810 return
[710]1811
[513]1812
[914]1813 def rotate_xyphase(self, angle):
1814 """
1815 Rotate the phase of the XY correlation. This is always done in situ
1816 in the data. So if you call this function more than once
1817 then each call rotates the phase further.
1818 Parameters:
1819 angle: The angle (degrees) to rotate (add) by.
1820 Examples:
1821 scan.rotate_xyphase(2.3)
1822 """
1823 varlist = vars()
[936]1824 self._math._rotate_xyphase(self, angle)
[914]1825 self._add_history("rotate_xyphase", varlist)
1826 print_log()
1827 return
1828
1829 def swap_linears(self):
1830 """
[1348]1831 Swap the linear polarisations XX and YY, or better the first two
1832 polarisations as this also works for ciculars.
[914]1833 """
1834 varlist = vars()
[936]1835 self._math._swap_linears(self)
[914]1836 self._add_history("swap_linears", varlist)
1837 print_log()
1838 return
1839
1840 def invert_phase(self):
1841 """
1842 Invert the phase of the complex polarisation
1843 """
1844 varlist = vars()
[936]1845 self._math._invert_phase(self)
[914]1846 self._add_history("invert_phase", varlist)
1847 print_log()
1848 return
1849
[876]1850 def add(self, offset, insitu=None):
[513]1851 """
1852 Return a scan where all spectra have the offset added
1853 Parameters:
1854 offset: the offset
1855 insitu: if False a new scantable is returned.
1856 Otherwise, the scaling is done in-situ
1857 The default is taken from .asaprc (False)
1858 """
1859 if insitu is None: insitu = rcParams['insitu']
[876]1860 self._math._setinsitu(insitu)
[513]1861 varlist = vars()
[876]1862 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1863 s._add_history("add", varlist)
[876]1864 print_log()
1865 if insitu:
1866 self._assign(s)
1867 else:
[513]1868 return s
1869
[1680]1870 def scale(self, factor, tsys=True, insitu=None):
[513]1871 """
1872 Return a scan where all spectra are scaled by the give 'factor'
1873 Parameters:
[1677]1874 factor: the scaling factor (float or 1D float list)
[513]1875 insitu: if False a new scantable is returned.
1876 Otherwise, the scaling is done in-situ
1877 The default is taken from .asaprc (False)
1878 tsys: if True (default) then apply the operation to Tsys
1879 as well as the data
1880 """
1881 if insitu is None: insitu = rcParams['insitu']
[876]1882 self._math._setinsitu(insitu)
[513]1883 varlist = vars()
[1680]1884 s = None
[1681]1885 import numpy
1886 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
1887 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
[1680]1888 from asapmath import _array2dOp
1889 s = _array2dOp( self.copy(), factor, "MUL", tsys )
[1677]1890 else:
[1681]1891 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
[1677]1892 else:
1893 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]1894 s._add_history("scale", varlist)
[876]1895 print_log()
1896 if insitu:
1897 self._assign(s)
1898 else:
[513]1899 return s
1900
[1603]1901 def set_sourcetype(self, match, matchtype="pattern",
1902 sourcetype="reference"):
1903 """
1904 Set the type of the source to be an source or reference scan
1905 using the provided pattern:
1906 Parameters:
1907 match: a Unix style pattern, regular expression or selector
1908 matchtype: 'pattern' (default) UNIX style pattern or
1909 'regex' regular expression
1910 sourcetype: the type of the source to use (source/reference)
1911 """
1912 varlist = vars()
1913 basesel = self.get_selection()
1914 stype = -1
1915 if sourcetype.lower().startswith("r"):
1916 stype = 1
1917 elif sourcetype.lower().startswith("s"):
1918 stype = 0
1919 else:
1920 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1921 if matchtype.lower().startswith("p"):
1922 matchtype = "pattern"
1923 elif matchtype.lower().startswith("r"):
1924 matchtype = "regex"
1925 else:
1926 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1927 sel = selector()
1928 if isinstance(match, selector):
1929 sel = match
1930 else:
1931 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1932 self.set_selection(basesel+sel)
1933 self._setsourcetype(stype)
1934 self.set_selection(basesel)
1935 s._add_history("set_sourcetype", varlist)
1936
[1631]1937 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[670]1938 """
1939 This function allows to build quotients automatically.
[1631]1940 It assumes the observation to have the same number of
[670]1941 "ons" and "offs"
1942 Parameters:
[710]1943 preserve: you can preserve (default) the continuum or
1944 remove it. The equations used are
[670]1945 preserve: Output = Toff * (on/off) - Toff
[1070]1946 remove: Output = Toff * (on/off) - Ton
[1348]1947 mode: the on/off detection mode
1948 'paired' (default)
1949 identifies 'off' scans by the
1950 trailing '_R' (Mopra/Parkes) or
1951 '_e'/'_w' (Tid) and matches
1952 on/off pairs from the observing pattern
[1603]1953 'time'
1954 finds the closest off in time
[1348]1955
[670]1956 """
[1348]1957 modes = ["time", "paired"]
[670]1958 if not mode in modes:
[876]1959 msg = "please provide valid mode. Valid modes are %s" % (modes)
1960 raise ValueError(msg)
1961 varlist = vars()
[1348]1962 s = None
1963 if mode.lower() == "paired":
1964 basesel = self.get_selection()
[1356]1965 sel = selector()+basesel
1966 sel.set_query("SRCTYPE==1")
1967 self.set_selection(sel)
[1348]1968 offs = self.copy()
1969 sel.set_query("SRCTYPE==0")
[1356]1970 self.set_selection(sel)
[1348]1971 ons = self.copy()
1972 s = scantable(self._math._quotient(ons, offs, preserve))
1973 self.set_selection(basesel)
1974 elif mode.lower() == "time":
1975 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1976 s._add_history("auto_quotient", varlist)
[876]1977 print_log()
1978 return s
[710]1979
[1145]1980 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1981 """
[1143]1982 Form a quotient using "off" beams when observing in "MX" mode.
1983 Parameters:
[1145]1984 mask: an optional mask to be used when weight == 'stddev'
[1143]1985 weight: How to average the off beams. Default is 'median'.
[1145]1986 preserve: you can preserve (default) the continuum or
1987 remove it. The equations used are
1988 preserve: Output = Toff * (on/off) - Toff
1989 remove: Output = Toff * (on/off) - Ton
[1217]1990 """
[1143]1991 if mask is None: mask = ()
[1141]1992 varlist = vars()
1993 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1994 preoff = scantable(self._math._mx_extract(self, 'off'))
1995 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1996 from asapmath import quotient
[1145]1997 q = quotient(on, off, preserve)
[1143]1998 q._add_history("mx_quotient", varlist)
[1145]1999 print_log()
[1217]2000 return q
[513]2001
[718]2002 def freq_switch(self, insitu=None):
2003 """
2004 Apply frequency switching to the data.
2005 Parameters:
2006 insitu: if False a new scantable is returned.
2007 Otherwise, the swictching is done in-situ
2008 The default is taken from .asaprc (False)
2009 Example:
2010 none
2011 """
2012 if insitu is None: insitu = rcParams['insitu']
[876]2013 self._math._setinsitu(insitu)
[718]2014 varlist = vars()
[876]2015 s = scantable(self._math._freqswitch(self))
[1118]2016 s._add_history("freq_switch", varlist)
[876]2017 print_log()
2018 if insitu: self._assign(s)
2019 else: return s
[718]2020
[780]2021 def recalc_azel(self):
2022 """
2023 Recalculate the azimuth and elevation for each position.
2024 Parameters:
2025 none
2026 Example:
2027 """
2028 varlist = vars()
[876]2029 self._recalcazel()
[780]2030 self._add_history("recalc_azel", varlist)
2031 print_log()
2032 return
2033
[513]2034 def __add__(self, other):
[1681]2035 """
2036 implicit on all axes and on Tsys
2037 """
2038 return self._operation( other, "ADD" )
[513]2039
2040 def __sub__(self, other):
2041 """
2042 implicit on all axes and on Tsys
2043 """
[1681]2044 return self._operation( other, 'SUB' )
[710]2045
[513]2046 def __mul__(self, other):
2047 """
2048 implicit on all axes and on Tsys
2049 """
[1681]2050 return self._operation( other, 'MUL' )
[513]2051
2052 def __div__(self, other):
2053 """
2054 implicit on all axes and on Tsys
2055 """
[1681]2056 return self._operation( other, 'DIV' )
[513]2057
[530]2058 def get_fit(self, row=0):
2059 """
2060 Print or return the stored fits for a row in the scantable
2061 Parameters:
2062 row: the row which the fit has been applied to.
2063 """
2064 if row > self.nrow():
2065 return
[976]2066 from asap.asapfit import asapfit
[530]2067 fit = asapfit(self._getfit(row))
[718]2068 if rcParams['verbose']:
[1612]2069 #print fit
[1614]2070 asaplog.push( '%s' %(fit) )
2071 print_log()
[530]2072 return
2073 else:
2074 return fit.as_dict()
2075
[1603]2076 def flag_nans(self):
2077 """
2078 Utility function to flag NaN values in the scantable.
2079 """
2080 import numpy
2081 basesel = self.get_selection()
2082 for i in range(self.nrow()):
2083 sel = selector()+basesel
2084 sel.set_scans(self.getscan(i))
2085 sel.set_beams(self.getbeam(i))
2086 sel.set_ifs(self.getif(i))
2087 sel.set_polarisations(self.getpol(i))
2088 self.set_selection(sel)
2089 nans = numpy.isnan(self._getspectrum(0))
2090 if numpy.any(nans):
2091 bnans = [ bool(v) for v in nans]
2092 self.flag(bnans)
2093 self.set_selection(basesel)
2094
2095
[484]2096 def _add_history(self, funcname, parameters):
[1603]2097 if not rcParams['scantable.history']:
2098 return
[484]2099 # create date
2100 sep = "##"
2101 from datetime import datetime
2102 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2103 hist = dstr+sep
2104 hist += funcname+sep#cdate+sep
2105 if parameters.has_key('self'): del parameters['self']
[1118]2106 for k, v in parameters.iteritems():
[484]2107 if type(v) is dict:
[1118]2108 for k2, v2 in v.iteritems():
[484]2109 hist += k2
2110 hist += "="
[1118]2111 if isinstance(v2, scantable):
[484]2112 hist += 'scantable'
2113 elif k2 == 'mask':
[1118]2114 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2115 hist += str(self._zip_mask(v2))
2116 else:
2117 hist += str(v2)
[484]2118 else:
[513]2119 hist += str(v2)
[484]2120 else:
2121 hist += k
2122 hist += "="
[1118]2123 if isinstance(v, scantable):
[484]2124 hist += 'scantable'
2125 elif k == 'mask':
[1118]2126 if isinstance(v, list) or isinstance(v, tuple):
[513]2127 hist += str(self._zip_mask(v))
2128 else:
2129 hist += str(v)
[484]2130 else:
2131 hist += str(v)
2132 hist += sep
2133 hist = hist[:-2] # remove trailing '##'
2134 self._addhistory(hist)
2135
[710]2136
[484]2137 def _zip_mask(self, mask):
2138 mask = list(mask)
2139 i = 0
2140 segments = []
2141 while mask[i:].count(1):
2142 i += mask[i:].index(1)
2143 if mask[i:].count(0):
2144 j = i + mask[i:].index(0)
2145 else:
[710]2146 j = len(mask)
[1118]2147 segments.append([i, j])
[710]2148 i = j
[484]2149 return segments
[714]2150
[626]2151 def _get_ordinate_label(self):
2152 fu = "("+self.get_fluxunit()+")"
2153 import re
2154 lbl = "Intensity"
[1118]2155 if re.match(".K.", fu):
[626]2156 lbl = "Brightness Temperature "+ fu
[1118]2157 elif re.match(".Jy.", fu):
[626]2158 lbl = "Flux density "+ fu
2159 return lbl
[710]2160
[876]2161 def _check_ifs(self):
2162 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2163 nchans = filter(lambda t: t > 0, nchans)
[876]2164 return (sum(nchans)/len(nchans) == nchans[0])
[976]2165
[1496]2166 def _fill(self, names, unit, average, getpt):
[976]2167 import os
2168 from asap._asap import stfiller
2169 first = True
2170 fullnames = []
2171 for name in names:
2172 name = os.path.expandvars(name)
2173 name = os.path.expanduser(name)
2174 if not os.path.exists(name):
2175 msg = "File '%s' does not exists" % (name)
2176 if rcParams['verbose']:
2177 asaplog.push(msg)
[1612]2178 #print asaplog.pop().strip()
[1614]2179 print_log( 'ERROR' )
[976]2180 return
2181 raise IOError(msg)
2182 fullnames.append(name)
2183 if average:
2184 asaplog.push('Auto averaging integrations')
[1079]2185 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2186 for name in fullnames:
[1073]2187 tbl = Scantable(stype)
2188 r = stfiller(tbl)
[1603]2189 rx = rcParams['scantable.reference']
2190 r._setreferenceexpr(rx)
[976]2191 msg = "Importing %s..." % (name)
[1118]2192 asaplog.push(msg, False)
[976]2193 print_log()
[1496]2194 r._open(name, -1, -1, getpt)
[976]2195 r._read()
2196 if average:
[1118]2197 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2198 if not first:
2199 tbl = self._math._merge([self, tbl])
2200 Scantable.__init__(self, tbl)
2201 r._close()
[1118]2202 del r, tbl
[976]2203 first = False
2204 if unit is not None:
2205 self.set_fluxunit(unit)
[1446]2206 #self.set_freqframe(rcParams['scantable.freqframe'])
[976]2207
[1603]2208 def __getitem__(self, key):
2209 if key < 0:
2210 key += self.nrow()
2211 if key >= self.nrow():
2212 raise IndexError("Row index out of range.")
2213 return self._getspectrum(key)
2214
2215 def __setitem__(self, key, value):
2216 if key < 0:
2217 key += self.nrow()
2218 if key >= self.nrow():
2219 raise IndexError("Row index out of range.")
2220 if not hasattr(value, "__len__") or \
2221 len(value) > self.nchan(self.getif(key)):
2222 raise ValueError("Spectrum length doesn't match.")
2223 return self._setspectrum(value, key)
2224
2225 def __len__(self):
2226 return self.nrow()
2227
2228 def __iter__(self):
2229 for i in range(len(self)):
2230 yield self[i]
[1681]2231
2232 def _operation(self, other, opmode):
2233 varlist = vars()
2234 s = None
2235 import numpy
2236 if isinstance(other, scantable):
2237 s = scantable(self._math._binaryop(self.copy(), other, opmode))
2238 elif isinstance(other, float) or isinstance(other, int):
2239 if opmode == 'DIV' and float(other) == 0.0:
2240 raise ZeroDivisionError("Dividing by zero is not recommended")
2241 s = scantable(self._math._unaryop(self.copy(), other, opmode, False))
2242 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
2243 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
2244 from asapmath import _array2dOp
2245 s = _array2dOp( self.copy(), other, opmode, False )
2246 else:
2247 s = scantable(self._math._arrayop(self.copy(), other, opmode, False))
2248 else:
2249 raise TypeError("Other input is not a scantable or float value or float list")
2250 opdic = {}
2251 opdic['ADD'] = '+'
2252 opdic['SUB'] = '-'
2253 opdic['MUL'] = '*'
2254 opdic['DIV'] = '/'
2255 s._add_history("operator %s" % opdic[opmode], varlist)
2256 print_log()
2257 return s
2258
2259
Note: See TracBrowser for help on using the repository browser.