source: branches/alma/python/scantable.py@ 1619

Last change on this file since 1619 was 1615, checked in by Takeshi Nakazato, 15 years ago

New Development: No

JIRA Issue: Yes CAS-729, CAS-1147

Ready to Release: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

I have modified a way to log messages in _row_callback() method such that
the message is directly written in the casalogger, not via a temporary file.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 80.6 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[1496]14 def __init__(self, filename, average=None, unit=None, getpt=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[1522]32 getpt: for MeasurementSet input data only:
33 If True, all pointing data are filled.
34 The deafult is False, which makes time to load
35 the MS data faster in some cases.
[710]36 """
[976]37 if average is None:
[710]38 average = rcParams['scantable.autoaverage']
[1496]39 if getpt is None:
40 getpt = False
[1259]41 varlist = vars()
[876]42 from asap._asap import stmath
43 self._math = stmath()
44 if isinstance(filename, Scantable):
45 Scantable.__init__(self, filename)
[181]46 else:
[1603]47 if isinstance(filename, str):# or \
48# (isinstance(filename, list) or isinstance(filename, tuple)) \
49# and isinstance(filename[-1], str):
[976]50 import os.path
51 filename = os.path.expandvars(filename)
52 filename = os.path.expanduser(filename)
53 if not os.path.exists(filename):
54 s = "File '%s' not found." % (filename)
[718]55 if rcParams['verbose']:
[976]56 asaplog.push(s)
[1612]57 #print asaplog.pop().strip()
[1614]58 print_log('ERROR')
[718]59 return
[976]60 raise IOError(s)
[1115]61 if os.path.isdir(filename) \
[1118]62 and not os.path.exists(filename+'/table.f1'):
[976]63 # crude check if asap table
64 if os.path.exists(filename+'/table.info'):
[1118]65 ondisk = rcParams['scantable.storage'] == 'disk'
66 Scantable.__init__(self, filename, ondisk)
[976]67 if unit is not None:
68 self.set_fluxunit(unit)
[1496]69 # do not reset to the default freqframe
70 #self.set_freqframe(rcParams['scantable.freqframe'])
[718]71 else:
[1118]72 msg = "The given file '%s'is not a valid " \
73 "asap table." % (filename)
[976]74 if rcParams['verbose']:
[1612]75 #print msg
[1614]76 asaplog.push( msg )
77 print_log( 'ERROR' )
[976]78 return
79 else:
80 raise IOError(msg)
[226]81 else:
[1496]82 self._fill([filename], unit, average, getpt)
[1118]83 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]84 and isinstance(filename[-1], str):
[1496]85 self._fill(filename, unit, average, getpt)
[1259]86 self._add_history("scantable", varlist)
[714]87 print_log()
[102]88
[876]89 def save(self, name=None, format=None, overwrite=False):
[116]90 """
[1280]91 Store the scantable on disk. This can be an asap (aips++) Table,
92 SDFITS or MS2 format.
[116]93 Parameters:
[1093]94 name: the name of the outputfile. For format "ASCII"
95 this is the root file name (data in 'name'.txt
[497]96 and header in 'name'_header.txt)
[116]97 format: an optional file format. Default is ASAP.
[280]98 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]99 'SDFITS' (save as SDFITS file)
[200]100 'ASCII' (saves as ascii text file)
[226]101 'MS2' (saves as an aips++
102 MeasurementSet V2)
[1603]103 'FITS' (save as image FITS - not
104 readable by class)
105 'CLASS' (save as FITS readable by CLASS)
[411]106 overwrite: If the file should be overwritten if it exists.
[256]107 The default False is to return with warning
[411]108 without writing the output. USE WITH CARE.
[116]109 Example:
110 scan.save('myscan.asap')
[1118]111 scan.save('myscan.sdfits', 'SDFITS')
[116]112 """
[411]113 from os import path
[226]114 if format is None: format = rcParams['scantable.save']
[256]115 suffix = '.'+format.lower()
[1118]116 if name is None or name == "":
[256]117 name = 'scantable'+suffix
[718]118 msg = "No filename given. Using default name %s..." % name
119 asaplog.push(msg)
[411]120 name = path.expandvars(name)
[256]121 if path.isfile(name) or path.isdir(name):
122 if not overwrite:
[718]123 msg = "File %s exists." % name
124 if rcParams['verbose']:
[1612]125 #print msg
[1614]126 asaplog.push( msg )
127 print_log( 'ERROR' )
[718]128 return
129 else:
130 raise IOError(msg)
[451]131 format2 = format.upper()
132 if format2 == 'ASAP':
[116]133 self._save(name)
134 else:
[989]135 from asap._asap import stwriter as stw
[1118]136 writer = stw(format2)
137 writer.write(self, name)
[718]138 print_log()
[116]139 return
140
[102]141 def copy(self):
142 """
143 Return a copy of this scantable.
[1348]144 Note:
145 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]146 Parameters:
[113]147 none
[102]148 Example:
149 copiedscan = scan.copy()
150 """
[876]151 sd = scantable(Scantable._copy(self))
[113]152 return sd
153
[1093]154 def drop_scan(self, scanid=None):
155 """
156 Return a new scantable where the specified scan number(s) has(have)
157 been dropped.
158 Parameters:
159 scanid: a (list of) scan number(s)
160 """
161 from asap import _is_sequence_or_number as _is_valid
162 from asap import _to_list
163 from asap import unique
164 if not _is_valid(scanid):
165 if rcParams['verbose']:
[1612]166 #print "Please specify a scanno to drop from the scantable"
[1614]167 asaplog.push( 'Please specify a scanno to drop from the scantable' )
168 print_log( 'ERROR' )
[1093]169 return
170 else:
171 raise RuntimeError("No scan given")
172 try:
173 scanid = _to_list(scanid)
174 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
175 for sid in scanid: allscans.remove(sid)
[1118]176 if len(allscans) == 0:
177 raise ValueError("Can't remove all scans")
[1093]178 except ValueError:
179 if rcParams['verbose']:
[1612]180 #print "Couldn't find any match."
[1614]181 print_log()
182 asaplog.push( "Couldn't find any match." )
183 print_log( 'ERROR' )
[1093]184 return
185 else: raise
186 try:
187 bsel = self.get_selection()
188 sel = selector()
189 sel.set_scans(allscans)
190 self.set_selection(bsel+sel)
191 scopy = self._copy()
192 self.set_selection(bsel)
193 return scantable(scopy)
194 except RuntimeError:
[1118]195 if rcParams['verbose']:
[1612]196 #print "Couldn't find any match."
[1614]197 print_log()
198 asaplog.push( "Couldn't find any match." )
199 print_log( 'ERROR' )
[1118]200 else:
201 raise
[1093]202
203
[102]204 def get_scan(self, scanid=None):
205 """
206 Return a specific scan (by scanno) or collection of scans (by
207 source name) in a new scantable.
[1348]208 Note:
209 See scantable.drop_scan() for the inverse operation.
[102]210 Parameters:
[513]211 scanid: a (list of) scanno or a source name, unix-style
212 patterns are accepted for source name matching, e.g.
213 '*_R' gets all 'ref scans
[102]214 Example:
[513]215 # get all scans containing the source '323p459'
216 newscan = scan.get_scan('323p459')
217 # get all 'off' scans
218 refscans = scan.get_scan('*_R')
219 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]220 newscan = scan.get_scan([0, 2, 7, 10])
[102]221 """
222 if scanid is None:
[718]223 if rcParams['verbose']:
[1612]224 #print "Please specify a scan no or name to " \
225 # "retrieve from the scantable"
[1614]226 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
227 print_log( 'ERROR' )
[718]228 return
229 else:
230 raise RuntimeError("No scan given")
231
[102]232 try:
[946]233 bsel = self.get_selection()
234 sel = selector()
[102]235 if type(scanid) is str:
[946]236 sel.set_name(scanid)
237 self.set_selection(bsel+sel)
[876]238 scopy = self._copy()
[946]239 self.set_selection(bsel)
[876]240 return scantable(scopy)
[102]241 elif type(scanid) is int:
[946]242 sel.set_scans([scanid])
243 self.set_selection(bsel+sel)
[876]244 scopy = self._copy()
[946]245 self.set_selection(bsel)
[876]246 return scantable(scopy)
[381]247 elif type(scanid) is list:
[946]248 sel.set_scans(scanid)
249 self.set_selection(sel)
[876]250 scopy = self._copy()
[946]251 self.set_selection(bsel)
[876]252 return scantable(scopy)
[381]253 else:
[718]254 msg = "Illegal scanid type, use 'int' or 'list' if ints."
255 if rcParams['verbose']:
[1612]256 #print msg
[1614]257 asaplog.push( msg )
258 print_log( 'ERROR' )
[718]259 else:
260 raise TypeError(msg)
[102]261 except RuntimeError:
[1612]262 if rcParams['verbose']:
263 #print "Couldn't find any match."
[1614]264 print_log()
265 asaplog.push( "Couldn't find any match." )
266 print_log( 'ERROR' )
[718]267 else: raise
[102]268
269 def __str__(self):
[1118]270 return Scantable._summary(self, True)
[102]271
[976]272 def summary(self, filename=None):
[102]273 """
274 Print a summary of the contents of this scantable.
275 Parameters:
276 filename: the name of a file to write the putput to
277 Default - no file output
[381]278 verbose: print extra info such as the frequency table
279 The default (False) is taken from .asaprc
[102]280 """
[976]281 info = Scantable._summary(self, True)
282 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]283 if filename is not None:
[256]284 if filename is "":
285 filename = 'scantable_summary.txt'
[415]286 from os.path import expandvars, isdir
[411]287 filename = expandvars(filename)
[415]288 if not isdir(filename):
[413]289 data = open(filename, 'w')
290 data.write(info)
291 data.close()
292 else:
[718]293 msg = "Illegal file name '%s'." % (filename)
294 if rcParams['verbose']:
[1612]295 #print msg
[1614]296 asaplog.push( msg )
297 print_log( 'ERROR' )
[718]298 else:
299 raise IOError(msg)
300 if rcParams['verbose']:
[794]301 try:
302 from IPython.genutils import page as pager
303 except ImportError:
304 from pydoc import pager
305 pager(info)
[718]306 else:
307 return info
[710]308
[1603]309 def get_spectrum(self, rowno):
310 """Return the spectrum for the current row in the scantable as a list.
311 Parameters:
312 rowno: the row number to retrieve the spectrum from
313 """
314 return self._getspectrum(rowno)
[946]315
[1603]316 def get_mask(self, rowno):
317 """Return the mask for the current row in the scantable as a list.
318 Parameters:
319 rowno: the row number to retrieve the mask from
320 """
321 return self._getmask(rowno)
322
323 def set_spectrum(self, spec, rowno):
324 """Return the spectrum for the current row in the scantable as a list.
325 Parameters:
326 spec: the spectrum
327 rowno: the row number to set the spectrum for
328 """
329 assert(len(spec) == self.nchan())
330 return self._setspectrum(spec, rowno)
331
[946]332 def get_selection(self):
333 """
[1005]334 Get the selection object currently set on this scantable.
335 Parameters:
336 none
337 Example:
338 sel = scan.get_selection()
339 sel.set_ifs(0) # select IF 0
340 scan.set_selection(sel) # apply modified selection
[946]341 """
342 return selector(self._getselection())
343
[1005]344 def set_selection(self, selection=selector()):
[946]345 """
[1005]346 Select a subset of the data. All following operations on this scantable
347 are only applied to thi selection.
348 Parameters:
349 selection: a selector object (default unset the selection)
350 Examples:
351 sel = selector() # create a selection object
[1118]352 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]353 scan.set_selection(sel) # set the selection
354 scan.summary() # will only print summary of scanno 0 an 3
355 scan.set_selection() # unset the selection
[946]356 """
357 self._setselection(selection)
358
[1446]359 def get_row(self, row=0, insitu=None):
360 """
361 Select a row in the scantable.
362 Return a scantable with single row.
363 Parameters:
364 row: row no of integration, default is 0.
365 insitu: if False a new scantable is returned.
366 Otherwise, the scaling is done in-situ
367 The default is taken from .asaprc (False)
368 """
369 if insitu is None: insitu = rcParams['insitu']
370 if not insitu:
371 workscan = self.copy()
372 else:
373 workscan = self
374 # Select a row
375 sel=selector()
376 sel.set_scans([workscan.getscan(row)])
377 sel.set_cycles([workscan.getcycle(row)])
378 sel.set_beams([workscan.getbeam(row)])
379 sel.set_ifs([workscan.getif(row)])
380 sel.set_polarisations([workscan.getpol(row)])
381 sel.set_name(workscan._getsourcename(row))
382 workscan.set_selection(sel)
383 if not workscan.nrow() == 1:
384 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
385 raise RuntimeError(msg)
386 del sel
387 if insitu:
388 self._assign(workscan)
389 else:
390 return workscan
391
[876]392 def stats(self, stat='stddev', mask=None):
[102]393 """
[135]394 Determine the specified statistic of the current beam/if/pol
[102]395 Takes a 'mask' as an optional parameter to specify which
396 channels should be excluded.
397 Parameters:
[1517]398 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
[1515]399 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[135]400 mask: an optional mask specifying where the statistic
[102]401 should be determined.
402 Example:
[113]403 scan.set_unit('channel')
[1118]404 msk = scan.create_mask([100, 200], [500, 600])
[135]405 scan.stats(stat='mean', mask=m)
[102]406 """
407 if mask == None:
[876]408 mask = []
[1118]409 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]410 if not self._check_ifs():
[1118]411 raise ValueError("Cannot apply mask as the IFs have different "
412 "number of channels. Please use setselection() "
413 "to select individual IFs")
[1530]414 rtnabc = False
415 if stat.lower().endswith('_abc'): rtnabc = True
416 getchan = False
417 if stat.lower().startswith('min') or stat.lower().startswith('max'):
[1517]418 chan = self._math._minmaxchan(self, mask, stat)
419 getchan = True
[1515]420 statvals = []
[1530]421 if not rtnabc: statvals = self._math._stats(self, mask, stat)
422
[876]423 out = ''
424 axes = []
425 for i in range(self.nrow()):
426 axis = []
427 axis.append(self.getscan(i))
428 axis.append(self.getbeam(i))
429 axis.append(self.getif(i))
430 axis.append(self.getpol(i))
431 axis.append(self.getcycle(i))
432 axes.append(axis)
433 tm = self._gettime(i)
434 src = self._getsourcename(i)
[1530]435 refstr = ''
436 statunit= ''
[1517]437 if getchan:
438 qx, qy = self.chan2data(rowno=i, chan=chan[i])
[1530]439 if rtnabc:
440 statvals.append(qx['value'])
441 refstr = '(value: %3.3f' % (qy['value'])+' ['+qy['unit']+'])'
442 statunit= '['+qx['unit']+']'
443 else:
444 refstr = '(@ %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
445 #statunit= ' ['+qy['unit']+']'
[876]446 out += 'Scan[%d] (%s) ' % (axis[0], src)
447 out += 'Time[%s]:\n' % (tm)
448 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
449 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
450 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
[1530]451 out += '= %3.3f ' % (statvals[i]) +refstr+'\n'
[876]452 out += "--------------------------------------------------\n"
[256]453
[876]454 if rcParams['verbose']:
[1615]455 import os
[1613]456 usr=os.environ['USER']
457 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
[1612]458 f=open(tmpfile,'w')
459 print >> f, "--------------------------------------------------"
460 print >> f, " ", stat, statunit
461 print >> f, "--------------------------------------------------"
462 print >> f, out
463 f.close()
464 f=open(tmpfile,'r')
465 x=f.readlines()
466 f.close()
467 for xx in x:
[1614]468 asaplog.push( xx )
469 print_log()
[1295]470 #else:
471 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
472 # 'axes' : axes,
473 # 'data': statvals}
474 return statvals
[102]475
[1517]476 def chan2data(self, rowno=0, chan=0):
[1515]477 """
[1517]478 Returns channel/frequency/velocity and spectral value
[1515]479 at an arbitrary row and channel in the scantable.
480 Parameters:
481 rowno: a row number in the scantable. Default is the
482 first row, i.e. rowno=0
[1517]483 chan: a channel in the scantable. Default is the first
[1515]484 channel, i.e. pos=0
485 """
[1517]486 if isinstance(rowno, int) and isinstance(chan, int):
[1530]487 qx = {'unit': self.get_unit(),
488 'value': self._getabcissa(rowno)[chan]}
[1517]489 qy = {'unit': self.get_fluxunit(),
490 'value': self._getspectrum(rowno)[chan]}
491 return qx, qy
[1515]492
[1118]493 def stddev(self, mask=None):
[135]494 """
495 Determine the standard deviation of the current beam/if/pol
496 Takes a 'mask' as an optional parameter to specify which
497 channels should be excluded.
498 Parameters:
499 mask: an optional mask specifying where the standard
500 deviation should be determined.
501
502 Example:
503 scan.set_unit('channel')
[1118]504 msk = scan.create_mask([100, 200], [500, 600])
[135]505 scan.stddev(mask=m)
506 """
[1118]507 return self.stats(stat='stddev', mask=mask);
[135]508
[1003]509
[1259]510 def get_column_names(self):
[1003]511 """
512 Return a list of column names, which can be used for selection.
513 """
[1259]514 return list(Scantable.get_column_names(self))
[1003]515
[876]516 def get_tsys(self):
[113]517 """
518 Return the System temperatures.
519 Returns:
[876]520 a list of Tsys values for the current selection
[113]521 """
[256]522
[876]523 return self._row_callback(self._gettsys, "Tsys")
[256]524
[876]525 def _row_callback(self, callback, label):
526 axes = []
[1118]527 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]528 out = ""
[1118]529 outvec = []
[876]530 for i in range(self.nrow()):
531 axis = []
532 axis.append(self.getscan(i))
533 axis.append(self.getbeam(i))
534 axis.append(self.getif(i))
535 axis.append(self.getpol(i))
536 axis.append(self.getcycle(i))
537 axes.append(axis)
538 tm = self._gettime(i)
539 src = self._getsourcename(i)
540 out += 'Scan[%d] (%s) ' % (axis[0], src)
541 out += 'Time[%s]:\n' % (tm)
542 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
543 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
544 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
545 outvec.append(callback(i))
546 out += '= %3.3f\n' % (outvec[i])
547 out += "--------------------------------------------------\n"
548 if rcParams['verbose']:
[1615]549 asaplog.push("--------------------------------------------------")
550 asaplog.push(" %s" % (label))
551 asaplog.push("--------------------------------------------------")
552 asaplog.push(out)
[1614]553 print_log()
[1175]554 # disabled because the vector seems more useful
555 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
556 return outvec
[256]557
[1070]558 def _get_column(self, callback, row=-1):
559 """
560 """
561 if row == -1:
562 return [callback(i) for i in range(self.nrow())]
563 else:
564 if 0 <= row < self.nrow():
565 return callback(row)
[256]566
[1070]567
[1348]568 def get_time(self, row=-1, asdatetime=False):
[113]569 """
570 Get a list of time stamps for the observations.
[1348]571 Return a datetime object for each integration time stamp in the scantable.
[113]572 Parameters:
[1348]573 row: row no of integration. Default -1 return all rows
574 asdatetime: return values as datetime objects rather than strings
[113]575 Example:
576 none
577 """
[1175]578 from time import strptime
579 from datetime import datetime
[1457]580 times = self._get_column(self._gettime, row)
[1348]581 if not asdatetime:
[1603]582 return times
[1175]583 format = "%Y/%m/%d/%H:%M:%S"
584 if isinstance(times, list):
585 return [datetime(*strptime(i, format)[:6]) for i in times]
586 else:
587 return datetime(*strptime(times, format)[:6])
[102]588
[1348]589
590 def get_inttime(self, row=-1):
591 """
592 Get a list of integration times for the observations.
593 Return a time in seconds for each integration in the scantable.
594 Parameters:
595 row: row no of integration. Default -1 return all rows.
596 Example:
597 none
598 """
599 return self._get_column(self._getinttime, row)
600
601
[714]602 def get_sourcename(self, row=-1):
603 """
[794]604 Get a list source names for the observations.
[714]605 Return a string for each integration in the scantable.
606 Parameters:
[1348]607 row: row no of integration. Default -1 return all rows.
[714]608 Example:
609 none
610 """
[1070]611 return self._get_column(self._getsourcename, row)
[714]612
[794]613 def get_elevation(self, row=-1):
614 """
615 Get a list of elevations for the observations.
616 Return a float for each integration in the scantable.
617 Parameters:
[1348]618 row: row no of integration. Default -1 return all rows.
[794]619 Example:
620 none
621 """
[1070]622 return self._get_column(self._getelevation, row)
[794]623
624 def get_azimuth(self, row=-1):
625 """
626 Get a list of azimuths for the observations.
627 Return a float for each integration in the scantable.
628 Parameters:
[1348]629 row: row no of integration. Default -1 return all rows.
[794]630 Example:
631 none
632 """
[1070]633 return self._get_column(self._getazimuth, row)
[794]634
635 def get_parangle(self, row=-1):
636 """
637 Get a list of parallactic angles for the observations.
638 Return a float for each integration in the scantable.
639 Parameters:
[1348]640 row: row no of integration. Default -1 return all rows.
[794]641 Example:
642 none
643 """
[1070]644 return self._get_column(self._getparangle, row)
[794]645
[1070]646 def get_direction(self, row=-1):
647 """
648 Get a list of Positions on the sky (direction) for the observations.
649 Return a float for each integration in the scantable.
650 Parameters:
651 row: row no of integration. Default -1 return all rows
652 Example:
653 none
654 """
655 return self._get_column(self._getdirection, row)
656
[1389]657 def get_directionval(self, row=-1):
658 """
659 Get a list of Positions on the sky (direction) for the observations.
660 Return a float for each integration in the scantable.
661 Parameters:
662 row: row no of integration. Default -1 return all rows
663 Example:
664 none
665 """
666 return self._get_column(self._getdirectionvec, row)
667
[102]668 def set_unit(self, unit='channel'):
669 """
670 Set the unit for all following operations on this scantable
671 Parameters:
672 unit: optional unit, default is 'channel'
[1118]673 one of '*Hz', 'km/s', 'channel', ''
[102]674 """
[484]675 varlist = vars()
[1118]676 if unit in ['', 'pixel', 'channel']:
[113]677 unit = ''
678 inf = list(self._getcoordinfo())
679 inf[0] = unit
680 self._setcoordinfo(inf)
[1118]681 self._add_history("set_unit", varlist)
[113]682
[484]683 def set_instrument(self, instr):
[358]684 """
[1348]685 Set the instrument for subsequent processing.
[358]686 Parameters:
[710]687 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]688 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]689 """
690 self._setInstrument(instr)
[1118]691 self._add_history("set_instument", vars())
[718]692 print_log()
[358]693
[1190]694 def set_feedtype(self, feedtype):
695 """
696 Overwrite the feed type, which might not be set correctly.
697 Parameters:
698 feedtype: 'linear' or 'circular'
699 """
700 self._setfeedtype(feedtype)
701 self._add_history("set_feedtype", vars())
702 print_log()
703
[276]704 def set_doppler(self, doppler='RADIO'):
705 """
706 Set the doppler for all following operations on this scantable.
707 Parameters:
708 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
709 """
[484]710 varlist = vars()
[276]711 inf = list(self._getcoordinfo())
712 inf[2] = doppler
713 self._setcoordinfo(inf)
[1118]714 self._add_history("set_doppler", vars())
[718]715 print_log()
[710]716
[226]717 def set_freqframe(self, frame=None):
[113]718 """
719 Set the frame type of the Spectral Axis.
720 Parameters:
[591]721 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]722 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
723 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]724 Examples:
725 scan.set_freqframe('BARY')
726 """
[484]727 if frame is None: frame = rcParams['scantable.freqframe']
728 varlist = vars()
[1118]729 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
730 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]731
[989]732 if frame in valid:
[113]733 inf = list(self._getcoordinfo())
734 inf[1] = frame
735 self._setcoordinfo(inf)
[1118]736 self._add_history("set_freqframe", varlist)
[102]737 else:
[1118]738 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]739 if rcParams['verbose']:
[1612]740 #print msg
[1614]741 asaplog.push( msg )
742 print_log( 'ERROR' )
[718]743 else:
744 raise TypeError(msg)
745 print_log()
[710]746
[989]747 def set_dirframe(self, frame=""):
748 """
749 Set the frame type of the Direction on the sky.
750 Parameters:
751 frame: an optional frame type, default ''. Valid frames are:
752 'J2000', 'B1950', 'GALACTIC'
753 Examples:
754 scan.set_dirframe('GALACTIC')
755 """
756 varlist = vars()
757 try:
758 Scantable.set_dirframe(self, frame)
[1118]759 except RuntimeError, msg:
[989]760 if rcParams['verbose']:
[1612]761 #print msg
[1614]762 print_log()
763 asaplog.push( msg )
764 print_log( 'ERROR' )
[989]765 else:
766 raise
[1118]767 self._add_history("set_dirframe", varlist)
[989]768
[113]769 def get_unit(self):
770 """
771 Get the default unit set in this scantable
772 Returns:
773 A unit string
774 """
775 inf = self._getcoordinfo()
776 unit = inf[0]
777 if unit == '': unit = 'channel'
778 return unit
[102]779
[158]780 def get_abcissa(self, rowno=0):
[102]781 """
[158]782 Get the abcissa in the current coordinate setup for the currently
[113]783 selected Beam/IF/Pol
784 Parameters:
[226]785 rowno: an optional row number in the scantable. Default is the
786 first row, i.e. rowno=0
[113]787 Returns:
[1348]788 The abcissa values and the format string (as a dictionary)
[113]789 """
[256]790 abc = self._getabcissa(rowno)
[710]791 lbl = self._getabcissalabel(rowno)
[718]792 print_log()
[158]793 return abc, lbl
[113]794
[1401]795 def flag(self, mask=None, unflag=False):
[1001]796 """
797 Flag the selected data using an optional channel mask.
798 Parameters:
799 mask: an optional channel mask, created with create_mask. Default
800 (no mask) is all channels.
[1401]801 unflag: if True, unflag the data
[1001]802 """
803 varlist = vars()
[1118]804 if mask is None:
805 mask = []
[1001]806 try:
[1401]807 self._flag(mask, unflag)
[1118]808 except RuntimeError, msg:
[1001]809 if rcParams['verbose']:
[1612]810 #print msg
[1614]811 print_log()
812 asaplog.push( msg )
813 print_log( 'ERROR' )
[1001]814 return
815 else: raise
816 self._add_history("flag", varlist)
817
[1203]818 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]819 """
820 Flag the data in 'lag' space by providing a frequency to remove.
821 Flagged data in the scantable gets set to 0.0 before the fft.
822 No taper is applied.
823 Parameters:
[1348]824 frequency: the frequency (really a period within the bandwidth)
825 to remove
826 width: the width of the frequency to remove, to remove a
[1603]827 range of frequencies around the centre.
[1203]828 unit: the frequency unit (default "GHz")
829 Notes:
[1348]830 It is recommended to flag edges of the band or strong
831 signals beforehand.
[1192]832 """
833 if insitu is None: insitu = rcParams['insitu']
834 self._math._setinsitu(insitu)
835 varlist = vars()
[1370]836 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]837 if not base.has_key(unit):
838 raise ValueError("%s is not a valid unit." % unit)
839 try:
[1200]840 s = scantable(self._math._lag_flag(self, frequency*base[unit],
841 width*base[unit]))
[1192]842 except RuntimeError, msg:
843 if rcParams['verbose']:
[1612]844 #print msg
[1614]845 print_log()
846 asaplog.push( msg )
847 print_log( 'ERROR' )
[1192]848 return
849 else: raise
850 s._add_history("lag_flag", varlist)
851 print_log()
852 if insitu:
853 self._assign(s)
854 else:
855 return s
[1001]856
[1192]857
[113]858 def create_mask(self, *args, **kwargs):
859 """
[1118]860 Compute and return a mask based on [min, max] windows.
[189]861 The specified windows are to be INCLUDED, when the mask is
[113]862 applied.
[102]863 Parameters:
[1118]864 [min, max], [min2, max2], ...
[1024]865 Pairs of start/end points (inclusive)specifying the regions
[102]866 to be masked
[189]867 invert: optional argument. If specified as True,
868 return an inverted mask, i.e. the regions
869 specified are EXCLUDED
[513]870 row: create the mask using the specified row for
871 unit conversions, default is row=0
872 only necessary if frequency varies over rows.
[102]873 Example:
[113]874 scan.set_unit('channel')
875 a)
[1118]876 msk = scan.create_mask([400, 500], [800, 900])
[189]877 # masks everything outside 400 and 500
[113]878 # and 800 and 900 in the unit 'channel'
879
880 b)
[1118]881 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]882 # masks the regions between 400 and 500
[113]883 # and 800 and 900 in the unit 'channel'
[1024]884 c)
885 mask only channel 400
[1118]886 msk = scan.create_mask([400, 400])
[102]887 """
[513]888 row = 0
889 if kwargs.has_key("row"):
890 row = kwargs.get("row")
891 data = self._getabcissa(row)
[113]892 u = self._getcoordinfo()[0]
[718]893 if rcParams['verbose']:
[113]894 if u == "": u = "channel"
[718]895 msg = "The current mask window unit is %s" % u
[1118]896 i = self._check_ifs()
897 if not i:
[876]898 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]899 asaplog.push(msg)
[102]900 n = self.nchan()
[1295]901 msk = _n_bools(n, False)
[710]902 # test if args is a 'list' or a 'normal *args - UGLY!!!
903
[1118]904 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
905 and args or args[0]
[710]906 for window in ws:
[102]907 if (len(window) != 2 or window[0] > window[1] ):
[1118]908 raise TypeError("A window needs to be defined as [min, max]")
[102]909 for i in range(n):
[1024]910 if data[i] >= window[0] and data[i] <= window[1]:
[1295]911 msk[i] = True
[113]912 if kwargs.has_key('invert'):
913 if kwargs.get('invert'):
[1295]914 msk = mask_not(msk)
[718]915 print_log()
[102]916 return msk
[710]917
[1446]918 def get_masklist(self, mask=None, row=0):
[256]919 """
[1446]920 Compute and return a list of mask windows, [min, max].
921 Parameters:
922 mask: channel mask, created with create_mask.
923 row: calcutate the masklist using the specified row
924 for unit conversions, default is row=0
925 only necessary if frequency varies over rows.
926 Returns:
927 [min, max], [min2, max2], ...
928 Pairs of start/end points (inclusive)specifying
929 the masked regions
930 """
931 if not (isinstance(mask,list) or isinstance(mask, tuple)):
932 raise TypeError("The mask should be list or tuple.")
933 if len(mask) < 2:
934 raise TypeError("The mask elements should be > 1")
935 if self.nchan() != len(mask):
936 msg = "Number of channels in scantable != number of mask elements"
937 raise TypeError(msg)
938 data = self._getabcissa(row)
939 u = self._getcoordinfo()[0]
940 if rcParams['verbose']:
941 if u == "": u = "channel"
942 msg = "The current mask window unit is %s" % u
943 i = self._check_ifs()
944 if not i:
945 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
946 asaplog.push(msg)
947 masklist=[]
948 ist, ien = None, None
949 ist, ien=self.get_mask_indices(mask)
950 if ist is not None and ien is not None:
951 for i in xrange(len(ist)):
952 range=[data[ist[i]],data[ien[i]]]
953 range.sort()
954 masklist.append([range[0],range[1]])
955 return masklist
956
957 def get_mask_indices(self, mask=None):
958 """
959 Compute and Return lists of mask start indices and mask end indices.
960 Parameters:
961 mask: channel mask, created with create_mask.
962 Returns:
963 List of mask start indices and that of mask end indices,
964 i.e., [istart1,istart2,....], [iend1,iend2,....].
965 """
966 if not (isinstance(mask,list) or isinstance(mask, tuple)):
967 raise TypeError("The mask should be list or tuple.")
968 if len(mask) < 2:
969 raise TypeError("The mask elements should be > 1")
970 istart=[]
971 iend=[]
972 if mask[0]: istart.append(0)
973 for i in range(len(mask)-1):
974 if not mask[i] and mask[i+1]:
975 istart.append(i+1)
976 elif mask[i] and not mask[i+1]:
977 iend.append(i)
978 if mask[len(mask)-1]: iend.append(len(mask)-1)
979 if len(istart) != len(iend):
980 raise RuntimeError("Numbers of mask start != mask end.")
981 for i in range(len(istart)):
982 if istart[i] > iend[i]:
983 raise RuntimeError("Mask start index > mask end index")
984 break
985 return istart,iend
986
987# def get_restfreqs(self):
988# """
989# Get the restfrequency(s) stored in this scantable.
990# The return value(s) are always of unit 'Hz'
991# Parameters:
992# none
993# Returns:
994# a list of doubles
995# """
996# return list(self._getrestfreqs())
997
998 def get_restfreqs(self, ids=None):
999 """
[256]1000 Get the restfrequency(s) stored in this scantable.
1001 The return value(s) are always of unit 'Hz'
1002 Parameters:
[1446]1003 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1004 be retrieved
[256]1005 Returns:
[1446]1006 dictionary containing ids and a list of doubles for each id
[256]1007 """
[1446]1008 if ids is None:
1009 rfreqs={}
1010 idlist = self.getmolnos()
1011 for i in idlist:
1012 rfreqs[i]=list(self._getrestfreqs(i))
1013 return rfreqs
1014 else:
1015 if type(ids)==list or type(ids)==tuple:
1016 rfreqs={}
1017 for i in ids:
1018 rfreqs[i]=list(self._getrestfreqs(i))
1019 return rfreqs
1020 else:
1021 return list(self._getrestfreqs(ids))
1022 #return list(self._getrestfreqs(ids))
[102]1023
[931]1024 def set_restfreqs(self, freqs=None, unit='Hz'):
1025 """
[1446]1026 ********NEED TO BE UPDATED begin************
[931]1027 Set or replace the restfrequency specified and
1028 If the 'freqs' argument holds a scalar,
1029 then that rest frequency will be applied to all the selected
1030 data. If the 'freqs' argument holds
1031 a vector, then it MUST be of equal or smaller length than
1032 the number of IFs (and the available restfrequencies will be
1033 replaced by this vector). In this case, *all* data have
1034 the restfrequency set per IF according
1035 to the corresponding value you give in the 'freqs' vector.
[1118]1036 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1037 IF 1 gets restfreq 2e9.
[1446]1038 ********NEED TO BE UPDATED end************
[1603]1039 You can also specify the frequencies via a linecatalog.
[1153]1040
[931]1041 Parameters:
1042 freqs: list of rest frequency values or string idenitfiers
1043 unit: unit for rest frequency (default 'Hz')
[402]1044
[931]1045 Example:
[1446]1046 # set the given restfrequency for the all currently selected IFs
[931]1047 scan.set_restfreqs(freqs=1.4e9)
[1446]1048 # set multiple restfrequencies to all the selected data
1049 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1050 # If the number of IFs in the data is >= 2 the IF0 gets the first
1051 # value IF1 the second... NOTE that freqs needs to be
1052 # specified in list of list (e.g. [[],[],...] ).
1053 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]1054 #set the given restfrequency for the whole table (by name)
1055 scan.set_restfreqs(freqs="OH1667")
[391]1056
[931]1057 Note:
1058 To do more sophisticate Restfrequency setting, e.g. on a
1059 source and IF basis, use scantable.set_selection() before using
1060 this function.
1061 # provide your scantable is call scan
1062 selection = selector()
1063 selection.set_name("ORION*")
1064 selection.set_ifs([1])
1065 scan.set_selection(selection)
1066 scan.set_restfreqs(freqs=86.6e9)
1067
1068 """
1069 varlist = vars()
[1157]1070 from asap import linecatalog
1071 # simple value
[1118]1072 if isinstance(freqs, int) or isinstance(freqs, float):
[1446]1073 # TT mod
1074 #self._setrestfreqs(freqs, "",unit)
1075 self._setrestfreqs([freqs], [""],unit)
[1157]1076 # list of values
[1118]1077 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1078 # list values are scalars
[1118]1079 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1446]1080 self._setrestfreqs(freqs, [""],unit)
[1157]1081 # list values are tuples, (value, name)
1082 elif isinstance(freqs[-1], dict):
[1446]1083 #sel = selector()
1084 #savesel = self._getselection()
1085 #iflist = self.getifnos()
1086 #for i in xrange(len(freqs)):
1087 # sel.set_ifs(iflist[i])
1088 # self._setselection(sel)
1089 # self._setrestfreqs(freqs[i], "",unit)
1090 #self._setselection(savesel)
1091 self._setrestfreqs(freqs["value"],
1092 freqs["name"], "MHz")
1093 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1094 sel = selector()
1095 savesel = self._getselection()
[1322]1096 iflist = self.getifnos()
[1446]1097 if len(freqs)>len(iflist):
1098 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]1099 for i in xrange(len(freqs)):
[1322]1100 sel.set_ifs(iflist[i])
[1259]1101 self._setselection(sel)
[1157]1102 self._setrestfreqs(freqs[i]["value"],
1103 freqs[i]["name"], "MHz")
1104 self._setselection(savesel)
1105 # freqs are to be taken from a linecatalog
[1153]1106 elif isinstance(freqs, linecatalog):
1107 sel = selector()
1108 savesel = self._getselection()
1109 for i in xrange(freqs.nrow()):
[1322]1110 sel.set_ifs(iflist[i])
[1153]1111 self._setselection(sel)
1112 self._setrestfreqs(freqs.get_frequency(i),
1113 freqs.get_name(i), "MHz")
1114 # ensure that we are not iterating past nIF
1115 if i == self.nif()-1: break
1116 self._setselection(savesel)
[931]1117 else:
1118 return
1119 self._add_history("set_restfreqs", varlist)
1120
[1360]1121 def shift_refpix(self, delta):
1122 """
1123 Shift the reference pixel of the Spectra Coordinate by an
1124 integer amount.
1125 Parameters:
1126 delta: the amount to shift by
1127 Note:
1128 Be careful using this with broadband data.
1129 """
1130 Scantable.shift(self, delta)
[931]1131
[1259]1132 def history(self, filename=None):
1133 """
1134 Print the history. Optionally to a file.
[1348]1135 Parameters:
1136 filename: The name of the file to save the history to.
[1259]1137 """
[484]1138 hist = list(self._gethistory())
[794]1139 out = "-"*80
[484]1140 for h in hist:
[489]1141 if h.startswith("---"):
[794]1142 out += "\n"+h
[489]1143 else:
1144 items = h.split("##")
1145 date = items[0]
1146 func = items[1]
1147 items = items[2:]
[794]1148 out += "\n"+date+"\n"
1149 out += "Function: %s\n Parameters:" % (func)
[489]1150 for i in items:
1151 s = i.split("=")
[1118]1152 out += "\n %s = %s" % (s[0], s[1])
[794]1153 out += "\n"+"-"*80
[1259]1154 if filename is not None:
1155 if filename is "":
1156 filename = 'scantable_history.txt'
1157 import os
1158 filename = os.path.expandvars(os.path.expanduser(filename))
1159 if not os.path.isdir(filename):
1160 data = open(filename, 'w')
1161 data.write(out)
1162 data.close()
1163 else:
1164 msg = "Illegal file name '%s'." % (filename)
1165 if rcParams['verbose']:
[1612]1166 #print msg
[1614]1167 asaplog.push( msg )
1168 print_log( 'ERROR' )
[1259]1169 else:
1170 raise IOError(msg)
1171 if rcParams['verbose']:
1172 try:
1173 from IPython.genutils import page as pager
1174 except ImportError:
1175 from pydoc import pager
1176 pager(out)
1177 else:
1178 return out
[484]1179 return
[513]1180 #
1181 # Maths business
1182 #
1183
[931]1184 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1185 """
[1070]1186 Return the (time) weighted average of a scan.
[513]1187 Note:
[1070]1188 in channels only - align if necessary
[513]1189 Parameters:
1190 mask: an optional mask (only used for 'var' and 'tsys'
1191 weighting)
[558]1192 scanav: True averages each scan separately
1193 False (default) averages all scans together,
[1099]1194 weight: Weighting scheme.
1195 'none' (mean no weight)
1196 'var' (1/var(spec) weighted)
1197 'tsys' (1/Tsys**2 weighted)
1198 'tint' (integration time weighted)
1199 'tintsys' (Tint/Tsys**2)
1200 'median' ( median averaging)
[535]1201 The default is 'tint'
[931]1202 align: align the spectra in velocity before averaging. It takes
1203 the time of the first spectrum as reference time.
[513]1204 Example:
1205 # time average the scantable without using a mask
[710]1206 newscan = scan.average_time()
[513]1207 """
1208 varlist = vars()
[976]1209 if weight is None: weight = 'TINT'
[513]1210 if mask is None: mask = ()
[1099]1211 if scanav: scanav = "SCAN"
1212 else: scanav = "NONE"
[1118]1213 scan = (self, )
[989]1214 try:
[1118]1215 if align:
1216 scan = (self.freq_align(insitu=False), )
1217 s = None
1218 if weight.upper() == 'MEDIAN':
1219 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1220 scanav))
1221 else:
1222 s = scantable(self._math._average(scan, mask, weight.upper(),
1223 scanav))
1224 except RuntimeError, msg:
[989]1225 if rcParams['verbose']:
[1612]1226 #print msg
[1614]1227 print_log()
1228 asaplog.push( msg )
1229 print_log( 'ERROR' )
[989]1230 return
1231 else: raise
[1099]1232 s._add_history("average_time", varlist)
[718]1233 print_log()
[513]1234 return s
[710]1235
[876]1236 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1237 """
1238 Return a scan where all spectra are converted to either
1239 Jansky or Kelvin depending upon the flux units of the scan table.
1240 By default the function tries to look the values up internally.
1241 If it can't find them (or if you want to over-ride), you must
1242 specify EITHER jyperk OR eta (and D which it will try to look up
1243 also if you don't set it). jyperk takes precedence if you set both.
1244 Parameters:
1245 jyperk: the Jy / K conversion factor
1246 eta: the aperture efficiency
1247 d: the geomtric diameter (metres)
1248 insitu: if False a new scantable is returned.
1249 Otherwise, the scaling is done in-situ
1250 The default is taken from .asaprc (False)
1251 """
1252 if insitu is None: insitu = rcParams['insitu']
[876]1253 self._math._setinsitu(insitu)
[513]1254 varlist = vars()
1255 if jyperk is None: jyperk = -1.0
1256 if d is None: d = -1.0
1257 if eta is None: eta = -1.0
[876]1258 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1259 s._add_history("convert_flux", varlist)
1260 print_log()
1261 if insitu: self._assign(s)
1262 else: return s
[513]1263
[876]1264 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1265 """
1266 Return a scan after applying a gain-elevation correction.
1267 The correction can be made via either a polynomial or a
1268 table-based interpolation (and extrapolation if necessary).
1269 You specify polynomial coefficients, an ascii table or neither.
1270 If you specify neither, then a polynomial correction will be made
1271 with built in coefficients known for certain telescopes (an error
1272 will occur if the instrument is not known).
1273 The data and Tsys are *divided* by the scaling factors.
1274 Parameters:
1275 poly: Polynomial coefficients (default None) to compute a
1276 gain-elevation correction as a function of
1277 elevation (in degrees).
1278 filename: The name of an ascii file holding correction factors.
1279 The first row of the ascii file must give the column
1280 names and these MUST include columns
1281 "ELEVATION" (degrees) and "FACTOR" (multiply data
1282 by this) somewhere.
1283 The second row must give the data type of the
1284 column. Use 'R' for Real and 'I' for Integer.
1285 An example file would be
1286 (actual factors are arbitrary) :
1287
1288 TIME ELEVATION FACTOR
1289 R R R
1290 0.1 0 0.8
1291 0.2 20 0.85
1292 0.3 40 0.9
1293 0.4 60 0.85
1294 0.5 80 0.8
1295 0.6 90 0.75
1296 method: Interpolation method when correcting from a table.
1297 Values are "nearest", "linear" (default), "cubic"
1298 and "spline"
1299 insitu: if False a new scantable is returned.
1300 Otherwise, the scaling is done in-situ
1301 The default is taken from .asaprc (False)
1302 """
1303
1304 if insitu is None: insitu = rcParams['insitu']
[876]1305 self._math._setinsitu(insitu)
[513]1306 varlist = vars()
1307 if poly is None:
[1118]1308 poly = ()
[513]1309 from os.path import expandvars
1310 filename = expandvars(filename)
[876]1311 s = scantable(self._math._gainel(self, poly, filename, method))
1312 s._add_history("gain_el", varlist)
1313 print_log()
1314 if insitu: self._assign(s)
1315 else: return s
[710]1316
[931]1317 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1318 """
1319 Return a scan where all rows have been aligned in frequency/velocity.
1320 The alignment frequency frame (e.g. LSRK) is that set by function
1321 set_freqframe.
1322 Parameters:
1323 reftime: reference time to align at. By default, the time of
1324 the first row of data is used.
1325 method: Interpolation method for regridding the spectra.
1326 Choose from "nearest", "linear", "cubic" (default)
1327 and "spline"
1328 insitu: if False a new scantable is returned.
1329 Otherwise, the scaling is done in-situ
1330 The default is taken from .asaprc (False)
1331 """
[931]1332 if insitu is None: insitu = rcParams["insitu"]
[876]1333 self._math._setinsitu(insitu)
[513]1334 varlist = vars()
[931]1335 if reftime is None: reftime = ""
1336 s = scantable(self._math._freq_align(self, reftime, method))
[876]1337 s._add_history("freq_align", varlist)
1338 print_log()
1339 if insitu: self._assign(s)
1340 else: return s
[513]1341
[876]1342 def opacity(self, tau, insitu=None):
[513]1343 """
1344 Apply an opacity correction. The data
1345 and Tsys are multiplied by the correction factor.
1346 Parameters:
1347 tau: Opacity from which the correction factor is
1348 exp(tau*ZD)
1349 where ZD is the zenith-distance
1350 insitu: if False a new scantable is returned.
1351 Otherwise, the scaling is done in-situ
1352 The default is taken from .asaprc (False)
1353 """
1354 if insitu is None: insitu = rcParams['insitu']
[876]1355 self._math._setinsitu(insitu)
[513]1356 varlist = vars()
[876]1357 s = scantable(self._math._opacity(self, tau))
1358 s._add_history("opacity", varlist)
1359 print_log()
1360 if insitu: self._assign(s)
1361 else: return s
[513]1362
1363 def bin(self, width=5, insitu=None):
1364 """
1365 Return a scan where all spectra have been binned up.
[1348]1366 Parameters:
[513]1367 width: The bin width (default=5) in pixels
1368 insitu: if False a new scantable is returned.
1369 Otherwise, the scaling is done in-situ
1370 The default is taken from .asaprc (False)
1371 """
1372 if insitu is None: insitu = rcParams['insitu']
[876]1373 self._math._setinsitu(insitu)
[513]1374 varlist = vars()
[876]1375 s = scantable(self._math._bin(self, width))
[1118]1376 s._add_history("bin", varlist)
[876]1377 print_log()
1378 if insitu: self._assign(s)
1379 else: return s
[513]1380
[710]1381
[513]1382 def resample(self, width=5, method='cubic', insitu=None):
1383 """
[1348]1384 Return a scan where all spectra have been binned up.
1385
1386 Parameters:
[513]1387 width: The bin width (default=5) in pixels
1388 method: Interpolation method when correcting from a table.
1389 Values are "nearest", "linear", "cubic" (default)
1390 and "spline"
1391 insitu: if False a new scantable is returned.
1392 Otherwise, the scaling is done in-situ
1393 The default is taken from .asaprc (False)
1394 """
1395 if insitu is None: insitu = rcParams['insitu']
[876]1396 self._math._setinsitu(insitu)
[513]1397 varlist = vars()
[876]1398 s = scantable(self._math._resample(self, method, width))
[1118]1399 s._add_history("resample", varlist)
[876]1400 print_log()
1401 if insitu: self._assign(s)
1402 else: return s
[513]1403
1404
[946]1405 def average_pol(self, mask=None, weight='none'):
1406 """
1407 Average the Polarisations together.
1408 Parameters:
1409 mask: An optional mask defining the region, where the
1410 averaging will be applied. The output will have all
1411 specified points masked.
1412 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1413 weighted), or 'tsys' (1/Tsys**2 weighted)
1414 """
1415 varlist = vars()
1416 if mask is None:
1417 mask = ()
[1010]1418 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1419 s._add_history("average_pol", varlist)
[946]1420 print_log()
[992]1421 return s
[513]1422
[1145]1423 def average_beam(self, mask=None, weight='none'):
1424 """
1425 Average the Beams together.
1426 Parameters:
1427 mask: An optional mask defining the region, where the
1428 averaging will be applied. The output will have all
1429 specified points masked.
1430 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1431 weighted), or 'tsys' (1/Tsys**2 weighted)
1432 """
1433 varlist = vars()
1434 if mask is None:
1435 mask = ()
1436 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1437 s._add_history("average_beam", varlist)
1438 print_log()
1439 return s
1440
[992]1441 def convert_pol(self, poltype=None):
1442 """
1443 Convert the data to a different polarisation type.
1444 Parameters:
1445 poltype: The new polarisation type. Valid types are:
1446 "linear", "stokes" and "circular"
1447 """
1448 varlist = vars()
1449 try:
1450 s = scantable(self._math._convertpol(self, poltype))
[1118]1451 except RuntimeError, msg:
[992]1452 if rcParams['verbose']:
[1612]1453 #print msg
[1614]1454 print_log()
1455 asaplog.push( msg )
1456 print_log( 'ERROR' )
[1118]1457 return
[992]1458 else:
1459 raise
[1118]1460 s._add_history("convert_pol", varlist)
[992]1461 print_log()
1462 return s
1463
[876]1464 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1465 """
1466 Smooth the spectrum by the specified kernel (conserving flux).
1467 Parameters:
1468 kernel: The type of smoothing kernel. Select from
[1373]1469 'hanning' (default), 'gaussian', 'boxcar' and
1470 'rmedian'
[513]1471 width: The width of the kernel in pixels. For hanning this is
1472 ignored otherwise it defauls to 5 pixels.
1473 For 'gaussian' it is the Full Width Half
1474 Maximum. For 'boxcar' it is the full width.
[1373]1475 For 'rmedian' it is the half width.
[513]1476 insitu: if False a new scantable is returned.
1477 Otherwise, the scaling is done in-situ
1478 The default is taken from .asaprc (False)
1479 Example:
1480 none
1481 """
1482 if insitu is None: insitu = rcParams['insitu']
[876]1483 self._math._setinsitu(insitu)
[513]1484 varlist = vars()
[1118]1485 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1486 s._add_history("smooth", varlist)
1487 print_log()
1488 if insitu: self._assign(s)
1489 else: return s
[513]1490
[876]1491
[1389]1492 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1493 """
1494 Return a scan which has been baselined (all rows) by a polynomial.
1495 Parameters:
[794]1496 mask: an optional mask
1497 order: the order of the polynomial (default is 0)
[1061]1498 plot: plot the fit and the residual. In this each
1499 indivual fit has to be approved, by typing 'y'
1500 or 'n'
[1389]1501 uselin: use linear polynomial fit
[794]1502 insitu: if False a new scantable is returned.
1503 Otherwise, the scaling is done in-situ
1504 The default is taken from .asaprc (False)
[513]1505 Example:
1506 # return a scan baselined by a third order polynomial,
1507 # not using a mask
1508 bscan = scan.poly_baseline(order=3)
[579]1509 """
[513]1510 if insitu is None: insitu = rcParams['insitu']
1511 varlist = vars()
1512 if mask is None:
[1295]1513 mask = [True for i in xrange(self.nchan(-1))]
[513]1514 from asap.asapfitter import fitter
[1217]1515 try:
1516 f = fitter()
1517 f.set_scan(self, mask)
[1389]1518 if uselin:
1519 f.set_function(lpoly=order)
1520 else:
1521 f.set_function(poly=order)
[1217]1522 s = f.auto_fit(insitu, plot=plot)
[1446]1523 # Save parameters of baseline fits as a class attribute.
1524 # NOTICE: It does not reflect changes in scantable!
1525 self.blpars = f.blpars
[1217]1526 s._add_history("poly_baseline", varlist)
1527 print_log()
1528 if insitu: self._assign(s)
1529 else: return s
1530 except RuntimeError:
1531 msg = "The fit failed, possibly because it didn't converge."
1532 if rcParams['verbose']:
[1612]1533 #print msg
[1614]1534 print_log()
1535 asaplog.push( msg )
1536 print_log( 'ERROR' )
[1217]1537 return
1538 else:
1539 raise RuntimeError(msg)
[513]1540
[1217]1541
[1118]1542 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1543 threshold=3, chan_avg_limit=1, plot=False,
1544 insitu=None):
[880]1545 """
1546 Return a scan which has been baselined (all rows) by a polynomial.
1547 Spectral lines are detected first using linefinder and masked out
1548 to avoid them affecting the baseline solution.
1549
1550 Parameters:
1551 mask: an optional mask retreived from scantable
1552 edge: an optional number of channel to drop at
1553 the edge of spectrum. If only one value is
1554 specified, the same number will be dropped from
1555 both sides of the spectrum. Default is to keep
[907]1556 all channels. Nested tuples represent individual
[976]1557 edge selection for different IFs (a number of spectral
1558 channels can be different)
[880]1559 order: the order of the polynomial (default is 0)
1560 threshold: the threshold used by line finder. It is better to
1561 keep it large as only strong lines affect the
1562 baseline solution.
[1280]1563 chan_avg_limit:
1564 a maximum number of consequtive spectral channels to
1565 average during the search of weak and broad lines.
1566 The default is no averaging (and no search for weak
1567 lines). If such lines can affect the fitted baseline
1568 (e.g. a high order polynomial is fitted), increase this
1569 parameter (usually values up to 8 are reasonable). Most
1570 users of this method should find the default value
1571 sufficient.
[1061]1572 plot: plot the fit and the residual. In this each
1573 indivual fit has to be approved, by typing 'y'
1574 or 'n'
[880]1575 insitu: if False a new scantable is returned.
1576 Otherwise, the scaling is done in-situ
1577 The default is taken from .asaprc (False)
1578
1579 Example:
1580 scan2=scan.auto_poly_baseline(order=7)
1581 """
1582 if insitu is None: insitu = rcParams['insitu']
1583 varlist = vars()
1584 from asap.asapfitter import fitter
1585 from asap.asaplinefind import linefinder
1586 from asap import _is_sequence_or_number as _is_valid
1587
[976]1588 # check whether edge is set up for each IF individually
[1118]1589 individualedge = False;
1590 if len(edge) > 1:
1591 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1592 individualedge = True;
[907]1593
[1118]1594 if not _is_valid(edge, int) and not individualedge:
[909]1595 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1596 pair of integers specified as a tuple. Nested tuples are allowed \
1597 to make individual selection for different IFs."
[919]1598
[1118]1599 curedge = (0, 0)
1600 if individualedge:
1601 for edgepar in edge:
1602 if not _is_valid(edgepar, int):
1603 raise ValueError, "Each element of the 'edge' tuple has \
1604 to be a pair of integers or an integer."
[907]1605 else:
[1118]1606 curedge = edge;
[880]1607
1608 # setup fitter
1609 f = fitter()
1610 f.set_function(poly=order)
1611
1612 # setup line finder
[1118]1613 fl = linefinder()
[1268]1614 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1615
1616 if not insitu:
[1118]1617 workscan = self.copy()
[880]1618 else:
[1118]1619 workscan = self
[880]1620
[907]1621 fl.set_scan(workscan)
1622
[1118]1623 rows = range(workscan.nrow())
[1446]1624 # Save parameters of baseline fits & masklists as a class attribute.
1625 # NOTICE: It does not reflect changes in scantable!
1626 if len(rows) > 0:
1627 self.blpars=[]
1628 self.masklists=[]
[880]1629 asaplog.push("Processing:")
1630 for r in rows:
[1118]1631 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1632 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1633 workscan.getpol(r), workscan.getcycle(r))
[880]1634 asaplog.push(msg, False)
[907]1635
[976]1636 # figure out edge parameter
[1118]1637 if individualedge:
1638 if len(edge) >= workscan.getif(r):
1639 raise RuntimeError, "Number of edge elements appear to " \
1640 "be less than the number of IFs"
1641 curedge = edge[workscan.getif(r)]
[919]1642
[976]1643 # setup line finder
[1118]1644 fl.find_lines(r, mask, curedge)
[1446]1645 outmask=fl.get_mask()
[880]1646 f.set_scan(workscan, fl.get_mask())
1647 f.x = workscan._getabcissa(r)
1648 f.y = workscan._getspectrum(r)
1649 f.data = None
1650 f.fit()
[1446]1651
1652 # Show mask list
1653 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1654 msg = "mask range: "+str(masklist)
1655 asaplog.push(msg, False)
1656
1657 fpar = f.get_parameters()
[1061]1658 if plot:
1659 f.plot(residual=True)
1660 x = raw_input("Accept fit ( [y]/n ): ")
1661 if x.upper() == 'N':
[1446]1662 self.blpars.append(None)
1663 self.masklists.append(None)
[1061]1664 continue
[880]1665 workscan._setspectrum(f.fitter.getresidual(), r)
[1446]1666 self.blpars.append(fpar)
1667 self.masklists.append(masklist)
[1061]1668 if plot:
1669 f._p.unmap()
1670 f._p = None
1671 workscan._add_history("auto_poly_baseline", varlist)
[880]1672 if insitu:
1673 self._assign(workscan)
1674 else:
1675 return workscan
1676
[914]1677 def rotate_linpolphase(self, angle):
1678 """
1679 Rotate the phase of the complex polarization O=Q+iU correlation.
1680 This is always done in situ in the raw data. So if you call this
1681 function more than once then each call rotates the phase further.
1682 Parameters:
1683 angle: The angle (degrees) to rotate (add) by.
1684 Examples:
1685 scan.rotate_linpolphase(2.3)
1686 """
1687 varlist = vars()
[936]1688 self._math._rotate_linpolphase(self, angle)
[914]1689 self._add_history("rotate_linpolphase", varlist)
1690 print_log()
1691 return
[710]1692
[513]1693
[914]1694 def rotate_xyphase(self, angle):
1695 """
1696 Rotate the phase of the XY correlation. This is always done in situ
1697 in the data. So if you call this function more than once
1698 then each call rotates the phase further.
1699 Parameters:
1700 angle: The angle (degrees) to rotate (add) by.
1701 Examples:
1702 scan.rotate_xyphase(2.3)
1703 """
1704 varlist = vars()
[936]1705 self._math._rotate_xyphase(self, angle)
[914]1706 self._add_history("rotate_xyphase", varlist)
1707 print_log()
1708 return
1709
1710 def swap_linears(self):
1711 """
[1348]1712 Swap the linear polarisations XX and YY, or better the first two
1713 polarisations as this also works for ciculars.
[914]1714 """
1715 varlist = vars()
[936]1716 self._math._swap_linears(self)
[914]1717 self._add_history("swap_linears", varlist)
1718 print_log()
1719 return
1720
1721 def invert_phase(self):
1722 """
1723 Invert the phase of the complex polarisation
1724 """
1725 varlist = vars()
[936]1726 self._math._invert_phase(self)
[914]1727 self._add_history("invert_phase", varlist)
1728 print_log()
1729 return
1730
[876]1731 def add(self, offset, insitu=None):
[513]1732 """
1733 Return a scan where all spectra have the offset added
1734 Parameters:
1735 offset: the offset
1736 insitu: if False a new scantable is returned.
1737 Otherwise, the scaling is done in-situ
1738 The default is taken from .asaprc (False)
1739 """
1740 if insitu is None: insitu = rcParams['insitu']
[876]1741 self._math._setinsitu(insitu)
[513]1742 varlist = vars()
[876]1743 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1744 s._add_history("add", varlist)
[876]1745 print_log()
1746 if insitu:
1747 self._assign(s)
1748 else:
[513]1749 return s
1750
[1308]1751 def scale(self, factor, tsys=True, insitu=None):
[513]1752 """
1753 Return a scan where all spectra are scaled by the give 'factor'
1754 Parameters:
1755 factor: the scaling factor
1756 insitu: if False a new scantable is returned.
1757 Otherwise, the scaling is done in-situ
1758 The default is taken from .asaprc (False)
1759 tsys: if True (default) then apply the operation to Tsys
1760 as well as the data
1761 """
1762 if insitu is None: insitu = rcParams['insitu']
[876]1763 self._math._setinsitu(insitu)
[513]1764 varlist = vars()
[876]1765 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1766 s._add_history("scale", varlist)
[876]1767 print_log()
1768 if insitu:
1769 self._assign(s)
1770 else:
[513]1771 return s
1772
[1603]1773 def set_sourcetype(self, match, matchtype="pattern",
1774 sourcetype="reference"):
1775 """
1776 Set the type of the source to be an source or reference scan
1777 using the provided pattern:
1778 Parameters:
1779 match: a Unix style pattern, regular expression or selector
1780 matchtype: 'pattern' (default) UNIX style pattern or
1781 'regex' regular expression
1782 sourcetype: the type of the source to use (source/reference)
1783 """
1784 varlist = vars()
1785 basesel = self.get_selection()
1786 stype = -1
1787 if sourcetype.lower().startswith("r"):
1788 stype = 1
1789 elif sourcetype.lower().startswith("s"):
1790 stype = 0
1791 else:
1792 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1793 if matchtype.lower().startswith("p"):
1794 matchtype = "pattern"
1795 elif matchtype.lower().startswith("r"):
1796 matchtype = "regex"
1797 else:
1798 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1799 sel = selector()
1800 if isinstance(match, selector):
1801 sel = match
1802 else:
1803 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1804 self.set_selection(basesel+sel)
1805 self._setsourcetype(stype)
1806 self.set_selection(basesel)
1807 s._add_history("set_sourcetype", varlist)
1808
[1348]1809 def auto_quotient(self, preserve=True, mode='paired'):
[670]1810 """
1811 This function allows to build quotients automatically.
1812 It assumes the observation to have the same numer of
1813 "ons" and "offs"
1814 Parameters:
[710]1815 preserve: you can preserve (default) the continuum or
1816 remove it. The equations used are
[670]1817 preserve: Output = Toff * (on/off) - Toff
[1070]1818 remove: Output = Toff * (on/off) - Ton
[1348]1819 mode: the on/off detection mode
1820 'paired' (default)
1821 identifies 'off' scans by the
1822 trailing '_R' (Mopra/Parkes) or
1823 '_e'/'_w' (Tid) and matches
1824 on/off pairs from the observing pattern
[1603]1825 'time'
1826 finds the closest off in time
[1348]1827
[670]1828 """
[1348]1829 modes = ["time", "paired"]
[670]1830 if not mode in modes:
[876]1831 msg = "please provide valid mode. Valid modes are %s" % (modes)
1832 raise ValueError(msg)
1833 varlist = vars()
[1348]1834 s = None
1835 if mode.lower() == "paired":
1836 basesel = self.get_selection()
[1356]1837 sel = selector()+basesel
1838 sel.set_query("SRCTYPE==1")
1839 self.set_selection(sel)
[1348]1840 offs = self.copy()
1841 sel.set_query("SRCTYPE==0")
[1356]1842 self.set_selection(sel)
[1348]1843 ons = self.copy()
1844 s = scantable(self._math._quotient(ons, offs, preserve))
1845 self.set_selection(basesel)
1846 elif mode.lower() == "time":
1847 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1848 s._add_history("auto_quotient", varlist)
[876]1849 print_log()
1850 return s
[710]1851
[1145]1852 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1853 """
[1143]1854 Form a quotient using "off" beams when observing in "MX" mode.
1855 Parameters:
[1145]1856 mask: an optional mask to be used when weight == 'stddev'
[1143]1857 weight: How to average the off beams. Default is 'median'.
[1145]1858 preserve: you can preserve (default) the continuum or
1859 remove it. The equations used are
1860 preserve: Output = Toff * (on/off) - Toff
1861 remove: Output = Toff * (on/off) - Ton
[1217]1862 """
[1143]1863 if mask is None: mask = ()
[1141]1864 varlist = vars()
1865 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1866 preoff = scantable(self._math._mx_extract(self, 'off'))
1867 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1868 from asapmath import quotient
[1145]1869 q = quotient(on, off, preserve)
[1143]1870 q._add_history("mx_quotient", varlist)
[1145]1871 print_log()
[1217]1872 return q
[513]1873
[718]1874 def freq_switch(self, insitu=None):
1875 """
1876 Apply frequency switching to the data.
1877 Parameters:
1878 insitu: if False a new scantable is returned.
1879 Otherwise, the swictching is done in-situ
1880 The default is taken from .asaprc (False)
1881 Example:
1882 none
1883 """
1884 if insitu is None: insitu = rcParams['insitu']
[876]1885 self._math._setinsitu(insitu)
[718]1886 varlist = vars()
[876]1887 s = scantable(self._math._freqswitch(self))
[1118]1888 s._add_history("freq_switch", varlist)
[876]1889 print_log()
1890 if insitu: self._assign(s)
1891 else: return s
[718]1892
[780]1893 def recalc_azel(self):
1894 """
1895 Recalculate the azimuth and elevation for each position.
1896 Parameters:
1897 none
1898 Example:
1899 """
1900 varlist = vars()
[876]1901 self._recalcazel()
[780]1902 self._add_history("recalc_azel", varlist)
1903 print_log()
1904 return
1905
[513]1906 def __add__(self, other):
1907 varlist = vars()
1908 s = None
1909 if isinstance(other, scantable):
[1308]1910 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1911 elif isinstance(other, float):
[876]1912 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1913 else:
[718]1914 raise TypeError("Other input is not a scantable or float value")
[513]1915 s._add_history("operator +", varlist)
[718]1916 print_log()
[513]1917 return s
1918
1919 def __sub__(self, other):
1920 """
1921 implicit on all axes and on Tsys
1922 """
1923 varlist = vars()
1924 s = None
1925 if isinstance(other, scantable):
[1308]1926 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1927 elif isinstance(other, float):
[876]1928 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1929 else:
[718]1930 raise TypeError("Other input is not a scantable or float value")
[513]1931 s._add_history("operator -", varlist)
[718]1932 print_log()
[513]1933 return s
[710]1934
[513]1935 def __mul__(self, other):
1936 """
1937 implicit on all axes and on Tsys
1938 """
1939 varlist = vars()
1940 s = None
1941 if isinstance(other, scantable):
[1308]1942 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1943 elif isinstance(other, float):
[876]1944 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1945 else:
[718]1946 raise TypeError("Other input is not a scantable or float value")
[513]1947 s._add_history("operator *", varlist)
[718]1948 print_log()
[513]1949 return s
1950
[710]1951
[513]1952 def __div__(self, other):
1953 """
1954 implicit on all axes and on Tsys
1955 """
1956 varlist = vars()
1957 s = None
1958 if isinstance(other, scantable):
[1308]1959 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1960 elif isinstance(other, float):
1961 if other == 0.0:
[718]1962 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1963 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1964 else:
[718]1965 raise TypeError("Other input is not a scantable or float value")
[513]1966 s._add_history("operator /", varlist)
[718]1967 print_log()
[513]1968 return s
1969
[530]1970 def get_fit(self, row=0):
1971 """
1972 Print or return the stored fits for a row in the scantable
1973 Parameters:
1974 row: the row which the fit has been applied to.
1975 """
1976 if row > self.nrow():
1977 return
[976]1978 from asap.asapfit import asapfit
[530]1979 fit = asapfit(self._getfit(row))
[718]1980 if rcParams['verbose']:
[1612]1981 #print fit
[1614]1982 asaplog.push( '%s' %(fit) )
1983 print_log()
[530]1984 return
1985 else:
1986 return fit.as_dict()
1987
[1603]1988 def flag_nans(self):
1989 """
1990 Utility function to flag NaN values in the scantable.
1991 """
1992 import numpy
1993 basesel = self.get_selection()
1994 for i in range(self.nrow()):
1995 sel = selector()+basesel
1996 sel.set_scans(self.getscan(i))
1997 sel.set_beams(self.getbeam(i))
1998 sel.set_ifs(self.getif(i))
1999 sel.set_polarisations(self.getpol(i))
2000 self.set_selection(sel)
2001 nans = numpy.isnan(self._getspectrum(0))
2002 if numpy.any(nans):
2003 bnans = [ bool(v) for v in nans]
2004 self.flag(bnans)
2005 self.set_selection(basesel)
2006
2007
[484]2008 def _add_history(self, funcname, parameters):
[1603]2009 if not rcParams['scantable.history']:
2010 return
[484]2011 # create date
2012 sep = "##"
2013 from datetime import datetime
2014 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2015 hist = dstr+sep
2016 hist += funcname+sep#cdate+sep
2017 if parameters.has_key('self'): del parameters['self']
[1118]2018 for k, v in parameters.iteritems():
[484]2019 if type(v) is dict:
[1118]2020 for k2, v2 in v.iteritems():
[484]2021 hist += k2
2022 hist += "="
[1118]2023 if isinstance(v2, scantable):
[484]2024 hist += 'scantable'
2025 elif k2 == 'mask':
[1118]2026 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2027 hist += str(self._zip_mask(v2))
2028 else:
2029 hist += str(v2)
[484]2030 else:
[513]2031 hist += str(v2)
[484]2032 else:
2033 hist += k
2034 hist += "="
[1118]2035 if isinstance(v, scantable):
[484]2036 hist += 'scantable'
2037 elif k == 'mask':
[1118]2038 if isinstance(v, list) or isinstance(v, tuple):
[513]2039 hist += str(self._zip_mask(v))
2040 else:
2041 hist += str(v)
[484]2042 else:
2043 hist += str(v)
2044 hist += sep
2045 hist = hist[:-2] # remove trailing '##'
2046 self._addhistory(hist)
2047
[710]2048
[484]2049 def _zip_mask(self, mask):
2050 mask = list(mask)
2051 i = 0
2052 segments = []
2053 while mask[i:].count(1):
2054 i += mask[i:].index(1)
2055 if mask[i:].count(0):
2056 j = i + mask[i:].index(0)
2057 else:
[710]2058 j = len(mask)
[1118]2059 segments.append([i, j])
[710]2060 i = j
[484]2061 return segments
[714]2062
[626]2063 def _get_ordinate_label(self):
2064 fu = "("+self.get_fluxunit()+")"
2065 import re
2066 lbl = "Intensity"
[1118]2067 if re.match(".K.", fu):
[626]2068 lbl = "Brightness Temperature "+ fu
[1118]2069 elif re.match(".Jy.", fu):
[626]2070 lbl = "Flux density "+ fu
2071 return lbl
[710]2072
[876]2073 def _check_ifs(self):
2074 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2075 nchans = filter(lambda t: t > 0, nchans)
[876]2076 return (sum(nchans)/len(nchans) == nchans[0])
[976]2077
[1496]2078 def _fill(self, names, unit, average, getpt):
[976]2079 import os
2080 from asap._asap import stfiller
2081 first = True
2082 fullnames = []
2083 for name in names:
2084 name = os.path.expandvars(name)
2085 name = os.path.expanduser(name)
2086 if not os.path.exists(name):
2087 msg = "File '%s' does not exists" % (name)
2088 if rcParams['verbose']:
2089 asaplog.push(msg)
[1612]2090 #print asaplog.pop().strip()
[1614]2091 print_log( 'ERROR' )
[976]2092 return
2093 raise IOError(msg)
2094 fullnames.append(name)
2095 if average:
2096 asaplog.push('Auto averaging integrations')
[1079]2097 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2098 for name in fullnames:
[1073]2099 tbl = Scantable(stype)
2100 r = stfiller(tbl)
[1603]2101 rx = rcParams['scantable.reference']
2102 r._setreferenceexpr(rx)
[976]2103 msg = "Importing %s..." % (name)
[1118]2104 asaplog.push(msg, False)
[976]2105 print_log()
[1496]2106 r._open(name, -1, -1, getpt)
[976]2107 r._read()
2108 if average:
[1118]2109 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2110 if not first:
2111 tbl = self._math._merge([self, tbl])
2112 Scantable.__init__(self, tbl)
2113 r._close()
[1118]2114 del r, tbl
[976]2115 first = False
2116 if unit is not None:
2117 self.set_fluxunit(unit)
[1446]2118 #self.set_freqframe(rcParams['scantable.freqframe'])
[976]2119
[1603]2120 def __getitem__(self, key):
2121 if key < 0:
2122 key += self.nrow()
2123 if key >= self.nrow():
2124 raise IndexError("Row index out of range.")
2125 return self._getspectrum(key)
2126
2127 def __setitem__(self, key, value):
2128 if key < 0:
2129 key += self.nrow()
2130 if key >= self.nrow():
2131 raise IndexError("Row index out of range.")
2132 if not hasattr(value, "__len__") or \
2133 len(value) > self.nchan(self.getif(key)):
2134 raise ValueError("Spectrum length doesn't match.")
2135 return self._setspectrum(value, key)
2136
2137 def __len__(self):
2138 return self.nrow()
2139
2140 def __iter__(self):
2141 for i in range(len(self)):
2142 yield self[i]
Note: See TracBrowser for help on using the repository browser.