source: branches/alma/python/scantable.py@ 1647

Last change on this file since 1647 was 1637, checked in by WataruKawasaki, 15 years ago

New Development: No

JIRA Issue: Yes (CAS-1434)

Ready to Release: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s):

Description: modified poly_baseline() and auto_poly_baseline()

so that FLAGTRA info is properly used in baseline fitting.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 82.7 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[1496]14 def __init__(self, filename, average=None, unit=None, getpt=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[1522]32 getpt: for MeasurementSet input data only:
33 If True, all pointing data are filled.
34 The deafult is False, which makes time to load
35 the MS data faster in some cases.
[710]36 """
[976]37 if average is None:
[710]38 average = rcParams['scantable.autoaverage']
[1496]39 if getpt is None:
40 getpt = False
[1259]41 varlist = vars()
[876]42 from asap._asap import stmath
43 self._math = stmath()
44 if isinstance(filename, Scantable):
45 Scantable.__init__(self, filename)
[181]46 else:
[1603]47 if isinstance(filename, str):# or \
48# (isinstance(filename, list) or isinstance(filename, tuple)) \
49# and isinstance(filename[-1], str):
[976]50 import os.path
51 filename = os.path.expandvars(filename)
52 filename = os.path.expanduser(filename)
53 if not os.path.exists(filename):
54 s = "File '%s' not found." % (filename)
[718]55 if rcParams['verbose']:
[976]56 asaplog.push(s)
[1612]57 #print asaplog.pop().strip()
[1614]58 print_log('ERROR')
[718]59 return
[976]60 raise IOError(s)
[1115]61 if os.path.isdir(filename) \
[1118]62 and not os.path.exists(filename+'/table.f1'):
[976]63 # crude check if asap table
64 if os.path.exists(filename+'/table.info'):
[1118]65 ondisk = rcParams['scantable.storage'] == 'disk'
66 Scantable.__init__(self, filename, ondisk)
[976]67 if unit is not None:
68 self.set_fluxunit(unit)
[1496]69 # do not reset to the default freqframe
70 #self.set_freqframe(rcParams['scantable.freqframe'])
[718]71 else:
[1118]72 msg = "The given file '%s'is not a valid " \
73 "asap table." % (filename)
[976]74 if rcParams['verbose']:
[1612]75 #print msg
[1614]76 asaplog.push( msg )
77 print_log( 'ERROR' )
[976]78 return
79 else:
80 raise IOError(msg)
[226]81 else:
[1496]82 self._fill([filename], unit, average, getpt)
[1118]83 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]84 and isinstance(filename[-1], str):
[1496]85 self._fill(filename, unit, average, getpt)
[1259]86 self._add_history("scantable", varlist)
[714]87 print_log()
[102]88
[876]89 def save(self, name=None, format=None, overwrite=False):
[116]90 """
[1280]91 Store the scantable on disk. This can be an asap (aips++) Table,
92 SDFITS or MS2 format.
[116]93 Parameters:
[1093]94 name: the name of the outputfile. For format "ASCII"
95 this is the root file name (data in 'name'.txt
[497]96 and header in 'name'_header.txt)
[116]97 format: an optional file format. Default is ASAP.
[280]98 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]99 'SDFITS' (save as SDFITS file)
[200]100 'ASCII' (saves as ascii text file)
[226]101 'MS2' (saves as an aips++
102 MeasurementSet V2)
[1603]103 'FITS' (save as image FITS - not
104 readable by class)
105 'CLASS' (save as FITS readable by CLASS)
[411]106 overwrite: If the file should be overwritten if it exists.
[256]107 The default False is to return with warning
[411]108 without writing the output. USE WITH CARE.
[116]109 Example:
110 scan.save('myscan.asap')
[1118]111 scan.save('myscan.sdfits', 'SDFITS')
[116]112 """
[411]113 from os import path
[226]114 if format is None: format = rcParams['scantable.save']
[256]115 suffix = '.'+format.lower()
[1118]116 if name is None or name == "":
[256]117 name = 'scantable'+suffix
[718]118 msg = "No filename given. Using default name %s..." % name
119 asaplog.push(msg)
[411]120 name = path.expandvars(name)
[256]121 if path.isfile(name) or path.isdir(name):
122 if not overwrite:
[718]123 msg = "File %s exists." % name
124 if rcParams['verbose']:
[1612]125 #print msg
[1614]126 asaplog.push( msg )
127 print_log( 'ERROR' )
[718]128 return
129 else:
130 raise IOError(msg)
[451]131 format2 = format.upper()
132 if format2 == 'ASAP':
[116]133 self._save(name)
134 else:
[989]135 from asap._asap import stwriter as stw
[1118]136 writer = stw(format2)
137 writer.write(self, name)
[718]138 print_log()
[116]139 return
140
[102]141 def copy(self):
142 """
143 Return a copy of this scantable.
[1348]144 Note:
145 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]146 Parameters:
[113]147 none
[102]148 Example:
149 copiedscan = scan.copy()
150 """
[876]151 sd = scantable(Scantable._copy(self))
[113]152 return sd
153
[1093]154 def drop_scan(self, scanid=None):
155 """
156 Return a new scantable where the specified scan number(s) has(have)
157 been dropped.
158 Parameters:
159 scanid: a (list of) scan number(s)
160 """
161 from asap import _is_sequence_or_number as _is_valid
162 from asap import _to_list
163 from asap import unique
164 if not _is_valid(scanid):
165 if rcParams['verbose']:
[1612]166 #print "Please specify a scanno to drop from the scantable"
[1614]167 asaplog.push( 'Please specify a scanno to drop from the scantable' )
168 print_log( 'ERROR' )
[1093]169 return
170 else:
171 raise RuntimeError("No scan given")
172 try:
173 scanid = _to_list(scanid)
174 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
175 for sid in scanid: allscans.remove(sid)
[1118]176 if len(allscans) == 0:
177 raise ValueError("Can't remove all scans")
[1093]178 except ValueError:
179 if rcParams['verbose']:
[1612]180 #print "Couldn't find any match."
[1614]181 print_log()
182 asaplog.push( "Couldn't find any match." )
183 print_log( 'ERROR' )
[1093]184 return
185 else: raise
186 try:
187 bsel = self.get_selection()
188 sel = selector()
189 sel.set_scans(allscans)
190 self.set_selection(bsel+sel)
191 scopy = self._copy()
192 self.set_selection(bsel)
193 return scantable(scopy)
194 except RuntimeError:
[1118]195 if rcParams['verbose']:
[1612]196 #print "Couldn't find any match."
[1614]197 print_log()
198 asaplog.push( "Couldn't find any match." )
199 print_log( 'ERROR' )
[1118]200 else:
201 raise
[1093]202
203
[102]204 def get_scan(self, scanid=None):
205 """
206 Return a specific scan (by scanno) or collection of scans (by
207 source name) in a new scantable.
[1348]208 Note:
209 See scantable.drop_scan() for the inverse operation.
[102]210 Parameters:
[513]211 scanid: a (list of) scanno or a source name, unix-style
212 patterns are accepted for source name matching, e.g.
213 '*_R' gets all 'ref scans
[102]214 Example:
[513]215 # get all scans containing the source '323p459'
216 newscan = scan.get_scan('323p459')
217 # get all 'off' scans
218 refscans = scan.get_scan('*_R')
219 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]220 newscan = scan.get_scan([0, 2, 7, 10])
[102]221 """
222 if scanid is None:
[718]223 if rcParams['verbose']:
[1612]224 #print "Please specify a scan no or name to " \
225 # "retrieve from the scantable"
[1614]226 asaplog.push( 'Please specify a scan no or name to retrieve from the scantable' )
227 print_log( 'ERROR' )
[718]228 return
229 else:
230 raise RuntimeError("No scan given")
231
[102]232 try:
[946]233 bsel = self.get_selection()
234 sel = selector()
[102]235 if type(scanid) is str:
[946]236 sel.set_name(scanid)
237 self.set_selection(bsel+sel)
[876]238 scopy = self._copy()
[946]239 self.set_selection(bsel)
[876]240 return scantable(scopy)
[102]241 elif type(scanid) is int:
[946]242 sel.set_scans([scanid])
243 self.set_selection(bsel+sel)
[876]244 scopy = self._copy()
[946]245 self.set_selection(bsel)
[876]246 return scantable(scopy)
[381]247 elif type(scanid) is list:
[946]248 sel.set_scans(scanid)
249 self.set_selection(sel)
[876]250 scopy = self._copy()
[946]251 self.set_selection(bsel)
[876]252 return scantable(scopy)
[381]253 else:
[718]254 msg = "Illegal scanid type, use 'int' or 'list' if ints."
255 if rcParams['verbose']:
[1612]256 #print msg
[1614]257 asaplog.push( msg )
258 print_log( 'ERROR' )
[718]259 else:
260 raise TypeError(msg)
[102]261 except RuntimeError:
[1612]262 if rcParams['verbose']:
263 #print "Couldn't find any match."
[1614]264 print_log()
265 asaplog.push( "Couldn't find any match." )
266 print_log( 'ERROR' )
[718]267 else: raise
[102]268
269 def __str__(self):
[1118]270 return Scantable._summary(self, True)
[102]271
[976]272 def summary(self, filename=None):
[102]273 """
274 Print a summary of the contents of this scantable.
275 Parameters:
276 filename: the name of a file to write the putput to
277 Default - no file output
[381]278 verbose: print extra info such as the frequency table
279 The default (False) is taken from .asaprc
[102]280 """
[976]281 info = Scantable._summary(self, True)
282 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]283 if filename is not None:
[256]284 if filename is "":
285 filename = 'scantable_summary.txt'
[415]286 from os.path import expandvars, isdir
[411]287 filename = expandvars(filename)
[415]288 if not isdir(filename):
[413]289 data = open(filename, 'w')
290 data.write(info)
291 data.close()
292 else:
[718]293 msg = "Illegal file name '%s'." % (filename)
294 if rcParams['verbose']:
[1612]295 #print msg
[1614]296 asaplog.push( msg )
297 print_log( 'ERROR' )
[718]298 else:
299 raise IOError(msg)
300 if rcParams['verbose']:
[794]301 try:
302 from IPython.genutils import page as pager
303 except ImportError:
304 from pydoc import pager
305 pager(info)
[718]306 else:
307 return info
[710]308
[1603]309 def get_spectrum(self, rowno):
310 """Return the spectrum for the current row in the scantable as a list.
311 Parameters:
312 rowno: the row number to retrieve the spectrum from
313 """
314 return self._getspectrum(rowno)
[946]315
[1603]316 def get_mask(self, rowno):
317 """Return the mask for the current row in the scantable as a list.
318 Parameters:
319 rowno: the row number to retrieve the mask from
320 """
321 return self._getmask(rowno)
322
323 def set_spectrum(self, spec, rowno):
324 """Return the spectrum for the current row in the scantable as a list.
325 Parameters:
326 spec: the spectrum
327 rowno: the row number to set the spectrum for
328 """
329 assert(len(spec) == self.nchan())
330 return self._setspectrum(spec, rowno)
331
[946]332 def get_selection(self):
333 """
[1005]334 Get the selection object currently set on this scantable.
335 Parameters:
336 none
337 Example:
338 sel = scan.get_selection()
339 sel.set_ifs(0) # select IF 0
340 scan.set_selection(sel) # apply modified selection
[946]341 """
342 return selector(self._getselection())
343
[1005]344 def set_selection(self, selection=selector()):
[946]345 """
[1005]346 Select a subset of the data. All following operations on this scantable
347 are only applied to thi selection.
348 Parameters:
349 selection: a selector object (default unset the selection)
350 Examples:
351 sel = selector() # create a selection object
[1118]352 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]353 scan.set_selection(sel) # set the selection
354 scan.summary() # will only print summary of scanno 0 an 3
355 scan.set_selection() # unset the selection
[946]356 """
357 self._setselection(selection)
358
[1446]359 def get_row(self, row=0, insitu=None):
360 """
361 Select a row in the scantable.
362 Return a scantable with single row.
363 Parameters:
364 row: row no of integration, default is 0.
365 insitu: if False a new scantable is returned.
366 Otherwise, the scaling is done in-situ
367 The default is taken from .asaprc (False)
368 """
369 if insitu is None: insitu = rcParams['insitu']
370 if not insitu:
371 workscan = self.copy()
372 else:
373 workscan = self
374 # Select a row
375 sel=selector()
376 sel.set_scans([workscan.getscan(row)])
377 sel.set_cycles([workscan.getcycle(row)])
378 sel.set_beams([workscan.getbeam(row)])
379 sel.set_ifs([workscan.getif(row)])
380 sel.set_polarisations([workscan.getpol(row)])
381 sel.set_name(workscan._getsourcename(row))
382 workscan.set_selection(sel)
383 if not workscan.nrow() == 1:
384 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
385 raise RuntimeError(msg)
386 del sel
387 if insitu:
388 self._assign(workscan)
389 else:
390 return workscan
391
[1629]392 #def stats(self, stat='stddev', mask=None):
393 def stats(self, stat='stddev', mask=None, form='3.3f'):
[102]394 """
[135]395 Determine the specified statistic of the current beam/if/pol
[102]396 Takes a 'mask' as an optional parameter to specify which
397 channels should be excluded.
398 Parameters:
[1517]399 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
[1515]400 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[135]401 mask: an optional mask specifying where the statistic
[102]402 should be determined.
[1629]403 form: format string to print statistic values
[102]404 Example:
[113]405 scan.set_unit('channel')
[1118]406 msk = scan.create_mask([100, 200], [500, 600])
[135]407 scan.stats(stat='mean', mask=m)
[102]408 """
409 if mask == None:
[876]410 mask = []
[1118]411 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]412 if not self._check_ifs():
[1118]413 raise ValueError("Cannot apply mask as the IFs have different "
414 "number of channels. Please use setselection() "
415 "to select individual IFs")
[1530]416 rtnabc = False
417 if stat.lower().endswith('_abc'): rtnabc = True
418 getchan = False
419 if stat.lower().startswith('min') or stat.lower().startswith('max'):
[1517]420 chan = self._math._minmaxchan(self, mask, stat)
421 getchan = True
[1515]422 statvals = []
[1530]423 if not rtnabc: statvals = self._math._stats(self, mask, stat)
424
[876]425 out = ''
426 axes = []
427 for i in range(self.nrow()):
428 axis = []
429 axis.append(self.getscan(i))
430 axis.append(self.getbeam(i))
431 axis.append(self.getif(i))
432 axis.append(self.getpol(i))
433 axis.append(self.getcycle(i))
434 axes.append(axis)
435 tm = self._gettime(i)
436 src = self._getsourcename(i)
[1530]437 refstr = ''
438 statunit= ''
[1517]439 if getchan:
440 qx, qy = self.chan2data(rowno=i, chan=chan[i])
[1530]441 if rtnabc:
442 statvals.append(qx['value'])
[1629]443 #refstr = '(value: %3.3f' % (qy['value'])+' ['+qy['unit']+'])'
444 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
[1530]445 statunit= '['+qx['unit']+']'
446 else:
[1629]447 #refstr = '(@ %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
448 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
[1530]449 #statunit= ' ['+qy['unit']+']'
[876]450 out += 'Scan[%d] (%s) ' % (axis[0], src)
451 out += 'Time[%s]:\n' % (tm)
452 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
453 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
454 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
[1629]455 #out += '= %3.3f ' % (statvals[i]) +refstr+'\n'
456 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
[876]457 out += "--------------------------------------------------\n"
[256]458
[876]459 if rcParams['verbose']:
[1615]460 import os
[1625]461 if os.environ.has_key( 'USER' ):
462 usr=os.environ['USER']
463 else:
464 import commands
465 usr=commands.getoutput( 'whoami' )
[1613]466 tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
[1612]467 f=open(tmpfile,'w')
468 print >> f, "--------------------------------------------------"
469 print >> f, " ", stat, statunit
470 print >> f, "--------------------------------------------------"
471 print >> f, out
472 f.close()
473 f=open(tmpfile,'r')
474 x=f.readlines()
475 f.close()
476 for xx in x:
[1614]477 asaplog.push( xx )
478 print_log()
[1295]479 #else:
480 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
481 # 'axes' : axes,
482 # 'data': statvals}
483 return statvals
[102]484
[1517]485 def chan2data(self, rowno=0, chan=0):
[1515]486 """
[1517]487 Returns channel/frequency/velocity and spectral value
[1515]488 at an arbitrary row and channel in the scantable.
489 Parameters:
490 rowno: a row number in the scantable. Default is the
491 first row, i.e. rowno=0
[1517]492 chan: a channel in the scantable. Default is the first
[1515]493 channel, i.e. pos=0
494 """
[1517]495 if isinstance(rowno, int) and isinstance(chan, int):
[1530]496 qx = {'unit': self.get_unit(),
497 'value': self._getabcissa(rowno)[chan]}
[1517]498 qy = {'unit': self.get_fluxunit(),
499 'value': self._getspectrum(rowno)[chan]}
500 return qx, qy
[1515]501
[1118]502 def stddev(self, mask=None):
[135]503 """
504 Determine the standard deviation of the current beam/if/pol
505 Takes a 'mask' as an optional parameter to specify which
506 channels should be excluded.
507 Parameters:
508 mask: an optional mask specifying where the standard
509 deviation should be determined.
510
511 Example:
512 scan.set_unit('channel')
[1118]513 msk = scan.create_mask([100, 200], [500, 600])
[135]514 scan.stddev(mask=m)
515 """
[1118]516 return self.stats(stat='stddev', mask=mask);
[135]517
[1003]518
[1259]519 def get_column_names(self):
[1003]520 """
521 Return a list of column names, which can be used for selection.
522 """
[1259]523 return list(Scantable.get_column_names(self))
[1003]524
[876]525 def get_tsys(self):
[113]526 """
527 Return the System temperatures.
528 Returns:
[876]529 a list of Tsys values for the current selection
[113]530 """
[256]531
[876]532 return self._row_callback(self._gettsys, "Tsys")
[256]533
[876]534 def _row_callback(self, callback, label):
535 axes = []
[1118]536 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]537 out = ""
[1118]538 outvec = []
[876]539 for i in range(self.nrow()):
540 axis = []
541 axis.append(self.getscan(i))
542 axis.append(self.getbeam(i))
543 axis.append(self.getif(i))
544 axis.append(self.getpol(i))
545 axis.append(self.getcycle(i))
546 axes.append(axis)
547 tm = self._gettime(i)
548 src = self._getsourcename(i)
549 out += 'Scan[%d] (%s) ' % (axis[0], src)
550 out += 'Time[%s]:\n' % (tm)
551 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
552 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
553 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
554 outvec.append(callback(i))
555 out += '= %3.3f\n' % (outvec[i])
556 out += "--------------------------------------------------\n"
557 if rcParams['verbose']:
[1615]558 asaplog.push("--------------------------------------------------")
559 asaplog.push(" %s" % (label))
560 asaplog.push("--------------------------------------------------")
561 asaplog.push(out)
[1614]562 print_log()
[1175]563 # disabled because the vector seems more useful
564 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
565 return outvec
[256]566
[1070]567 def _get_column(self, callback, row=-1):
568 """
569 """
570 if row == -1:
571 return [callback(i) for i in range(self.nrow())]
572 else:
573 if 0 <= row < self.nrow():
574 return callback(row)
[256]575
[1070]576
[1348]577 def get_time(self, row=-1, asdatetime=False):
[113]578 """
579 Get a list of time stamps for the observations.
[1348]580 Return a datetime object for each integration time stamp in the scantable.
[113]581 Parameters:
[1348]582 row: row no of integration. Default -1 return all rows
583 asdatetime: return values as datetime objects rather than strings
[113]584 Example:
585 none
586 """
[1175]587 from time import strptime
588 from datetime import datetime
[1457]589 times = self._get_column(self._gettime, row)
[1348]590 if not asdatetime:
[1603]591 return times
[1175]592 format = "%Y/%m/%d/%H:%M:%S"
593 if isinstance(times, list):
594 return [datetime(*strptime(i, format)[:6]) for i in times]
595 else:
596 return datetime(*strptime(times, format)[:6])
[102]597
[1348]598
599 def get_inttime(self, row=-1):
600 """
601 Get a list of integration times for the observations.
602 Return a time in seconds for each integration in the scantable.
603 Parameters:
604 row: row no of integration. Default -1 return all rows.
605 Example:
606 none
607 """
608 return self._get_column(self._getinttime, row)
609
610
[714]611 def get_sourcename(self, row=-1):
612 """
[794]613 Get a list source names for the observations.
[714]614 Return a string for each integration in the scantable.
615 Parameters:
[1348]616 row: row no of integration. Default -1 return all rows.
[714]617 Example:
618 none
619 """
[1070]620 return self._get_column(self._getsourcename, row)
[714]621
[794]622 def get_elevation(self, row=-1):
623 """
624 Get a list of elevations for the observations.
625 Return a float for each integration in the scantable.
626 Parameters:
[1348]627 row: row no of integration. Default -1 return all rows.
[794]628 Example:
629 none
630 """
[1070]631 return self._get_column(self._getelevation, row)
[794]632
633 def get_azimuth(self, row=-1):
634 """
635 Get a list of azimuths for the observations.
636 Return a float for each integration in the scantable.
637 Parameters:
[1348]638 row: row no of integration. Default -1 return all rows.
[794]639 Example:
640 none
641 """
[1070]642 return self._get_column(self._getazimuth, row)
[794]643
644 def get_parangle(self, row=-1):
645 """
646 Get a list of parallactic angles for the observations.
647 Return a float for each integration in the scantable.
648 Parameters:
[1348]649 row: row no of integration. Default -1 return all rows.
[794]650 Example:
651 none
652 """
[1070]653 return self._get_column(self._getparangle, row)
[794]654
[1070]655 def get_direction(self, row=-1):
656 """
657 Get a list of Positions on the sky (direction) for the observations.
658 Return a float for each integration in the scantable.
659 Parameters:
660 row: row no of integration. Default -1 return all rows
661 Example:
662 none
663 """
664 return self._get_column(self._getdirection, row)
665
[1389]666 def get_directionval(self, row=-1):
667 """
668 Get a list of Positions on the sky (direction) for the observations.
669 Return a float for each integration in the scantable.
670 Parameters:
671 row: row no of integration. Default -1 return all rows
672 Example:
673 none
674 """
675 return self._get_column(self._getdirectionvec, row)
676
[102]677 def set_unit(self, unit='channel'):
678 """
679 Set the unit for all following operations on this scantable
680 Parameters:
681 unit: optional unit, default is 'channel'
[1118]682 one of '*Hz', 'km/s', 'channel', ''
[102]683 """
[484]684 varlist = vars()
[1118]685 if unit in ['', 'pixel', 'channel']:
[113]686 unit = ''
687 inf = list(self._getcoordinfo())
688 inf[0] = unit
689 self._setcoordinfo(inf)
[1118]690 self._add_history("set_unit", varlist)
[113]691
[484]692 def set_instrument(self, instr):
[358]693 """
[1348]694 Set the instrument for subsequent processing.
[358]695 Parameters:
[710]696 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]697 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]698 """
699 self._setInstrument(instr)
[1118]700 self._add_history("set_instument", vars())
[718]701 print_log()
[358]702
[1190]703 def set_feedtype(self, feedtype):
704 """
705 Overwrite the feed type, which might not be set correctly.
706 Parameters:
707 feedtype: 'linear' or 'circular'
708 """
709 self._setfeedtype(feedtype)
710 self._add_history("set_feedtype", vars())
711 print_log()
712
[276]713 def set_doppler(self, doppler='RADIO'):
714 """
715 Set the doppler for all following operations on this scantable.
716 Parameters:
717 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
718 """
[484]719 varlist = vars()
[276]720 inf = list(self._getcoordinfo())
721 inf[2] = doppler
722 self._setcoordinfo(inf)
[1118]723 self._add_history("set_doppler", vars())
[718]724 print_log()
[710]725
[226]726 def set_freqframe(self, frame=None):
[113]727 """
728 Set the frame type of the Spectral Axis.
729 Parameters:
[591]730 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]731 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
732 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]733 Examples:
734 scan.set_freqframe('BARY')
735 """
[484]736 if frame is None: frame = rcParams['scantable.freqframe']
737 varlist = vars()
[1118]738 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
739 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]740
[989]741 if frame in valid:
[113]742 inf = list(self._getcoordinfo())
743 inf[1] = frame
744 self._setcoordinfo(inf)
[1118]745 self._add_history("set_freqframe", varlist)
[102]746 else:
[1118]747 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]748 if rcParams['verbose']:
[1612]749 #print msg
[1614]750 asaplog.push( msg )
751 print_log( 'ERROR' )
[718]752 else:
753 raise TypeError(msg)
754 print_log()
[710]755
[989]756 def set_dirframe(self, frame=""):
757 """
758 Set the frame type of the Direction on the sky.
759 Parameters:
760 frame: an optional frame type, default ''. Valid frames are:
761 'J2000', 'B1950', 'GALACTIC'
762 Examples:
763 scan.set_dirframe('GALACTIC')
764 """
765 varlist = vars()
766 try:
767 Scantable.set_dirframe(self, frame)
[1118]768 except RuntimeError, msg:
[989]769 if rcParams['verbose']:
[1612]770 #print msg
[1614]771 print_log()
[1627]772 asaplog.push( msg.message )
[1614]773 print_log( 'ERROR' )
[989]774 else:
775 raise
[1118]776 self._add_history("set_dirframe", varlist)
[989]777
[113]778 def get_unit(self):
779 """
780 Get the default unit set in this scantable
781 Returns:
782 A unit string
783 """
784 inf = self._getcoordinfo()
785 unit = inf[0]
786 if unit == '': unit = 'channel'
787 return unit
[102]788
[158]789 def get_abcissa(self, rowno=0):
[102]790 """
[158]791 Get the abcissa in the current coordinate setup for the currently
[113]792 selected Beam/IF/Pol
793 Parameters:
[226]794 rowno: an optional row number in the scantable. Default is the
795 first row, i.e. rowno=0
[113]796 Returns:
[1348]797 The abcissa values and the format string (as a dictionary)
[113]798 """
[256]799 abc = self._getabcissa(rowno)
[710]800 lbl = self._getabcissalabel(rowno)
[718]801 print_log()
[158]802 return abc, lbl
[113]803
[1401]804 def flag(self, mask=None, unflag=False):
[1001]805 """
806 Flag the selected data using an optional channel mask.
807 Parameters:
808 mask: an optional channel mask, created with create_mask. Default
809 (no mask) is all channels.
[1401]810 unflag: if True, unflag the data
[1001]811 """
812 varlist = vars()
[1118]813 if mask is None:
814 mask = []
[1001]815 try:
[1401]816 self._flag(mask, unflag)
[1118]817 except RuntimeError, msg:
[1001]818 if rcParams['verbose']:
[1612]819 #print msg
[1614]820 print_log()
[1627]821 asaplog.push( msg.message )
[1614]822 print_log( 'ERROR' )
[1001]823 return
824 else: raise
825 self._add_history("flag", varlist)
826
[1203]827 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]828 """
829 Flag the data in 'lag' space by providing a frequency to remove.
830 Flagged data in the scantable gets set to 0.0 before the fft.
831 No taper is applied.
832 Parameters:
[1348]833 frequency: the frequency (really a period within the bandwidth)
834 to remove
835 width: the width of the frequency to remove, to remove a
[1603]836 range of frequencies around the centre.
[1203]837 unit: the frequency unit (default "GHz")
838 Notes:
[1348]839 It is recommended to flag edges of the band or strong
840 signals beforehand.
[1192]841 """
842 if insitu is None: insitu = rcParams['insitu']
843 self._math._setinsitu(insitu)
844 varlist = vars()
[1370]845 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]846 if not base.has_key(unit):
847 raise ValueError("%s is not a valid unit." % unit)
848 try:
[1200]849 s = scantable(self._math._lag_flag(self, frequency*base[unit],
850 width*base[unit]))
[1192]851 except RuntimeError, msg:
852 if rcParams['verbose']:
[1612]853 #print msg
[1614]854 print_log()
[1627]855 asaplog.push( msg.message )
[1614]856 print_log( 'ERROR' )
[1192]857 return
858 else: raise
859 s._add_history("lag_flag", varlist)
860 print_log()
861 if insitu:
862 self._assign(s)
863 else:
864 return s
[1001]865
[1192]866
[113]867 def create_mask(self, *args, **kwargs):
868 """
[1118]869 Compute and return a mask based on [min, max] windows.
[189]870 The specified windows are to be INCLUDED, when the mask is
[113]871 applied.
[102]872 Parameters:
[1118]873 [min, max], [min2, max2], ...
[1024]874 Pairs of start/end points (inclusive)specifying the regions
[102]875 to be masked
[189]876 invert: optional argument. If specified as True,
877 return an inverted mask, i.e. the regions
878 specified are EXCLUDED
[513]879 row: create the mask using the specified row for
880 unit conversions, default is row=0
881 only necessary if frequency varies over rows.
[102]882 Example:
[113]883 scan.set_unit('channel')
884 a)
[1118]885 msk = scan.create_mask([400, 500], [800, 900])
[189]886 # masks everything outside 400 and 500
[113]887 # and 800 and 900 in the unit 'channel'
888
889 b)
[1118]890 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]891 # masks the regions between 400 and 500
[113]892 # and 800 and 900 in the unit 'channel'
[1024]893 c)
894 mask only channel 400
[1118]895 msk = scan.create_mask([400, 400])
[102]896 """
[513]897 row = 0
898 if kwargs.has_key("row"):
899 row = kwargs.get("row")
900 data = self._getabcissa(row)
[113]901 u = self._getcoordinfo()[0]
[718]902 if rcParams['verbose']:
[113]903 if u == "": u = "channel"
[718]904 msg = "The current mask window unit is %s" % u
[1118]905 i = self._check_ifs()
906 if not i:
[876]907 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]908 asaplog.push(msg)
[102]909 n = self.nchan()
[1295]910 msk = _n_bools(n, False)
[710]911 # test if args is a 'list' or a 'normal *args - UGLY!!!
912
[1118]913 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
914 and args or args[0]
[710]915 for window in ws:
[102]916 if (len(window) != 2 or window[0] > window[1] ):
[1118]917 raise TypeError("A window needs to be defined as [min, max]")
[102]918 for i in range(n):
[1024]919 if data[i] >= window[0] and data[i] <= window[1]:
[1295]920 msk[i] = True
[113]921 if kwargs.has_key('invert'):
922 if kwargs.get('invert'):
[1295]923 msk = mask_not(msk)
[718]924 print_log()
[102]925 return msk
[710]926
[1446]927 def get_masklist(self, mask=None, row=0):
[256]928 """
[1446]929 Compute and return a list of mask windows, [min, max].
930 Parameters:
931 mask: channel mask, created with create_mask.
932 row: calcutate the masklist using the specified row
933 for unit conversions, default is row=0
934 only necessary if frequency varies over rows.
935 Returns:
936 [min, max], [min2, max2], ...
937 Pairs of start/end points (inclusive)specifying
938 the masked regions
939 """
940 if not (isinstance(mask,list) or isinstance(mask, tuple)):
941 raise TypeError("The mask should be list or tuple.")
942 if len(mask) < 2:
943 raise TypeError("The mask elements should be > 1")
944 if self.nchan() != len(mask):
945 msg = "Number of channels in scantable != number of mask elements"
946 raise TypeError(msg)
947 data = self._getabcissa(row)
948 u = self._getcoordinfo()[0]
949 if rcParams['verbose']:
950 if u == "": u = "channel"
951 msg = "The current mask window unit is %s" % u
952 i = self._check_ifs()
953 if not i:
954 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
955 asaplog.push(msg)
956 masklist=[]
957 ist, ien = None, None
958 ist, ien=self.get_mask_indices(mask)
959 if ist is not None and ien is not None:
960 for i in xrange(len(ist)):
961 range=[data[ist[i]],data[ien[i]]]
962 range.sort()
963 masklist.append([range[0],range[1]])
964 return masklist
965
966 def get_mask_indices(self, mask=None):
967 """
968 Compute and Return lists of mask start indices and mask end indices.
969 Parameters:
970 mask: channel mask, created with create_mask.
971 Returns:
972 List of mask start indices and that of mask end indices,
973 i.e., [istart1,istart2,....], [iend1,iend2,....].
974 """
975 if not (isinstance(mask,list) or isinstance(mask, tuple)):
976 raise TypeError("The mask should be list or tuple.")
977 if len(mask) < 2:
978 raise TypeError("The mask elements should be > 1")
979 istart=[]
980 iend=[]
981 if mask[0]: istart.append(0)
982 for i in range(len(mask)-1):
983 if not mask[i] and mask[i+1]:
984 istart.append(i+1)
985 elif mask[i] and not mask[i+1]:
986 iend.append(i)
987 if mask[len(mask)-1]: iend.append(len(mask)-1)
988 if len(istart) != len(iend):
989 raise RuntimeError("Numbers of mask start != mask end.")
990 for i in range(len(istart)):
991 if istart[i] > iend[i]:
992 raise RuntimeError("Mask start index > mask end index")
993 break
994 return istart,iend
995
996# def get_restfreqs(self):
997# """
998# Get the restfrequency(s) stored in this scantable.
999# The return value(s) are always of unit 'Hz'
1000# Parameters:
1001# none
1002# Returns:
1003# a list of doubles
1004# """
1005# return list(self._getrestfreqs())
1006
1007 def get_restfreqs(self, ids=None):
1008 """
[256]1009 Get the restfrequency(s) stored in this scantable.
1010 The return value(s) are always of unit 'Hz'
1011 Parameters:
[1446]1012 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1013 be retrieved
[256]1014 Returns:
[1446]1015 dictionary containing ids and a list of doubles for each id
[256]1016 """
[1446]1017 if ids is None:
1018 rfreqs={}
1019 idlist = self.getmolnos()
1020 for i in idlist:
1021 rfreqs[i]=list(self._getrestfreqs(i))
1022 return rfreqs
1023 else:
1024 if type(ids)==list or type(ids)==tuple:
1025 rfreqs={}
1026 for i in ids:
1027 rfreqs[i]=list(self._getrestfreqs(i))
1028 return rfreqs
1029 else:
1030 return list(self._getrestfreqs(ids))
1031 #return list(self._getrestfreqs(ids))
[102]1032
[931]1033 def set_restfreqs(self, freqs=None, unit='Hz'):
1034 """
[1446]1035 ********NEED TO BE UPDATED begin************
[931]1036 Set or replace the restfrequency specified and
1037 If the 'freqs' argument holds a scalar,
1038 then that rest frequency will be applied to all the selected
1039 data. If the 'freqs' argument holds
1040 a vector, then it MUST be of equal or smaller length than
1041 the number of IFs (and the available restfrequencies will be
1042 replaced by this vector). In this case, *all* data have
1043 the restfrequency set per IF according
1044 to the corresponding value you give in the 'freqs' vector.
[1118]1045 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1046 IF 1 gets restfreq 2e9.
[1446]1047 ********NEED TO BE UPDATED end************
[1603]1048 You can also specify the frequencies via a linecatalog.
[1153]1049
[931]1050 Parameters:
1051 freqs: list of rest frequency values or string idenitfiers
1052 unit: unit for rest frequency (default 'Hz')
[402]1053
[931]1054 Example:
[1446]1055 # set the given restfrequency for the all currently selected IFs
[931]1056 scan.set_restfreqs(freqs=1.4e9)
[1446]1057 # set multiple restfrequencies to all the selected data
1058 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1059 # If the number of IFs in the data is >= 2 the IF0 gets the first
1060 # value IF1 the second... NOTE that freqs needs to be
1061 # specified in list of list (e.g. [[],[],...] ).
1062 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]1063 #set the given restfrequency for the whole table (by name)
1064 scan.set_restfreqs(freqs="OH1667")
[391]1065
[931]1066 Note:
1067 To do more sophisticate Restfrequency setting, e.g. on a
1068 source and IF basis, use scantable.set_selection() before using
1069 this function.
1070 # provide your scantable is call scan
1071 selection = selector()
1072 selection.set_name("ORION*")
1073 selection.set_ifs([1])
1074 scan.set_selection(selection)
1075 scan.set_restfreqs(freqs=86.6e9)
1076
1077 """
1078 varlist = vars()
[1157]1079 from asap import linecatalog
1080 # simple value
[1118]1081 if isinstance(freqs, int) or isinstance(freqs, float):
[1446]1082 # TT mod
1083 #self._setrestfreqs(freqs, "",unit)
1084 self._setrestfreqs([freqs], [""],unit)
[1157]1085 # list of values
[1118]1086 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1087 # list values are scalars
[1118]1088 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1446]1089 self._setrestfreqs(freqs, [""],unit)
[1157]1090 # list values are tuples, (value, name)
1091 elif isinstance(freqs[-1], dict):
[1446]1092 #sel = selector()
1093 #savesel = self._getselection()
1094 #iflist = self.getifnos()
1095 #for i in xrange(len(freqs)):
1096 # sel.set_ifs(iflist[i])
1097 # self._setselection(sel)
1098 # self._setrestfreqs(freqs[i], "",unit)
1099 #self._setselection(savesel)
1100 self._setrestfreqs(freqs["value"],
1101 freqs["name"], "MHz")
1102 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1103 sel = selector()
1104 savesel = self._getselection()
[1322]1105 iflist = self.getifnos()
[1446]1106 if len(freqs)>len(iflist):
1107 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]1108 for i in xrange(len(freqs)):
[1322]1109 sel.set_ifs(iflist[i])
[1259]1110 self._setselection(sel)
[1157]1111 self._setrestfreqs(freqs[i]["value"],
1112 freqs[i]["name"], "MHz")
1113 self._setselection(savesel)
1114 # freqs are to be taken from a linecatalog
[1153]1115 elif isinstance(freqs, linecatalog):
1116 sel = selector()
1117 savesel = self._getselection()
1118 for i in xrange(freqs.nrow()):
[1322]1119 sel.set_ifs(iflist[i])
[1153]1120 self._setselection(sel)
1121 self._setrestfreqs(freqs.get_frequency(i),
1122 freqs.get_name(i), "MHz")
1123 # ensure that we are not iterating past nIF
1124 if i == self.nif()-1: break
1125 self._setselection(savesel)
[931]1126 else:
1127 return
1128 self._add_history("set_restfreqs", varlist)
1129
[1360]1130 def shift_refpix(self, delta):
1131 """
1132 Shift the reference pixel of the Spectra Coordinate by an
1133 integer amount.
1134 Parameters:
1135 delta: the amount to shift by
1136 Note:
1137 Be careful using this with broadband data.
1138 """
1139 Scantable.shift(self, delta)
[931]1140
[1259]1141 def history(self, filename=None):
1142 """
1143 Print the history. Optionally to a file.
[1348]1144 Parameters:
1145 filename: The name of the file to save the history to.
[1259]1146 """
[484]1147 hist = list(self._gethistory())
[794]1148 out = "-"*80
[484]1149 for h in hist:
[489]1150 if h.startswith("---"):
[794]1151 out += "\n"+h
[489]1152 else:
1153 items = h.split("##")
1154 date = items[0]
1155 func = items[1]
1156 items = items[2:]
[794]1157 out += "\n"+date+"\n"
1158 out += "Function: %s\n Parameters:" % (func)
[489]1159 for i in items:
1160 s = i.split("=")
[1118]1161 out += "\n %s = %s" % (s[0], s[1])
[794]1162 out += "\n"+"-"*80
[1259]1163 if filename is not None:
1164 if filename is "":
1165 filename = 'scantable_history.txt'
1166 import os
1167 filename = os.path.expandvars(os.path.expanduser(filename))
1168 if not os.path.isdir(filename):
1169 data = open(filename, 'w')
1170 data.write(out)
1171 data.close()
1172 else:
1173 msg = "Illegal file name '%s'." % (filename)
1174 if rcParams['verbose']:
[1612]1175 #print msg
[1614]1176 asaplog.push( msg )
1177 print_log( 'ERROR' )
[1259]1178 else:
1179 raise IOError(msg)
1180 if rcParams['verbose']:
1181 try:
1182 from IPython.genutils import page as pager
1183 except ImportError:
1184 from pydoc import pager
1185 pager(out)
1186 else:
1187 return out
[484]1188 return
[513]1189 #
1190 # Maths business
1191 #
1192
[931]1193 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1194 """
[1070]1195 Return the (time) weighted average of a scan.
[513]1196 Note:
[1070]1197 in channels only - align if necessary
[513]1198 Parameters:
1199 mask: an optional mask (only used for 'var' and 'tsys'
1200 weighting)
[558]1201 scanav: True averages each scan separately
1202 False (default) averages all scans together,
[1099]1203 weight: Weighting scheme.
1204 'none' (mean no weight)
1205 'var' (1/var(spec) weighted)
1206 'tsys' (1/Tsys**2 weighted)
1207 'tint' (integration time weighted)
1208 'tintsys' (Tint/Tsys**2)
1209 'median' ( median averaging)
[535]1210 The default is 'tint'
[931]1211 align: align the spectra in velocity before averaging. It takes
1212 the time of the first spectrum as reference time.
[513]1213 Example:
1214 # time average the scantable without using a mask
[710]1215 newscan = scan.average_time()
[513]1216 """
1217 varlist = vars()
[976]1218 if weight is None: weight = 'TINT'
[513]1219 if mask is None: mask = ()
[1099]1220 if scanav: scanav = "SCAN"
1221 else: scanav = "NONE"
[1118]1222 scan = (self, )
[989]1223 try:
[1118]1224 if align:
1225 scan = (self.freq_align(insitu=False), )
1226 s = None
1227 if weight.upper() == 'MEDIAN':
1228 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1229 scanav))
1230 else:
1231 s = scantable(self._math._average(scan, mask, weight.upper(),
1232 scanav))
1233 except RuntimeError, msg:
[989]1234 if rcParams['verbose']:
[1612]1235 #print msg
[1614]1236 print_log()
[1627]1237 asaplog.push( msg.message )
[1614]1238 print_log( 'ERROR' )
[989]1239 return
1240 else: raise
[1099]1241 s._add_history("average_time", varlist)
[718]1242 print_log()
[513]1243 return s
[710]1244
[876]1245 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1246 """
1247 Return a scan where all spectra are converted to either
1248 Jansky or Kelvin depending upon the flux units of the scan table.
1249 By default the function tries to look the values up internally.
1250 If it can't find them (or if you want to over-ride), you must
1251 specify EITHER jyperk OR eta (and D which it will try to look up
1252 also if you don't set it). jyperk takes precedence if you set both.
1253 Parameters:
1254 jyperk: the Jy / K conversion factor
1255 eta: the aperture efficiency
1256 d: the geomtric diameter (metres)
1257 insitu: if False a new scantable is returned.
1258 Otherwise, the scaling is done in-situ
1259 The default is taken from .asaprc (False)
1260 """
1261 if insitu is None: insitu = rcParams['insitu']
[876]1262 self._math._setinsitu(insitu)
[513]1263 varlist = vars()
1264 if jyperk is None: jyperk = -1.0
1265 if d is None: d = -1.0
1266 if eta is None: eta = -1.0
[876]1267 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1268 s._add_history("convert_flux", varlist)
1269 print_log()
1270 if insitu: self._assign(s)
1271 else: return s
[513]1272
[876]1273 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1274 """
1275 Return a scan after applying a gain-elevation correction.
1276 The correction can be made via either a polynomial or a
1277 table-based interpolation (and extrapolation if necessary).
1278 You specify polynomial coefficients, an ascii table or neither.
1279 If you specify neither, then a polynomial correction will be made
1280 with built in coefficients known for certain telescopes (an error
1281 will occur if the instrument is not known).
1282 The data and Tsys are *divided* by the scaling factors.
1283 Parameters:
1284 poly: Polynomial coefficients (default None) to compute a
1285 gain-elevation correction as a function of
1286 elevation (in degrees).
1287 filename: The name of an ascii file holding correction factors.
1288 The first row of the ascii file must give the column
1289 names and these MUST include columns
1290 "ELEVATION" (degrees) and "FACTOR" (multiply data
1291 by this) somewhere.
1292 The second row must give the data type of the
1293 column. Use 'R' for Real and 'I' for Integer.
1294 An example file would be
1295 (actual factors are arbitrary) :
1296
1297 TIME ELEVATION FACTOR
1298 R R R
1299 0.1 0 0.8
1300 0.2 20 0.85
1301 0.3 40 0.9
1302 0.4 60 0.85
1303 0.5 80 0.8
1304 0.6 90 0.75
1305 method: Interpolation method when correcting from a table.
1306 Values are "nearest", "linear" (default), "cubic"
1307 and "spline"
1308 insitu: if False a new scantable is returned.
1309 Otherwise, the scaling is done in-situ
1310 The default is taken from .asaprc (False)
1311 """
1312
1313 if insitu is None: insitu = rcParams['insitu']
[876]1314 self._math._setinsitu(insitu)
[513]1315 varlist = vars()
1316 if poly is None:
[1118]1317 poly = ()
[513]1318 from os.path import expandvars
1319 filename = expandvars(filename)
[876]1320 s = scantable(self._math._gainel(self, poly, filename, method))
1321 s._add_history("gain_el", varlist)
1322 print_log()
1323 if insitu: self._assign(s)
1324 else: return s
[710]1325
[931]1326 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1327 """
1328 Return a scan where all rows have been aligned in frequency/velocity.
1329 The alignment frequency frame (e.g. LSRK) is that set by function
1330 set_freqframe.
1331 Parameters:
1332 reftime: reference time to align at. By default, the time of
1333 the first row of data is used.
1334 method: Interpolation method for regridding the spectra.
1335 Choose from "nearest", "linear", "cubic" (default)
1336 and "spline"
1337 insitu: if False a new scantable is returned.
1338 Otherwise, the scaling is done in-situ
1339 The default is taken from .asaprc (False)
1340 """
[931]1341 if insitu is None: insitu = rcParams["insitu"]
[876]1342 self._math._setinsitu(insitu)
[513]1343 varlist = vars()
[931]1344 if reftime is None: reftime = ""
1345 s = scantable(self._math._freq_align(self, reftime, method))
[876]1346 s._add_history("freq_align", varlist)
1347 print_log()
1348 if insitu: self._assign(s)
1349 else: return s
[513]1350
[876]1351 def opacity(self, tau, insitu=None):
[513]1352 """
1353 Apply an opacity correction. The data
1354 and Tsys are multiplied by the correction factor.
1355 Parameters:
1356 tau: Opacity from which the correction factor is
1357 exp(tau*ZD)
1358 where ZD is the zenith-distance
1359 insitu: if False a new scantable is returned.
1360 Otherwise, the scaling is done in-situ
1361 The default is taken from .asaprc (False)
1362 """
1363 if insitu is None: insitu = rcParams['insitu']
[876]1364 self._math._setinsitu(insitu)
[513]1365 varlist = vars()
[876]1366 s = scantable(self._math._opacity(self, tau))
1367 s._add_history("opacity", varlist)
1368 print_log()
1369 if insitu: self._assign(s)
1370 else: return s
[513]1371
1372 def bin(self, width=5, insitu=None):
1373 """
1374 Return a scan where all spectra have been binned up.
[1348]1375 Parameters:
[513]1376 width: The bin width (default=5) in pixels
1377 insitu: if False a new scantable is returned.
1378 Otherwise, the scaling is done in-situ
1379 The default is taken from .asaprc (False)
1380 """
1381 if insitu is None: insitu = rcParams['insitu']
[876]1382 self._math._setinsitu(insitu)
[513]1383 varlist = vars()
[876]1384 s = scantable(self._math._bin(self, width))
[1118]1385 s._add_history("bin", varlist)
[876]1386 print_log()
1387 if insitu: self._assign(s)
1388 else: return s
[513]1389
[710]1390
[513]1391 def resample(self, width=5, method='cubic', insitu=None):
1392 """
[1348]1393 Return a scan where all spectra have been binned up.
1394
1395 Parameters:
[513]1396 width: The bin width (default=5) in pixels
1397 method: Interpolation method when correcting from a table.
1398 Values are "nearest", "linear", "cubic" (default)
1399 and "spline"
1400 insitu: if False a new scantable is returned.
1401 Otherwise, the scaling is done in-situ
1402 The default is taken from .asaprc (False)
1403 """
1404 if insitu is None: insitu = rcParams['insitu']
[876]1405 self._math._setinsitu(insitu)
[513]1406 varlist = vars()
[876]1407 s = scantable(self._math._resample(self, method, width))
[1118]1408 s._add_history("resample", varlist)
[876]1409 print_log()
1410 if insitu: self._assign(s)
1411 else: return s
[513]1412
1413
[946]1414 def average_pol(self, mask=None, weight='none'):
1415 """
1416 Average the Polarisations together.
1417 Parameters:
1418 mask: An optional mask defining the region, where the
1419 averaging will be applied. The output will have all
1420 specified points masked.
1421 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1422 weighted), or 'tsys' (1/Tsys**2 weighted)
1423 """
1424 varlist = vars()
1425 if mask is None:
1426 mask = ()
[1010]1427 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1428 s._add_history("average_pol", varlist)
[946]1429 print_log()
[992]1430 return s
[513]1431
[1145]1432 def average_beam(self, mask=None, weight='none'):
1433 """
1434 Average the Beams together.
1435 Parameters:
1436 mask: An optional mask defining the region, where the
1437 averaging will be applied. The output will have all
1438 specified points masked.
1439 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1440 weighted), or 'tsys' (1/Tsys**2 weighted)
1441 """
1442 varlist = vars()
1443 if mask is None:
1444 mask = ()
1445 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1446 s._add_history("average_beam", varlist)
1447 print_log()
1448 return s
1449
[992]1450 def convert_pol(self, poltype=None):
1451 """
1452 Convert the data to a different polarisation type.
1453 Parameters:
1454 poltype: The new polarisation type. Valid types are:
1455 "linear", "stokes" and "circular"
1456 """
1457 varlist = vars()
1458 try:
1459 s = scantable(self._math._convertpol(self, poltype))
[1118]1460 except RuntimeError, msg:
[992]1461 if rcParams['verbose']:
[1612]1462 #print msg
[1614]1463 print_log()
[1627]1464 asaplog.push( msg.message )
[1614]1465 print_log( 'ERROR' )
[1118]1466 return
[992]1467 else:
1468 raise
[1118]1469 s._add_history("convert_pol", varlist)
[992]1470 print_log()
1471 return s
1472
[876]1473 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1474 """
1475 Smooth the spectrum by the specified kernel (conserving flux).
1476 Parameters:
1477 kernel: The type of smoothing kernel. Select from
[1373]1478 'hanning' (default), 'gaussian', 'boxcar' and
1479 'rmedian'
[513]1480 width: The width of the kernel in pixels. For hanning this is
1481 ignored otherwise it defauls to 5 pixels.
1482 For 'gaussian' it is the Full Width Half
1483 Maximum. For 'boxcar' it is the full width.
[1373]1484 For 'rmedian' it is the half width.
[513]1485 insitu: if False a new scantable is returned.
1486 Otherwise, the scaling is done in-situ
1487 The default is taken from .asaprc (False)
1488 Example:
1489 none
1490 """
1491 if insitu is None: insitu = rcParams['insitu']
[876]1492 self._math._setinsitu(insitu)
[513]1493 varlist = vars()
[1118]1494 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1495 s._add_history("smooth", varlist)
1496 print_log()
1497 if insitu: self._assign(s)
1498 else: return s
[513]1499
[876]1500
[1389]1501 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1502 """
1503 Return a scan which has been baselined (all rows) by a polynomial.
1504 Parameters:
[794]1505 mask: an optional mask
1506 order: the order of the polynomial (default is 0)
[1061]1507 plot: plot the fit and the residual. In this each
1508 indivual fit has to be approved, by typing 'y'
1509 or 'n'
[1389]1510 uselin: use linear polynomial fit
[794]1511 insitu: if False a new scantable is returned.
1512 Otherwise, the scaling is done in-situ
1513 The default is taken from .asaprc (False)
[513]1514 Example:
1515 # return a scan baselined by a third order polynomial,
1516 # not using a mask
1517 bscan = scan.poly_baseline(order=3)
[579]1518 """
[513]1519 if insitu is None: insitu = rcParams['insitu']
1520 varlist = vars()
1521 if mask is None:
[1295]1522 mask = [True for i in xrange(self.nchan(-1))]
[1637]1523
[513]1524 from asap.asapfitter import fitter
[1217]1525 try:
1526 f = fitter()
[1389]1527 if uselin:
1528 f.set_function(lpoly=order)
1529 else:
1530 f.set_function(poly=order)
[1637]1531
1532 rows = range(self.nrow())
1533 if len(rows) > 0:
1534 self.blpars = []
1535
1536 for r in rows:
1537 # take into account flagtra info (CAS-1434)
1538 flagtra = self._getmask(r)
1539 actualmask = mask[:]
1540 if len(actualmask) == 0:
1541 actualmask = list(flagtra[:])
1542 else:
1543 if len(actualmask) != len(flagtra):
1544 raise RuntimeError, "Mask and flagtra have different length"
1545 else:
1546 for i in range(0, len(actualmask)):
1547 actualmask[i] = actualmask[i] and flagtra[i]
1548 f.set_scan(self, actualmask)
1549 f.x = self._getabcissa(r)
1550 f.y = self._getspectrum(r)
1551 f.data = None
1552 f.fit()
1553 fpar = f.get_parameters()
1554 self.blpars.append(fpar)
1555
1556 #f.set_scan(self, mask)
1557 #s = f.auto_fit(insitu, plot=plot)
1558 ## Save parameters of baseline fits as a class attribute.
1559 ## NOTICE: It does not reflect changes in scantable!
1560 #self.blpars = f.blpars
[1217]1561 s._add_history("poly_baseline", varlist)
1562 print_log()
1563 if insitu: self._assign(s)
1564 else: return s
1565 except RuntimeError:
1566 msg = "The fit failed, possibly because it didn't converge."
1567 if rcParams['verbose']:
[1612]1568 #print msg
[1614]1569 print_log()
[1627]1570 asaplog.push( msg.message )
[1614]1571 print_log( 'ERROR' )
[1217]1572 return
1573 else:
1574 raise RuntimeError(msg)
[513]1575
[1217]1576
[1118]1577 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1578 threshold=3, chan_avg_limit=1, plot=False,
1579 insitu=None):
[880]1580 """
1581 Return a scan which has been baselined (all rows) by a polynomial.
1582 Spectral lines are detected first using linefinder and masked out
1583 to avoid them affecting the baseline solution.
1584
1585 Parameters:
1586 mask: an optional mask retreived from scantable
1587 edge: an optional number of channel to drop at
1588 the edge of spectrum. If only one value is
1589 specified, the same number will be dropped from
1590 both sides of the spectrum. Default is to keep
[907]1591 all channels. Nested tuples represent individual
[976]1592 edge selection for different IFs (a number of spectral
1593 channels can be different)
[880]1594 order: the order of the polynomial (default is 0)
1595 threshold: the threshold used by line finder. It is better to
1596 keep it large as only strong lines affect the
1597 baseline solution.
[1280]1598 chan_avg_limit:
1599 a maximum number of consequtive spectral channels to
1600 average during the search of weak and broad lines.
1601 The default is no averaging (and no search for weak
1602 lines). If such lines can affect the fitted baseline
1603 (e.g. a high order polynomial is fitted), increase this
1604 parameter (usually values up to 8 are reasonable). Most
1605 users of this method should find the default value
1606 sufficient.
[1061]1607 plot: plot the fit and the residual. In this each
1608 indivual fit has to be approved, by typing 'y'
1609 or 'n'
[880]1610 insitu: if False a new scantable is returned.
1611 Otherwise, the scaling is done in-situ
1612 The default is taken from .asaprc (False)
1613
1614 Example:
1615 scan2=scan.auto_poly_baseline(order=7)
1616 """
1617 if insitu is None: insitu = rcParams['insitu']
1618 varlist = vars()
1619 from asap.asapfitter import fitter
1620 from asap.asaplinefind import linefinder
1621 from asap import _is_sequence_or_number as _is_valid
1622
[976]1623 # check whether edge is set up for each IF individually
[1118]1624 individualedge = False;
1625 if len(edge) > 1:
1626 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1627 individualedge = True;
[907]1628
[1118]1629 if not _is_valid(edge, int) and not individualedge:
[909]1630 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1631 pair of integers specified as a tuple. Nested tuples are allowed \
1632 to make individual selection for different IFs."
[919]1633
[1118]1634 curedge = (0, 0)
1635 if individualedge:
1636 for edgepar in edge:
1637 if not _is_valid(edgepar, int):
1638 raise ValueError, "Each element of the 'edge' tuple has \
1639 to be a pair of integers or an integer."
[907]1640 else:
[1118]1641 curedge = edge;
[880]1642
1643 # setup fitter
1644 f = fitter()
1645 f.set_function(poly=order)
1646
1647 # setup line finder
[1118]1648 fl = linefinder()
[1268]1649 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1650
1651 if not insitu:
[1118]1652 workscan = self.copy()
[880]1653 else:
[1118]1654 workscan = self
[880]1655
[907]1656 fl.set_scan(workscan)
1657
[1118]1658 rows = range(workscan.nrow())
[1446]1659 # Save parameters of baseline fits & masklists as a class attribute.
1660 # NOTICE: It does not reflect changes in scantable!
1661 if len(rows) > 0:
1662 self.blpars=[]
1663 self.masklists=[]
[880]1664 asaplog.push("Processing:")
1665 for r in rows:
[1118]1666 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1667 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1668 workscan.getpol(r), workscan.getcycle(r))
[880]1669 asaplog.push(msg, False)
[907]1670
[976]1671 # figure out edge parameter
[1118]1672 if individualedge:
1673 if len(edge) >= workscan.getif(r):
1674 raise RuntimeError, "Number of edge elements appear to " \
1675 "be less than the number of IFs"
1676 curedge = edge[workscan.getif(r)]
[919]1677
[1637]1678 # take into account flagtra info (CAS-1434)
1679 flagtra = workscan._getmask(r)
1680 actualmask = mask[:]
1681 if len(actualmask) == 0:
1682 actualmask = list(flagtra[:])
1683 else:
1684 if len(actualmask) != len(flagtra):
1685 raise RuntimeError, "Mask and flagtra have different length"
1686 else:
1687 for i in range(0, len(actualmask)):
1688 actualmask[i] = actualmask[i] and flagtra[i]
1689
[976]1690 # setup line finder
[1637]1691 fl.find_lines(r, actualmask, curedge)
[1446]1692 outmask=fl.get_mask()
[880]1693 f.set_scan(workscan, fl.get_mask())
1694 f.x = workscan._getabcissa(r)
1695 f.y = workscan._getspectrum(r)
1696 f.data = None
1697 f.fit()
[1446]1698
1699 # Show mask list
1700 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1701 msg = "mask range: "+str(masklist)
1702 asaplog.push(msg, False)
1703
1704 fpar = f.get_parameters()
[1061]1705 if plot:
1706 f.plot(residual=True)
1707 x = raw_input("Accept fit ( [y]/n ): ")
1708 if x.upper() == 'N':
[1446]1709 self.blpars.append(None)
1710 self.masklists.append(None)
[1061]1711 continue
[880]1712 workscan._setspectrum(f.fitter.getresidual(), r)
[1446]1713 self.blpars.append(fpar)
1714 self.masklists.append(masklist)
[1061]1715 if plot:
1716 f._p.unmap()
1717 f._p = None
1718 workscan._add_history("auto_poly_baseline", varlist)
[880]1719 if insitu:
1720 self._assign(workscan)
1721 else:
1722 return workscan
1723
[914]1724 def rotate_linpolphase(self, angle):
1725 """
1726 Rotate the phase of the complex polarization O=Q+iU correlation.
1727 This is always done in situ in the raw data. So if you call this
1728 function more than once then each call rotates the phase further.
1729 Parameters:
1730 angle: The angle (degrees) to rotate (add) by.
1731 Examples:
1732 scan.rotate_linpolphase(2.3)
1733 """
1734 varlist = vars()
[936]1735 self._math._rotate_linpolphase(self, angle)
[914]1736 self._add_history("rotate_linpolphase", varlist)
1737 print_log()
1738 return
[710]1739
[513]1740
[914]1741 def rotate_xyphase(self, angle):
1742 """
1743 Rotate the phase of the XY correlation. This is always done in situ
1744 in the data. So if you call this function more than once
1745 then each call rotates the phase further.
1746 Parameters:
1747 angle: The angle (degrees) to rotate (add) by.
1748 Examples:
1749 scan.rotate_xyphase(2.3)
1750 """
1751 varlist = vars()
[936]1752 self._math._rotate_xyphase(self, angle)
[914]1753 self._add_history("rotate_xyphase", varlist)
1754 print_log()
1755 return
1756
1757 def swap_linears(self):
1758 """
[1348]1759 Swap the linear polarisations XX and YY, or better the first two
1760 polarisations as this also works for ciculars.
[914]1761 """
1762 varlist = vars()
[936]1763 self._math._swap_linears(self)
[914]1764 self._add_history("swap_linears", varlist)
1765 print_log()
1766 return
1767
1768 def invert_phase(self):
1769 """
1770 Invert the phase of the complex polarisation
1771 """
1772 varlist = vars()
[936]1773 self._math._invert_phase(self)
[914]1774 self._add_history("invert_phase", varlist)
1775 print_log()
1776 return
1777
[876]1778 def add(self, offset, insitu=None):
[513]1779 """
1780 Return a scan where all spectra have the offset added
1781 Parameters:
1782 offset: the offset
1783 insitu: if False a new scantable is returned.
1784 Otherwise, the scaling is done in-situ
1785 The default is taken from .asaprc (False)
1786 """
1787 if insitu is None: insitu = rcParams['insitu']
[876]1788 self._math._setinsitu(insitu)
[513]1789 varlist = vars()
[876]1790 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1791 s._add_history("add", varlist)
[876]1792 print_log()
1793 if insitu:
1794 self._assign(s)
1795 else:
[513]1796 return s
1797
[1308]1798 def scale(self, factor, tsys=True, insitu=None):
[513]1799 """
1800 Return a scan where all spectra are scaled by the give 'factor'
1801 Parameters:
1802 factor: the scaling factor
1803 insitu: if False a new scantable is returned.
1804 Otherwise, the scaling is done in-situ
1805 The default is taken from .asaprc (False)
1806 tsys: if True (default) then apply the operation to Tsys
1807 as well as the data
1808 """
1809 if insitu is None: insitu = rcParams['insitu']
[876]1810 self._math._setinsitu(insitu)
[513]1811 varlist = vars()
[876]1812 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1813 s._add_history("scale", varlist)
[876]1814 print_log()
1815 if insitu:
1816 self._assign(s)
1817 else:
[513]1818 return s
1819
[1603]1820 def set_sourcetype(self, match, matchtype="pattern",
1821 sourcetype="reference"):
1822 """
1823 Set the type of the source to be an source or reference scan
1824 using the provided pattern:
1825 Parameters:
1826 match: a Unix style pattern, regular expression or selector
1827 matchtype: 'pattern' (default) UNIX style pattern or
1828 'regex' regular expression
1829 sourcetype: the type of the source to use (source/reference)
1830 """
1831 varlist = vars()
1832 basesel = self.get_selection()
1833 stype = -1
1834 if sourcetype.lower().startswith("r"):
1835 stype = 1
1836 elif sourcetype.lower().startswith("s"):
1837 stype = 0
1838 else:
1839 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1840 if matchtype.lower().startswith("p"):
1841 matchtype = "pattern"
1842 elif matchtype.lower().startswith("r"):
1843 matchtype = "regex"
1844 else:
1845 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1846 sel = selector()
1847 if isinstance(match, selector):
1848 sel = match
1849 else:
1850 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1851 self.set_selection(basesel+sel)
1852 self._setsourcetype(stype)
1853 self.set_selection(basesel)
1854 s._add_history("set_sourcetype", varlist)
1855
[1631]1856 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[670]1857 """
1858 This function allows to build quotients automatically.
[1631]1859 It assumes the observation to have the same number of
[670]1860 "ons" and "offs"
1861 Parameters:
[710]1862 preserve: you can preserve (default) the continuum or
1863 remove it. The equations used are
[670]1864 preserve: Output = Toff * (on/off) - Toff
[1070]1865 remove: Output = Toff * (on/off) - Ton
[1348]1866 mode: the on/off detection mode
1867 'paired' (default)
1868 identifies 'off' scans by the
1869 trailing '_R' (Mopra/Parkes) or
1870 '_e'/'_w' (Tid) and matches
1871 on/off pairs from the observing pattern
[1603]1872 'time'
1873 finds the closest off in time
[1348]1874
[670]1875 """
[1348]1876 modes = ["time", "paired"]
[670]1877 if not mode in modes:
[876]1878 msg = "please provide valid mode. Valid modes are %s" % (modes)
1879 raise ValueError(msg)
1880 varlist = vars()
[1348]1881 s = None
1882 if mode.lower() == "paired":
1883 basesel = self.get_selection()
[1356]1884 sel = selector()+basesel
1885 sel.set_query("SRCTYPE==1")
1886 self.set_selection(sel)
[1348]1887 offs = self.copy()
1888 sel.set_query("SRCTYPE==0")
[1356]1889 self.set_selection(sel)
[1348]1890 ons = self.copy()
1891 s = scantable(self._math._quotient(ons, offs, preserve))
1892 self.set_selection(basesel)
1893 elif mode.lower() == "time":
1894 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1895 s._add_history("auto_quotient", varlist)
[876]1896 print_log()
1897 return s
[710]1898
[1145]1899 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1900 """
[1143]1901 Form a quotient using "off" beams when observing in "MX" mode.
1902 Parameters:
[1145]1903 mask: an optional mask to be used when weight == 'stddev'
[1143]1904 weight: How to average the off beams. Default is 'median'.
[1145]1905 preserve: you can preserve (default) the continuum or
1906 remove it. The equations used are
1907 preserve: Output = Toff * (on/off) - Toff
1908 remove: Output = Toff * (on/off) - Ton
[1217]1909 """
[1143]1910 if mask is None: mask = ()
[1141]1911 varlist = vars()
1912 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1913 preoff = scantable(self._math._mx_extract(self, 'off'))
1914 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1915 from asapmath import quotient
[1145]1916 q = quotient(on, off, preserve)
[1143]1917 q._add_history("mx_quotient", varlist)
[1145]1918 print_log()
[1217]1919 return q
[513]1920
[718]1921 def freq_switch(self, insitu=None):
1922 """
1923 Apply frequency switching to the data.
1924 Parameters:
1925 insitu: if False a new scantable is returned.
1926 Otherwise, the swictching is done in-situ
1927 The default is taken from .asaprc (False)
1928 Example:
1929 none
1930 """
1931 if insitu is None: insitu = rcParams['insitu']
[876]1932 self._math._setinsitu(insitu)
[718]1933 varlist = vars()
[876]1934 s = scantable(self._math._freqswitch(self))
[1118]1935 s._add_history("freq_switch", varlist)
[876]1936 print_log()
1937 if insitu: self._assign(s)
1938 else: return s
[718]1939
[780]1940 def recalc_azel(self):
1941 """
1942 Recalculate the azimuth and elevation for each position.
1943 Parameters:
1944 none
1945 Example:
1946 """
1947 varlist = vars()
[876]1948 self._recalcazel()
[780]1949 self._add_history("recalc_azel", varlist)
1950 print_log()
1951 return
1952
[513]1953 def __add__(self, other):
1954 varlist = vars()
1955 s = None
1956 if isinstance(other, scantable):
[1308]1957 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1958 elif isinstance(other, float):
[876]1959 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1960 else:
[718]1961 raise TypeError("Other input is not a scantable or float value")
[513]1962 s._add_history("operator +", varlist)
[718]1963 print_log()
[513]1964 return s
1965
1966 def __sub__(self, other):
1967 """
1968 implicit on all axes and on Tsys
1969 """
1970 varlist = vars()
1971 s = None
1972 if isinstance(other, scantable):
[1308]1973 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1974 elif isinstance(other, float):
[876]1975 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1976 else:
[718]1977 raise TypeError("Other input is not a scantable or float value")
[513]1978 s._add_history("operator -", varlist)
[718]1979 print_log()
[513]1980 return s
[710]1981
[513]1982 def __mul__(self, other):
1983 """
1984 implicit on all axes and on Tsys
1985 """
1986 varlist = vars()
1987 s = None
1988 if isinstance(other, scantable):
[1308]1989 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1990 elif isinstance(other, float):
[876]1991 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1992 else:
[718]1993 raise TypeError("Other input is not a scantable or float value")
[513]1994 s._add_history("operator *", varlist)
[718]1995 print_log()
[513]1996 return s
1997
[710]1998
[513]1999 def __div__(self, other):
2000 """
2001 implicit on all axes and on Tsys
2002 """
2003 varlist = vars()
2004 s = None
2005 if isinstance(other, scantable):
[1308]2006 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2007 elif isinstance(other, float):
2008 if other == 0.0:
[718]2009 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2010 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2011 else:
[718]2012 raise TypeError("Other input is not a scantable or float value")
[513]2013 s._add_history("operator /", varlist)
[718]2014 print_log()
[513]2015 return s
2016
[530]2017 def get_fit(self, row=0):
2018 """
2019 Print or return the stored fits for a row in the scantable
2020 Parameters:
2021 row: the row which the fit has been applied to.
2022 """
2023 if row > self.nrow():
2024 return
[976]2025 from asap.asapfit import asapfit
[530]2026 fit = asapfit(self._getfit(row))
[718]2027 if rcParams['verbose']:
[1612]2028 #print fit
[1614]2029 asaplog.push( '%s' %(fit) )
2030 print_log()
[530]2031 return
2032 else:
2033 return fit.as_dict()
2034
[1603]2035 def flag_nans(self):
2036 """
2037 Utility function to flag NaN values in the scantable.
2038 """
2039 import numpy
2040 basesel = self.get_selection()
2041 for i in range(self.nrow()):
2042 sel = selector()+basesel
2043 sel.set_scans(self.getscan(i))
2044 sel.set_beams(self.getbeam(i))
2045 sel.set_ifs(self.getif(i))
2046 sel.set_polarisations(self.getpol(i))
2047 self.set_selection(sel)
2048 nans = numpy.isnan(self._getspectrum(0))
2049 if numpy.any(nans):
2050 bnans = [ bool(v) for v in nans]
2051 self.flag(bnans)
2052 self.set_selection(basesel)
2053
2054
[484]2055 def _add_history(self, funcname, parameters):
[1603]2056 if not rcParams['scantable.history']:
2057 return
[484]2058 # create date
2059 sep = "##"
2060 from datetime import datetime
2061 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2062 hist = dstr+sep
2063 hist += funcname+sep#cdate+sep
2064 if parameters.has_key('self'): del parameters['self']
[1118]2065 for k, v in parameters.iteritems():
[484]2066 if type(v) is dict:
[1118]2067 for k2, v2 in v.iteritems():
[484]2068 hist += k2
2069 hist += "="
[1118]2070 if isinstance(v2, scantable):
[484]2071 hist += 'scantable'
2072 elif k2 == 'mask':
[1118]2073 if isinstance(v2, list) or isinstance(v2, tuple):
[513]2074 hist += str(self._zip_mask(v2))
2075 else:
2076 hist += str(v2)
[484]2077 else:
[513]2078 hist += str(v2)
[484]2079 else:
2080 hist += k
2081 hist += "="
[1118]2082 if isinstance(v, scantable):
[484]2083 hist += 'scantable'
2084 elif k == 'mask':
[1118]2085 if isinstance(v, list) or isinstance(v, tuple):
[513]2086 hist += str(self._zip_mask(v))
2087 else:
2088 hist += str(v)
[484]2089 else:
2090 hist += str(v)
2091 hist += sep
2092 hist = hist[:-2] # remove trailing '##'
2093 self._addhistory(hist)
2094
[710]2095
[484]2096 def _zip_mask(self, mask):
2097 mask = list(mask)
2098 i = 0
2099 segments = []
2100 while mask[i:].count(1):
2101 i += mask[i:].index(1)
2102 if mask[i:].count(0):
2103 j = i + mask[i:].index(0)
2104 else:
[710]2105 j = len(mask)
[1118]2106 segments.append([i, j])
[710]2107 i = j
[484]2108 return segments
[714]2109
[626]2110 def _get_ordinate_label(self):
2111 fu = "("+self.get_fluxunit()+")"
2112 import re
2113 lbl = "Intensity"
[1118]2114 if re.match(".K.", fu):
[626]2115 lbl = "Brightness Temperature "+ fu
[1118]2116 elif re.match(".Jy.", fu):
[626]2117 lbl = "Flux density "+ fu
2118 return lbl
[710]2119
[876]2120 def _check_ifs(self):
2121 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2122 nchans = filter(lambda t: t > 0, nchans)
[876]2123 return (sum(nchans)/len(nchans) == nchans[0])
[976]2124
[1496]2125 def _fill(self, names, unit, average, getpt):
[976]2126 import os
2127 from asap._asap import stfiller
2128 first = True
2129 fullnames = []
2130 for name in names:
2131 name = os.path.expandvars(name)
2132 name = os.path.expanduser(name)
2133 if not os.path.exists(name):
2134 msg = "File '%s' does not exists" % (name)
2135 if rcParams['verbose']:
2136 asaplog.push(msg)
[1612]2137 #print asaplog.pop().strip()
[1614]2138 print_log( 'ERROR' )
[976]2139 return
2140 raise IOError(msg)
2141 fullnames.append(name)
2142 if average:
2143 asaplog.push('Auto averaging integrations')
[1079]2144 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2145 for name in fullnames:
[1073]2146 tbl = Scantable(stype)
2147 r = stfiller(tbl)
[1603]2148 rx = rcParams['scantable.reference']
2149 r._setreferenceexpr(rx)
[976]2150 msg = "Importing %s..." % (name)
[1118]2151 asaplog.push(msg, False)
[976]2152 print_log()
[1496]2153 r._open(name, -1, -1, getpt)
[976]2154 r._read()
2155 if average:
[1118]2156 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2157 if not first:
2158 tbl = self._math._merge([self, tbl])
2159 Scantable.__init__(self, tbl)
2160 r._close()
[1118]2161 del r, tbl
[976]2162 first = False
2163 if unit is not None:
2164 self.set_fluxunit(unit)
[1446]2165 #self.set_freqframe(rcParams['scantable.freqframe'])
[976]2166
[1603]2167 def __getitem__(self, key):
2168 if key < 0:
2169 key += self.nrow()
2170 if key >= self.nrow():
2171 raise IndexError("Row index out of range.")
2172 return self._getspectrum(key)
2173
2174 def __setitem__(self, key, value):
2175 if key < 0:
2176 key += self.nrow()
2177 if key >= self.nrow():
2178 raise IndexError("Row index out of range.")
2179 if not hasattr(value, "__len__") or \
2180 len(value) > self.nchan(self.getif(key)):
2181 raise ValueError("Spectrum length doesn't match.")
2182 return self._setspectrum(value, key)
2183
2184 def __len__(self):
2185 return self.nrow()
2186
2187 def __iter__(self):
2188 for i in range(len(self)):
2189 yield self[i]
Note: See TracBrowser for help on using the repository browser.