source: branches/alma/python/scantable.py@ 1576

Last change on this file since 1576 was 1530, checked in by Kana Sugimoto, 16 years ago

New Development: No

JIRA Issue: Yes (CAS-1079)

Ready to Release: Yes

Interface Changes: Yes for scantable.stats()

What Interface Changed:
Return value is now a list of abcissa values
when stat='min_abc' and 'max_abc'.

Test Programs:

run scantable.stats() with the parameter stat='min_abc' or 'max_abc'.

Put in Release Notes: No

Module(s): sdstat

Description:

scantable.stats() are modified so that the tool returns abscissa
chan/freq/velocity of min or max when stat='min_abc' or 'max_abc',
respectively.
Minor changes in IPosition mathutil::minMaxPos() to accept both
min (max) and min_abc (max_abc) as a parameter.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 74.3 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[1496]14 def __init__(self, filename, average=None, unit=None, getpt=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[1522]32 getpt: for MeasurementSet input data only:
33 If True, all pointing data are filled.
34 The deafult is False, which makes time to load
35 the MS data faster in some cases.
[710]36 """
[976]37 if average is None:
[710]38 average = rcParams['scantable.autoaverage']
[1496]39 if getpt is None:
40 getpt = False
[1259]41 varlist = vars()
[876]42 from asap._asap import stmath
43 self._math = stmath()
44 if isinstance(filename, Scantable):
45 Scantable.__init__(self, filename)
[181]46 else:
[1118]47 if isinstance(filename, str):
[976]48 import os.path
49 filename = os.path.expandvars(filename)
50 filename = os.path.expanduser(filename)
51 if not os.path.exists(filename):
52 s = "File '%s' not found." % (filename)
[718]53 if rcParams['verbose']:
[976]54 asaplog.push(s)
55 print asaplog.pop().strip()
[718]56 return
[976]57 raise IOError(s)
[1115]58 if os.path.isdir(filename) \
[1118]59 and not os.path.exists(filename+'/table.f1'):
[976]60 # crude check if asap table
61 if os.path.exists(filename+'/table.info'):
[1118]62 ondisk = rcParams['scantable.storage'] == 'disk'
63 Scantable.__init__(self, filename, ondisk)
[976]64 if unit is not None:
65 self.set_fluxunit(unit)
[1496]66 # do not reset to the default freqframe
67 #self.set_freqframe(rcParams['scantable.freqframe'])
[718]68 else:
[1118]69 msg = "The given file '%s'is not a valid " \
70 "asap table." % (filename)
[976]71 if rcParams['verbose']:
72 print msg
73 return
74 else:
75 raise IOError(msg)
[226]76 else:
[1496]77 self._fill([filename], unit, average, getpt)
[1118]78 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]79 and isinstance(filename[-1], str):
[1496]80 self._fill(filename, unit, average, getpt)
[1259]81 self._add_history("scantable", varlist)
[714]82 print_log()
[102]83
[876]84 def save(self, name=None, format=None, overwrite=False):
[116]85 """
[1280]86 Store the scantable on disk. This can be an asap (aips++) Table,
87 SDFITS or MS2 format.
[116]88 Parameters:
[1093]89 name: the name of the outputfile. For format "ASCII"
90 this is the root file name (data in 'name'.txt
[497]91 and header in 'name'_header.txt)
[116]92 format: an optional file format. Default is ASAP.
[280]93 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]94 'SDFITS' (save as SDFITS file)
[200]95 'ASCII' (saves as ascii text file)
[226]96 'MS2' (saves as an aips++
97 MeasurementSet V2)
[411]98 overwrite: If the file should be overwritten if it exists.
[256]99 The default False is to return with warning
[411]100 without writing the output. USE WITH CARE.
[116]101 Example:
102 scan.save('myscan.asap')
[1118]103 scan.save('myscan.sdfits', 'SDFITS')
[116]104 """
[411]105 from os import path
[226]106 if format is None: format = rcParams['scantable.save']
[256]107 suffix = '.'+format.lower()
[1118]108 if name is None or name == "":
[256]109 name = 'scantable'+suffix
[718]110 msg = "No filename given. Using default name %s..." % name
111 asaplog.push(msg)
[411]112 name = path.expandvars(name)
[256]113 if path.isfile(name) or path.isdir(name):
114 if not overwrite:
[718]115 msg = "File %s exists." % name
116 if rcParams['verbose']:
117 print msg
118 return
119 else:
120 raise IOError(msg)
[451]121 format2 = format.upper()
122 if format2 == 'ASAP':
[116]123 self._save(name)
124 else:
[989]125 from asap._asap import stwriter as stw
[1118]126 writer = stw(format2)
127 writer.write(self, name)
[718]128 print_log()
[116]129 return
130
[102]131 def copy(self):
132 """
133 Return a copy of this scantable.
[1348]134 Note:
135 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]136 Parameters:
[113]137 none
[102]138 Example:
139 copiedscan = scan.copy()
140 """
[876]141 sd = scantable(Scantable._copy(self))
[113]142 return sd
143
[1093]144 def drop_scan(self, scanid=None):
145 """
146 Return a new scantable where the specified scan number(s) has(have)
147 been dropped.
148 Parameters:
149 scanid: a (list of) scan number(s)
150 """
151 from asap import _is_sequence_or_number as _is_valid
152 from asap import _to_list
153 from asap import unique
154 if not _is_valid(scanid):
155 if rcParams['verbose']:
156 print "Please specify a scanno to drop from the scantable"
157 return
158 else:
159 raise RuntimeError("No scan given")
160 try:
161 scanid = _to_list(scanid)
162 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
163 for sid in scanid: allscans.remove(sid)
[1118]164 if len(allscans) == 0:
165 raise ValueError("Can't remove all scans")
[1093]166 except ValueError:
167 if rcParams['verbose']:
168 print "Couldn't find any match."
169 return
170 else: raise
171 try:
172 bsel = self.get_selection()
173 sel = selector()
174 sel.set_scans(allscans)
175 self.set_selection(bsel+sel)
176 scopy = self._copy()
177 self.set_selection(bsel)
178 return scantable(scopy)
179 except RuntimeError:
[1118]180 if rcParams['verbose']:
181 print "Couldn't find any match."
182 else:
183 raise
[1093]184
185
[102]186 def get_scan(self, scanid=None):
187 """
188 Return a specific scan (by scanno) or collection of scans (by
189 source name) in a new scantable.
[1348]190 Note:
191 See scantable.drop_scan() for the inverse operation.
[102]192 Parameters:
[513]193 scanid: a (list of) scanno or a source name, unix-style
194 patterns are accepted for source name matching, e.g.
195 '*_R' gets all 'ref scans
[102]196 Example:
[513]197 # get all scans containing the source '323p459'
198 newscan = scan.get_scan('323p459')
199 # get all 'off' scans
200 refscans = scan.get_scan('*_R')
201 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]202 newscan = scan.get_scan([0, 2, 7, 10])
[102]203 """
204 if scanid is None:
[718]205 if rcParams['verbose']:
[1118]206 print "Please specify a scan no or name to " \
207 "retrieve from the scantable"
[718]208 return
209 else:
210 raise RuntimeError("No scan given")
211
[102]212 try:
[946]213 bsel = self.get_selection()
214 sel = selector()
[102]215 if type(scanid) is str:
[946]216 sel.set_name(scanid)
217 self.set_selection(bsel+sel)
[876]218 scopy = self._copy()
[946]219 self.set_selection(bsel)
[876]220 return scantable(scopy)
[102]221 elif type(scanid) is int:
[946]222 sel.set_scans([scanid])
223 self.set_selection(bsel+sel)
[876]224 scopy = self._copy()
[946]225 self.set_selection(bsel)
[876]226 return scantable(scopy)
[381]227 elif type(scanid) is list:
[946]228 sel.set_scans(scanid)
229 self.set_selection(sel)
[876]230 scopy = self._copy()
[946]231 self.set_selection(bsel)
[876]232 return scantable(scopy)
[381]233 else:
[718]234 msg = "Illegal scanid type, use 'int' or 'list' if ints."
235 if rcParams['verbose']:
236 print msg
237 else:
238 raise TypeError(msg)
[102]239 except RuntimeError:
[718]240 if rcParams['verbose']: print "Couldn't find any match."
241 else: raise
[102]242
243 def __str__(self):
[1118]244 return Scantable._summary(self, True)
[102]245
[976]246 def summary(self, filename=None):
[102]247 """
248 Print a summary of the contents of this scantable.
249 Parameters:
250 filename: the name of a file to write the putput to
251 Default - no file output
[381]252 verbose: print extra info such as the frequency table
253 The default (False) is taken from .asaprc
[102]254 """
[976]255 info = Scantable._summary(self, True)
256 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]257 if filename is not None:
[256]258 if filename is "":
259 filename = 'scantable_summary.txt'
[415]260 from os.path import expandvars, isdir
[411]261 filename = expandvars(filename)
[415]262 if not isdir(filename):
[413]263 data = open(filename, 'w')
264 data.write(info)
265 data.close()
266 else:
[718]267 msg = "Illegal file name '%s'." % (filename)
268 if rcParams['verbose']:
269 print msg
270 else:
271 raise IOError(msg)
272 if rcParams['verbose']:
[794]273 try:
274 from IPython.genutils import page as pager
275 except ImportError:
276 from pydoc import pager
277 pager(info)
[718]278 else:
279 return info
[710]280
[946]281
282 def get_selection(self):
283 """
[1005]284 Get the selection object currently set on this scantable.
285 Parameters:
286 none
287 Example:
288 sel = scan.get_selection()
289 sel.set_ifs(0) # select IF 0
290 scan.set_selection(sel) # apply modified selection
[946]291 """
292 return selector(self._getselection())
293
[1005]294 def set_selection(self, selection=selector()):
[946]295 """
[1005]296 Select a subset of the data. All following operations on this scantable
297 are only applied to thi selection.
298 Parameters:
299 selection: a selector object (default unset the selection)
300 Examples:
301 sel = selector() # create a selection object
[1118]302 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]303 scan.set_selection(sel) # set the selection
304 scan.summary() # will only print summary of scanno 0 an 3
305 scan.set_selection() # unset the selection
[946]306 """
307 self._setselection(selection)
308
[1446]309 def get_row(self, row=0, insitu=None):
310 """
311 Select a row in the scantable.
312 Return a scantable with single row.
313 Parameters:
314 row: row no of integration, default is 0.
315 insitu: if False a new scantable is returned.
316 Otherwise, the scaling is done in-situ
317 The default is taken from .asaprc (False)
318 """
319 if insitu is None: insitu = rcParams['insitu']
320 if not insitu:
321 workscan = self.copy()
322 else:
323 workscan = self
324 # Select a row
325 sel=selector()
326 sel.set_scans([workscan.getscan(row)])
327 sel.set_cycles([workscan.getcycle(row)])
328 sel.set_beams([workscan.getbeam(row)])
329 sel.set_ifs([workscan.getif(row)])
330 sel.set_polarisations([workscan.getpol(row)])
331 sel.set_name(workscan._getsourcename(row))
332 workscan.set_selection(sel)
333 if not workscan.nrow() == 1:
334 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
335 raise RuntimeError(msg)
336 del sel
337 if insitu:
338 self._assign(workscan)
339 else:
340 return workscan
341
[876]342 def stats(self, stat='stddev', mask=None):
[102]343 """
[135]344 Determine the specified statistic of the current beam/if/pol
[102]345 Takes a 'mask' as an optional parameter to specify which
346 channels should be excluded.
347 Parameters:
[1517]348 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
[1515]349 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[135]350 mask: an optional mask specifying where the statistic
[102]351 should be determined.
352 Example:
[113]353 scan.set_unit('channel')
[1118]354 msk = scan.create_mask([100, 200], [500, 600])
[135]355 scan.stats(stat='mean', mask=m)
[102]356 """
357 if mask == None:
[876]358 mask = []
[1118]359 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]360 if not self._check_ifs():
[1118]361 raise ValueError("Cannot apply mask as the IFs have different "
362 "number of channels. Please use setselection() "
363 "to select individual IFs")
[1530]364 rtnabc = False
365 if stat.lower().endswith('_abc'): rtnabc = True
366 getchan = False
367 if stat.lower().startswith('min') or stat.lower().startswith('max'):
[1517]368 chan = self._math._minmaxchan(self, mask, stat)
369 getchan = True
[1515]370 statvals = []
[1530]371 if not rtnabc: statvals = self._math._stats(self, mask, stat)
372
[876]373 out = ''
374 axes = []
375 for i in range(self.nrow()):
376 axis = []
377 axis.append(self.getscan(i))
378 axis.append(self.getbeam(i))
379 axis.append(self.getif(i))
380 axis.append(self.getpol(i))
381 axis.append(self.getcycle(i))
382 axes.append(axis)
383 tm = self._gettime(i)
384 src = self._getsourcename(i)
[1530]385 refstr = ''
386 statunit= ''
[1517]387 if getchan:
388 qx, qy = self.chan2data(rowno=i, chan=chan[i])
[1530]389 if rtnabc:
390 statvals.append(qx['value'])
391 refstr = '(value: %3.3f' % (qy['value'])+' ['+qy['unit']+'])'
392 statunit= '['+qx['unit']+']'
393 else:
394 refstr = '(@ %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
395 #statunit= ' ['+qy['unit']+']'
[876]396 out += 'Scan[%d] (%s) ' % (axis[0], src)
397 out += 'Time[%s]:\n' % (tm)
398 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
399 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
400 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
[1530]401 out += '= %3.3f ' % (statvals[i]) +refstr+'\n'
[876]402 out += "--------------------------------------------------\n"
[256]403
[876]404 if rcParams['verbose']:
405 print "--------------------------------------------------"
[1530]406 print " ", stat, statunit
[876]407 print "--------------------------------------------------"
408 print out
[1295]409 #else:
410 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
411 # 'axes' : axes,
412 # 'data': statvals}
413 return statvals
[102]414
[1517]415 def chan2data(self, rowno=0, chan=0):
[1515]416 """
[1517]417 Returns channel/frequency/velocity and spectral value
[1515]418 at an arbitrary row and channel in the scantable.
419 Parameters:
420 rowno: a row number in the scantable. Default is the
421 first row, i.e. rowno=0
[1517]422 chan: a channel in the scantable. Default is the first
[1515]423 channel, i.e. pos=0
424 """
[1517]425 if isinstance(rowno, int) and isinstance(chan, int):
[1530]426 qx = {'unit': self.get_unit(),
427 'value': self._getabcissa(rowno)[chan]}
[1517]428 qy = {'unit': self.get_fluxunit(),
429 'value': self._getspectrum(rowno)[chan]}
430 return qx, qy
[1515]431
[1118]432 def stddev(self, mask=None):
[135]433 """
434 Determine the standard deviation of the current beam/if/pol
435 Takes a 'mask' as an optional parameter to specify which
436 channels should be excluded.
437 Parameters:
438 mask: an optional mask specifying where the standard
439 deviation should be determined.
440
441 Example:
442 scan.set_unit('channel')
[1118]443 msk = scan.create_mask([100, 200], [500, 600])
[135]444 scan.stddev(mask=m)
445 """
[1118]446 return self.stats(stat='stddev', mask=mask);
[135]447
[1003]448
[1259]449 def get_column_names(self):
[1003]450 """
451 Return a list of column names, which can be used for selection.
452 """
[1259]453 return list(Scantable.get_column_names(self))
[1003]454
[876]455 def get_tsys(self):
[113]456 """
457 Return the System temperatures.
458 Returns:
[876]459 a list of Tsys values for the current selection
[113]460 """
[256]461
[876]462 return self._row_callback(self._gettsys, "Tsys")
[256]463
[876]464 def _row_callback(self, callback, label):
465 axes = []
[1118]466 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]467 out = ""
[1118]468 outvec = []
[876]469 for i in range(self.nrow()):
470 axis = []
471 axis.append(self.getscan(i))
472 axis.append(self.getbeam(i))
473 axis.append(self.getif(i))
474 axis.append(self.getpol(i))
475 axis.append(self.getcycle(i))
476 axes.append(axis)
477 tm = self._gettime(i)
478 src = self._getsourcename(i)
479 out += 'Scan[%d] (%s) ' % (axis[0], src)
480 out += 'Time[%s]:\n' % (tm)
481 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
482 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
483 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
484 outvec.append(callback(i))
485 out += '= %3.3f\n' % (outvec[i])
486 out += "--------------------------------------------------\n"
487 if rcParams['verbose']:
488 print "--------------------------------------------------"
489 print " %s" % (label)
490 print "--------------------------------------------------"
491 print out
[1175]492 # disabled because the vector seems more useful
493 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
494 return outvec
[256]495
[1070]496 def _get_column(self, callback, row=-1):
497 """
498 """
499 if row == -1:
500 return [callback(i) for i in range(self.nrow())]
501 else:
502 if 0 <= row < self.nrow():
503 return callback(row)
[256]504
[1070]505
[1348]506 def get_time(self, row=-1, asdatetime=False):
[113]507 """
508 Get a list of time stamps for the observations.
[1348]509 Return a datetime object for each integration time stamp in the scantable.
[113]510 Parameters:
[1348]511 row: row no of integration. Default -1 return all rows
512 asdatetime: return values as datetime objects rather than strings
[113]513 Example:
514 none
515 """
[1175]516 from time import strptime
517 from datetime import datetime
[1457]518 times = self._get_column(self._gettime, row)
[1348]519 if not asdatetime:
[1457]520 return times
[1175]521 format = "%Y/%m/%d/%H:%M:%S"
522 if isinstance(times, list):
523 return [datetime(*strptime(i, format)[:6]) for i in times]
524 else:
525 return datetime(*strptime(times, format)[:6])
[102]526
[1348]527
528 def get_inttime(self, row=-1):
529 """
530 Get a list of integration times for the observations.
531 Return a time in seconds for each integration in the scantable.
532 Parameters:
533 row: row no of integration. Default -1 return all rows.
534 Example:
535 none
536 """
537 return self._get_column(self._getinttime, row)
538
539
[714]540 def get_sourcename(self, row=-1):
541 """
[794]542 Get a list source names for the observations.
[714]543 Return a string for each integration in the scantable.
544 Parameters:
[1348]545 row: row no of integration. Default -1 return all rows.
[714]546 Example:
547 none
548 """
[1070]549 return self._get_column(self._getsourcename, row)
[714]550
[794]551 def get_elevation(self, row=-1):
552 """
553 Get a list of elevations for the observations.
554 Return a float for each integration in the scantable.
555 Parameters:
[1348]556 row: row no of integration. Default -1 return all rows.
[794]557 Example:
558 none
559 """
[1070]560 return self._get_column(self._getelevation, row)
[794]561
562 def get_azimuth(self, row=-1):
563 """
564 Get a list of azimuths for the observations.
565 Return a float for each integration in the scantable.
566 Parameters:
[1348]567 row: row no of integration. Default -1 return all rows.
[794]568 Example:
569 none
570 """
[1070]571 return self._get_column(self._getazimuth, row)
[794]572
573 def get_parangle(self, row=-1):
574 """
575 Get a list of parallactic angles for the observations.
576 Return a float for each integration in the scantable.
577 Parameters:
[1348]578 row: row no of integration. Default -1 return all rows.
[794]579 Example:
580 none
581 """
[1070]582 return self._get_column(self._getparangle, row)
[794]583
[1070]584 def get_direction(self, row=-1):
585 """
586 Get a list of Positions on the sky (direction) for the observations.
587 Return a float for each integration in the scantable.
588 Parameters:
589 row: row no of integration. Default -1 return all rows
590 Example:
591 none
592 """
593 return self._get_column(self._getdirection, row)
594
[1389]595 def get_directionval(self, row=-1):
596 """
597 Get a list of Positions on the sky (direction) for the observations.
598 Return a float for each integration in the scantable.
599 Parameters:
600 row: row no of integration. Default -1 return all rows
601 Example:
602 none
603 """
604 return self._get_column(self._getdirectionvec, row)
605
[102]606 def set_unit(self, unit='channel'):
607 """
608 Set the unit for all following operations on this scantable
609 Parameters:
610 unit: optional unit, default is 'channel'
[1118]611 one of '*Hz', 'km/s', 'channel', ''
[102]612 """
[484]613 varlist = vars()
[1118]614 if unit in ['', 'pixel', 'channel']:
[113]615 unit = ''
616 inf = list(self._getcoordinfo())
617 inf[0] = unit
618 self._setcoordinfo(inf)
[1118]619 self._add_history("set_unit", varlist)
[113]620
[484]621 def set_instrument(self, instr):
[358]622 """
[1348]623 Set the instrument for subsequent processing.
[358]624 Parameters:
[710]625 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]626 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]627 """
628 self._setInstrument(instr)
[1118]629 self._add_history("set_instument", vars())
[718]630 print_log()
[358]631
[1190]632 def set_feedtype(self, feedtype):
633 """
634 Overwrite the feed type, which might not be set correctly.
635 Parameters:
636 feedtype: 'linear' or 'circular'
637 """
638 self._setfeedtype(feedtype)
639 self._add_history("set_feedtype", vars())
640 print_log()
641
[276]642 def set_doppler(self, doppler='RADIO'):
643 """
644 Set the doppler for all following operations on this scantable.
645 Parameters:
646 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
647 """
[484]648 varlist = vars()
[276]649 inf = list(self._getcoordinfo())
650 inf[2] = doppler
651 self._setcoordinfo(inf)
[1118]652 self._add_history("set_doppler", vars())
[718]653 print_log()
[710]654
[226]655 def set_freqframe(self, frame=None):
[113]656 """
657 Set the frame type of the Spectral Axis.
658 Parameters:
[591]659 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]660 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
661 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]662 Examples:
663 scan.set_freqframe('BARY')
664 """
[484]665 if frame is None: frame = rcParams['scantable.freqframe']
666 varlist = vars()
[1118]667 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
668 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]669
[989]670 if frame in valid:
[113]671 inf = list(self._getcoordinfo())
672 inf[1] = frame
673 self._setcoordinfo(inf)
[1118]674 self._add_history("set_freqframe", varlist)
[102]675 else:
[1118]676 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]677 if rcParams['verbose']:
678 print msg
679 else:
680 raise TypeError(msg)
681 print_log()
[710]682
[989]683 def set_dirframe(self, frame=""):
684 """
685 Set the frame type of the Direction on the sky.
686 Parameters:
687 frame: an optional frame type, default ''. Valid frames are:
688 'J2000', 'B1950', 'GALACTIC'
689 Examples:
690 scan.set_dirframe('GALACTIC')
691 """
692 varlist = vars()
693 try:
694 Scantable.set_dirframe(self, frame)
[1118]695 except RuntimeError, msg:
[989]696 if rcParams['verbose']:
697 print msg
698 else:
699 raise
[1118]700 self._add_history("set_dirframe", varlist)
[989]701
[113]702 def get_unit(self):
703 """
704 Get the default unit set in this scantable
705 Returns:
706 A unit string
707 """
708 inf = self._getcoordinfo()
709 unit = inf[0]
710 if unit == '': unit = 'channel'
711 return unit
[102]712
[158]713 def get_abcissa(self, rowno=0):
[102]714 """
[158]715 Get the abcissa in the current coordinate setup for the currently
[113]716 selected Beam/IF/Pol
717 Parameters:
[226]718 rowno: an optional row number in the scantable. Default is the
719 first row, i.e. rowno=0
[113]720 Returns:
[1348]721 The abcissa values and the format string (as a dictionary)
[113]722 """
[256]723 abc = self._getabcissa(rowno)
[710]724 lbl = self._getabcissalabel(rowno)
[718]725 print_log()
[158]726 return abc, lbl
[113]727
[1401]728 def flag(self, mask=None, unflag=False):
[1001]729 """
730 Flag the selected data using an optional channel mask.
731 Parameters:
732 mask: an optional channel mask, created with create_mask. Default
733 (no mask) is all channels.
[1401]734 unflag: if True, unflag the data
[1001]735 """
736 varlist = vars()
[1118]737 if mask is None:
738 mask = []
[1001]739 try:
[1401]740 self._flag(mask, unflag)
[1118]741 except RuntimeError, msg:
[1001]742 if rcParams['verbose']:
743 print msg
744 return
745 else: raise
746 self._add_history("flag", varlist)
747
[1203]748 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]749 """
750 Flag the data in 'lag' space by providing a frequency to remove.
751 Flagged data in the scantable gets set to 0.0 before the fft.
752 No taper is applied.
753 Parameters:
[1348]754 frequency: the frequency (really a period within the bandwidth)
755 to remove
756 width: the width of the frequency to remove, to remove a
757 range of frequencies aroung the centre.
[1203]758 unit: the frequency unit (default "GHz")
759 Notes:
[1348]760 It is recommended to flag edges of the band or strong
761 signals beforehand.
[1192]762 """
763 if insitu is None: insitu = rcParams['insitu']
764 self._math._setinsitu(insitu)
765 varlist = vars()
[1370]766 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]767 if not base.has_key(unit):
768 raise ValueError("%s is not a valid unit." % unit)
769 try:
[1200]770 s = scantable(self._math._lag_flag(self, frequency*base[unit],
771 width*base[unit]))
[1192]772 except RuntimeError, msg:
773 if rcParams['verbose']:
774 print msg
775 return
776 else: raise
777 s._add_history("lag_flag", varlist)
778 print_log()
779 if insitu:
780 self._assign(s)
781 else:
782 return s
[1001]783
[1192]784
[113]785 def create_mask(self, *args, **kwargs):
786 """
[1118]787 Compute and return a mask based on [min, max] windows.
[189]788 The specified windows are to be INCLUDED, when the mask is
[113]789 applied.
[102]790 Parameters:
[1118]791 [min, max], [min2, max2], ...
[1024]792 Pairs of start/end points (inclusive)specifying the regions
[102]793 to be masked
[189]794 invert: optional argument. If specified as True,
795 return an inverted mask, i.e. the regions
796 specified are EXCLUDED
[513]797 row: create the mask using the specified row for
798 unit conversions, default is row=0
799 only necessary if frequency varies over rows.
[102]800 Example:
[113]801 scan.set_unit('channel')
802 a)
[1118]803 msk = scan.create_mask([400, 500], [800, 900])
[189]804 # masks everything outside 400 and 500
[113]805 # and 800 and 900 in the unit 'channel'
806
807 b)
[1118]808 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]809 # masks the regions between 400 and 500
[113]810 # and 800 and 900 in the unit 'channel'
[1024]811 c)
812 mask only channel 400
[1118]813 msk = scan.create_mask([400, 400])
[102]814 """
[513]815 row = 0
816 if kwargs.has_key("row"):
817 row = kwargs.get("row")
818 data = self._getabcissa(row)
[113]819 u = self._getcoordinfo()[0]
[718]820 if rcParams['verbose']:
[113]821 if u == "": u = "channel"
[718]822 msg = "The current mask window unit is %s" % u
[1118]823 i = self._check_ifs()
824 if not i:
[876]825 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]826 asaplog.push(msg)
[102]827 n = self.nchan()
[1295]828 msk = _n_bools(n, False)
[710]829 # test if args is a 'list' or a 'normal *args - UGLY!!!
830
[1118]831 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
832 and args or args[0]
[710]833 for window in ws:
[102]834 if (len(window) != 2 or window[0] > window[1] ):
[1118]835 raise TypeError("A window needs to be defined as [min, max]")
[102]836 for i in range(n):
[1024]837 if data[i] >= window[0] and data[i] <= window[1]:
[1295]838 msk[i] = True
[113]839 if kwargs.has_key('invert'):
840 if kwargs.get('invert'):
[1295]841 msk = mask_not(msk)
[718]842 print_log()
[102]843 return msk
[710]844
[1446]845 def get_masklist(self, mask=None, row=0):
[256]846 """
[1446]847 Compute and return a list of mask windows, [min, max].
848 Parameters:
849 mask: channel mask, created with create_mask.
850 row: calcutate the masklist using the specified row
851 for unit conversions, default is row=0
852 only necessary if frequency varies over rows.
853 Returns:
854 [min, max], [min2, max2], ...
855 Pairs of start/end points (inclusive)specifying
856 the masked regions
857 """
858 if not (isinstance(mask,list) or isinstance(mask, tuple)):
859 raise TypeError("The mask should be list or tuple.")
860 if len(mask) < 2:
861 raise TypeError("The mask elements should be > 1")
862 if self.nchan() != len(mask):
863 msg = "Number of channels in scantable != number of mask elements"
864 raise TypeError(msg)
865 data = self._getabcissa(row)
866 u = self._getcoordinfo()[0]
867 if rcParams['verbose']:
868 if u == "": u = "channel"
869 msg = "The current mask window unit is %s" % u
870 i = self._check_ifs()
871 if not i:
872 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
873 asaplog.push(msg)
874 masklist=[]
875 ist, ien = None, None
876 ist, ien=self.get_mask_indices(mask)
877 if ist is not None and ien is not None:
878 for i in xrange(len(ist)):
879 range=[data[ist[i]],data[ien[i]]]
880 range.sort()
881 masklist.append([range[0],range[1]])
882 return masklist
883
884 def get_mask_indices(self, mask=None):
885 """
886 Compute and Return lists of mask start indices and mask end indices.
887 Parameters:
888 mask: channel mask, created with create_mask.
889 Returns:
890 List of mask start indices and that of mask end indices,
891 i.e., [istart1,istart2,....], [iend1,iend2,....].
892 """
893 if not (isinstance(mask,list) or isinstance(mask, tuple)):
894 raise TypeError("The mask should be list or tuple.")
895 if len(mask) < 2:
896 raise TypeError("The mask elements should be > 1")
897 istart=[]
898 iend=[]
899 if mask[0]: istart.append(0)
900 for i in range(len(mask)-1):
901 if not mask[i] and mask[i+1]:
902 istart.append(i+1)
903 elif mask[i] and not mask[i+1]:
904 iend.append(i)
905 if mask[len(mask)-1]: iend.append(len(mask)-1)
906 if len(istart) != len(iend):
907 raise RuntimeError("Numbers of mask start != mask end.")
908 for i in range(len(istart)):
909 if istart[i] > iend[i]:
910 raise RuntimeError("Mask start index > mask end index")
911 break
912 return istart,iend
913
914# def get_restfreqs(self):
915# """
916# Get the restfrequency(s) stored in this scantable.
917# The return value(s) are always of unit 'Hz'
918# Parameters:
919# none
920# Returns:
921# a list of doubles
922# """
923# return list(self._getrestfreqs())
924
925 def get_restfreqs(self, ids=None):
926 """
[256]927 Get the restfrequency(s) stored in this scantable.
928 The return value(s) are always of unit 'Hz'
929 Parameters:
[1446]930 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
931 be retrieved
[256]932 Returns:
[1446]933 dictionary containing ids and a list of doubles for each id
[256]934 """
[1446]935 if ids is None:
936 rfreqs={}
937 idlist = self.getmolnos()
938 for i in idlist:
939 rfreqs[i]=list(self._getrestfreqs(i))
940 return rfreqs
941 else:
942 if type(ids)==list or type(ids)==tuple:
943 rfreqs={}
944 for i in ids:
945 rfreqs[i]=list(self._getrestfreqs(i))
946 return rfreqs
947 else:
948 return list(self._getrestfreqs(ids))
949 #return list(self._getrestfreqs(ids))
[102]950
[931]951 def set_restfreqs(self, freqs=None, unit='Hz'):
952 """
[1446]953 ********NEED TO BE UPDATED begin************
[931]954 Set or replace the restfrequency specified and
955 If the 'freqs' argument holds a scalar,
956 then that rest frequency will be applied to all the selected
957 data. If the 'freqs' argument holds
958 a vector, then it MUST be of equal or smaller length than
959 the number of IFs (and the available restfrequencies will be
960 replaced by this vector). In this case, *all* data have
961 the restfrequency set per IF according
962 to the corresponding value you give in the 'freqs' vector.
[1118]963 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]964 IF 1 gets restfreq 2e9.
[1446]965 ********NEED TO BE UPDATED end************
[1153]966 You can also specify the frequencies via a linecatalog/
967
[931]968 Parameters:
969 freqs: list of rest frequency values or string idenitfiers
970 unit: unit for rest frequency (default 'Hz')
[402]971
[931]972 Example:
[1446]973 # set the given restfrequency for the all currently selected IFs
[931]974 scan.set_restfreqs(freqs=1.4e9)
[1446]975 # set multiple restfrequencies to all the selected data
976 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
977 # If the number of IFs in the data is >= 2 the IF0 gets the first
978 # value IF1 the second... NOTE that freqs needs to be
979 # specified in list of list (e.g. [[],[],...] ).
980 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]981 #set the given restfrequency for the whole table (by name)
982 scan.set_restfreqs(freqs="OH1667")
[391]983
[931]984 Note:
985 To do more sophisticate Restfrequency setting, e.g. on a
986 source and IF basis, use scantable.set_selection() before using
987 this function.
988 # provide your scantable is call scan
989 selection = selector()
990 selection.set_name("ORION*")
991 selection.set_ifs([1])
992 scan.set_selection(selection)
993 scan.set_restfreqs(freqs=86.6e9)
994
995 """
996 varlist = vars()
[1157]997 from asap import linecatalog
998 # simple value
[1118]999 if isinstance(freqs, int) or isinstance(freqs, float):
[1446]1000 # TT mod
1001 #self._setrestfreqs(freqs, "",unit)
1002 self._setrestfreqs([freqs], [""],unit)
[1157]1003 # list of values
[1118]1004 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1005 # list values are scalars
[1118]1006 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1446]1007 self._setrestfreqs(freqs, [""],unit)
[1157]1008 # list values are tuples, (value, name)
1009 elif isinstance(freqs[-1], dict):
[1446]1010 #sel = selector()
1011 #savesel = self._getselection()
1012 #iflist = self.getifnos()
1013 #for i in xrange(len(freqs)):
1014 # sel.set_ifs(iflist[i])
1015 # self._setselection(sel)
1016 # self._setrestfreqs(freqs[i], "",unit)
1017 #self._setselection(savesel)
1018 self._setrestfreqs(freqs["value"],
1019 freqs["name"], "MHz")
1020 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1021 sel = selector()
1022 savesel = self._getselection()
[1322]1023 iflist = self.getifnos()
[1446]1024 if len(freqs)>len(iflist):
1025 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]1026 for i in xrange(len(freqs)):
[1322]1027 sel.set_ifs(iflist[i])
[1259]1028 self._setselection(sel)
[1157]1029 self._setrestfreqs(freqs[i]["value"],
1030 freqs[i]["name"], "MHz")
1031 self._setselection(savesel)
1032 # freqs are to be taken from a linecatalog
[1153]1033 elif isinstance(freqs, linecatalog):
1034 sel = selector()
1035 savesel = self._getselection()
1036 for i in xrange(freqs.nrow()):
[1322]1037 sel.set_ifs(iflist[i])
[1153]1038 self._setselection(sel)
1039 self._setrestfreqs(freqs.get_frequency(i),
1040 freqs.get_name(i), "MHz")
1041 # ensure that we are not iterating past nIF
1042 if i == self.nif()-1: break
1043 self._setselection(savesel)
[931]1044 else:
1045 return
1046 self._add_history("set_restfreqs", varlist)
1047
[1360]1048 def shift_refpix(self, delta):
1049 """
1050 Shift the reference pixel of the Spectra Coordinate by an
1051 integer amount.
1052 Parameters:
1053 delta: the amount to shift by
1054 Note:
1055 Be careful using this with broadband data.
1056 """
1057 Scantable.shift(self, delta)
[931]1058
[1259]1059 def history(self, filename=None):
1060 """
1061 Print the history. Optionally to a file.
[1348]1062 Parameters:
1063 filename: The name of the file to save the history to.
[1259]1064 """
[484]1065 hist = list(self._gethistory())
[794]1066 out = "-"*80
[484]1067 for h in hist:
[489]1068 if h.startswith("---"):
[794]1069 out += "\n"+h
[489]1070 else:
1071 items = h.split("##")
1072 date = items[0]
1073 func = items[1]
1074 items = items[2:]
[794]1075 out += "\n"+date+"\n"
1076 out += "Function: %s\n Parameters:" % (func)
[489]1077 for i in items:
1078 s = i.split("=")
[1118]1079 out += "\n %s = %s" % (s[0], s[1])
[794]1080 out += "\n"+"-"*80
[1259]1081 if filename is not None:
1082 if filename is "":
1083 filename = 'scantable_history.txt'
1084 import os
1085 filename = os.path.expandvars(os.path.expanduser(filename))
1086 if not os.path.isdir(filename):
1087 data = open(filename, 'w')
1088 data.write(out)
1089 data.close()
1090 else:
1091 msg = "Illegal file name '%s'." % (filename)
1092 if rcParams['verbose']:
1093 print msg
1094 else:
1095 raise IOError(msg)
1096 if rcParams['verbose']:
1097 try:
1098 from IPython.genutils import page as pager
1099 except ImportError:
1100 from pydoc import pager
1101 pager(out)
1102 else:
1103 return out
[484]1104 return
[513]1105 #
1106 # Maths business
1107 #
1108
[931]1109 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1110 """
[1070]1111 Return the (time) weighted average of a scan.
[513]1112 Note:
[1070]1113 in channels only - align if necessary
[513]1114 Parameters:
1115 mask: an optional mask (only used for 'var' and 'tsys'
1116 weighting)
[558]1117 scanav: True averages each scan separately
1118 False (default) averages all scans together,
[1099]1119 weight: Weighting scheme.
1120 'none' (mean no weight)
1121 'var' (1/var(spec) weighted)
1122 'tsys' (1/Tsys**2 weighted)
1123 'tint' (integration time weighted)
1124 'tintsys' (Tint/Tsys**2)
1125 'median' ( median averaging)
[535]1126 The default is 'tint'
[931]1127 align: align the spectra in velocity before averaging. It takes
1128 the time of the first spectrum as reference time.
[513]1129 Example:
1130 # time average the scantable without using a mask
[710]1131 newscan = scan.average_time()
[513]1132 """
1133 varlist = vars()
[976]1134 if weight is None: weight = 'TINT'
[513]1135 if mask is None: mask = ()
[1099]1136 if scanav: scanav = "SCAN"
1137 else: scanav = "NONE"
[1118]1138 scan = (self, )
[989]1139 try:
[1118]1140 if align:
1141 scan = (self.freq_align(insitu=False), )
1142 s = None
1143 if weight.upper() == 'MEDIAN':
1144 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1145 scanav))
1146 else:
1147 s = scantable(self._math._average(scan, mask, weight.upper(),
1148 scanav))
1149 except RuntimeError, msg:
[989]1150 if rcParams['verbose']:
1151 print msg
1152 return
1153 else: raise
[1099]1154 s._add_history("average_time", varlist)
[718]1155 print_log()
[513]1156 return s
[710]1157
[876]1158 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1159 """
1160 Return a scan where all spectra are converted to either
1161 Jansky or Kelvin depending upon the flux units of the scan table.
1162 By default the function tries to look the values up internally.
1163 If it can't find them (or if you want to over-ride), you must
1164 specify EITHER jyperk OR eta (and D which it will try to look up
1165 also if you don't set it). jyperk takes precedence if you set both.
1166 Parameters:
1167 jyperk: the Jy / K conversion factor
1168 eta: the aperture efficiency
1169 d: the geomtric diameter (metres)
1170 insitu: if False a new scantable is returned.
1171 Otherwise, the scaling is done in-situ
1172 The default is taken from .asaprc (False)
1173 """
1174 if insitu is None: insitu = rcParams['insitu']
[876]1175 self._math._setinsitu(insitu)
[513]1176 varlist = vars()
1177 if jyperk is None: jyperk = -1.0
1178 if d is None: d = -1.0
1179 if eta is None: eta = -1.0
[876]1180 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1181 s._add_history("convert_flux", varlist)
1182 print_log()
1183 if insitu: self._assign(s)
1184 else: return s
[513]1185
[876]1186 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1187 """
1188 Return a scan after applying a gain-elevation correction.
1189 The correction can be made via either a polynomial or a
1190 table-based interpolation (and extrapolation if necessary).
1191 You specify polynomial coefficients, an ascii table or neither.
1192 If you specify neither, then a polynomial correction will be made
1193 with built in coefficients known for certain telescopes (an error
1194 will occur if the instrument is not known).
1195 The data and Tsys are *divided* by the scaling factors.
1196 Parameters:
1197 poly: Polynomial coefficients (default None) to compute a
1198 gain-elevation correction as a function of
1199 elevation (in degrees).
1200 filename: The name of an ascii file holding correction factors.
1201 The first row of the ascii file must give the column
1202 names and these MUST include columns
1203 "ELEVATION" (degrees) and "FACTOR" (multiply data
1204 by this) somewhere.
1205 The second row must give the data type of the
1206 column. Use 'R' for Real and 'I' for Integer.
1207 An example file would be
1208 (actual factors are arbitrary) :
1209
1210 TIME ELEVATION FACTOR
1211 R R R
1212 0.1 0 0.8
1213 0.2 20 0.85
1214 0.3 40 0.9
1215 0.4 60 0.85
1216 0.5 80 0.8
1217 0.6 90 0.75
1218 method: Interpolation method when correcting from a table.
1219 Values are "nearest", "linear" (default), "cubic"
1220 and "spline"
1221 insitu: if False a new scantable is returned.
1222 Otherwise, the scaling is done in-situ
1223 The default is taken from .asaprc (False)
1224 """
1225
1226 if insitu is None: insitu = rcParams['insitu']
[876]1227 self._math._setinsitu(insitu)
[513]1228 varlist = vars()
1229 if poly is None:
[1118]1230 poly = ()
[513]1231 from os.path import expandvars
1232 filename = expandvars(filename)
[876]1233 s = scantable(self._math._gainel(self, poly, filename, method))
1234 s._add_history("gain_el", varlist)
1235 print_log()
1236 if insitu: self._assign(s)
1237 else: return s
[710]1238
[931]1239 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1240 """
1241 Return a scan where all rows have been aligned in frequency/velocity.
1242 The alignment frequency frame (e.g. LSRK) is that set by function
1243 set_freqframe.
1244 Parameters:
1245 reftime: reference time to align at. By default, the time of
1246 the first row of data is used.
1247 method: Interpolation method for regridding the spectra.
1248 Choose from "nearest", "linear", "cubic" (default)
1249 and "spline"
1250 insitu: if False a new scantable is returned.
1251 Otherwise, the scaling is done in-situ
1252 The default is taken from .asaprc (False)
1253 """
[931]1254 if insitu is None: insitu = rcParams["insitu"]
[876]1255 self._math._setinsitu(insitu)
[513]1256 varlist = vars()
[931]1257 if reftime is None: reftime = ""
1258 s = scantable(self._math._freq_align(self, reftime, method))
[876]1259 s._add_history("freq_align", varlist)
1260 print_log()
1261 if insitu: self._assign(s)
1262 else: return s
[513]1263
[876]1264 def opacity(self, tau, insitu=None):
[513]1265 """
1266 Apply an opacity correction. The data
1267 and Tsys are multiplied by the correction factor.
1268 Parameters:
1269 tau: Opacity from which the correction factor is
1270 exp(tau*ZD)
1271 where ZD is the zenith-distance
1272 insitu: if False a new scantable is returned.
1273 Otherwise, the scaling is done in-situ
1274 The default is taken from .asaprc (False)
1275 """
1276 if insitu is None: insitu = rcParams['insitu']
[876]1277 self._math._setinsitu(insitu)
[513]1278 varlist = vars()
[876]1279 s = scantable(self._math._opacity(self, tau))
1280 s._add_history("opacity", varlist)
1281 print_log()
1282 if insitu: self._assign(s)
1283 else: return s
[513]1284
1285 def bin(self, width=5, insitu=None):
1286 """
1287 Return a scan where all spectra have been binned up.
[1348]1288 Parameters:
[513]1289 width: The bin width (default=5) in pixels
1290 insitu: if False a new scantable is returned.
1291 Otherwise, the scaling is done in-situ
1292 The default is taken from .asaprc (False)
1293 """
1294 if insitu is None: insitu = rcParams['insitu']
[876]1295 self._math._setinsitu(insitu)
[513]1296 varlist = vars()
[876]1297 s = scantable(self._math._bin(self, width))
[1118]1298 s._add_history("bin", varlist)
[876]1299 print_log()
1300 if insitu: self._assign(s)
1301 else: return s
[513]1302
[710]1303
[513]1304 def resample(self, width=5, method='cubic', insitu=None):
1305 """
[1348]1306 Return a scan where all spectra have been binned up.
1307
1308 Parameters:
[513]1309 width: The bin width (default=5) in pixels
1310 method: Interpolation method when correcting from a table.
1311 Values are "nearest", "linear", "cubic" (default)
1312 and "spline"
1313 insitu: if False a new scantable is returned.
1314 Otherwise, the scaling is done in-situ
1315 The default is taken from .asaprc (False)
1316 """
1317 if insitu is None: insitu = rcParams['insitu']
[876]1318 self._math._setinsitu(insitu)
[513]1319 varlist = vars()
[876]1320 s = scantable(self._math._resample(self, method, width))
[1118]1321 s._add_history("resample", varlist)
[876]1322 print_log()
1323 if insitu: self._assign(s)
1324 else: return s
[513]1325
1326
[946]1327 def average_pol(self, mask=None, weight='none'):
1328 """
1329 Average the Polarisations together.
1330 Parameters:
1331 mask: An optional mask defining the region, where the
1332 averaging will be applied. The output will have all
1333 specified points masked.
1334 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1335 weighted), or 'tsys' (1/Tsys**2 weighted)
1336 """
1337 varlist = vars()
1338 if mask is None:
1339 mask = ()
[1010]1340 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1341 s._add_history("average_pol", varlist)
[946]1342 print_log()
[992]1343 return s
[513]1344
[1145]1345 def average_beam(self, mask=None, weight='none'):
1346 """
1347 Average the Beams together.
1348 Parameters:
1349 mask: An optional mask defining the region, where the
1350 averaging will be applied. The output will have all
1351 specified points masked.
1352 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1353 weighted), or 'tsys' (1/Tsys**2 weighted)
1354 """
1355 varlist = vars()
1356 if mask is None:
1357 mask = ()
1358 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1359 s._add_history("average_beam", varlist)
1360 print_log()
1361 return s
1362
[992]1363 def convert_pol(self, poltype=None):
1364 """
1365 Convert the data to a different polarisation type.
1366 Parameters:
1367 poltype: The new polarisation type. Valid types are:
1368 "linear", "stokes" and "circular"
1369 """
1370 varlist = vars()
1371 try:
1372 s = scantable(self._math._convertpol(self, poltype))
[1118]1373 except RuntimeError, msg:
[992]1374 if rcParams['verbose']:
[1118]1375 print msg
1376 return
[992]1377 else:
1378 raise
[1118]1379 s._add_history("convert_pol", varlist)
[992]1380 print_log()
1381 return s
1382
[876]1383 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1384 """
1385 Smooth the spectrum by the specified kernel (conserving flux).
1386 Parameters:
1387 kernel: The type of smoothing kernel. Select from
[1373]1388 'hanning' (default), 'gaussian', 'boxcar' and
1389 'rmedian'
[513]1390 width: The width of the kernel in pixels. For hanning this is
1391 ignored otherwise it defauls to 5 pixels.
1392 For 'gaussian' it is the Full Width Half
1393 Maximum. For 'boxcar' it is the full width.
[1373]1394 For 'rmedian' it is the half width.
[513]1395 insitu: if False a new scantable is returned.
1396 Otherwise, the scaling is done in-situ
1397 The default is taken from .asaprc (False)
1398 Example:
1399 none
1400 """
1401 if insitu is None: insitu = rcParams['insitu']
[876]1402 self._math._setinsitu(insitu)
[513]1403 varlist = vars()
[1118]1404 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1405 s._add_history("smooth", varlist)
1406 print_log()
1407 if insitu: self._assign(s)
1408 else: return s
[513]1409
[876]1410
[1389]1411 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1412 """
1413 Return a scan which has been baselined (all rows) by a polynomial.
1414 Parameters:
[794]1415 mask: an optional mask
1416 order: the order of the polynomial (default is 0)
[1061]1417 plot: plot the fit and the residual. In this each
1418 indivual fit has to be approved, by typing 'y'
1419 or 'n'
[1389]1420 uselin: use linear polynomial fit
[794]1421 insitu: if False a new scantable is returned.
1422 Otherwise, the scaling is done in-situ
1423 The default is taken from .asaprc (False)
[513]1424 Example:
1425 # return a scan baselined by a third order polynomial,
1426 # not using a mask
1427 bscan = scan.poly_baseline(order=3)
[579]1428 """
[513]1429 if insitu is None: insitu = rcParams['insitu']
1430 varlist = vars()
1431 if mask is None:
[1295]1432 mask = [True for i in xrange(self.nchan(-1))]
[513]1433 from asap.asapfitter import fitter
[1217]1434 try:
1435 f = fitter()
1436 f.set_scan(self, mask)
[1389]1437 if uselin:
1438 f.set_function(lpoly=order)
1439 else:
1440 f.set_function(poly=order)
[1217]1441 s = f.auto_fit(insitu, plot=plot)
[1446]1442 # Save parameters of baseline fits as a class attribute.
1443 # NOTICE: It does not reflect changes in scantable!
1444 self.blpars = f.blpars
[1217]1445 s._add_history("poly_baseline", varlist)
1446 print_log()
1447 if insitu: self._assign(s)
1448 else: return s
1449 except RuntimeError:
1450 msg = "The fit failed, possibly because it didn't converge."
1451 if rcParams['verbose']:
1452 print msg
1453 return
1454 else:
1455 raise RuntimeError(msg)
[513]1456
[1217]1457
[1118]1458 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1459 threshold=3, chan_avg_limit=1, plot=False,
1460 insitu=None):
[880]1461 """
1462 Return a scan which has been baselined (all rows) by a polynomial.
1463 Spectral lines are detected first using linefinder and masked out
1464 to avoid them affecting the baseline solution.
1465
1466 Parameters:
1467 mask: an optional mask retreived from scantable
1468 edge: an optional number of channel to drop at
1469 the edge of spectrum. If only one value is
1470 specified, the same number will be dropped from
1471 both sides of the spectrum. Default is to keep
[907]1472 all channels. Nested tuples represent individual
[976]1473 edge selection for different IFs (a number of spectral
1474 channels can be different)
[880]1475 order: the order of the polynomial (default is 0)
1476 threshold: the threshold used by line finder. It is better to
1477 keep it large as only strong lines affect the
1478 baseline solution.
[1280]1479 chan_avg_limit:
1480 a maximum number of consequtive spectral channels to
1481 average during the search of weak and broad lines.
1482 The default is no averaging (and no search for weak
1483 lines). If such lines can affect the fitted baseline
1484 (e.g. a high order polynomial is fitted), increase this
1485 parameter (usually values up to 8 are reasonable). Most
1486 users of this method should find the default value
1487 sufficient.
[1061]1488 plot: plot the fit and the residual. In this each
1489 indivual fit has to be approved, by typing 'y'
1490 or 'n'
[880]1491 insitu: if False a new scantable is returned.
1492 Otherwise, the scaling is done in-situ
1493 The default is taken from .asaprc (False)
1494
1495 Example:
1496 scan2=scan.auto_poly_baseline(order=7)
1497 """
1498 if insitu is None: insitu = rcParams['insitu']
1499 varlist = vars()
1500 from asap.asapfitter import fitter
1501 from asap.asaplinefind import linefinder
1502 from asap import _is_sequence_or_number as _is_valid
1503
[976]1504 # check whether edge is set up for each IF individually
[1118]1505 individualedge = False;
1506 if len(edge) > 1:
1507 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1508 individualedge = True;
[907]1509
[1118]1510 if not _is_valid(edge, int) and not individualedge:
[909]1511 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1512 pair of integers specified as a tuple. Nested tuples are allowed \
1513 to make individual selection for different IFs."
[919]1514
[1118]1515 curedge = (0, 0)
1516 if individualedge:
1517 for edgepar in edge:
1518 if not _is_valid(edgepar, int):
1519 raise ValueError, "Each element of the 'edge' tuple has \
1520 to be a pair of integers or an integer."
[907]1521 else:
[1118]1522 curedge = edge;
[880]1523
1524 # setup fitter
1525 f = fitter()
1526 f.set_function(poly=order)
1527
1528 # setup line finder
[1118]1529 fl = linefinder()
[1268]1530 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1531
1532 if not insitu:
[1118]1533 workscan = self.copy()
[880]1534 else:
[1118]1535 workscan = self
[880]1536
[907]1537 fl.set_scan(workscan)
1538
[1118]1539 rows = range(workscan.nrow())
[1446]1540 # Save parameters of baseline fits & masklists as a class attribute.
1541 # NOTICE: It does not reflect changes in scantable!
1542 if len(rows) > 0:
1543 self.blpars=[]
1544 self.masklists=[]
[880]1545 asaplog.push("Processing:")
1546 for r in rows:
[1118]1547 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1548 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1549 workscan.getpol(r), workscan.getcycle(r))
[880]1550 asaplog.push(msg, False)
[907]1551
[976]1552 # figure out edge parameter
[1118]1553 if individualedge:
1554 if len(edge) >= workscan.getif(r):
1555 raise RuntimeError, "Number of edge elements appear to " \
1556 "be less than the number of IFs"
1557 curedge = edge[workscan.getif(r)]
[919]1558
[976]1559 # setup line finder
[1118]1560 fl.find_lines(r, mask, curedge)
[1446]1561 outmask=fl.get_mask()
[880]1562 f.set_scan(workscan, fl.get_mask())
1563 f.x = workscan._getabcissa(r)
1564 f.y = workscan._getspectrum(r)
1565 f.data = None
1566 f.fit()
[1446]1567
1568 # Show mask list
1569 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1570 msg = "mask range: "+str(masklist)
1571 asaplog.push(msg, False)
1572
1573 fpar = f.get_parameters()
[1061]1574 if plot:
1575 f.plot(residual=True)
1576 x = raw_input("Accept fit ( [y]/n ): ")
1577 if x.upper() == 'N':
[1446]1578 self.blpars.append(None)
1579 self.masklists.append(None)
[1061]1580 continue
[880]1581 workscan._setspectrum(f.fitter.getresidual(), r)
[1446]1582 self.blpars.append(fpar)
1583 self.masklists.append(masklist)
[1061]1584 if plot:
1585 f._p.unmap()
1586 f._p = None
1587 workscan._add_history("auto_poly_baseline", varlist)
[880]1588 if insitu:
1589 self._assign(workscan)
1590 else:
1591 return workscan
1592
[914]1593 def rotate_linpolphase(self, angle):
1594 """
1595 Rotate the phase of the complex polarization O=Q+iU correlation.
1596 This is always done in situ in the raw data. So if you call this
1597 function more than once then each call rotates the phase further.
1598 Parameters:
1599 angle: The angle (degrees) to rotate (add) by.
1600 Examples:
1601 scan.rotate_linpolphase(2.3)
1602 """
1603 varlist = vars()
[936]1604 self._math._rotate_linpolphase(self, angle)
[914]1605 self._add_history("rotate_linpolphase", varlist)
1606 print_log()
1607 return
[710]1608
[513]1609
[914]1610 def rotate_xyphase(self, angle):
1611 """
1612 Rotate the phase of the XY correlation. This is always done in situ
1613 in the data. So if you call this function more than once
1614 then each call rotates the phase further.
1615 Parameters:
1616 angle: The angle (degrees) to rotate (add) by.
1617 Examples:
1618 scan.rotate_xyphase(2.3)
1619 """
1620 varlist = vars()
[936]1621 self._math._rotate_xyphase(self, angle)
[914]1622 self._add_history("rotate_xyphase", varlist)
1623 print_log()
1624 return
1625
1626 def swap_linears(self):
1627 """
[1348]1628 Swap the linear polarisations XX and YY, or better the first two
1629 polarisations as this also works for ciculars.
[914]1630 """
1631 varlist = vars()
[936]1632 self._math._swap_linears(self)
[914]1633 self._add_history("swap_linears", varlist)
1634 print_log()
1635 return
1636
1637 def invert_phase(self):
1638 """
1639 Invert the phase of the complex polarisation
1640 """
1641 varlist = vars()
[936]1642 self._math._invert_phase(self)
[914]1643 self._add_history("invert_phase", varlist)
1644 print_log()
1645 return
1646
[876]1647 def add(self, offset, insitu=None):
[513]1648 """
1649 Return a scan where all spectra have the offset added
1650 Parameters:
1651 offset: the offset
1652 insitu: if False a new scantable is returned.
1653 Otherwise, the scaling is done in-situ
1654 The default is taken from .asaprc (False)
1655 """
1656 if insitu is None: insitu = rcParams['insitu']
[876]1657 self._math._setinsitu(insitu)
[513]1658 varlist = vars()
[876]1659 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1660 s._add_history("add", varlist)
[876]1661 print_log()
1662 if insitu:
1663 self._assign(s)
1664 else:
[513]1665 return s
1666
[1308]1667 def scale(self, factor, tsys=True, insitu=None):
[513]1668 """
1669 Return a scan where all spectra are scaled by the give 'factor'
1670 Parameters:
1671 factor: the scaling factor
1672 insitu: if False a new scantable is returned.
1673 Otherwise, the scaling is done in-situ
1674 The default is taken from .asaprc (False)
1675 tsys: if True (default) then apply the operation to Tsys
1676 as well as the data
1677 """
1678 if insitu is None: insitu = rcParams['insitu']
[876]1679 self._math._setinsitu(insitu)
[513]1680 varlist = vars()
[876]1681 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1682 s._add_history("scale", varlist)
[876]1683 print_log()
1684 if insitu:
1685 self._assign(s)
1686 else:
[513]1687 return s
1688
[1348]1689 def auto_quotient(self, preserve=True, mode='paired'):
[670]1690 """
1691 This function allows to build quotients automatically.
1692 It assumes the observation to have the same numer of
1693 "ons" and "offs"
1694 Parameters:
[710]1695 preserve: you can preserve (default) the continuum or
1696 remove it. The equations used are
[670]1697 preserve: Output = Toff * (on/off) - Toff
[1070]1698 remove: Output = Toff * (on/off) - Ton
[1348]1699 mode: the on/off detection mode
1700 'paired' (default)
1701 identifies 'off' scans by the
1702 trailing '_R' (Mopra/Parkes) or
1703 '_e'/'_w' (Tid) and matches
1704 on/off pairs from the observing pattern
1705 'time'
1706 finds the closest off in time
1707
[670]1708 """
[1348]1709 modes = ["time", "paired"]
[670]1710 if not mode in modes:
[876]1711 msg = "please provide valid mode. Valid modes are %s" % (modes)
1712 raise ValueError(msg)
1713 varlist = vars()
[1348]1714 s = None
1715 if mode.lower() == "paired":
1716 basesel = self.get_selection()
[1356]1717 sel = selector()+basesel
1718 sel.set_query("SRCTYPE==1")
1719 self.set_selection(sel)
[1348]1720 offs = self.copy()
1721 sel.set_query("SRCTYPE==0")
[1356]1722 self.set_selection(sel)
[1348]1723 ons = self.copy()
1724 s = scantable(self._math._quotient(ons, offs, preserve))
1725 self.set_selection(basesel)
1726 elif mode.lower() == "time":
1727 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1728 s._add_history("auto_quotient", varlist)
[876]1729 print_log()
1730 return s
[710]1731
[1145]1732 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1733 """
[1143]1734 Form a quotient using "off" beams when observing in "MX" mode.
1735 Parameters:
[1145]1736 mask: an optional mask to be used when weight == 'stddev'
[1143]1737 weight: How to average the off beams. Default is 'median'.
[1145]1738 preserve: you can preserve (default) the continuum or
1739 remove it. The equations used are
1740 preserve: Output = Toff * (on/off) - Toff
1741 remove: Output = Toff * (on/off) - Ton
[1217]1742 """
[1143]1743 if mask is None: mask = ()
[1141]1744 varlist = vars()
1745 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1746 preoff = scantable(self._math._mx_extract(self, 'off'))
1747 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1748 from asapmath import quotient
[1145]1749 q = quotient(on, off, preserve)
[1143]1750 q._add_history("mx_quotient", varlist)
[1145]1751 print_log()
[1217]1752 return q
[513]1753
[718]1754 def freq_switch(self, insitu=None):
1755 """
1756 Apply frequency switching to the data.
1757 Parameters:
1758 insitu: if False a new scantable is returned.
1759 Otherwise, the swictching is done in-situ
1760 The default is taken from .asaprc (False)
1761 Example:
1762 none
1763 """
1764 if insitu is None: insitu = rcParams['insitu']
[876]1765 self._math._setinsitu(insitu)
[718]1766 varlist = vars()
[876]1767 s = scantable(self._math._freqswitch(self))
[1118]1768 s._add_history("freq_switch", varlist)
[876]1769 print_log()
1770 if insitu: self._assign(s)
1771 else: return s
[718]1772
[780]1773 def recalc_azel(self):
1774 """
1775 Recalculate the azimuth and elevation for each position.
1776 Parameters:
1777 none
1778 Example:
1779 """
1780 varlist = vars()
[876]1781 self._recalcazel()
[780]1782 self._add_history("recalc_azel", varlist)
1783 print_log()
1784 return
1785
[513]1786 def __add__(self, other):
1787 varlist = vars()
1788 s = None
1789 if isinstance(other, scantable):
[1308]1790 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1791 elif isinstance(other, float):
[876]1792 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1793 else:
[718]1794 raise TypeError("Other input is not a scantable or float value")
[513]1795 s._add_history("operator +", varlist)
[718]1796 print_log()
[513]1797 return s
1798
1799 def __sub__(self, other):
1800 """
1801 implicit on all axes and on Tsys
1802 """
1803 varlist = vars()
1804 s = None
1805 if isinstance(other, scantable):
[1308]1806 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1807 elif isinstance(other, float):
[876]1808 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1809 else:
[718]1810 raise TypeError("Other input is not a scantable or float value")
[513]1811 s._add_history("operator -", varlist)
[718]1812 print_log()
[513]1813 return s
[710]1814
[513]1815 def __mul__(self, other):
1816 """
1817 implicit on all axes and on Tsys
1818 """
1819 varlist = vars()
1820 s = None
1821 if isinstance(other, scantable):
[1308]1822 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1823 elif isinstance(other, float):
[876]1824 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1825 else:
[718]1826 raise TypeError("Other input is not a scantable or float value")
[513]1827 s._add_history("operator *", varlist)
[718]1828 print_log()
[513]1829 return s
1830
[710]1831
[513]1832 def __div__(self, other):
1833 """
1834 implicit on all axes and on Tsys
1835 """
1836 varlist = vars()
1837 s = None
1838 if isinstance(other, scantable):
[1308]1839 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1840 elif isinstance(other, float):
1841 if other == 0.0:
[718]1842 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1843 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1844 else:
[718]1845 raise TypeError("Other input is not a scantable or float value")
[513]1846 s._add_history("operator /", varlist)
[718]1847 print_log()
[513]1848 return s
1849
[530]1850 def get_fit(self, row=0):
1851 """
1852 Print or return the stored fits for a row in the scantable
1853 Parameters:
1854 row: the row which the fit has been applied to.
1855 """
1856 if row > self.nrow():
1857 return
[976]1858 from asap.asapfit import asapfit
[530]1859 fit = asapfit(self._getfit(row))
[718]1860 if rcParams['verbose']:
[530]1861 print fit
1862 return
1863 else:
1864 return fit.as_dict()
1865
[484]1866 def _add_history(self, funcname, parameters):
1867 # create date
1868 sep = "##"
1869 from datetime import datetime
1870 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1871 hist = dstr+sep
1872 hist += funcname+sep#cdate+sep
1873 if parameters.has_key('self'): del parameters['self']
[1118]1874 for k, v in parameters.iteritems():
[484]1875 if type(v) is dict:
[1118]1876 for k2, v2 in v.iteritems():
[484]1877 hist += k2
1878 hist += "="
[1118]1879 if isinstance(v2, scantable):
[484]1880 hist += 'scantable'
1881 elif k2 == 'mask':
[1118]1882 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1883 hist += str(self._zip_mask(v2))
1884 else:
1885 hist += str(v2)
[484]1886 else:
[513]1887 hist += str(v2)
[484]1888 else:
1889 hist += k
1890 hist += "="
[1118]1891 if isinstance(v, scantable):
[484]1892 hist += 'scantable'
1893 elif k == 'mask':
[1118]1894 if isinstance(v, list) or isinstance(v, tuple):
[513]1895 hist += str(self._zip_mask(v))
1896 else:
1897 hist += str(v)
[484]1898 else:
1899 hist += str(v)
1900 hist += sep
1901 hist = hist[:-2] # remove trailing '##'
1902 self._addhistory(hist)
1903
[710]1904
[484]1905 def _zip_mask(self, mask):
1906 mask = list(mask)
1907 i = 0
1908 segments = []
1909 while mask[i:].count(1):
1910 i += mask[i:].index(1)
1911 if mask[i:].count(0):
1912 j = i + mask[i:].index(0)
1913 else:
[710]1914 j = len(mask)
[1118]1915 segments.append([i, j])
[710]1916 i = j
[484]1917 return segments
[714]1918
[626]1919 def _get_ordinate_label(self):
1920 fu = "("+self.get_fluxunit()+")"
1921 import re
1922 lbl = "Intensity"
[1118]1923 if re.match(".K.", fu):
[626]1924 lbl = "Brightness Temperature "+ fu
[1118]1925 elif re.match(".Jy.", fu):
[626]1926 lbl = "Flux density "+ fu
1927 return lbl
[710]1928
[876]1929 def _check_ifs(self):
1930 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1931 nchans = filter(lambda t: t > 0, nchans)
[876]1932 return (sum(nchans)/len(nchans) == nchans[0])
[976]1933
[1496]1934 def _fill(self, names, unit, average, getpt):
[976]1935 import os
1936 from asap._asap import stfiller
1937 first = True
1938 fullnames = []
1939 for name in names:
1940 name = os.path.expandvars(name)
1941 name = os.path.expanduser(name)
1942 if not os.path.exists(name):
1943 msg = "File '%s' does not exists" % (name)
1944 if rcParams['verbose']:
1945 asaplog.push(msg)
1946 print asaplog.pop().strip()
1947 return
1948 raise IOError(msg)
1949 fullnames.append(name)
1950 if average:
1951 asaplog.push('Auto averaging integrations')
[1079]1952 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1953 for name in fullnames:
[1073]1954 tbl = Scantable(stype)
1955 r = stfiller(tbl)
[976]1956 msg = "Importing %s..." % (name)
[1118]1957 asaplog.push(msg, False)
[976]1958 print_log()
[1496]1959 r._open(name, -1, -1, getpt)
[976]1960 r._read()
[1073]1961 #tbl = r._getdata()
[976]1962 if average:
[1118]1963 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1964 #tbl = tbl2
1965 if not first:
1966 tbl = self._math._merge([self, tbl])
1967 #tbl = tbl2
1968 Scantable.__init__(self, tbl)
1969 r._close()
[1118]1970 del r, tbl
[976]1971 first = False
1972 if unit is not None:
1973 self.set_fluxunit(unit)
[1446]1974 #self.set_freqframe(rcParams['scantable.freqframe'])
[976]1975
Note: See TracBrowser for help on using the repository browser.