source: trunk/python/scantable.py@ 1585

Last change on this file since 1585 was 1584, checked in by Malte Marquarding, 15 years ago

Change default to MHz, doc update

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.9 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1435]41 if isinstance(filename, str):# or \
42# (isinstance(filename, list) or isinstance(filename, tuple)) \
43# and isinstance(filename[-1], str):
[976]44 import os.path
45 filename = os.path.expandvars(filename)
46 filename = os.path.expanduser(filename)
47 if not os.path.exists(filename):
48 s = "File '%s' not found." % (filename)
[718]49 if rcParams['verbose']:
[976]50 asaplog.push(s)
51 print asaplog.pop().strip()
[718]52 return
[976]53 raise IOError(s)
[1115]54 if os.path.isdir(filename) \
[1118]55 and not os.path.exists(filename+'/table.f1'):
[976]56 # crude check if asap table
57 if os.path.exists(filename+'/table.info'):
[1118]58 ondisk = rcParams['scantable.storage'] == 'disk'
59 Scantable.__init__(self, filename, ondisk)
[976]60 if unit is not None:
61 self.set_fluxunit(unit)
62 self.set_freqframe(rcParams['scantable.freqframe'])
[718]63 else:
[1118]64 msg = "The given file '%s'is not a valid " \
65 "asap table." % (filename)
[976]66 if rcParams['verbose']:
67 print msg
68 return
69 else:
70 raise IOError(msg)
[226]71 else:
[1118]72 self._fill([filename], unit, average)
73 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]74 and isinstance(filename[-1], str):
75 self._fill(filename, unit, average)
[1259]76 self._add_history("scantable", varlist)
[714]77 print_log()
[102]78
[876]79 def save(self, name=None, format=None, overwrite=False):
[116]80 """
[1280]81 Store the scantable on disk. This can be an asap (aips++) Table,
82 SDFITS or MS2 format.
[116]83 Parameters:
[1093]84 name: the name of the outputfile. For format "ASCII"
85 this is the root file name (data in 'name'.txt
[497]86 and header in 'name'_header.txt)
[116]87 format: an optional file format. Default is ASAP.
[280]88 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]89 'SDFITS' (save as SDFITS file)
[200]90 'ASCII' (saves as ascii text file)
[226]91 'MS2' (saves as an aips++
92 MeasurementSet V2)
[1573]93 'FITS' (save as image FITS - not
[1443]94 readable by class)
95 'CLASS' (save as FITS readable by CLASS)
[411]96 overwrite: If the file should be overwritten if it exists.
[256]97 The default False is to return with warning
[411]98 without writing the output. USE WITH CARE.
[116]99 Example:
100 scan.save('myscan.asap')
[1118]101 scan.save('myscan.sdfits', 'SDFITS')
[116]102 """
[411]103 from os import path
[226]104 if format is None: format = rcParams['scantable.save']
[256]105 suffix = '.'+format.lower()
[1118]106 if name is None or name == "":
[256]107 name = 'scantable'+suffix
[718]108 msg = "No filename given. Using default name %s..." % name
109 asaplog.push(msg)
[411]110 name = path.expandvars(name)
[256]111 if path.isfile(name) or path.isdir(name):
112 if not overwrite:
[718]113 msg = "File %s exists." % name
114 if rcParams['verbose']:
115 print msg
116 return
117 else:
118 raise IOError(msg)
[451]119 format2 = format.upper()
120 if format2 == 'ASAP':
[116]121 self._save(name)
122 else:
[989]123 from asap._asap import stwriter as stw
[1118]124 writer = stw(format2)
125 writer.write(self, name)
[718]126 print_log()
[116]127 return
128
[102]129 def copy(self):
130 """
131 Return a copy of this scantable.
[1348]132 Note:
133 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]134 Parameters:
[113]135 none
[102]136 Example:
137 copiedscan = scan.copy()
138 """
[876]139 sd = scantable(Scantable._copy(self))
[113]140 return sd
141
[1093]142 def drop_scan(self, scanid=None):
143 """
144 Return a new scantable where the specified scan number(s) has(have)
145 been dropped.
146 Parameters:
147 scanid: a (list of) scan number(s)
148 """
149 from asap import _is_sequence_or_number as _is_valid
150 from asap import _to_list
151 from asap import unique
152 if not _is_valid(scanid):
153 if rcParams['verbose']:
154 print "Please specify a scanno to drop from the scantable"
155 return
156 else:
157 raise RuntimeError("No scan given")
158 try:
159 scanid = _to_list(scanid)
160 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
161 for sid in scanid: allscans.remove(sid)
[1118]162 if len(allscans) == 0:
163 raise ValueError("Can't remove all scans")
[1093]164 except ValueError:
165 if rcParams['verbose']:
166 print "Couldn't find any match."
167 return
168 else: raise
169 try:
170 bsel = self.get_selection()
171 sel = selector()
172 sel.set_scans(allscans)
173 self.set_selection(bsel+sel)
174 scopy = self._copy()
175 self.set_selection(bsel)
176 return scantable(scopy)
177 except RuntimeError:
[1118]178 if rcParams['verbose']:
179 print "Couldn't find any match."
180 else:
181 raise
[1093]182
183
[102]184 def get_scan(self, scanid=None):
185 """
186 Return a specific scan (by scanno) or collection of scans (by
187 source name) in a new scantable.
[1348]188 Note:
189 See scantable.drop_scan() for the inverse operation.
[102]190 Parameters:
[513]191 scanid: a (list of) scanno or a source name, unix-style
192 patterns are accepted for source name matching, e.g.
193 '*_R' gets all 'ref scans
[102]194 Example:
[513]195 # get all scans containing the source '323p459'
196 newscan = scan.get_scan('323p459')
197 # get all 'off' scans
198 refscans = scan.get_scan('*_R')
199 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]200 newscan = scan.get_scan([0, 2, 7, 10])
[102]201 """
202 if scanid is None:
[718]203 if rcParams['verbose']:
[1118]204 print "Please specify a scan no or name to " \
205 "retrieve from the scantable"
[718]206 return
207 else:
208 raise RuntimeError("No scan given")
209
[102]210 try:
[946]211 bsel = self.get_selection()
212 sel = selector()
[102]213 if type(scanid) is str:
[946]214 sel.set_name(scanid)
215 self.set_selection(bsel+sel)
[876]216 scopy = self._copy()
[946]217 self.set_selection(bsel)
[876]218 return scantable(scopy)
[102]219 elif type(scanid) is int:
[946]220 sel.set_scans([scanid])
221 self.set_selection(bsel+sel)
[876]222 scopy = self._copy()
[946]223 self.set_selection(bsel)
[876]224 return scantable(scopy)
[381]225 elif type(scanid) is list:
[946]226 sel.set_scans(scanid)
227 self.set_selection(sel)
[876]228 scopy = self._copy()
[946]229 self.set_selection(bsel)
[876]230 return scantable(scopy)
[381]231 else:
[718]232 msg = "Illegal scanid type, use 'int' or 'list' if ints."
233 if rcParams['verbose']:
234 print msg
235 else:
236 raise TypeError(msg)
[102]237 except RuntimeError:
[718]238 if rcParams['verbose']: print "Couldn't find any match."
239 else: raise
[102]240
241 def __str__(self):
[1118]242 return Scantable._summary(self, True)
[102]243
[976]244 def summary(self, filename=None):
[102]245 """
246 Print a summary of the contents of this scantable.
247 Parameters:
248 filename: the name of a file to write the putput to
249 Default - no file output
[381]250 verbose: print extra info such as the frequency table
251 The default (False) is taken from .asaprc
[102]252 """
[976]253 info = Scantable._summary(self, True)
254 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]255 if filename is not None:
[256]256 if filename is "":
257 filename = 'scantable_summary.txt'
[415]258 from os.path import expandvars, isdir
[411]259 filename = expandvars(filename)
[415]260 if not isdir(filename):
[413]261 data = open(filename, 'w')
262 data.write(info)
263 data.close()
264 else:
[718]265 msg = "Illegal file name '%s'." % (filename)
266 if rcParams['verbose']:
267 print msg
268 else:
269 raise IOError(msg)
270 if rcParams['verbose']:
[794]271 try:
272 from IPython.genutils import page as pager
273 except ImportError:
274 from pydoc import pager
275 pager(info)
[718]276 else:
277 return info
[710]278
[1512]279 def get_spectrum(self, rowno):
[1471]280 """Return the spectrum for the current row in the scantable as a list.
281 Parameters:
[1573]282 rowno: the row number to retrieve the spectrum from
[1471]283 """
284 return self._getspectrum(rowno)
[946]285
[1471]286 def get_mask(self, rowno):
287 """Return the mask for the current row in the scantable as a list.
288 Parameters:
[1573]289 rowno: the row number to retrieve the mask from
[1471]290 """
291 return self._getmask(rowno)
292
293 def set_spectrum(self, spec, rowno):
294 """Return the spectrum for the current row in the scantable as a list.
295 Parameters:
296 spec: the spectrum
[1573]297 rowno: the row number to set the spectrum for
[1471]298 """
299 assert(len(spec) == self.nchan())
300 return self._setspectrum(spec, rowno)
301
[946]302 def get_selection(self):
303 """
[1005]304 Get the selection object currently set on this scantable.
305 Parameters:
306 none
307 Example:
308 sel = scan.get_selection()
309 sel.set_ifs(0) # select IF 0
310 scan.set_selection(sel) # apply modified selection
[946]311 """
312 return selector(self._getselection())
313
[1576]314 def set_selection(self, selection=None, **kw):
[946]315 """
[1005]316 Select a subset of the data. All following operations on this scantable
317 are only applied to thi selection.
318 Parameters:
319 selection: a selector object (default unset the selection)
320 Examples:
321 sel = selector() # create a selection object
[1118]322 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]323 scan.set_selection(sel) # set the selection
324 scan.summary() # will only print summary of scanno 0 an 3
325 scan.set_selection() # unset the selection
[946]326 """
[1576]327 if selection is None:
328 # reset
329 if len(kw) == 0:
330 selection = selector()
331 else:
332 # try keywords
333 for k in kw:
334 if k not in selector.fields:
335 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
336 selection = selector(**kw)
[946]337 self._setselection(selection)
338
[876]339 def stats(self, stat='stddev', mask=None):
[102]340 """
[135]341 Determine the specified statistic of the current beam/if/pol
[102]342 Takes a 'mask' as an optional parameter to specify which
343 channels should be excluded.
344 Parameters:
[135]345 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
346 'var', 'stddev', 'avdev', 'rms', 'median'
347 mask: an optional mask specifying where the statistic
[102]348 should be determined.
349 Example:
[113]350 scan.set_unit('channel')
[1118]351 msk = scan.create_mask([100, 200], [500, 600])
[135]352 scan.stats(stat='mean', mask=m)
[102]353 """
354 if mask == None:
[876]355 mask = []
[1118]356 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]357 if not self._check_ifs():
[1118]358 raise ValueError("Cannot apply mask as the IFs have different "
359 "number of channels. Please use setselection() "
360 "to select individual IFs")
[256]361
[876]362 statvals = self._math._stats(self, mask, stat)
363 out = ''
364 axes = []
365 for i in range(self.nrow()):
366 axis = []
367 axis.append(self.getscan(i))
368 axis.append(self.getbeam(i))
369 axis.append(self.getif(i))
370 axis.append(self.getpol(i))
371 axis.append(self.getcycle(i))
372 axes.append(axis)
373 tm = self._gettime(i)
374 src = self._getsourcename(i)
375 out += 'Scan[%d] (%s) ' % (axis[0], src)
376 out += 'Time[%s]:\n' % (tm)
377 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
378 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
379 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
380 out += '= %3.3f\n' % (statvals[i])
381 out += "--------------------------------------------------\n"
[256]382
[876]383 if rcParams['verbose']:
384 print "--------------------------------------------------"
[1118]385 print " ", stat
[876]386 print "--------------------------------------------------"
387 print out
[1295]388 #else:
389 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
390 # 'axes' : axes,
391 # 'data': statvals}
392 return statvals
[102]393
[1118]394 def stddev(self, mask=None):
[135]395 """
396 Determine the standard deviation of the current beam/if/pol
397 Takes a 'mask' as an optional parameter to specify which
398 channels should be excluded.
399 Parameters:
400 mask: an optional mask specifying where the standard
401 deviation should be determined.
402
403 Example:
404 scan.set_unit('channel')
[1118]405 msk = scan.create_mask([100, 200], [500, 600])
[135]406 scan.stddev(mask=m)
407 """
[1118]408 return self.stats(stat='stddev', mask=mask);
[135]409
[1003]410
[1259]411 def get_column_names(self):
[1003]412 """
413 Return a list of column names, which can be used for selection.
414 """
[1259]415 return list(Scantable.get_column_names(self))
[1003]416
[876]417 def get_tsys(self):
[113]418 """
419 Return the System temperatures.
420 Returns:
[876]421 a list of Tsys values for the current selection
[113]422 """
[256]423
[876]424 return self._row_callback(self._gettsys, "Tsys")
[256]425
[876]426 def _row_callback(self, callback, label):
427 axes = []
[1118]428 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]429 out = ""
[1118]430 outvec = []
[876]431 for i in range(self.nrow()):
432 axis = []
433 axis.append(self.getscan(i))
434 axis.append(self.getbeam(i))
435 axis.append(self.getif(i))
436 axis.append(self.getpol(i))
437 axis.append(self.getcycle(i))
438 axes.append(axis)
439 tm = self._gettime(i)
440 src = self._getsourcename(i)
441 out += 'Scan[%d] (%s) ' % (axis[0], src)
442 out += 'Time[%s]:\n' % (tm)
443 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
444 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
445 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
446 outvec.append(callback(i))
447 out += '= %3.3f\n' % (outvec[i])
448 out += "--------------------------------------------------\n"
449 if rcParams['verbose']:
450 print "--------------------------------------------------"
451 print " %s" % (label)
452 print "--------------------------------------------------"
453 print out
[1175]454 # disabled because the vector seems more useful
455 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
456 return outvec
[256]457
[1070]458 def _get_column(self, callback, row=-1):
459 """
460 """
461 if row == -1:
462 return [callback(i) for i in range(self.nrow())]
463 else:
464 if 0 <= row < self.nrow():
465 return callback(row)
[256]466
[1070]467
[1348]468 def get_time(self, row=-1, asdatetime=False):
[113]469 """
470 Get a list of time stamps for the observations.
[1348]471 Return a datetime object for each integration time stamp in the scantable.
[113]472 Parameters:
[1348]473 row: row no of integration. Default -1 return all rows
474 asdatetime: return values as datetime objects rather than strings
[113]475 Example:
476 none
477 """
[1175]478 from time import strptime
479 from datetime import datetime
[1392]480 times = self._get_column(self._gettime, row)
[1348]481 if not asdatetime:
[1392]482 return times
[1175]483 format = "%Y/%m/%d/%H:%M:%S"
484 if isinstance(times, list):
485 return [datetime(*strptime(i, format)[:6]) for i in times]
486 else:
487 return datetime(*strptime(times, format)[:6])
[102]488
[1348]489
490 def get_inttime(self, row=-1):
491 """
492 Get a list of integration times for the observations.
493 Return a time in seconds for each integration in the scantable.
494 Parameters:
495 row: row no of integration. Default -1 return all rows.
496 Example:
497 none
498 """
[1573]499 return self._get_column(self._getinttime, row)
[1348]500
[1573]501
[714]502 def get_sourcename(self, row=-1):
503 """
[794]504 Get a list source names for the observations.
[714]505 Return a string for each integration in the scantable.
506 Parameters:
[1348]507 row: row no of integration. Default -1 return all rows.
[714]508 Example:
509 none
510 """
[1070]511 return self._get_column(self._getsourcename, row)
[714]512
[794]513 def get_elevation(self, row=-1):
514 """
515 Get a list of elevations for the observations.
516 Return a float for each integration in the scantable.
517 Parameters:
[1348]518 row: row no of integration. Default -1 return all rows.
[794]519 Example:
520 none
521 """
[1070]522 return self._get_column(self._getelevation, row)
[794]523
524 def get_azimuth(self, row=-1):
525 """
526 Get a list of azimuths for the observations.
527 Return a float for each integration in the scantable.
528 Parameters:
[1348]529 row: row no of integration. Default -1 return all rows.
[794]530 Example:
531 none
532 """
[1070]533 return self._get_column(self._getazimuth, row)
[794]534
535 def get_parangle(self, row=-1):
536 """
537 Get a list of parallactic angles for the observations.
538 Return a float for each integration in the scantable.
539 Parameters:
[1348]540 row: row no of integration. Default -1 return all rows.
[794]541 Example:
542 none
543 """
[1070]544 return self._get_column(self._getparangle, row)
[794]545
[1070]546 def get_direction(self, row=-1):
547 """
548 Get a list of Positions on the sky (direction) for the observations.
549 Return a float for each integration in the scantable.
550 Parameters:
551 row: row no of integration. Default -1 return all rows
552 Example:
553 none
554 """
555 return self._get_column(self._getdirection, row)
556
[1391]557 def get_directionval(self, row=-1):
558 """
559 Get a list of Positions on the sky (direction) for the observations.
560 Return a float for each integration in the scantable.
561 Parameters:
562 row: row no of integration. Default -1 return all rows
563 Example:
564 none
565 """
566 return self._get_column(self._getdirectionvec, row)
567
568
[102]569 def set_unit(self, unit='channel'):
570 """
571 Set the unit for all following operations on this scantable
572 Parameters:
573 unit: optional unit, default is 'channel'
[1118]574 one of '*Hz', 'km/s', 'channel', ''
[102]575 """
[484]576 varlist = vars()
[1118]577 if unit in ['', 'pixel', 'channel']:
[113]578 unit = ''
579 inf = list(self._getcoordinfo())
580 inf[0] = unit
581 self._setcoordinfo(inf)
[1118]582 self._add_history("set_unit", varlist)
[113]583
[484]584 def set_instrument(self, instr):
[358]585 """
[1348]586 Set the instrument for subsequent processing.
[358]587 Parameters:
[710]588 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]589 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]590 """
591 self._setInstrument(instr)
[1118]592 self._add_history("set_instument", vars())
[718]593 print_log()
[358]594
[1190]595 def set_feedtype(self, feedtype):
596 """
597 Overwrite the feed type, which might not be set correctly.
598 Parameters:
599 feedtype: 'linear' or 'circular'
600 """
601 self._setfeedtype(feedtype)
602 self._add_history("set_feedtype", vars())
603 print_log()
604
[276]605 def set_doppler(self, doppler='RADIO'):
606 """
607 Set the doppler for all following operations on this scantable.
608 Parameters:
609 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
610 """
[484]611 varlist = vars()
[276]612 inf = list(self._getcoordinfo())
613 inf[2] = doppler
614 self._setcoordinfo(inf)
[1118]615 self._add_history("set_doppler", vars())
[718]616 print_log()
[710]617
[226]618 def set_freqframe(self, frame=None):
[113]619 """
620 Set the frame type of the Spectral Axis.
621 Parameters:
[591]622 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]623 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
624 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]625 Examples:
626 scan.set_freqframe('BARY')
627 """
[484]628 if frame is None: frame = rcParams['scantable.freqframe']
629 varlist = vars()
[1118]630 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
631 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]632
[989]633 if frame in valid:
[113]634 inf = list(self._getcoordinfo())
635 inf[1] = frame
636 self._setcoordinfo(inf)
[1118]637 self._add_history("set_freqframe", varlist)
[102]638 else:
[1118]639 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]640 if rcParams['verbose']:
641 print msg
642 else:
643 raise TypeError(msg)
644 print_log()
[710]645
[989]646 def set_dirframe(self, frame=""):
647 """
648 Set the frame type of the Direction on the sky.
649 Parameters:
650 frame: an optional frame type, default ''. Valid frames are:
651 'J2000', 'B1950', 'GALACTIC'
652 Examples:
653 scan.set_dirframe('GALACTIC')
654 """
655 varlist = vars()
656 try:
657 Scantable.set_dirframe(self, frame)
[1118]658 except RuntimeError, msg:
[989]659 if rcParams['verbose']:
660 print msg
661 else:
662 raise
[1118]663 self._add_history("set_dirframe", varlist)
[989]664
[113]665 def get_unit(self):
666 """
667 Get the default unit set in this scantable
668 Returns:
669 A unit string
670 """
671 inf = self._getcoordinfo()
672 unit = inf[0]
673 if unit == '': unit = 'channel'
674 return unit
[102]675
[158]676 def get_abcissa(self, rowno=0):
[102]677 """
[158]678 Get the abcissa in the current coordinate setup for the currently
[113]679 selected Beam/IF/Pol
680 Parameters:
[226]681 rowno: an optional row number in the scantable. Default is the
682 first row, i.e. rowno=0
[113]683 Returns:
[1348]684 The abcissa values and the format string (as a dictionary)
[113]685 """
[256]686 abc = self._getabcissa(rowno)
[710]687 lbl = self._getabcissalabel(rowno)
[718]688 print_log()
[158]689 return abc, lbl
[113]690
[1118]691 def flag(self, mask=None):
[1001]692 """
693 Flag the selected data using an optional channel mask.
694 Parameters:
695 mask: an optional channel mask, created with create_mask. Default
696 (no mask) is all channels.
697 """
698 varlist = vars()
[1118]699 if mask is None:
700 mask = []
[1001]701 try:
702 self._flag(mask)
[1118]703 except RuntimeError, msg:
[1001]704 if rcParams['verbose']:
705 print msg
706 return
707 else: raise
708 self._add_history("flag", varlist)
709
[1584]710 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1192]711 """
712 Flag the data in 'lag' space by providing a frequency to remove.
[1584]713 Flagged data in the scantable gets interpolated over the region.
[1192]714 No taper is applied.
715 Parameters:
[1579]716 start: the start frequency (really a period within the
717 bandwidth) or period to remove
718 end: the end frequency or period to remove
[1584]719 unit: the frequency unit (default "MHz") or "" for
[1579]720 explicit lag channels
[1203]721 Notes:
[1579]722 It is recommended to flag edges of the band or strong
[1348]723 signals beforehand.
[1192]724 """
725 if insitu is None: insitu = rcParams['insitu']
726 self._math._setinsitu(insitu)
727 varlist = vars()
[1579]728 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
729 if not (unit == "" or base.has_key(unit)):
[1192]730 raise ValueError("%s is not a valid unit." % unit)
731 try:
[1579]732 if unit == "":
733 s = scantable(self._math._lag_flag(self, start, end, "lags"))
734 else:
735 s = scantable(self._math._lag_flag(self, start*base[unit],
736 end*base[unit], "frequency"))
[1192]737 except RuntimeError, msg:
738 if rcParams['verbose']:
739 print msg
740 return
741 else: raise
742 s._add_history("lag_flag", varlist)
743 print_log()
744 if insitu:
745 self._assign(s)
746 else:
747 return s
[1001]748
[1192]749
[113]750 def create_mask(self, *args, **kwargs):
751 """
[1118]752 Compute and return a mask based on [min, max] windows.
[189]753 The specified windows are to be INCLUDED, when the mask is
[113]754 applied.
[102]755 Parameters:
[1118]756 [min, max], [min2, max2], ...
[1024]757 Pairs of start/end points (inclusive)specifying the regions
[102]758 to be masked
[189]759 invert: optional argument. If specified as True,
760 return an inverted mask, i.e. the regions
761 specified are EXCLUDED
[513]762 row: create the mask using the specified row for
763 unit conversions, default is row=0
764 only necessary if frequency varies over rows.
[102]765 Example:
[113]766 scan.set_unit('channel')
767 a)
[1118]768 msk = scan.create_mask([400, 500], [800, 900])
[189]769 # masks everything outside 400 and 500
[113]770 # and 800 and 900 in the unit 'channel'
771
772 b)
[1118]773 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]774 # masks the regions between 400 and 500
[113]775 # and 800 and 900 in the unit 'channel'
[1024]776 c)
777 mask only channel 400
[1554]778 msk = scan.create_mask([400])
[102]779 """
[1554]780 row = kwargs.get("row", 0)
[513]781 data = self._getabcissa(row)
[113]782 u = self._getcoordinfo()[0]
[718]783 if rcParams['verbose']:
[113]784 if u == "": u = "channel"
[718]785 msg = "The current mask window unit is %s" % u
[1118]786 i = self._check_ifs()
787 if not i:
[876]788 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]789 asaplog.push(msg)
[102]790 n = self.nchan()
[1295]791 msk = _n_bools(n, False)
[710]792 # test if args is a 'list' or a 'normal *args - UGLY!!!
793
[1118]794 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
795 and args or args[0]
[710]796 for window in ws:
[1554]797 if len(window) == 1:
798 window = [window[0], window[0]]
799 if len(window) == 0 or len(window) > 2:
800 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]801 if window[0] > window[1]:
802 tmp = window[0]
803 window[0] = window[1]
804 window[1] = tmp
[102]805 for i in range(n):
[1024]806 if data[i] >= window[0] and data[i] <= window[1]:
[1295]807 msk[i] = True
[113]808 if kwargs.has_key('invert'):
809 if kwargs.get('invert'):
[1295]810 msk = mask_not(msk)
[718]811 print_log()
[102]812 return msk
[710]813
[256]814 def get_restfreqs(self):
815 """
816 Get the restfrequency(s) stored in this scantable.
817 The return value(s) are always of unit 'Hz'
818 Parameters:
819 none
820 Returns:
821 a list of doubles
822 """
823 return list(self._getrestfreqs())
[102]824
[402]825
[931]826 def set_restfreqs(self, freqs=None, unit='Hz'):
827 """
828 Set or replace the restfrequency specified and
829 If the 'freqs' argument holds a scalar,
830 then that rest frequency will be applied to all the selected
831 data. If the 'freqs' argument holds
832 a vector, then it MUST be of equal or smaller length than
833 the number of IFs (and the available restfrequencies will be
834 replaced by this vector). In this case, *all* data have
835 the restfrequency set per IF according
836 to the corresponding value you give in the 'freqs' vector.
[1118]837 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]838 IF 1 gets restfreq 2e9.
[1395]839 You can also specify the frequencies via a linecatalog.
[1153]840
[931]841 Parameters:
842 freqs: list of rest frequency values or string idenitfiers
843 unit: unit for rest frequency (default 'Hz')
[402]844
[931]845 Example:
846 # set the given restfrequency for the whole table
847 scan.set_restfreqs(freqs=1.4e9)
[1348]848 # If thee number of IFs in the data is >= 2 IF0 gets the first
[931]849 # value IF1 the second...
[1118]850 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]851 #set the given restfrequency for the whole table (by name)
852 scan.set_restfreqs(freqs="OH1667")
[391]853
[931]854 Note:
855 To do more sophisticate Restfrequency setting, e.g. on a
856 source and IF basis, use scantable.set_selection() before using
857 this function.
858 # provide your scantable is call scan
859 selection = selector()
860 selection.set_name("ORION*")
861 selection.set_ifs([1])
862 scan.set_selection(selection)
863 scan.set_restfreqs(freqs=86.6e9)
864
865 """
866 varlist = vars()
[1157]867 from asap import linecatalog
868 # simple value
[1118]869 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]870 self._setrestfreqs(freqs, "",unit)
[1157]871 # list of values
[1118]872 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]873 # list values are scalars
[1118]874 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]875 sel = selector()
876 savesel = self._getselection()
[1322]877 iflist = self.getifnos()
[931]878 for i in xrange(len(freqs)):
[1322]879 sel.set_ifs(iflist[i])
[931]880 self._setselection(sel)
[1153]881 self._setrestfreqs(freqs[i], "",unit)
[931]882 self._setselection(savesel)
[1157]883 # list values are tuples, (value, name)
884 elif isinstance(freqs[-1], dict):
885 sel = selector()
886 savesel = self._getselection()
[1322]887 iflist = self.getifnos()
[1157]888 for i in xrange(len(freqs)):
[1322]889 sel.set_ifs(iflist[i])
[1259]890 self._setselection(sel)
[1157]891 self._setrestfreqs(freqs[i]["value"],
892 freqs[i]["name"], "MHz")
893 self._setselection(savesel)
894 # freqs are to be taken from a linecatalog
[1153]895 elif isinstance(freqs, linecatalog):
896 sel = selector()
897 savesel = self._getselection()
[1322]898 iflist = self.getifnos()
[1153]899 for i in xrange(freqs.nrow()):
[1322]900 sel.set_ifs(iflist[i])
[1153]901 self._setselection(sel)
902 self._setrestfreqs(freqs.get_frequency(i),
903 freqs.get_name(i), "MHz")
904 # ensure that we are not iterating past nIF
905 if i == self.nif()-1: break
906 self._setselection(savesel)
[931]907 else:
908 return
909 self._add_history("set_restfreqs", varlist)
910
[1360]911 def shift_refpix(self, delta):
912 """
[1573]913 Shift the reference pixel of the Spectra Coordinate by an
[1360]914 integer amount.
915 Parameters:
916 delta: the amount to shift by
917 Note:
918 Be careful using this with broadband data.
919 """
920 Scantable.shift(self, delta)
[931]921
[1259]922 def history(self, filename=None):
923 """
924 Print the history. Optionally to a file.
[1348]925 Parameters:
926 filename: The name of the file to save the history to.
[1259]927 """
[484]928 hist = list(self._gethistory())
[794]929 out = "-"*80
[484]930 for h in hist:
[489]931 if h.startswith("---"):
[794]932 out += "\n"+h
[489]933 else:
934 items = h.split("##")
935 date = items[0]
936 func = items[1]
937 items = items[2:]
[794]938 out += "\n"+date+"\n"
939 out += "Function: %s\n Parameters:" % (func)
[489]940 for i in items:
941 s = i.split("=")
[1118]942 out += "\n %s = %s" % (s[0], s[1])
[794]943 out += "\n"+"-"*80
[1259]944 if filename is not None:
945 if filename is "":
946 filename = 'scantable_history.txt'
947 import os
948 filename = os.path.expandvars(os.path.expanduser(filename))
949 if not os.path.isdir(filename):
950 data = open(filename, 'w')
951 data.write(out)
952 data.close()
953 else:
954 msg = "Illegal file name '%s'." % (filename)
955 if rcParams['verbose']:
956 print msg
957 else:
958 raise IOError(msg)
959 if rcParams['verbose']:
960 try:
961 from IPython.genutils import page as pager
962 except ImportError:
963 from pydoc import pager
964 pager(out)
965 else:
966 return out
[484]967 return
[513]968 #
969 # Maths business
970 #
971
[931]972 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]973 """
[1070]974 Return the (time) weighted average of a scan.
[513]975 Note:
[1070]976 in channels only - align if necessary
[513]977 Parameters:
978 mask: an optional mask (only used for 'var' and 'tsys'
979 weighting)
[558]980 scanav: True averages each scan separately
981 False (default) averages all scans together,
[1099]982 weight: Weighting scheme.
983 'none' (mean no weight)
984 'var' (1/var(spec) weighted)
985 'tsys' (1/Tsys**2 weighted)
986 'tint' (integration time weighted)
987 'tintsys' (Tint/Tsys**2)
988 'median' ( median averaging)
[535]989 The default is 'tint'
[931]990 align: align the spectra in velocity before averaging. It takes
991 the time of the first spectrum as reference time.
[513]992 Example:
993 # time average the scantable without using a mask
[710]994 newscan = scan.average_time()
[513]995 """
996 varlist = vars()
[976]997 if weight is None: weight = 'TINT'
[513]998 if mask is None: mask = ()
[1099]999 if scanav: scanav = "SCAN"
1000 else: scanav = "NONE"
[1118]1001 scan = (self, )
[989]1002 try:
[1118]1003 if align:
1004 scan = (self.freq_align(insitu=False), )
1005 s = None
1006 if weight.upper() == 'MEDIAN':
1007 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1008 scanav))
1009 else:
1010 s = scantable(self._math._average(scan, mask, weight.upper(),
1011 scanav))
1012 except RuntimeError, msg:
[989]1013 if rcParams['verbose']:
1014 print msg
1015 return
1016 else: raise
[1099]1017 s._add_history("average_time", varlist)
[718]1018 print_log()
[513]1019 return s
[710]1020
[876]1021 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1022 """
1023 Return a scan where all spectra are converted to either
1024 Jansky or Kelvin depending upon the flux units of the scan table.
1025 By default the function tries to look the values up internally.
1026 If it can't find them (or if you want to over-ride), you must
1027 specify EITHER jyperk OR eta (and D which it will try to look up
1028 also if you don't set it). jyperk takes precedence if you set both.
1029 Parameters:
1030 jyperk: the Jy / K conversion factor
1031 eta: the aperture efficiency
1032 d: the geomtric diameter (metres)
1033 insitu: if False a new scantable is returned.
1034 Otherwise, the scaling is done in-situ
1035 The default is taken from .asaprc (False)
1036 """
1037 if insitu is None: insitu = rcParams['insitu']
[876]1038 self._math._setinsitu(insitu)
[513]1039 varlist = vars()
1040 if jyperk is None: jyperk = -1.0
1041 if d is None: d = -1.0
1042 if eta is None: eta = -1.0
[876]1043 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1044 s._add_history("convert_flux", varlist)
1045 print_log()
1046 if insitu: self._assign(s)
1047 else: return s
[513]1048
[876]1049 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1050 """
1051 Return a scan after applying a gain-elevation correction.
1052 The correction can be made via either a polynomial or a
1053 table-based interpolation (and extrapolation if necessary).
1054 You specify polynomial coefficients, an ascii table or neither.
1055 If you specify neither, then a polynomial correction will be made
1056 with built in coefficients known for certain telescopes (an error
1057 will occur if the instrument is not known).
1058 The data and Tsys are *divided* by the scaling factors.
1059 Parameters:
1060 poly: Polynomial coefficients (default None) to compute a
1061 gain-elevation correction as a function of
1062 elevation (in degrees).
1063 filename: The name of an ascii file holding correction factors.
1064 The first row of the ascii file must give the column
1065 names and these MUST include columns
1066 "ELEVATION" (degrees) and "FACTOR" (multiply data
1067 by this) somewhere.
1068 The second row must give the data type of the
1069 column. Use 'R' for Real and 'I' for Integer.
1070 An example file would be
1071 (actual factors are arbitrary) :
1072
1073 TIME ELEVATION FACTOR
1074 R R R
1075 0.1 0 0.8
1076 0.2 20 0.85
1077 0.3 40 0.9
1078 0.4 60 0.85
1079 0.5 80 0.8
1080 0.6 90 0.75
1081 method: Interpolation method when correcting from a table.
1082 Values are "nearest", "linear" (default), "cubic"
1083 and "spline"
1084 insitu: if False a new scantable is returned.
1085 Otherwise, the scaling is done in-situ
1086 The default is taken from .asaprc (False)
1087 """
1088
1089 if insitu is None: insitu = rcParams['insitu']
[876]1090 self._math._setinsitu(insitu)
[513]1091 varlist = vars()
1092 if poly is None:
[1118]1093 poly = ()
[513]1094 from os.path import expandvars
1095 filename = expandvars(filename)
[876]1096 s = scantable(self._math._gainel(self, poly, filename, method))
1097 s._add_history("gain_el", varlist)
1098 print_log()
1099 if insitu: self._assign(s)
1100 else: return s
[710]1101
[931]1102 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1103 """
1104 Return a scan where all rows have been aligned in frequency/velocity.
1105 The alignment frequency frame (e.g. LSRK) is that set by function
1106 set_freqframe.
1107 Parameters:
1108 reftime: reference time to align at. By default, the time of
1109 the first row of data is used.
1110 method: Interpolation method for regridding the spectra.
1111 Choose from "nearest", "linear", "cubic" (default)
1112 and "spline"
1113 insitu: if False a new scantable is returned.
1114 Otherwise, the scaling is done in-situ
1115 The default is taken from .asaprc (False)
1116 """
[931]1117 if insitu is None: insitu = rcParams["insitu"]
[876]1118 self._math._setinsitu(insitu)
[513]1119 varlist = vars()
[931]1120 if reftime is None: reftime = ""
1121 s = scantable(self._math._freq_align(self, reftime, method))
[876]1122 s._add_history("freq_align", varlist)
1123 print_log()
1124 if insitu: self._assign(s)
1125 else: return s
[513]1126
[876]1127 def opacity(self, tau, insitu=None):
[513]1128 """
1129 Apply an opacity correction. The data
1130 and Tsys are multiplied by the correction factor.
1131 Parameters:
1132 tau: Opacity from which the correction factor is
1133 exp(tau*ZD)
1134 where ZD is the zenith-distance
1135 insitu: if False a new scantable is returned.
1136 Otherwise, the scaling is done in-situ
1137 The default is taken from .asaprc (False)
1138 """
1139 if insitu is None: insitu = rcParams['insitu']
[876]1140 self._math._setinsitu(insitu)
[513]1141 varlist = vars()
[876]1142 s = scantable(self._math._opacity(self, tau))
1143 s._add_history("opacity", varlist)
1144 print_log()
1145 if insitu: self._assign(s)
1146 else: return s
[513]1147
1148 def bin(self, width=5, insitu=None):
1149 """
1150 Return a scan where all spectra have been binned up.
[1348]1151 Parameters:
[513]1152 width: The bin width (default=5) in pixels
1153 insitu: if False a new scantable is returned.
1154 Otherwise, the scaling is done in-situ
1155 The default is taken from .asaprc (False)
1156 """
1157 if insitu is None: insitu = rcParams['insitu']
[876]1158 self._math._setinsitu(insitu)
[513]1159 varlist = vars()
[876]1160 s = scantable(self._math._bin(self, width))
[1118]1161 s._add_history("bin", varlist)
[876]1162 print_log()
1163 if insitu: self._assign(s)
1164 else: return s
[513]1165
[710]1166
[513]1167 def resample(self, width=5, method='cubic', insitu=None):
1168 """
[1348]1169 Return a scan where all spectra have been binned up.
[1573]1170
[1348]1171 Parameters:
[513]1172 width: The bin width (default=5) in pixels
1173 method: Interpolation method when correcting from a table.
1174 Values are "nearest", "linear", "cubic" (default)
1175 and "spline"
1176 insitu: if False a new scantable is returned.
1177 Otherwise, the scaling is done in-situ
1178 The default is taken from .asaprc (False)
1179 """
1180 if insitu is None: insitu = rcParams['insitu']
[876]1181 self._math._setinsitu(insitu)
[513]1182 varlist = vars()
[876]1183 s = scantable(self._math._resample(self, method, width))
[1118]1184 s._add_history("resample", varlist)
[876]1185 print_log()
1186 if insitu: self._assign(s)
1187 else: return s
[513]1188
1189
[946]1190 def average_pol(self, mask=None, weight='none'):
1191 """
1192 Average the Polarisations together.
1193 Parameters:
1194 mask: An optional mask defining the region, where the
1195 averaging will be applied. The output will have all
1196 specified points masked.
1197 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1198 weighted), or 'tsys' (1/Tsys**2 weighted)
1199 """
1200 varlist = vars()
1201 if mask is None:
1202 mask = ()
[1010]1203 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1204 s._add_history("average_pol", varlist)
[946]1205 print_log()
[992]1206 return s
[513]1207
[1145]1208 def average_beam(self, mask=None, weight='none'):
1209 """
1210 Average the Beams together.
1211 Parameters:
1212 mask: An optional mask defining the region, where the
1213 averaging will be applied. The output will have all
1214 specified points masked.
1215 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1216 weighted), or 'tsys' (1/Tsys**2 weighted)
1217 """
1218 varlist = vars()
1219 if mask is None:
1220 mask = ()
1221 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1222 s._add_history("average_beam", varlist)
1223 print_log()
1224 return s
1225
[992]1226 def convert_pol(self, poltype=None):
1227 """
1228 Convert the data to a different polarisation type.
[1565]1229 Note that you will need cross-polarisation terms for most conversions.
[992]1230 Parameters:
1231 poltype: The new polarisation type. Valid types are:
[1565]1232 "linear", "circular", "stokes" and "linpol"
[992]1233 """
1234 varlist = vars()
1235 try:
1236 s = scantable(self._math._convertpol(self, poltype))
[1118]1237 except RuntimeError, msg:
[992]1238 if rcParams['verbose']:
[1118]1239 print msg
1240 return
[992]1241 else:
1242 raise
[1118]1243 s._add_history("convert_pol", varlist)
[992]1244 print_log()
1245 return s
1246
[1574]1247 def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
[513]1248 """
1249 Smooth the spectrum by the specified kernel (conserving flux).
1250 Parameters:
1251 kernel: The type of smoothing kernel. Select from
[1574]1252 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1253 or 'poly'
[513]1254 width: The width of the kernel in pixels. For hanning this is
1255 ignored otherwise it defauls to 5 pixels.
1256 For 'gaussian' it is the Full Width Half
1257 Maximum. For 'boxcar' it is the full width.
[1574]1258 For 'rmedian' and 'poly' it is the half width.
1259 order: Optional parameter for 'poly' kernel (default is 2), to
1260 specify the order of the polnomial. Ignored by all other
1261 kernels.
[513]1262 insitu: if False a new scantable is returned.
1263 Otherwise, the scaling is done in-situ
1264 The default is taken from .asaprc (False)
1265 Example:
1266 none
1267 """
1268 if insitu is None: insitu = rcParams['insitu']
[876]1269 self._math._setinsitu(insitu)
[513]1270 varlist = vars()
[1574]1271 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1272 s._add_history("smooth", varlist)
1273 print_log()
1274 if insitu: self._assign(s)
1275 else: return s
[513]1276
[876]1277
[1391]1278 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1279 """
1280 Return a scan which has been baselined (all rows) by a polynomial.
1281 Parameters:
[794]1282 mask: an optional mask
1283 order: the order of the polynomial (default is 0)
[1061]1284 plot: plot the fit and the residual. In this each
1285 indivual fit has to be approved, by typing 'y'
1286 or 'n'
[1391]1287 uselin: use linear polynomial fit
[794]1288 insitu: if False a new scantable is returned.
1289 Otherwise, the scaling is done in-situ
1290 The default is taken from .asaprc (False)
[513]1291 Example:
1292 # return a scan baselined by a third order polynomial,
1293 # not using a mask
1294 bscan = scan.poly_baseline(order=3)
[579]1295 """
[513]1296 if insitu is None: insitu = rcParams['insitu']
1297 varlist = vars()
1298 if mask is None:
[1295]1299 mask = [True for i in xrange(self.nchan(-1))]
[513]1300 from asap.asapfitter import fitter
[1217]1301 try:
1302 f = fitter()
1303 f.set_scan(self, mask)
[1391]1304 #f.set_function(poly=order)
1305 if uselin:
1306 f.set_function(lpoly=order)
1307 else:
1308 f.set_function(poly=order)
[1217]1309 s = f.auto_fit(insitu, plot=plot)
1310 s._add_history("poly_baseline", varlist)
1311 print_log()
1312 if insitu: self._assign(s)
1313 else: return s
1314 except RuntimeError:
1315 msg = "The fit failed, possibly because it didn't converge."
1316 if rcParams['verbose']:
1317 print msg
1318 return
1319 else:
1320 raise RuntimeError(msg)
[513]1321
[1217]1322
[1118]1323 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1324 threshold=3, chan_avg_limit=1, plot=False,
1325 insitu=None):
[880]1326 """
1327 Return a scan which has been baselined (all rows) by a polynomial.
1328 Spectral lines are detected first using linefinder and masked out
1329 to avoid them affecting the baseline solution.
1330
1331 Parameters:
1332 mask: an optional mask retreived from scantable
1333 edge: an optional number of channel to drop at
1334 the edge of spectrum. If only one value is
1335 specified, the same number will be dropped from
1336 both sides of the spectrum. Default is to keep
[907]1337 all channels. Nested tuples represent individual
[976]1338 edge selection for different IFs (a number of spectral
1339 channels can be different)
[880]1340 order: the order of the polynomial (default is 0)
1341 threshold: the threshold used by line finder. It is better to
1342 keep it large as only strong lines affect the
1343 baseline solution.
[1280]1344 chan_avg_limit:
1345 a maximum number of consequtive spectral channels to
1346 average during the search of weak and broad lines.
1347 The default is no averaging (and no search for weak
1348 lines). If such lines can affect the fitted baseline
1349 (e.g. a high order polynomial is fitted), increase this
1350 parameter (usually values up to 8 are reasonable). Most
1351 users of this method should find the default value
1352 sufficient.
[1061]1353 plot: plot the fit and the residual. In this each
1354 indivual fit has to be approved, by typing 'y'
1355 or 'n'
[880]1356 insitu: if False a new scantable is returned.
1357 Otherwise, the scaling is done in-situ
1358 The default is taken from .asaprc (False)
1359
1360 Example:
1361 scan2=scan.auto_poly_baseline(order=7)
1362 """
1363 if insitu is None: insitu = rcParams['insitu']
1364 varlist = vars()
1365 from asap.asapfitter import fitter
1366 from asap.asaplinefind import linefinder
1367 from asap import _is_sequence_or_number as _is_valid
1368
[976]1369 # check whether edge is set up for each IF individually
[1118]1370 individualedge = False;
1371 if len(edge) > 1:
1372 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1373 individualedge = True;
[907]1374
[1118]1375 if not _is_valid(edge, int) and not individualedge:
[909]1376 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1377 pair of integers specified as a tuple. Nested tuples are allowed \
1378 to make individual selection for different IFs."
[919]1379
[1118]1380 curedge = (0, 0)
1381 if individualedge:
1382 for edgepar in edge:
1383 if not _is_valid(edgepar, int):
1384 raise ValueError, "Each element of the 'edge' tuple has \
1385 to be a pair of integers or an integer."
[907]1386 else:
[1118]1387 curedge = edge;
[880]1388
1389 # setup fitter
1390 f = fitter()
1391 f.set_function(poly=order)
1392
1393 # setup line finder
[1118]1394 fl = linefinder()
[1268]1395 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1396
1397 if not insitu:
[1118]1398 workscan = self.copy()
[880]1399 else:
[1118]1400 workscan = self
[880]1401
[907]1402 fl.set_scan(workscan)
1403
[1118]1404 rows = range(workscan.nrow())
[880]1405 asaplog.push("Processing:")
1406 for r in rows:
[1118]1407 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1408 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1409 workscan.getpol(r), workscan.getcycle(r))
[880]1410 asaplog.push(msg, False)
[907]1411
[976]1412 # figure out edge parameter
[1118]1413 if individualedge:
1414 if len(edge) >= workscan.getif(r):
1415 raise RuntimeError, "Number of edge elements appear to " \
1416 "be less than the number of IFs"
1417 curedge = edge[workscan.getif(r)]
[919]1418
[976]1419 # setup line finder
[1118]1420 fl.find_lines(r, mask, curedge)
[1536]1421 f.set_data(workscan._getabcissa(r), workscan._getspectrum(r),
1422 mask_and(workscan._getmask(r), fl.get_mask()))
[880]1423 f.fit()
1424 x = f.get_parameters()
[1061]1425 if plot:
1426 f.plot(residual=True)
1427 x = raw_input("Accept fit ( [y]/n ): ")
1428 if x.upper() == 'N':
1429 continue
[880]1430 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1431 if plot:
1432 f._p.unmap()
1433 f._p = None
1434 workscan._add_history("auto_poly_baseline", varlist)
[880]1435 if insitu:
1436 self._assign(workscan)
1437 else:
1438 return workscan
1439
[914]1440 def rotate_linpolphase(self, angle):
1441 """
1442 Rotate the phase of the complex polarization O=Q+iU correlation.
1443 This is always done in situ in the raw data. So if you call this
1444 function more than once then each call rotates the phase further.
1445 Parameters:
1446 angle: The angle (degrees) to rotate (add) by.
1447 Examples:
1448 scan.rotate_linpolphase(2.3)
1449 """
1450 varlist = vars()
[936]1451 self._math._rotate_linpolphase(self, angle)
[914]1452 self._add_history("rotate_linpolphase", varlist)
1453 print_log()
1454 return
[710]1455
[513]1456
[914]1457 def rotate_xyphase(self, angle):
1458 """
1459 Rotate the phase of the XY correlation. This is always done in situ
1460 in the data. So if you call this function more than once
1461 then each call rotates the phase further.
1462 Parameters:
1463 angle: The angle (degrees) to rotate (add) by.
1464 Examples:
1465 scan.rotate_xyphase(2.3)
1466 """
1467 varlist = vars()
[936]1468 self._math._rotate_xyphase(self, angle)
[914]1469 self._add_history("rotate_xyphase", varlist)
1470 print_log()
1471 return
1472
1473 def swap_linears(self):
1474 """
[1573]1475 Swap the linear polarisations XX and YY, or better the first two
[1348]1476 polarisations as this also works for ciculars.
[914]1477 """
1478 varlist = vars()
[936]1479 self._math._swap_linears(self)
[914]1480 self._add_history("swap_linears", varlist)
1481 print_log()
1482 return
1483
1484 def invert_phase(self):
1485 """
1486 Invert the phase of the complex polarisation
1487 """
1488 varlist = vars()
[936]1489 self._math._invert_phase(self)
[914]1490 self._add_history("invert_phase", varlist)
1491 print_log()
1492 return
1493
[876]1494 def add(self, offset, insitu=None):
[513]1495 """
1496 Return a scan where all spectra have the offset added
1497 Parameters:
1498 offset: the offset
1499 insitu: if False a new scantable is returned.
1500 Otherwise, the scaling is done in-situ
1501 The default is taken from .asaprc (False)
1502 """
1503 if insitu is None: insitu = rcParams['insitu']
[876]1504 self._math._setinsitu(insitu)
[513]1505 varlist = vars()
[876]1506 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1507 s._add_history("add", varlist)
[876]1508 print_log()
1509 if insitu:
1510 self._assign(s)
1511 else:
[513]1512 return s
1513
[1308]1514 def scale(self, factor, tsys=True, insitu=None):
[513]1515 """
1516 Return a scan where all spectra are scaled by the give 'factor'
1517 Parameters:
1518 factor: the scaling factor
1519 insitu: if False a new scantable is returned.
1520 Otherwise, the scaling is done in-situ
1521 The default is taken from .asaprc (False)
1522 tsys: if True (default) then apply the operation to Tsys
1523 as well as the data
1524 """
1525 if insitu is None: insitu = rcParams['insitu']
[876]1526 self._math._setinsitu(insitu)
[513]1527 varlist = vars()
[876]1528 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1529 s._add_history("scale", varlist)
[876]1530 print_log()
1531 if insitu:
1532 self._assign(s)
1533 else:
[513]1534 return s
1535
[1504]1536 def set_sourcetype(self, match, matchtype="pattern",
1537 sourcetype="reference"):
[1502]1538 """
1539 Set the type of the source to be an source or reference scan
1540 using the provided pattern:
1541 Parameters:
[1504]1542 match: a Unix style pattern, regular expression or selector
1543 matchtype: 'pattern' (default) UNIX style pattern or
1544 'regex' regular expression
[1502]1545 sourcetype: the type of the source to use (source/reference)
1546 """
1547 varlist = vars()
1548 basesel = self.get_selection()
1549 stype = -1
1550 if sourcetype.lower().startswith("r"):
1551 stype = 1
1552 elif sourcetype.lower().startswith("s"):
1553 stype = 0
[1504]1554 else:
[1502]1555 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]1556 if matchtype.lower().startswith("p"):
1557 matchtype = "pattern"
1558 elif matchtype.lower().startswith("r"):
1559 matchtype = "regex"
1560 else:
1561 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]1562 sel = selector()
1563 if isinstance(match, selector):
1564 sel = match
1565 else:
[1504]1566 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]1567 self.set_selection(basesel+sel)
1568 self._setsourcetype(stype)
1569 self.set_selection(basesel)
[1573]1570 self._add_history("set_sourcetype", varlist)
[1502]1571
[1348]1572 def auto_quotient(self, preserve=True, mode='paired'):
[670]1573 """
1574 This function allows to build quotients automatically.
1575 It assumes the observation to have the same numer of
1576 "ons" and "offs"
1577 Parameters:
[710]1578 preserve: you can preserve (default) the continuum or
1579 remove it. The equations used are
[670]1580 preserve: Output = Toff * (on/off) - Toff
[1070]1581 remove: Output = Toff * (on/off) - Ton
[1573]1582 mode: the on/off detection mode
[1348]1583 'paired' (default)
1584 identifies 'off' scans by the
1585 trailing '_R' (Mopra/Parkes) or
1586 '_e'/'_w' (Tid) and matches
1587 on/off pairs from the observing pattern
[1502]1588 'time'
1589 finds the closest off in time
[1348]1590
[670]1591 """
[1348]1592 modes = ["time", "paired"]
[670]1593 if not mode in modes:
[876]1594 msg = "please provide valid mode. Valid modes are %s" % (modes)
1595 raise ValueError(msg)
1596 varlist = vars()
[1348]1597 s = None
1598 if mode.lower() == "paired":
1599 basesel = self.get_selection()
[1356]1600 sel = selector()+basesel
1601 sel.set_query("SRCTYPE==1")
1602 self.set_selection(sel)
[1348]1603 offs = self.copy()
1604 sel.set_query("SRCTYPE==0")
[1356]1605 self.set_selection(sel)
[1348]1606 ons = self.copy()
1607 s = scantable(self._math._quotient(ons, offs, preserve))
1608 self.set_selection(basesel)
1609 elif mode.lower() == "time":
1610 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1611 s._add_history("auto_quotient", varlist)
[876]1612 print_log()
1613 return s
[710]1614
[1145]1615 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1616 """
[1143]1617 Form a quotient using "off" beams when observing in "MX" mode.
1618 Parameters:
[1145]1619 mask: an optional mask to be used when weight == 'stddev'
[1143]1620 weight: How to average the off beams. Default is 'median'.
[1145]1621 preserve: you can preserve (default) the continuum or
1622 remove it. The equations used are
1623 preserve: Output = Toff * (on/off) - Toff
1624 remove: Output = Toff * (on/off) - Ton
[1217]1625 """
[1143]1626 if mask is None: mask = ()
[1141]1627 varlist = vars()
1628 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1629 preoff = scantable(self._math._mx_extract(self, 'off'))
1630 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1631 from asapmath import quotient
[1145]1632 q = quotient(on, off, preserve)
[1143]1633 q._add_history("mx_quotient", varlist)
[1145]1634 print_log()
[1217]1635 return q
[513]1636
[718]1637 def freq_switch(self, insitu=None):
1638 """
1639 Apply frequency switching to the data.
1640 Parameters:
1641 insitu: if False a new scantable is returned.
1642 Otherwise, the swictching is done in-situ
1643 The default is taken from .asaprc (False)
1644 Example:
1645 none
1646 """
1647 if insitu is None: insitu = rcParams['insitu']
[876]1648 self._math._setinsitu(insitu)
[718]1649 varlist = vars()
[876]1650 s = scantable(self._math._freqswitch(self))
[1118]1651 s._add_history("freq_switch", varlist)
[876]1652 print_log()
1653 if insitu: self._assign(s)
1654 else: return s
[718]1655
[780]1656 def recalc_azel(self):
1657 """
1658 Recalculate the azimuth and elevation for each position.
1659 Parameters:
1660 none
1661 Example:
1662 """
1663 varlist = vars()
[876]1664 self._recalcazel()
[780]1665 self._add_history("recalc_azel", varlist)
1666 print_log()
1667 return
1668
[513]1669 def __add__(self, other):
1670 varlist = vars()
1671 s = None
1672 if isinstance(other, scantable):
[1573]1673 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1674 elif isinstance(other, float):
[876]1675 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1676 else:
[718]1677 raise TypeError("Other input is not a scantable or float value")
[513]1678 s._add_history("operator +", varlist)
[718]1679 print_log()
[513]1680 return s
1681
1682 def __sub__(self, other):
1683 """
1684 implicit on all axes and on Tsys
1685 """
1686 varlist = vars()
1687 s = None
1688 if isinstance(other, scantable):
[1308]1689 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1690 elif isinstance(other, float):
[876]1691 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1692 else:
[718]1693 raise TypeError("Other input is not a scantable or float value")
[513]1694 s._add_history("operator -", varlist)
[718]1695 print_log()
[513]1696 return s
[710]1697
[513]1698 def __mul__(self, other):
1699 """
1700 implicit on all axes and on Tsys
1701 """
1702 varlist = vars()
1703 s = None
1704 if isinstance(other, scantable):
[1308]1705 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1706 elif isinstance(other, float):
[876]1707 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1708 else:
[718]1709 raise TypeError("Other input is not a scantable or float value")
[513]1710 s._add_history("operator *", varlist)
[718]1711 print_log()
[513]1712 return s
1713
[710]1714
[513]1715 def __div__(self, other):
1716 """
1717 implicit on all axes and on Tsys
1718 """
1719 varlist = vars()
1720 s = None
1721 if isinstance(other, scantable):
[1308]1722 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1723 elif isinstance(other, float):
1724 if other == 0.0:
[718]1725 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1726 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1727 else:
[718]1728 raise TypeError("Other input is not a scantable or float value")
[513]1729 s._add_history("operator /", varlist)
[718]1730 print_log()
[513]1731 return s
1732
[530]1733 def get_fit(self, row=0):
1734 """
1735 Print or return the stored fits for a row in the scantable
1736 Parameters:
1737 row: the row which the fit has been applied to.
1738 """
1739 if row > self.nrow():
1740 return
[976]1741 from asap.asapfit import asapfit
[530]1742 fit = asapfit(self._getfit(row))
[718]1743 if rcParams['verbose']:
[530]1744 print fit
1745 return
1746 else:
1747 return fit.as_dict()
1748
[1483]1749 def flag_nans(self):
1750 """
1751 Utility function to flag NaN values in the scantable.
1752 """
1753 import numpy
1754 basesel = self.get_selection()
1755 for i in range(self.nrow()):
1756 sel = selector()+basesel
1757 sel.set_scans(self.getscan(i))
1758 sel.set_beams(self.getbeam(i))
1759 sel.set_ifs(self.getif(i))
1760 sel.set_polarisations(self.getpol(i))
1761 self.set_selection(sel)
1762 nans = numpy.isnan(self._getspectrum(0))
1763 if numpy.any(nans):
1764 bnans = [ bool(v) for v in nans]
1765 self.flag(bnans)
1766 self.set_selection(basesel)
1767
[1573]1768
[484]1769 def _add_history(self, funcname, parameters):
[1435]1770 if not rcParams['scantable.history']:
1771 return
[484]1772 # create date
1773 sep = "##"
1774 from datetime import datetime
1775 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1776 hist = dstr+sep
1777 hist += funcname+sep#cdate+sep
1778 if parameters.has_key('self'): del parameters['self']
[1118]1779 for k, v in parameters.iteritems():
[484]1780 if type(v) is dict:
[1118]1781 for k2, v2 in v.iteritems():
[484]1782 hist += k2
1783 hist += "="
[1118]1784 if isinstance(v2, scantable):
[484]1785 hist += 'scantable'
1786 elif k2 == 'mask':
[1118]1787 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1788 hist += str(self._zip_mask(v2))
1789 else:
1790 hist += str(v2)
[484]1791 else:
[513]1792 hist += str(v2)
[484]1793 else:
1794 hist += k
1795 hist += "="
[1118]1796 if isinstance(v, scantable):
[484]1797 hist += 'scantable'
1798 elif k == 'mask':
[1118]1799 if isinstance(v, list) or isinstance(v, tuple):
[513]1800 hist += str(self._zip_mask(v))
1801 else:
1802 hist += str(v)
[484]1803 else:
1804 hist += str(v)
1805 hist += sep
1806 hist = hist[:-2] # remove trailing '##'
1807 self._addhistory(hist)
1808
[710]1809
[484]1810 def _zip_mask(self, mask):
1811 mask = list(mask)
1812 i = 0
1813 segments = []
1814 while mask[i:].count(1):
1815 i += mask[i:].index(1)
1816 if mask[i:].count(0):
1817 j = i + mask[i:].index(0)
1818 else:
[710]1819 j = len(mask)
[1118]1820 segments.append([i, j])
[710]1821 i = j
[484]1822 return segments
[714]1823
[626]1824 def _get_ordinate_label(self):
1825 fu = "("+self.get_fluxunit()+")"
1826 import re
1827 lbl = "Intensity"
[1118]1828 if re.match(".K.", fu):
[626]1829 lbl = "Brightness Temperature "+ fu
[1118]1830 elif re.match(".Jy.", fu):
[626]1831 lbl = "Flux density "+ fu
1832 return lbl
[710]1833
[876]1834 def _check_ifs(self):
1835 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1836 nchans = filter(lambda t: t > 0, nchans)
[876]1837 return (sum(nchans)/len(nchans) == nchans[0])
[976]1838
1839 def _fill(self, names, unit, average):
1840 import os
1841 from asap._asap import stfiller
1842 first = True
1843 fullnames = []
1844 for name in names:
1845 name = os.path.expandvars(name)
1846 name = os.path.expanduser(name)
1847 if not os.path.exists(name):
1848 msg = "File '%s' does not exists" % (name)
1849 if rcParams['verbose']:
1850 asaplog.push(msg)
1851 print asaplog.pop().strip()
1852 return
1853 raise IOError(msg)
1854 fullnames.append(name)
1855 if average:
1856 asaplog.push('Auto averaging integrations')
[1079]1857 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1858 for name in fullnames:
[1073]1859 tbl = Scantable(stype)
1860 r = stfiller(tbl)
[1504]1861 rx = rcParams['scantable.reference']
1862 r._setreferenceexpr(rx)
[976]1863 msg = "Importing %s..." % (name)
[1118]1864 asaplog.push(msg, False)
[976]1865 print_log()
[1118]1866 r._open(name, -1, -1)
[976]1867 r._read()
1868 if average:
[1118]1869 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1870 if not first:
1871 tbl = self._math._merge([self, tbl])
1872 Scantable.__init__(self, tbl)
1873 r._close()
[1118]1874 del r, tbl
[976]1875 first = False
1876 if unit is not None:
1877 self.set_fluxunit(unit)
1878 self.set_freqframe(rcParams['scantable.freqframe'])
1879
[1402]1880 def __getitem__(self, key):
1881 if key < 0:
1882 key += self.nrow()
1883 if key >= self.nrow():
1884 raise IndexError("Row index out of range.")
1885 return self._getspectrum(key)
1886
1887 def __setitem__(self, key, value):
1888 if key < 0:
1889 key += self.nrow()
1890 if key >= self.nrow():
1891 raise IndexError("Row index out of range.")
1892 if not hasattr(value, "__len__") or \
1893 len(value) > self.nchan(self.getif(key)):
1894 raise ValueError("Spectrum length doesn't match.")
1895 return self._setspectrum(value, key)
1896
1897 def __len__(self):
1898 return self.nrow()
1899
1900 def __iter__(self):
1901 for i in range(len(self)):
1902 yield self[i]
Note: See TracBrowser for help on using the repository browser.