source: trunk/python/scantable.py@ 1578

Last change on this file since 1578 was 1576, checked in by Malte Marquarding, 15 years ago

Ticket #169: allow direct settings of selections

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.8 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1435]41 if isinstance(filename, str):# or \
42# (isinstance(filename, list) or isinstance(filename, tuple)) \
43# and isinstance(filename[-1], str):
[976]44 import os.path
45 filename = os.path.expandvars(filename)
46 filename = os.path.expanduser(filename)
47 if not os.path.exists(filename):
48 s = "File '%s' not found." % (filename)
[718]49 if rcParams['verbose']:
[976]50 asaplog.push(s)
51 print asaplog.pop().strip()
[718]52 return
[976]53 raise IOError(s)
[1115]54 if os.path.isdir(filename) \
[1118]55 and not os.path.exists(filename+'/table.f1'):
[976]56 # crude check if asap table
57 if os.path.exists(filename+'/table.info'):
[1118]58 ondisk = rcParams['scantable.storage'] == 'disk'
59 Scantable.__init__(self, filename, ondisk)
[976]60 if unit is not None:
61 self.set_fluxunit(unit)
62 self.set_freqframe(rcParams['scantable.freqframe'])
[718]63 else:
[1118]64 msg = "The given file '%s'is not a valid " \
65 "asap table." % (filename)
[976]66 if rcParams['verbose']:
67 print msg
68 return
69 else:
70 raise IOError(msg)
[226]71 else:
[1118]72 self._fill([filename], unit, average)
73 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]74 and isinstance(filename[-1], str):
75 self._fill(filename, unit, average)
[1259]76 self._add_history("scantable", varlist)
[714]77 print_log()
[102]78
[876]79 def save(self, name=None, format=None, overwrite=False):
[116]80 """
[1280]81 Store the scantable on disk. This can be an asap (aips++) Table,
82 SDFITS or MS2 format.
[116]83 Parameters:
[1093]84 name: the name of the outputfile. For format "ASCII"
85 this is the root file name (data in 'name'.txt
[497]86 and header in 'name'_header.txt)
[116]87 format: an optional file format. Default is ASAP.
[280]88 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]89 'SDFITS' (save as SDFITS file)
[200]90 'ASCII' (saves as ascii text file)
[226]91 'MS2' (saves as an aips++
92 MeasurementSet V2)
[1573]93 'FITS' (save as image FITS - not
[1443]94 readable by class)
95 'CLASS' (save as FITS readable by CLASS)
[411]96 overwrite: If the file should be overwritten if it exists.
[256]97 The default False is to return with warning
[411]98 without writing the output. USE WITH CARE.
[116]99 Example:
100 scan.save('myscan.asap')
[1118]101 scan.save('myscan.sdfits', 'SDFITS')
[116]102 """
[411]103 from os import path
[226]104 if format is None: format = rcParams['scantable.save']
[256]105 suffix = '.'+format.lower()
[1118]106 if name is None or name == "":
[256]107 name = 'scantable'+suffix
[718]108 msg = "No filename given. Using default name %s..." % name
109 asaplog.push(msg)
[411]110 name = path.expandvars(name)
[256]111 if path.isfile(name) or path.isdir(name):
112 if not overwrite:
[718]113 msg = "File %s exists." % name
114 if rcParams['verbose']:
115 print msg
116 return
117 else:
118 raise IOError(msg)
[451]119 format2 = format.upper()
120 if format2 == 'ASAP':
[116]121 self._save(name)
122 else:
[989]123 from asap._asap import stwriter as stw
[1118]124 writer = stw(format2)
125 writer.write(self, name)
[718]126 print_log()
[116]127 return
128
[102]129 def copy(self):
130 """
131 Return a copy of this scantable.
[1348]132 Note:
133 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]134 Parameters:
[113]135 none
[102]136 Example:
137 copiedscan = scan.copy()
138 """
[876]139 sd = scantable(Scantable._copy(self))
[113]140 return sd
141
[1093]142 def drop_scan(self, scanid=None):
143 """
144 Return a new scantable where the specified scan number(s) has(have)
145 been dropped.
146 Parameters:
147 scanid: a (list of) scan number(s)
148 """
149 from asap import _is_sequence_or_number as _is_valid
150 from asap import _to_list
151 from asap import unique
152 if not _is_valid(scanid):
153 if rcParams['verbose']:
154 print "Please specify a scanno to drop from the scantable"
155 return
156 else:
157 raise RuntimeError("No scan given")
158 try:
159 scanid = _to_list(scanid)
160 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
161 for sid in scanid: allscans.remove(sid)
[1118]162 if len(allscans) == 0:
163 raise ValueError("Can't remove all scans")
[1093]164 except ValueError:
165 if rcParams['verbose']:
166 print "Couldn't find any match."
167 return
168 else: raise
169 try:
170 bsel = self.get_selection()
171 sel = selector()
172 sel.set_scans(allscans)
173 self.set_selection(bsel+sel)
174 scopy = self._copy()
175 self.set_selection(bsel)
176 return scantable(scopy)
177 except RuntimeError:
[1118]178 if rcParams['verbose']:
179 print "Couldn't find any match."
180 else:
181 raise
[1093]182
183
[102]184 def get_scan(self, scanid=None):
185 """
186 Return a specific scan (by scanno) or collection of scans (by
187 source name) in a new scantable.
[1348]188 Note:
189 See scantable.drop_scan() for the inverse operation.
[102]190 Parameters:
[513]191 scanid: a (list of) scanno or a source name, unix-style
192 patterns are accepted for source name matching, e.g.
193 '*_R' gets all 'ref scans
[102]194 Example:
[513]195 # get all scans containing the source '323p459'
196 newscan = scan.get_scan('323p459')
197 # get all 'off' scans
198 refscans = scan.get_scan('*_R')
199 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]200 newscan = scan.get_scan([0, 2, 7, 10])
[102]201 """
202 if scanid is None:
[718]203 if rcParams['verbose']:
[1118]204 print "Please specify a scan no or name to " \
205 "retrieve from the scantable"
[718]206 return
207 else:
208 raise RuntimeError("No scan given")
209
[102]210 try:
[946]211 bsel = self.get_selection()
212 sel = selector()
[102]213 if type(scanid) is str:
[946]214 sel.set_name(scanid)
215 self.set_selection(bsel+sel)
[876]216 scopy = self._copy()
[946]217 self.set_selection(bsel)
[876]218 return scantable(scopy)
[102]219 elif type(scanid) is int:
[946]220 sel.set_scans([scanid])
221 self.set_selection(bsel+sel)
[876]222 scopy = self._copy()
[946]223 self.set_selection(bsel)
[876]224 return scantable(scopy)
[381]225 elif type(scanid) is list:
[946]226 sel.set_scans(scanid)
227 self.set_selection(sel)
[876]228 scopy = self._copy()
[946]229 self.set_selection(bsel)
[876]230 return scantable(scopy)
[381]231 else:
[718]232 msg = "Illegal scanid type, use 'int' or 'list' if ints."
233 if rcParams['verbose']:
234 print msg
235 else:
236 raise TypeError(msg)
[102]237 except RuntimeError:
[718]238 if rcParams['verbose']: print "Couldn't find any match."
239 else: raise
[102]240
241 def __str__(self):
[1118]242 return Scantable._summary(self, True)
[102]243
[976]244 def summary(self, filename=None):
[102]245 """
246 Print a summary of the contents of this scantable.
247 Parameters:
248 filename: the name of a file to write the putput to
249 Default - no file output
[381]250 verbose: print extra info such as the frequency table
251 The default (False) is taken from .asaprc
[102]252 """
[976]253 info = Scantable._summary(self, True)
254 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]255 if filename is not None:
[256]256 if filename is "":
257 filename = 'scantable_summary.txt'
[415]258 from os.path import expandvars, isdir
[411]259 filename = expandvars(filename)
[415]260 if not isdir(filename):
[413]261 data = open(filename, 'w')
262 data.write(info)
263 data.close()
264 else:
[718]265 msg = "Illegal file name '%s'." % (filename)
266 if rcParams['verbose']:
267 print msg
268 else:
269 raise IOError(msg)
270 if rcParams['verbose']:
[794]271 try:
272 from IPython.genutils import page as pager
273 except ImportError:
274 from pydoc import pager
275 pager(info)
[718]276 else:
277 return info
[710]278
[1512]279 def get_spectrum(self, rowno):
[1471]280 """Return the spectrum for the current row in the scantable as a list.
281 Parameters:
[1573]282 rowno: the row number to retrieve the spectrum from
[1471]283 """
284 return self._getspectrum(rowno)
[946]285
[1471]286 def get_mask(self, rowno):
287 """Return the mask for the current row in the scantable as a list.
288 Parameters:
[1573]289 rowno: the row number to retrieve the mask from
[1471]290 """
291 return self._getmask(rowno)
292
293 def set_spectrum(self, spec, rowno):
294 """Return the spectrum for the current row in the scantable as a list.
295 Parameters:
296 spec: the spectrum
[1573]297 rowno: the row number to set the spectrum for
[1471]298 """
299 assert(len(spec) == self.nchan())
300 return self._setspectrum(spec, rowno)
301
[946]302 def get_selection(self):
303 """
[1005]304 Get the selection object currently set on this scantable.
305 Parameters:
306 none
307 Example:
308 sel = scan.get_selection()
309 sel.set_ifs(0) # select IF 0
310 scan.set_selection(sel) # apply modified selection
[946]311 """
312 return selector(self._getselection())
313
[1576]314 def set_selection(self, selection=None, **kw):
[946]315 """
[1005]316 Select a subset of the data. All following operations on this scantable
317 are only applied to thi selection.
318 Parameters:
319 selection: a selector object (default unset the selection)
320 Examples:
321 sel = selector() # create a selection object
[1118]322 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]323 scan.set_selection(sel) # set the selection
324 scan.summary() # will only print summary of scanno 0 an 3
325 scan.set_selection() # unset the selection
[946]326 """
[1576]327 if selection is None:
328 # reset
329 if len(kw) == 0:
330 selection = selector()
331 else:
332 # try keywords
333 for k in kw:
334 if k not in selector.fields:
335 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
336 selection = selector(**kw)
[946]337 self._setselection(selection)
338
[876]339 def stats(self, stat='stddev', mask=None):
[102]340 """
[135]341 Determine the specified statistic of the current beam/if/pol
[102]342 Takes a 'mask' as an optional parameter to specify which
343 channels should be excluded.
344 Parameters:
[135]345 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
346 'var', 'stddev', 'avdev', 'rms', 'median'
347 mask: an optional mask specifying where the statistic
[102]348 should be determined.
349 Example:
[113]350 scan.set_unit('channel')
[1118]351 msk = scan.create_mask([100, 200], [500, 600])
[135]352 scan.stats(stat='mean', mask=m)
[102]353 """
354 if mask == None:
[876]355 mask = []
[1118]356 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]357 if not self._check_ifs():
[1118]358 raise ValueError("Cannot apply mask as the IFs have different "
359 "number of channels. Please use setselection() "
360 "to select individual IFs")
[256]361
[876]362 statvals = self._math._stats(self, mask, stat)
363 out = ''
364 axes = []
365 for i in range(self.nrow()):
366 axis = []
367 axis.append(self.getscan(i))
368 axis.append(self.getbeam(i))
369 axis.append(self.getif(i))
370 axis.append(self.getpol(i))
371 axis.append(self.getcycle(i))
372 axes.append(axis)
373 tm = self._gettime(i)
374 src = self._getsourcename(i)
375 out += 'Scan[%d] (%s) ' % (axis[0], src)
376 out += 'Time[%s]:\n' % (tm)
377 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
378 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
379 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
380 out += '= %3.3f\n' % (statvals[i])
381 out += "--------------------------------------------------\n"
[256]382
[876]383 if rcParams['verbose']:
384 print "--------------------------------------------------"
[1118]385 print " ", stat
[876]386 print "--------------------------------------------------"
387 print out
[1295]388 #else:
389 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
390 # 'axes' : axes,
391 # 'data': statvals}
392 return statvals
[102]393
[1118]394 def stddev(self, mask=None):
[135]395 """
396 Determine the standard deviation of the current beam/if/pol
397 Takes a 'mask' as an optional parameter to specify which
398 channels should be excluded.
399 Parameters:
400 mask: an optional mask specifying where the standard
401 deviation should be determined.
402
403 Example:
404 scan.set_unit('channel')
[1118]405 msk = scan.create_mask([100, 200], [500, 600])
[135]406 scan.stddev(mask=m)
407 """
[1118]408 return self.stats(stat='stddev', mask=mask);
[135]409
[1003]410
[1259]411 def get_column_names(self):
[1003]412 """
413 Return a list of column names, which can be used for selection.
414 """
[1259]415 return list(Scantable.get_column_names(self))
[1003]416
[876]417 def get_tsys(self):
[113]418 """
419 Return the System temperatures.
420 Returns:
[876]421 a list of Tsys values for the current selection
[113]422 """
[256]423
[876]424 return self._row_callback(self._gettsys, "Tsys")
[256]425
[876]426 def _row_callback(self, callback, label):
427 axes = []
[1118]428 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]429 out = ""
[1118]430 outvec = []
[876]431 for i in range(self.nrow()):
432 axis = []
433 axis.append(self.getscan(i))
434 axis.append(self.getbeam(i))
435 axis.append(self.getif(i))
436 axis.append(self.getpol(i))
437 axis.append(self.getcycle(i))
438 axes.append(axis)
439 tm = self._gettime(i)
440 src = self._getsourcename(i)
441 out += 'Scan[%d] (%s) ' % (axis[0], src)
442 out += 'Time[%s]:\n' % (tm)
443 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
444 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
445 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
446 outvec.append(callback(i))
447 out += '= %3.3f\n' % (outvec[i])
448 out += "--------------------------------------------------\n"
449 if rcParams['verbose']:
450 print "--------------------------------------------------"
451 print " %s" % (label)
452 print "--------------------------------------------------"
453 print out
[1175]454 # disabled because the vector seems more useful
455 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
456 return outvec
[256]457
[1070]458 def _get_column(self, callback, row=-1):
459 """
460 """
461 if row == -1:
462 return [callback(i) for i in range(self.nrow())]
463 else:
464 if 0 <= row < self.nrow():
465 return callback(row)
[256]466
[1070]467
[1348]468 def get_time(self, row=-1, asdatetime=False):
[113]469 """
470 Get a list of time stamps for the observations.
[1348]471 Return a datetime object for each integration time stamp in the scantable.
[113]472 Parameters:
[1348]473 row: row no of integration. Default -1 return all rows
474 asdatetime: return values as datetime objects rather than strings
[113]475 Example:
476 none
477 """
[1175]478 from time import strptime
479 from datetime import datetime
[1392]480 times = self._get_column(self._gettime, row)
[1348]481 if not asdatetime:
[1392]482 return times
[1175]483 format = "%Y/%m/%d/%H:%M:%S"
484 if isinstance(times, list):
485 return [datetime(*strptime(i, format)[:6]) for i in times]
486 else:
487 return datetime(*strptime(times, format)[:6])
[102]488
[1348]489
490 def get_inttime(self, row=-1):
491 """
492 Get a list of integration times for the observations.
493 Return a time in seconds for each integration in the scantable.
494 Parameters:
495 row: row no of integration. Default -1 return all rows.
496 Example:
497 none
498 """
[1573]499 return self._get_column(self._getinttime, row)
[1348]500
[1573]501
[714]502 def get_sourcename(self, row=-1):
503 """
[794]504 Get a list source names for the observations.
[714]505 Return a string for each integration in the scantable.
506 Parameters:
[1348]507 row: row no of integration. Default -1 return all rows.
[714]508 Example:
509 none
510 """
[1070]511 return self._get_column(self._getsourcename, row)
[714]512
[794]513 def get_elevation(self, row=-1):
514 """
515 Get a list of elevations for the observations.
516 Return a float for each integration in the scantable.
517 Parameters:
[1348]518 row: row no of integration. Default -1 return all rows.
[794]519 Example:
520 none
521 """
[1070]522 return self._get_column(self._getelevation, row)
[794]523
524 def get_azimuth(self, row=-1):
525 """
526 Get a list of azimuths for the observations.
527 Return a float for each integration in the scantable.
528 Parameters:
[1348]529 row: row no of integration. Default -1 return all rows.
[794]530 Example:
531 none
532 """
[1070]533 return self._get_column(self._getazimuth, row)
[794]534
535 def get_parangle(self, row=-1):
536 """
537 Get a list of parallactic angles for the observations.
538 Return a float for each integration in the scantable.
539 Parameters:
[1348]540 row: row no of integration. Default -1 return all rows.
[794]541 Example:
542 none
543 """
[1070]544 return self._get_column(self._getparangle, row)
[794]545
[1070]546 def get_direction(self, row=-1):
547 """
548 Get a list of Positions on the sky (direction) for the observations.
549 Return a float for each integration in the scantable.
550 Parameters:
551 row: row no of integration. Default -1 return all rows
552 Example:
553 none
554 """
555 return self._get_column(self._getdirection, row)
556
[1391]557 def get_directionval(self, row=-1):
558 """
559 Get a list of Positions on the sky (direction) for the observations.
560 Return a float for each integration in the scantable.
561 Parameters:
562 row: row no of integration. Default -1 return all rows
563 Example:
564 none
565 """
566 return self._get_column(self._getdirectionvec, row)
567
568
[102]569 def set_unit(self, unit='channel'):
570 """
571 Set the unit for all following operations on this scantable
572 Parameters:
573 unit: optional unit, default is 'channel'
[1118]574 one of '*Hz', 'km/s', 'channel', ''
[102]575 """
[484]576 varlist = vars()
[1118]577 if unit in ['', 'pixel', 'channel']:
[113]578 unit = ''
579 inf = list(self._getcoordinfo())
580 inf[0] = unit
581 self._setcoordinfo(inf)
[1118]582 self._add_history("set_unit", varlist)
[113]583
[484]584 def set_instrument(self, instr):
[358]585 """
[1348]586 Set the instrument for subsequent processing.
[358]587 Parameters:
[710]588 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]589 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]590 """
591 self._setInstrument(instr)
[1118]592 self._add_history("set_instument", vars())
[718]593 print_log()
[358]594
[1190]595 def set_feedtype(self, feedtype):
596 """
597 Overwrite the feed type, which might not be set correctly.
598 Parameters:
599 feedtype: 'linear' or 'circular'
600 """
601 self._setfeedtype(feedtype)
602 self._add_history("set_feedtype", vars())
603 print_log()
604
[276]605 def set_doppler(self, doppler='RADIO'):
606 """
607 Set the doppler for all following operations on this scantable.
608 Parameters:
609 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
610 """
[484]611 varlist = vars()
[276]612 inf = list(self._getcoordinfo())
613 inf[2] = doppler
614 self._setcoordinfo(inf)
[1118]615 self._add_history("set_doppler", vars())
[718]616 print_log()
[710]617
[226]618 def set_freqframe(self, frame=None):
[113]619 """
620 Set the frame type of the Spectral Axis.
621 Parameters:
[591]622 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]623 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
624 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]625 Examples:
626 scan.set_freqframe('BARY')
627 """
[484]628 if frame is None: frame = rcParams['scantable.freqframe']
629 varlist = vars()
[1118]630 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
631 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]632
[989]633 if frame in valid:
[113]634 inf = list(self._getcoordinfo())
635 inf[1] = frame
636 self._setcoordinfo(inf)
[1118]637 self._add_history("set_freqframe", varlist)
[102]638 else:
[1118]639 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]640 if rcParams['verbose']:
641 print msg
642 else:
643 raise TypeError(msg)
644 print_log()
[710]645
[989]646 def set_dirframe(self, frame=""):
647 """
648 Set the frame type of the Direction on the sky.
649 Parameters:
650 frame: an optional frame type, default ''. Valid frames are:
651 'J2000', 'B1950', 'GALACTIC'
652 Examples:
653 scan.set_dirframe('GALACTIC')
654 """
655 varlist = vars()
656 try:
657 Scantable.set_dirframe(self, frame)
[1118]658 except RuntimeError, msg:
[989]659 if rcParams['verbose']:
660 print msg
661 else:
662 raise
[1118]663 self._add_history("set_dirframe", varlist)
[989]664
[113]665 def get_unit(self):
666 """
667 Get the default unit set in this scantable
668 Returns:
669 A unit string
670 """
671 inf = self._getcoordinfo()
672 unit = inf[0]
673 if unit == '': unit = 'channel'
674 return unit
[102]675
[158]676 def get_abcissa(self, rowno=0):
[102]677 """
[158]678 Get the abcissa in the current coordinate setup for the currently
[113]679 selected Beam/IF/Pol
680 Parameters:
[226]681 rowno: an optional row number in the scantable. Default is the
682 first row, i.e. rowno=0
[113]683 Returns:
[1348]684 The abcissa values and the format string (as a dictionary)
[113]685 """
[256]686 abc = self._getabcissa(rowno)
[710]687 lbl = self._getabcissalabel(rowno)
[718]688 print_log()
[158]689 return abc, lbl
[113]690
[1118]691 def flag(self, mask=None):
[1001]692 """
693 Flag the selected data using an optional channel mask.
694 Parameters:
695 mask: an optional channel mask, created with create_mask. Default
696 (no mask) is all channels.
697 """
698 varlist = vars()
[1118]699 if mask is None:
700 mask = []
[1001]701 try:
702 self._flag(mask)
[1118]703 except RuntimeError, msg:
[1001]704 if rcParams['verbose']:
705 print msg
706 return
707 else: raise
708 self._add_history("flag", varlist)
709
[1203]710 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]711 """
712 Flag the data in 'lag' space by providing a frequency to remove.
713 Flagged data in the scantable gets set to 0.0 before the fft.
714 No taper is applied.
715 Parameters:
[1348]716 frequency: the frequency (really a period within the bandwidth)
717 to remove
718 width: the width of the frequency to remove, to remove a
[1471]719 range of frequencies around the centre.
[1203]720 unit: the frequency unit (default "GHz")
721 Notes:
[1348]722 It is recommended to flag edges of the band or strong
723 signals beforehand.
[1192]724 """
725 if insitu is None: insitu = rcParams['insitu']
726 self._math._setinsitu(insitu)
727 varlist = vars()
[1370]728 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]729 if not base.has_key(unit):
730 raise ValueError("%s is not a valid unit." % unit)
731 try:
[1200]732 s = scantable(self._math._lag_flag(self, frequency*base[unit],
733 width*base[unit]))
[1192]734 except RuntimeError, msg:
735 if rcParams['verbose']:
736 print msg
737 return
738 else: raise
739 s._add_history("lag_flag", varlist)
740 print_log()
741 if insitu:
742 self._assign(s)
743 else:
744 return s
[1001]745
[1192]746
[113]747 def create_mask(self, *args, **kwargs):
748 """
[1118]749 Compute and return a mask based on [min, max] windows.
[189]750 The specified windows are to be INCLUDED, when the mask is
[113]751 applied.
[102]752 Parameters:
[1118]753 [min, max], [min2, max2], ...
[1024]754 Pairs of start/end points (inclusive)specifying the regions
[102]755 to be masked
[189]756 invert: optional argument. If specified as True,
757 return an inverted mask, i.e. the regions
758 specified are EXCLUDED
[513]759 row: create the mask using the specified row for
760 unit conversions, default is row=0
761 only necessary if frequency varies over rows.
[102]762 Example:
[113]763 scan.set_unit('channel')
764 a)
[1118]765 msk = scan.create_mask([400, 500], [800, 900])
[189]766 # masks everything outside 400 and 500
[113]767 # and 800 and 900 in the unit 'channel'
768
769 b)
[1118]770 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]771 # masks the regions between 400 and 500
[113]772 # and 800 and 900 in the unit 'channel'
[1024]773 c)
774 mask only channel 400
[1554]775 msk = scan.create_mask([400])
[102]776 """
[1554]777 row = kwargs.get("row", 0)
[513]778 data = self._getabcissa(row)
[113]779 u = self._getcoordinfo()[0]
[718]780 if rcParams['verbose']:
[113]781 if u == "": u = "channel"
[718]782 msg = "The current mask window unit is %s" % u
[1118]783 i = self._check_ifs()
784 if not i:
[876]785 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]786 asaplog.push(msg)
[102]787 n = self.nchan()
[1295]788 msk = _n_bools(n, False)
[710]789 # test if args is a 'list' or a 'normal *args - UGLY!!!
790
[1118]791 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
792 and args or args[0]
[710]793 for window in ws:
[1554]794 if len(window) == 1:
795 window = [window[0], window[0]]
796 if len(window) == 0 or len(window) > 2:
797 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]798 if window[0] > window[1]:
799 tmp = window[0]
800 window[0] = window[1]
801 window[1] = tmp
[102]802 for i in range(n):
[1024]803 if data[i] >= window[0] and data[i] <= window[1]:
[1295]804 msk[i] = True
[113]805 if kwargs.has_key('invert'):
806 if kwargs.get('invert'):
[1295]807 msk = mask_not(msk)
[718]808 print_log()
[102]809 return msk
[710]810
[256]811 def get_restfreqs(self):
812 """
813 Get the restfrequency(s) stored in this scantable.
814 The return value(s) are always of unit 'Hz'
815 Parameters:
816 none
817 Returns:
818 a list of doubles
819 """
820 return list(self._getrestfreqs())
[102]821
[402]822
[931]823 def set_restfreqs(self, freqs=None, unit='Hz'):
824 """
825 Set or replace the restfrequency specified and
826 If the 'freqs' argument holds a scalar,
827 then that rest frequency will be applied to all the selected
828 data. If the 'freqs' argument holds
829 a vector, then it MUST be of equal or smaller length than
830 the number of IFs (and the available restfrequencies will be
831 replaced by this vector). In this case, *all* data have
832 the restfrequency set per IF according
833 to the corresponding value you give in the 'freqs' vector.
[1118]834 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]835 IF 1 gets restfreq 2e9.
[1395]836 You can also specify the frequencies via a linecatalog.
[1153]837
[931]838 Parameters:
839 freqs: list of rest frequency values or string idenitfiers
840 unit: unit for rest frequency (default 'Hz')
[402]841
[931]842 Example:
843 # set the given restfrequency for the whole table
844 scan.set_restfreqs(freqs=1.4e9)
[1348]845 # If thee number of IFs in the data is >= 2 IF0 gets the first
[931]846 # value IF1 the second...
[1118]847 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]848 #set the given restfrequency for the whole table (by name)
849 scan.set_restfreqs(freqs="OH1667")
[391]850
[931]851 Note:
852 To do more sophisticate Restfrequency setting, e.g. on a
853 source and IF basis, use scantable.set_selection() before using
854 this function.
855 # provide your scantable is call scan
856 selection = selector()
857 selection.set_name("ORION*")
858 selection.set_ifs([1])
859 scan.set_selection(selection)
860 scan.set_restfreqs(freqs=86.6e9)
861
862 """
863 varlist = vars()
[1157]864 from asap import linecatalog
865 # simple value
[1118]866 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]867 self._setrestfreqs(freqs, "",unit)
[1157]868 # list of values
[1118]869 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]870 # list values are scalars
[1118]871 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]872 sel = selector()
873 savesel = self._getselection()
[1322]874 iflist = self.getifnos()
[931]875 for i in xrange(len(freqs)):
[1322]876 sel.set_ifs(iflist[i])
[931]877 self._setselection(sel)
[1153]878 self._setrestfreqs(freqs[i], "",unit)
[931]879 self._setselection(savesel)
[1157]880 # list values are tuples, (value, name)
881 elif isinstance(freqs[-1], dict):
882 sel = selector()
883 savesel = self._getselection()
[1322]884 iflist = self.getifnos()
[1157]885 for i in xrange(len(freqs)):
[1322]886 sel.set_ifs(iflist[i])
[1259]887 self._setselection(sel)
[1157]888 self._setrestfreqs(freqs[i]["value"],
889 freqs[i]["name"], "MHz")
890 self._setselection(savesel)
891 # freqs are to be taken from a linecatalog
[1153]892 elif isinstance(freqs, linecatalog):
893 sel = selector()
894 savesel = self._getselection()
[1322]895 iflist = self.getifnos()
[1153]896 for i in xrange(freqs.nrow()):
[1322]897 sel.set_ifs(iflist[i])
[1153]898 self._setselection(sel)
899 self._setrestfreqs(freqs.get_frequency(i),
900 freqs.get_name(i), "MHz")
901 # ensure that we are not iterating past nIF
902 if i == self.nif()-1: break
903 self._setselection(savesel)
[931]904 else:
905 return
906 self._add_history("set_restfreqs", varlist)
907
[1360]908 def shift_refpix(self, delta):
909 """
[1573]910 Shift the reference pixel of the Spectra Coordinate by an
[1360]911 integer amount.
912 Parameters:
913 delta: the amount to shift by
914 Note:
915 Be careful using this with broadband data.
916 """
917 Scantable.shift(self, delta)
[931]918
[1259]919 def history(self, filename=None):
920 """
921 Print the history. Optionally to a file.
[1348]922 Parameters:
923 filename: The name of the file to save the history to.
[1259]924 """
[484]925 hist = list(self._gethistory())
[794]926 out = "-"*80
[484]927 for h in hist:
[489]928 if h.startswith("---"):
[794]929 out += "\n"+h
[489]930 else:
931 items = h.split("##")
932 date = items[0]
933 func = items[1]
934 items = items[2:]
[794]935 out += "\n"+date+"\n"
936 out += "Function: %s\n Parameters:" % (func)
[489]937 for i in items:
938 s = i.split("=")
[1118]939 out += "\n %s = %s" % (s[0], s[1])
[794]940 out += "\n"+"-"*80
[1259]941 if filename is not None:
942 if filename is "":
943 filename = 'scantable_history.txt'
944 import os
945 filename = os.path.expandvars(os.path.expanduser(filename))
946 if not os.path.isdir(filename):
947 data = open(filename, 'w')
948 data.write(out)
949 data.close()
950 else:
951 msg = "Illegal file name '%s'." % (filename)
952 if rcParams['verbose']:
953 print msg
954 else:
955 raise IOError(msg)
956 if rcParams['verbose']:
957 try:
958 from IPython.genutils import page as pager
959 except ImportError:
960 from pydoc import pager
961 pager(out)
962 else:
963 return out
[484]964 return
[513]965 #
966 # Maths business
967 #
968
[931]969 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]970 """
[1070]971 Return the (time) weighted average of a scan.
[513]972 Note:
[1070]973 in channels only - align if necessary
[513]974 Parameters:
975 mask: an optional mask (only used for 'var' and 'tsys'
976 weighting)
[558]977 scanav: True averages each scan separately
978 False (default) averages all scans together,
[1099]979 weight: Weighting scheme.
980 'none' (mean no weight)
981 'var' (1/var(spec) weighted)
982 'tsys' (1/Tsys**2 weighted)
983 'tint' (integration time weighted)
984 'tintsys' (Tint/Tsys**2)
985 'median' ( median averaging)
[535]986 The default is 'tint'
[931]987 align: align the spectra in velocity before averaging. It takes
988 the time of the first spectrum as reference time.
[513]989 Example:
990 # time average the scantable without using a mask
[710]991 newscan = scan.average_time()
[513]992 """
993 varlist = vars()
[976]994 if weight is None: weight = 'TINT'
[513]995 if mask is None: mask = ()
[1099]996 if scanav: scanav = "SCAN"
997 else: scanav = "NONE"
[1118]998 scan = (self, )
[989]999 try:
[1118]1000 if align:
1001 scan = (self.freq_align(insitu=False), )
1002 s = None
1003 if weight.upper() == 'MEDIAN':
1004 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1005 scanav))
1006 else:
1007 s = scantable(self._math._average(scan, mask, weight.upper(),
1008 scanav))
1009 except RuntimeError, msg:
[989]1010 if rcParams['verbose']:
1011 print msg
1012 return
1013 else: raise
[1099]1014 s._add_history("average_time", varlist)
[718]1015 print_log()
[513]1016 return s
[710]1017
[876]1018 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1019 """
1020 Return a scan where all spectra are converted to either
1021 Jansky or Kelvin depending upon the flux units of the scan table.
1022 By default the function tries to look the values up internally.
1023 If it can't find them (or if you want to over-ride), you must
1024 specify EITHER jyperk OR eta (and D which it will try to look up
1025 also if you don't set it). jyperk takes precedence if you set both.
1026 Parameters:
1027 jyperk: the Jy / K conversion factor
1028 eta: the aperture efficiency
1029 d: the geomtric diameter (metres)
1030 insitu: if False a new scantable is returned.
1031 Otherwise, the scaling is done in-situ
1032 The default is taken from .asaprc (False)
1033 """
1034 if insitu is None: insitu = rcParams['insitu']
[876]1035 self._math._setinsitu(insitu)
[513]1036 varlist = vars()
1037 if jyperk is None: jyperk = -1.0
1038 if d is None: d = -1.0
1039 if eta is None: eta = -1.0
[876]1040 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1041 s._add_history("convert_flux", varlist)
1042 print_log()
1043 if insitu: self._assign(s)
1044 else: return s
[513]1045
[876]1046 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1047 """
1048 Return a scan after applying a gain-elevation correction.
1049 The correction can be made via either a polynomial or a
1050 table-based interpolation (and extrapolation if necessary).
1051 You specify polynomial coefficients, an ascii table or neither.
1052 If you specify neither, then a polynomial correction will be made
1053 with built in coefficients known for certain telescopes (an error
1054 will occur if the instrument is not known).
1055 The data and Tsys are *divided* by the scaling factors.
1056 Parameters:
1057 poly: Polynomial coefficients (default None) to compute a
1058 gain-elevation correction as a function of
1059 elevation (in degrees).
1060 filename: The name of an ascii file holding correction factors.
1061 The first row of the ascii file must give the column
1062 names and these MUST include columns
1063 "ELEVATION" (degrees) and "FACTOR" (multiply data
1064 by this) somewhere.
1065 The second row must give the data type of the
1066 column. Use 'R' for Real and 'I' for Integer.
1067 An example file would be
1068 (actual factors are arbitrary) :
1069
1070 TIME ELEVATION FACTOR
1071 R R R
1072 0.1 0 0.8
1073 0.2 20 0.85
1074 0.3 40 0.9
1075 0.4 60 0.85
1076 0.5 80 0.8
1077 0.6 90 0.75
1078 method: Interpolation method when correcting from a table.
1079 Values are "nearest", "linear" (default), "cubic"
1080 and "spline"
1081 insitu: if False a new scantable is returned.
1082 Otherwise, the scaling is done in-situ
1083 The default is taken from .asaprc (False)
1084 """
1085
1086 if insitu is None: insitu = rcParams['insitu']
[876]1087 self._math._setinsitu(insitu)
[513]1088 varlist = vars()
1089 if poly is None:
[1118]1090 poly = ()
[513]1091 from os.path import expandvars
1092 filename = expandvars(filename)
[876]1093 s = scantable(self._math._gainel(self, poly, filename, method))
1094 s._add_history("gain_el", varlist)
1095 print_log()
1096 if insitu: self._assign(s)
1097 else: return s
[710]1098
[931]1099 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1100 """
1101 Return a scan where all rows have been aligned in frequency/velocity.
1102 The alignment frequency frame (e.g. LSRK) is that set by function
1103 set_freqframe.
1104 Parameters:
1105 reftime: reference time to align at. By default, the time of
1106 the first row of data is used.
1107 method: Interpolation method for regridding the spectra.
1108 Choose from "nearest", "linear", "cubic" (default)
1109 and "spline"
1110 insitu: if False a new scantable is returned.
1111 Otherwise, the scaling is done in-situ
1112 The default is taken from .asaprc (False)
1113 """
[931]1114 if insitu is None: insitu = rcParams["insitu"]
[876]1115 self._math._setinsitu(insitu)
[513]1116 varlist = vars()
[931]1117 if reftime is None: reftime = ""
1118 s = scantable(self._math._freq_align(self, reftime, method))
[876]1119 s._add_history("freq_align", varlist)
1120 print_log()
1121 if insitu: self._assign(s)
1122 else: return s
[513]1123
[876]1124 def opacity(self, tau, insitu=None):
[513]1125 """
1126 Apply an opacity correction. The data
1127 and Tsys are multiplied by the correction factor.
1128 Parameters:
1129 tau: Opacity from which the correction factor is
1130 exp(tau*ZD)
1131 where ZD is the zenith-distance
1132 insitu: if False a new scantable is returned.
1133 Otherwise, the scaling is done in-situ
1134 The default is taken from .asaprc (False)
1135 """
1136 if insitu is None: insitu = rcParams['insitu']
[876]1137 self._math._setinsitu(insitu)
[513]1138 varlist = vars()
[876]1139 s = scantable(self._math._opacity(self, tau))
1140 s._add_history("opacity", varlist)
1141 print_log()
1142 if insitu: self._assign(s)
1143 else: return s
[513]1144
1145 def bin(self, width=5, insitu=None):
1146 """
1147 Return a scan where all spectra have been binned up.
[1348]1148 Parameters:
[513]1149 width: The bin width (default=5) in pixels
1150 insitu: if False a new scantable is returned.
1151 Otherwise, the scaling is done in-situ
1152 The default is taken from .asaprc (False)
1153 """
1154 if insitu is None: insitu = rcParams['insitu']
[876]1155 self._math._setinsitu(insitu)
[513]1156 varlist = vars()
[876]1157 s = scantable(self._math._bin(self, width))
[1118]1158 s._add_history("bin", varlist)
[876]1159 print_log()
1160 if insitu: self._assign(s)
1161 else: return s
[513]1162
[710]1163
[513]1164 def resample(self, width=5, method='cubic', insitu=None):
1165 """
[1348]1166 Return a scan where all spectra have been binned up.
[1573]1167
[1348]1168 Parameters:
[513]1169 width: The bin width (default=5) in pixels
1170 method: Interpolation method when correcting from a table.
1171 Values are "nearest", "linear", "cubic" (default)
1172 and "spline"
1173 insitu: if False a new scantable is returned.
1174 Otherwise, the scaling is done in-situ
1175 The default is taken from .asaprc (False)
1176 """
1177 if insitu is None: insitu = rcParams['insitu']
[876]1178 self._math._setinsitu(insitu)
[513]1179 varlist = vars()
[876]1180 s = scantable(self._math._resample(self, method, width))
[1118]1181 s._add_history("resample", varlist)
[876]1182 print_log()
1183 if insitu: self._assign(s)
1184 else: return s
[513]1185
1186
[946]1187 def average_pol(self, mask=None, weight='none'):
1188 """
1189 Average the Polarisations together.
1190 Parameters:
1191 mask: An optional mask defining the region, where the
1192 averaging will be applied. The output will have all
1193 specified points masked.
1194 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1195 weighted), or 'tsys' (1/Tsys**2 weighted)
1196 """
1197 varlist = vars()
1198 if mask is None:
1199 mask = ()
[1010]1200 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1201 s._add_history("average_pol", varlist)
[946]1202 print_log()
[992]1203 return s
[513]1204
[1145]1205 def average_beam(self, mask=None, weight='none'):
1206 """
1207 Average the Beams together.
1208 Parameters:
1209 mask: An optional mask defining the region, where the
1210 averaging will be applied. The output will have all
1211 specified points masked.
1212 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1213 weighted), or 'tsys' (1/Tsys**2 weighted)
1214 """
1215 varlist = vars()
1216 if mask is None:
1217 mask = ()
1218 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1219 s._add_history("average_beam", varlist)
1220 print_log()
1221 return s
1222
[992]1223 def convert_pol(self, poltype=None):
1224 """
1225 Convert the data to a different polarisation type.
[1565]1226 Note that you will need cross-polarisation terms for most conversions.
[992]1227 Parameters:
1228 poltype: The new polarisation type. Valid types are:
[1565]1229 "linear", "circular", "stokes" and "linpol"
[992]1230 """
1231 varlist = vars()
1232 try:
1233 s = scantable(self._math._convertpol(self, poltype))
[1118]1234 except RuntimeError, msg:
[992]1235 if rcParams['verbose']:
[1118]1236 print msg
1237 return
[992]1238 else:
1239 raise
[1118]1240 s._add_history("convert_pol", varlist)
[992]1241 print_log()
1242 return s
1243
[1574]1244 def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
[513]1245 """
1246 Smooth the spectrum by the specified kernel (conserving flux).
1247 Parameters:
1248 kernel: The type of smoothing kernel. Select from
[1574]1249 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1250 or 'poly'
[513]1251 width: The width of the kernel in pixels. For hanning this is
1252 ignored otherwise it defauls to 5 pixels.
1253 For 'gaussian' it is the Full Width Half
1254 Maximum. For 'boxcar' it is the full width.
[1574]1255 For 'rmedian' and 'poly' it is the half width.
1256 order: Optional parameter for 'poly' kernel (default is 2), to
1257 specify the order of the polnomial. Ignored by all other
1258 kernels.
[513]1259 insitu: if False a new scantable is returned.
1260 Otherwise, the scaling is done in-situ
1261 The default is taken from .asaprc (False)
1262 Example:
1263 none
1264 """
1265 if insitu is None: insitu = rcParams['insitu']
[876]1266 self._math._setinsitu(insitu)
[513]1267 varlist = vars()
[1574]1268 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1269 s._add_history("smooth", varlist)
1270 print_log()
1271 if insitu: self._assign(s)
1272 else: return s
[513]1273
[876]1274
[1391]1275 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1276 """
1277 Return a scan which has been baselined (all rows) by a polynomial.
1278 Parameters:
[794]1279 mask: an optional mask
1280 order: the order of the polynomial (default is 0)
[1061]1281 plot: plot the fit and the residual. In this each
1282 indivual fit has to be approved, by typing 'y'
1283 or 'n'
[1391]1284 uselin: use linear polynomial fit
[794]1285 insitu: if False a new scantable is returned.
1286 Otherwise, the scaling is done in-situ
1287 The default is taken from .asaprc (False)
[513]1288 Example:
1289 # return a scan baselined by a third order polynomial,
1290 # not using a mask
1291 bscan = scan.poly_baseline(order=3)
[579]1292 """
[513]1293 if insitu is None: insitu = rcParams['insitu']
1294 varlist = vars()
1295 if mask is None:
[1295]1296 mask = [True for i in xrange(self.nchan(-1))]
[513]1297 from asap.asapfitter import fitter
[1217]1298 try:
1299 f = fitter()
1300 f.set_scan(self, mask)
[1391]1301 #f.set_function(poly=order)
1302 if uselin:
1303 f.set_function(lpoly=order)
1304 else:
1305 f.set_function(poly=order)
[1217]1306 s = f.auto_fit(insitu, plot=plot)
1307 s._add_history("poly_baseline", varlist)
1308 print_log()
1309 if insitu: self._assign(s)
1310 else: return s
1311 except RuntimeError:
1312 msg = "The fit failed, possibly because it didn't converge."
1313 if rcParams['verbose']:
1314 print msg
1315 return
1316 else:
1317 raise RuntimeError(msg)
[513]1318
[1217]1319
[1118]1320 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1321 threshold=3, chan_avg_limit=1, plot=False,
1322 insitu=None):
[880]1323 """
1324 Return a scan which has been baselined (all rows) by a polynomial.
1325 Spectral lines are detected first using linefinder and masked out
1326 to avoid them affecting the baseline solution.
1327
1328 Parameters:
1329 mask: an optional mask retreived from scantable
1330 edge: an optional number of channel to drop at
1331 the edge of spectrum. If only one value is
1332 specified, the same number will be dropped from
1333 both sides of the spectrum. Default is to keep
[907]1334 all channels. Nested tuples represent individual
[976]1335 edge selection for different IFs (a number of spectral
1336 channels can be different)
[880]1337 order: the order of the polynomial (default is 0)
1338 threshold: the threshold used by line finder. It is better to
1339 keep it large as only strong lines affect the
1340 baseline solution.
[1280]1341 chan_avg_limit:
1342 a maximum number of consequtive spectral channels to
1343 average during the search of weak and broad lines.
1344 The default is no averaging (and no search for weak
1345 lines). If such lines can affect the fitted baseline
1346 (e.g. a high order polynomial is fitted), increase this
1347 parameter (usually values up to 8 are reasonable). Most
1348 users of this method should find the default value
1349 sufficient.
[1061]1350 plot: plot the fit and the residual. In this each
1351 indivual fit has to be approved, by typing 'y'
1352 or 'n'
[880]1353 insitu: if False a new scantable is returned.
1354 Otherwise, the scaling is done in-situ
1355 The default is taken from .asaprc (False)
1356
1357 Example:
1358 scan2=scan.auto_poly_baseline(order=7)
1359 """
1360 if insitu is None: insitu = rcParams['insitu']
1361 varlist = vars()
1362 from asap.asapfitter import fitter
1363 from asap.asaplinefind import linefinder
1364 from asap import _is_sequence_or_number as _is_valid
1365
[976]1366 # check whether edge is set up for each IF individually
[1118]1367 individualedge = False;
1368 if len(edge) > 1:
1369 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1370 individualedge = True;
[907]1371
[1118]1372 if not _is_valid(edge, int) and not individualedge:
[909]1373 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1374 pair of integers specified as a tuple. Nested tuples are allowed \
1375 to make individual selection for different IFs."
[919]1376
[1118]1377 curedge = (0, 0)
1378 if individualedge:
1379 for edgepar in edge:
1380 if not _is_valid(edgepar, int):
1381 raise ValueError, "Each element of the 'edge' tuple has \
1382 to be a pair of integers or an integer."
[907]1383 else:
[1118]1384 curedge = edge;
[880]1385
1386 # setup fitter
1387 f = fitter()
1388 f.set_function(poly=order)
1389
1390 # setup line finder
[1118]1391 fl = linefinder()
[1268]1392 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1393
1394 if not insitu:
[1118]1395 workscan = self.copy()
[880]1396 else:
[1118]1397 workscan = self
[880]1398
[907]1399 fl.set_scan(workscan)
1400
[1118]1401 rows = range(workscan.nrow())
[880]1402 asaplog.push("Processing:")
1403 for r in rows:
[1118]1404 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1405 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1406 workscan.getpol(r), workscan.getcycle(r))
[880]1407 asaplog.push(msg, False)
[907]1408
[976]1409 # figure out edge parameter
[1118]1410 if individualedge:
1411 if len(edge) >= workscan.getif(r):
1412 raise RuntimeError, "Number of edge elements appear to " \
1413 "be less than the number of IFs"
1414 curedge = edge[workscan.getif(r)]
[919]1415
[976]1416 # setup line finder
[1118]1417 fl.find_lines(r, mask, curedge)
[1536]1418 f.set_data(workscan._getabcissa(r), workscan._getspectrum(r),
1419 mask_and(workscan._getmask(r), fl.get_mask()))
[880]1420 f.fit()
1421 x = f.get_parameters()
[1061]1422 if plot:
1423 f.plot(residual=True)
1424 x = raw_input("Accept fit ( [y]/n ): ")
1425 if x.upper() == 'N':
1426 continue
[880]1427 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1428 if plot:
1429 f._p.unmap()
1430 f._p = None
1431 workscan._add_history("auto_poly_baseline", varlist)
[880]1432 if insitu:
1433 self._assign(workscan)
1434 else:
1435 return workscan
1436
[914]1437 def rotate_linpolphase(self, angle):
1438 """
1439 Rotate the phase of the complex polarization O=Q+iU correlation.
1440 This is always done in situ in the raw data. So if you call this
1441 function more than once then each call rotates the phase further.
1442 Parameters:
1443 angle: The angle (degrees) to rotate (add) by.
1444 Examples:
1445 scan.rotate_linpolphase(2.3)
1446 """
1447 varlist = vars()
[936]1448 self._math._rotate_linpolphase(self, angle)
[914]1449 self._add_history("rotate_linpolphase", varlist)
1450 print_log()
1451 return
[710]1452
[513]1453
[914]1454 def rotate_xyphase(self, angle):
1455 """
1456 Rotate the phase of the XY correlation. This is always done in situ
1457 in the data. So if you call this function more than once
1458 then each call rotates the phase further.
1459 Parameters:
1460 angle: The angle (degrees) to rotate (add) by.
1461 Examples:
1462 scan.rotate_xyphase(2.3)
1463 """
1464 varlist = vars()
[936]1465 self._math._rotate_xyphase(self, angle)
[914]1466 self._add_history("rotate_xyphase", varlist)
1467 print_log()
1468 return
1469
1470 def swap_linears(self):
1471 """
[1573]1472 Swap the linear polarisations XX and YY, or better the first two
[1348]1473 polarisations as this also works for ciculars.
[914]1474 """
1475 varlist = vars()
[936]1476 self._math._swap_linears(self)
[914]1477 self._add_history("swap_linears", varlist)
1478 print_log()
1479 return
1480
1481 def invert_phase(self):
1482 """
1483 Invert the phase of the complex polarisation
1484 """
1485 varlist = vars()
[936]1486 self._math._invert_phase(self)
[914]1487 self._add_history("invert_phase", varlist)
1488 print_log()
1489 return
1490
[876]1491 def add(self, offset, insitu=None):
[513]1492 """
1493 Return a scan where all spectra have the offset added
1494 Parameters:
1495 offset: the offset
1496 insitu: if False a new scantable is returned.
1497 Otherwise, the scaling is done in-situ
1498 The default is taken from .asaprc (False)
1499 """
1500 if insitu is None: insitu = rcParams['insitu']
[876]1501 self._math._setinsitu(insitu)
[513]1502 varlist = vars()
[876]1503 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1504 s._add_history("add", varlist)
[876]1505 print_log()
1506 if insitu:
1507 self._assign(s)
1508 else:
[513]1509 return s
1510
[1308]1511 def scale(self, factor, tsys=True, insitu=None):
[513]1512 """
1513 Return a scan where all spectra are scaled by the give 'factor'
1514 Parameters:
1515 factor: the scaling factor
1516 insitu: if False a new scantable is returned.
1517 Otherwise, the scaling is done in-situ
1518 The default is taken from .asaprc (False)
1519 tsys: if True (default) then apply the operation to Tsys
1520 as well as the data
1521 """
1522 if insitu is None: insitu = rcParams['insitu']
[876]1523 self._math._setinsitu(insitu)
[513]1524 varlist = vars()
[876]1525 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1526 s._add_history("scale", varlist)
[876]1527 print_log()
1528 if insitu:
1529 self._assign(s)
1530 else:
[513]1531 return s
1532
[1504]1533 def set_sourcetype(self, match, matchtype="pattern",
1534 sourcetype="reference"):
[1502]1535 """
1536 Set the type of the source to be an source or reference scan
1537 using the provided pattern:
1538 Parameters:
[1504]1539 match: a Unix style pattern, regular expression or selector
1540 matchtype: 'pattern' (default) UNIX style pattern or
1541 'regex' regular expression
[1502]1542 sourcetype: the type of the source to use (source/reference)
1543 """
1544 varlist = vars()
1545 basesel = self.get_selection()
1546 stype = -1
1547 if sourcetype.lower().startswith("r"):
1548 stype = 1
1549 elif sourcetype.lower().startswith("s"):
1550 stype = 0
[1504]1551 else:
[1502]1552 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]1553 if matchtype.lower().startswith("p"):
1554 matchtype = "pattern"
1555 elif matchtype.lower().startswith("r"):
1556 matchtype = "regex"
1557 else:
1558 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]1559 sel = selector()
1560 if isinstance(match, selector):
1561 sel = match
1562 else:
[1504]1563 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]1564 self.set_selection(basesel+sel)
1565 self._setsourcetype(stype)
1566 self.set_selection(basesel)
[1573]1567 self._add_history("set_sourcetype", varlist)
[1502]1568
[1348]1569 def auto_quotient(self, preserve=True, mode='paired'):
[670]1570 """
1571 This function allows to build quotients automatically.
1572 It assumes the observation to have the same numer of
1573 "ons" and "offs"
1574 Parameters:
[710]1575 preserve: you can preserve (default) the continuum or
1576 remove it. The equations used are
[670]1577 preserve: Output = Toff * (on/off) - Toff
[1070]1578 remove: Output = Toff * (on/off) - Ton
[1573]1579 mode: the on/off detection mode
[1348]1580 'paired' (default)
1581 identifies 'off' scans by the
1582 trailing '_R' (Mopra/Parkes) or
1583 '_e'/'_w' (Tid) and matches
1584 on/off pairs from the observing pattern
[1502]1585 'time'
1586 finds the closest off in time
[1348]1587
[670]1588 """
[1348]1589 modes = ["time", "paired"]
[670]1590 if not mode in modes:
[876]1591 msg = "please provide valid mode. Valid modes are %s" % (modes)
1592 raise ValueError(msg)
1593 varlist = vars()
[1348]1594 s = None
1595 if mode.lower() == "paired":
1596 basesel = self.get_selection()
[1356]1597 sel = selector()+basesel
1598 sel.set_query("SRCTYPE==1")
1599 self.set_selection(sel)
[1348]1600 offs = self.copy()
1601 sel.set_query("SRCTYPE==0")
[1356]1602 self.set_selection(sel)
[1348]1603 ons = self.copy()
1604 s = scantable(self._math._quotient(ons, offs, preserve))
1605 self.set_selection(basesel)
1606 elif mode.lower() == "time":
1607 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1608 s._add_history("auto_quotient", varlist)
[876]1609 print_log()
1610 return s
[710]1611
[1145]1612 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1613 """
[1143]1614 Form a quotient using "off" beams when observing in "MX" mode.
1615 Parameters:
[1145]1616 mask: an optional mask to be used when weight == 'stddev'
[1143]1617 weight: How to average the off beams. Default is 'median'.
[1145]1618 preserve: you can preserve (default) the continuum or
1619 remove it. The equations used are
1620 preserve: Output = Toff * (on/off) - Toff
1621 remove: Output = Toff * (on/off) - Ton
[1217]1622 """
[1143]1623 if mask is None: mask = ()
[1141]1624 varlist = vars()
1625 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1626 preoff = scantable(self._math._mx_extract(self, 'off'))
1627 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1628 from asapmath import quotient
[1145]1629 q = quotient(on, off, preserve)
[1143]1630 q._add_history("mx_quotient", varlist)
[1145]1631 print_log()
[1217]1632 return q
[513]1633
[718]1634 def freq_switch(self, insitu=None):
1635 """
1636 Apply frequency switching to the data.
1637 Parameters:
1638 insitu: if False a new scantable is returned.
1639 Otherwise, the swictching is done in-situ
1640 The default is taken from .asaprc (False)
1641 Example:
1642 none
1643 """
1644 if insitu is None: insitu = rcParams['insitu']
[876]1645 self._math._setinsitu(insitu)
[718]1646 varlist = vars()
[876]1647 s = scantable(self._math._freqswitch(self))
[1118]1648 s._add_history("freq_switch", varlist)
[876]1649 print_log()
1650 if insitu: self._assign(s)
1651 else: return s
[718]1652
[780]1653 def recalc_azel(self):
1654 """
1655 Recalculate the azimuth and elevation for each position.
1656 Parameters:
1657 none
1658 Example:
1659 """
1660 varlist = vars()
[876]1661 self._recalcazel()
[780]1662 self._add_history("recalc_azel", varlist)
1663 print_log()
1664 return
1665
[513]1666 def __add__(self, other):
1667 varlist = vars()
1668 s = None
1669 if isinstance(other, scantable):
[1573]1670 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1671 elif isinstance(other, float):
[876]1672 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1673 else:
[718]1674 raise TypeError("Other input is not a scantable or float value")
[513]1675 s._add_history("operator +", varlist)
[718]1676 print_log()
[513]1677 return s
1678
1679 def __sub__(self, other):
1680 """
1681 implicit on all axes and on Tsys
1682 """
1683 varlist = vars()
1684 s = None
1685 if isinstance(other, scantable):
[1308]1686 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1687 elif isinstance(other, float):
[876]1688 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1689 else:
[718]1690 raise TypeError("Other input is not a scantable or float value")
[513]1691 s._add_history("operator -", varlist)
[718]1692 print_log()
[513]1693 return s
[710]1694
[513]1695 def __mul__(self, other):
1696 """
1697 implicit on all axes and on Tsys
1698 """
1699 varlist = vars()
1700 s = None
1701 if isinstance(other, scantable):
[1308]1702 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1703 elif isinstance(other, float):
[876]1704 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1705 else:
[718]1706 raise TypeError("Other input is not a scantable or float value")
[513]1707 s._add_history("operator *", varlist)
[718]1708 print_log()
[513]1709 return s
1710
[710]1711
[513]1712 def __div__(self, other):
1713 """
1714 implicit on all axes and on Tsys
1715 """
1716 varlist = vars()
1717 s = None
1718 if isinstance(other, scantable):
[1308]1719 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1720 elif isinstance(other, float):
1721 if other == 0.0:
[718]1722 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1723 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1724 else:
[718]1725 raise TypeError("Other input is not a scantable or float value")
[513]1726 s._add_history("operator /", varlist)
[718]1727 print_log()
[513]1728 return s
1729
[530]1730 def get_fit(self, row=0):
1731 """
1732 Print or return the stored fits for a row in the scantable
1733 Parameters:
1734 row: the row which the fit has been applied to.
1735 """
1736 if row > self.nrow():
1737 return
[976]1738 from asap.asapfit import asapfit
[530]1739 fit = asapfit(self._getfit(row))
[718]1740 if rcParams['verbose']:
[530]1741 print fit
1742 return
1743 else:
1744 return fit.as_dict()
1745
[1483]1746 def flag_nans(self):
1747 """
1748 Utility function to flag NaN values in the scantable.
1749 """
1750 import numpy
1751 basesel = self.get_selection()
1752 for i in range(self.nrow()):
1753 sel = selector()+basesel
1754 sel.set_scans(self.getscan(i))
1755 sel.set_beams(self.getbeam(i))
1756 sel.set_ifs(self.getif(i))
1757 sel.set_polarisations(self.getpol(i))
1758 self.set_selection(sel)
1759 nans = numpy.isnan(self._getspectrum(0))
1760 if numpy.any(nans):
1761 bnans = [ bool(v) for v in nans]
1762 self.flag(bnans)
1763 self.set_selection(basesel)
1764
[1573]1765
[484]1766 def _add_history(self, funcname, parameters):
[1435]1767 if not rcParams['scantable.history']:
1768 return
[484]1769 # create date
1770 sep = "##"
1771 from datetime import datetime
1772 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1773 hist = dstr+sep
1774 hist += funcname+sep#cdate+sep
1775 if parameters.has_key('self'): del parameters['self']
[1118]1776 for k, v in parameters.iteritems():
[484]1777 if type(v) is dict:
[1118]1778 for k2, v2 in v.iteritems():
[484]1779 hist += k2
1780 hist += "="
[1118]1781 if isinstance(v2, scantable):
[484]1782 hist += 'scantable'
1783 elif k2 == 'mask':
[1118]1784 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1785 hist += str(self._zip_mask(v2))
1786 else:
1787 hist += str(v2)
[484]1788 else:
[513]1789 hist += str(v2)
[484]1790 else:
1791 hist += k
1792 hist += "="
[1118]1793 if isinstance(v, scantable):
[484]1794 hist += 'scantable'
1795 elif k == 'mask':
[1118]1796 if isinstance(v, list) or isinstance(v, tuple):
[513]1797 hist += str(self._zip_mask(v))
1798 else:
1799 hist += str(v)
[484]1800 else:
1801 hist += str(v)
1802 hist += sep
1803 hist = hist[:-2] # remove trailing '##'
1804 self._addhistory(hist)
1805
[710]1806
[484]1807 def _zip_mask(self, mask):
1808 mask = list(mask)
1809 i = 0
1810 segments = []
1811 while mask[i:].count(1):
1812 i += mask[i:].index(1)
1813 if mask[i:].count(0):
1814 j = i + mask[i:].index(0)
1815 else:
[710]1816 j = len(mask)
[1118]1817 segments.append([i, j])
[710]1818 i = j
[484]1819 return segments
[714]1820
[626]1821 def _get_ordinate_label(self):
1822 fu = "("+self.get_fluxunit()+")"
1823 import re
1824 lbl = "Intensity"
[1118]1825 if re.match(".K.", fu):
[626]1826 lbl = "Brightness Temperature "+ fu
[1118]1827 elif re.match(".Jy.", fu):
[626]1828 lbl = "Flux density "+ fu
1829 return lbl
[710]1830
[876]1831 def _check_ifs(self):
1832 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1833 nchans = filter(lambda t: t > 0, nchans)
[876]1834 return (sum(nchans)/len(nchans) == nchans[0])
[976]1835
1836 def _fill(self, names, unit, average):
1837 import os
1838 from asap._asap import stfiller
1839 first = True
1840 fullnames = []
1841 for name in names:
1842 name = os.path.expandvars(name)
1843 name = os.path.expanduser(name)
1844 if not os.path.exists(name):
1845 msg = "File '%s' does not exists" % (name)
1846 if rcParams['verbose']:
1847 asaplog.push(msg)
1848 print asaplog.pop().strip()
1849 return
1850 raise IOError(msg)
1851 fullnames.append(name)
1852 if average:
1853 asaplog.push('Auto averaging integrations')
[1079]1854 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1855 for name in fullnames:
[1073]1856 tbl = Scantable(stype)
1857 r = stfiller(tbl)
[1504]1858 rx = rcParams['scantable.reference']
1859 r._setreferenceexpr(rx)
[976]1860 msg = "Importing %s..." % (name)
[1118]1861 asaplog.push(msg, False)
[976]1862 print_log()
[1118]1863 r._open(name, -1, -1)
[976]1864 r._read()
1865 if average:
[1118]1866 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1867 if not first:
1868 tbl = self._math._merge([self, tbl])
1869 Scantable.__init__(self, tbl)
1870 r._close()
[1118]1871 del r, tbl
[976]1872 first = False
1873 if unit is not None:
1874 self.set_fluxunit(unit)
1875 self.set_freqframe(rcParams['scantable.freqframe'])
1876
[1402]1877 def __getitem__(self, key):
1878 if key < 0:
1879 key += self.nrow()
1880 if key >= self.nrow():
1881 raise IndexError("Row index out of range.")
1882 return self._getspectrum(key)
1883
1884 def __setitem__(self, key, value):
1885 if key < 0:
1886 key += self.nrow()
1887 if key >= self.nrow():
1888 raise IndexError("Row index out of range.")
1889 if not hasattr(value, "__len__") or \
1890 len(value) > self.nchan(self.getif(key)):
1891 raise ValueError("Spectrum length doesn't match.")
1892 return self._setspectrum(value, key)
1893
1894 def __len__(self):
1895 return self.nrow()
1896
1897 def __iter__(self):
1898 for i in range(len(self)):
1899 yield self[i]
Note: See TracBrowser for help on using the repository browser.