source: trunk/python/scantable.py@ 1571

Last change on this file since 1571 was 1565, checked in by Malte Marquarding, 16 years ago

Fix for ticket #162: documentation of scantable.convert_pol didn't mention 'linpol' as a valid option

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.2 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1435]41 if isinstance(filename, str):# or \
42# (isinstance(filename, list) or isinstance(filename, tuple)) \
43# and isinstance(filename[-1], str):
[976]44 import os.path
45 filename = os.path.expandvars(filename)
46 filename = os.path.expanduser(filename)
47 if not os.path.exists(filename):
48 s = "File '%s' not found." % (filename)
[718]49 if rcParams['verbose']:
[976]50 asaplog.push(s)
51 print asaplog.pop().strip()
[718]52 return
[976]53 raise IOError(s)
[1115]54 if os.path.isdir(filename) \
[1118]55 and not os.path.exists(filename+'/table.f1'):
[976]56 # crude check if asap table
57 if os.path.exists(filename+'/table.info'):
[1118]58 ondisk = rcParams['scantable.storage'] == 'disk'
59 Scantable.__init__(self, filename, ondisk)
[976]60 if unit is not None:
61 self.set_fluxunit(unit)
62 self.set_freqframe(rcParams['scantable.freqframe'])
[718]63 else:
[1118]64 msg = "The given file '%s'is not a valid " \
65 "asap table." % (filename)
[976]66 if rcParams['verbose']:
67 print msg
68 return
69 else:
70 raise IOError(msg)
[226]71 else:
[1118]72 self._fill([filename], unit, average)
73 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]74 and isinstance(filename[-1], str):
75 self._fill(filename, unit, average)
[1259]76 self._add_history("scantable", varlist)
[714]77 print_log()
[102]78
[876]79 def save(self, name=None, format=None, overwrite=False):
[116]80 """
[1280]81 Store the scantable on disk. This can be an asap (aips++) Table,
82 SDFITS or MS2 format.
[116]83 Parameters:
[1093]84 name: the name of the outputfile. For format "ASCII"
85 this is the root file name (data in 'name'.txt
[497]86 and header in 'name'_header.txt)
[116]87 format: an optional file format. Default is ASAP.
[280]88 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]89 'SDFITS' (save as SDFITS file)
[200]90 'ASCII' (saves as ascii text file)
[226]91 'MS2' (saves as an aips++
92 MeasurementSet V2)
[1443]93 'FITS' (save as image FITS - not
94 readable by class)
95 'CLASS' (save as FITS readable by CLASS)
[411]96 overwrite: If the file should be overwritten if it exists.
[256]97 The default False is to return with warning
[411]98 without writing the output. USE WITH CARE.
[116]99 Example:
100 scan.save('myscan.asap')
[1118]101 scan.save('myscan.sdfits', 'SDFITS')
[116]102 """
[411]103 from os import path
[226]104 if format is None: format = rcParams['scantable.save']
[256]105 suffix = '.'+format.lower()
[1118]106 if name is None or name == "":
[256]107 name = 'scantable'+suffix
[718]108 msg = "No filename given. Using default name %s..." % name
109 asaplog.push(msg)
[411]110 name = path.expandvars(name)
[256]111 if path.isfile(name) or path.isdir(name):
112 if not overwrite:
[718]113 msg = "File %s exists." % name
114 if rcParams['verbose']:
115 print msg
116 return
117 else:
118 raise IOError(msg)
[451]119 format2 = format.upper()
120 if format2 == 'ASAP':
[116]121 self._save(name)
122 else:
[989]123 from asap._asap import stwriter as stw
[1118]124 writer = stw(format2)
125 writer.write(self, name)
[718]126 print_log()
[116]127 return
128
[102]129 def copy(self):
130 """
131 Return a copy of this scantable.
[1348]132 Note:
133 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]134 Parameters:
[113]135 none
[102]136 Example:
137 copiedscan = scan.copy()
138 """
[876]139 sd = scantable(Scantable._copy(self))
[113]140 return sd
141
[1093]142 def drop_scan(self, scanid=None):
143 """
144 Return a new scantable where the specified scan number(s) has(have)
145 been dropped.
146 Parameters:
147 scanid: a (list of) scan number(s)
148 """
149 from asap import _is_sequence_or_number as _is_valid
150 from asap import _to_list
151 from asap import unique
152 if not _is_valid(scanid):
153 if rcParams['verbose']:
154 print "Please specify a scanno to drop from the scantable"
155 return
156 else:
157 raise RuntimeError("No scan given")
158 try:
159 scanid = _to_list(scanid)
160 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
161 for sid in scanid: allscans.remove(sid)
[1118]162 if len(allscans) == 0:
163 raise ValueError("Can't remove all scans")
[1093]164 except ValueError:
165 if rcParams['verbose']:
166 print "Couldn't find any match."
167 return
168 else: raise
169 try:
170 bsel = self.get_selection()
171 sel = selector()
172 sel.set_scans(allscans)
173 self.set_selection(bsel+sel)
174 scopy = self._copy()
175 self.set_selection(bsel)
176 return scantable(scopy)
177 except RuntimeError:
[1118]178 if rcParams['verbose']:
179 print "Couldn't find any match."
180 else:
181 raise
[1093]182
183
[102]184 def get_scan(self, scanid=None):
185 """
186 Return a specific scan (by scanno) or collection of scans (by
187 source name) in a new scantable.
[1348]188 Note:
189 See scantable.drop_scan() for the inverse operation.
[102]190 Parameters:
[513]191 scanid: a (list of) scanno or a source name, unix-style
192 patterns are accepted for source name matching, e.g.
193 '*_R' gets all 'ref scans
[102]194 Example:
[513]195 # get all scans containing the source '323p459'
196 newscan = scan.get_scan('323p459')
197 # get all 'off' scans
198 refscans = scan.get_scan('*_R')
199 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]200 newscan = scan.get_scan([0, 2, 7, 10])
[102]201 """
202 if scanid is None:
[718]203 if rcParams['verbose']:
[1118]204 print "Please specify a scan no or name to " \
205 "retrieve from the scantable"
[718]206 return
207 else:
208 raise RuntimeError("No scan given")
209
[102]210 try:
[946]211 bsel = self.get_selection()
212 sel = selector()
[102]213 if type(scanid) is str:
[946]214 sel.set_name(scanid)
215 self.set_selection(bsel+sel)
[876]216 scopy = self._copy()
[946]217 self.set_selection(bsel)
[876]218 return scantable(scopy)
[102]219 elif type(scanid) is int:
[946]220 sel.set_scans([scanid])
221 self.set_selection(bsel+sel)
[876]222 scopy = self._copy()
[946]223 self.set_selection(bsel)
[876]224 return scantable(scopy)
[381]225 elif type(scanid) is list:
[946]226 sel.set_scans(scanid)
227 self.set_selection(sel)
[876]228 scopy = self._copy()
[946]229 self.set_selection(bsel)
[876]230 return scantable(scopy)
[381]231 else:
[718]232 msg = "Illegal scanid type, use 'int' or 'list' if ints."
233 if rcParams['verbose']:
234 print msg
235 else:
236 raise TypeError(msg)
[102]237 except RuntimeError:
[718]238 if rcParams['verbose']: print "Couldn't find any match."
239 else: raise
[102]240
241 def __str__(self):
[1118]242 return Scantable._summary(self, True)
[102]243
[976]244 def summary(self, filename=None):
[102]245 """
246 Print a summary of the contents of this scantable.
247 Parameters:
248 filename: the name of a file to write the putput to
249 Default - no file output
[381]250 verbose: print extra info such as the frequency table
251 The default (False) is taken from .asaprc
[102]252 """
[976]253 info = Scantable._summary(self, True)
254 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]255 if filename is not None:
[256]256 if filename is "":
257 filename = 'scantable_summary.txt'
[415]258 from os.path import expandvars, isdir
[411]259 filename = expandvars(filename)
[415]260 if not isdir(filename):
[413]261 data = open(filename, 'w')
262 data.write(info)
263 data.close()
264 else:
[718]265 msg = "Illegal file name '%s'." % (filename)
266 if rcParams['verbose']:
267 print msg
268 else:
269 raise IOError(msg)
270 if rcParams['verbose']:
[794]271 try:
272 from IPython.genutils import page as pager
273 except ImportError:
274 from pydoc import pager
275 pager(info)
[718]276 else:
277 return info
[710]278
[1512]279 def get_spectrum(self, rowno):
[1471]280 """Return the spectrum for the current row in the scantable as a list.
281 Parameters:
282 rowno: the row number to retrieve the spectrum from
283 """
284 return self._getspectrum(rowno)
[946]285
[1471]286 def get_mask(self, rowno):
287 """Return the mask for the current row in the scantable as a list.
288 Parameters:
289 rowno: the row number to retrieve the mask from
290 """
291 return self._getmask(rowno)
292
293 def set_spectrum(self, spec, rowno):
294 """Return the spectrum for the current row in the scantable as a list.
295 Parameters:
296 spec: the spectrum
297 rowno: the row number to set the spectrum for
298 """
299 assert(len(spec) == self.nchan())
300 return self._setspectrum(spec, rowno)
301
[946]302 def get_selection(self):
303 """
[1005]304 Get the selection object currently set on this scantable.
305 Parameters:
306 none
307 Example:
308 sel = scan.get_selection()
309 sel.set_ifs(0) # select IF 0
310 scan.set_selection(sel) # apply modified selection
[946]311 """
312 return selector(self._getselection())
313
[1005]314 def set_selection(self, selection=selector()):
[946]315 """
[1005]316 Select a subset of the data. All following operations on this scantable
317 are only applied to thi selection.
318 Parameters:
319 selection: a selector object (default unset the selection)
320 Examples:
321 sel = selector() # create a selection object
[1118]322 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]323 scan.set_selection(sel) # set the selection
324 scan.summary() # will only print summary of scanno 0 an 3
325 scan.set_selection() # unset the selection
[946]326 """
327 self._setselection(selection)
328
[876]329 def stats(self, stat='stddev', mask=None):
[102]330 """
[135]331 Determine the specified statistic of the current beam/if/pol
[102]332 Takes a 'mask' as an optional parameter to specify which
333 channels should be excluded.
334 Parameters:
[135]335 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
336 'var', 'stddev', 'avdev', 'rms', 'median'
337 mask: an optional mask specifying where the statistic
[102]338 should be determined.
339 Example:
[113]340 scan.set_unit('channel')
[1118]341 msk = scan.create_mask([100, 200], [500, 600])
[135]342 scan.stats(stat='mean', mask=m)
[102]343 """
344 if mask == None:
[876]345 mask = []
[1118]346 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]347 if not self._check_ifs():
[1118]348 raise ValueError("Cannot apply mask as the IFs have different "
349 "number of channels. Please use setselection() "
350 "to select individual IFs")
[256]351
[876]352 statvals = self._math._stats(self, mask, stat)
353 out = ''
354 axes = []
355 for i in range(self.nrow()):
356 axis = []
357 axis.append(self.getscan(i))
358 axis.append(self.getbeam(i))
359 axis.append(self.getif(i))
360 axis.append(self.getpol(i))
361 axis.append(self.getcycle(i))
362 axes.append(axis)
363 tm = self._gettime(i)
364 src = self._getsourcename(i)
365 out += 'Scan[%d] (%s) ' % (axis[0], src)
366 out += 'Time[%s]:\n' % (tm)
367 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
368 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
369 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
370 out += '= %3.3f\n' % (statvals[i])
371 out += "--------------------------------------------------\n"
[256]372
[876]373 if rcParams['verbose']:
374 print "--------------------------------------------------"
[1118]375 print " ", stat
[876]376 print "--------------------------------------------------"
377 print out
[1295]378 #else:
379 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
380 # 'axes' : axes,
381 # 'data': statvals}
382 return statvals
[102]383
[1118]384 def stddev(self, mask=None):
[135]385 """
386 Determine the standard deviation of the current beam/if/pol
387 Takes a 'mask' as an optional parameter to specify which
388 channels should be excluded.
389 Parameters:
390 mask: an optional mask specifying where the standard
391 deviation should be determined.
392
393 Example:
394 scan.set_unit('channel')
[1118]395 msk = scan.create_mask([100, 200], [500, 600])
[135]396 scan.stddev(mask=m)
397 """
[1118]398 return self.stats(stat='stddev', mask=mask);
[135]399
[1003]400
[1259]401 def get_column_names(self):
[1003]402 """
403 Return a list of column names, which can be used for selection.
404 """
[1259]405 return list(Scantable.get_column_names(self))
[1003]406
[876]407 def get_tsys(self):
[113]408 """
409 Return the System temperatures.
410 Returns:
[876]411 a list of Tsys values for the current selection
[113]412 """
[256]413
[876]414 return self._row_callback(self._gettsys, "Tsys")
[256]415
[876]416 def _row_callback(self, callback, label):
417 axes = []
[1118]418 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]419 out = ""
[1118]420 outvec = []
[876]421 for i in range(self.nrow()):
422 axis = []
423 axis.append(self.getscan(i))
424 axis.append(self.getbeam(i))
425 axis.append(self.getif(i))
426 axis.append(self.getpol(i))
427 axis.append(self.getcycle(i))
428 axes.append(axis)
429 tm = self._gettime(i)
430 src = self._getsourcename(i)
431 out += 'Scan[%d] (%s) ' % (axis[0], src)
432 out += 'Time[%s]:\n' % (tm)
433 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
434 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
435 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
436 outvec.append(callback(i))
437 out += '= %3.3f\n' % (outvec[i])
438 out += "--------------------------------------------------\n"
439 if rcParams['verbose']:
440 print "--------------------------------------------------"
441 print " %s" % (label)
442 print "--------------------------------------------------"
443 print out
[1175]444 # disabled because the vector seems more useful
445 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
446 return outvec
[256]447
[1070]448 def _get_column(self, callback, row=-1):
449 """
450 """
451 if row == -1:
452 return [callback(i) for i in range(self.nrow())]
453 else:
454 if 0 <= row < self.nrow():
455 return callback(row)
[256]456
[1070]457
[1348]458 def get_time(self, row=-1, asdatetime=False):
[113]459 """
460 Get a list of time stamps for the observations.
[1348]461 Return a datetime object for each integration time stamp in the scantable.
[113]462 Parameters:
[1348]463 row: row no of integration. Default -1 return all rows
464 asdatetime: return values as datetime objects rather than strings
[113]465 Example:
466 none
467 """
[1175]468 from time import strptime
469 from datetime import datetime
[1392]470 times = self._get_column(self._gettime, row)
[1348]471 if not asdatetime:
[1392]472 return times
[1175]473 format = "%Y/%m/%d/%H:%M:%S"
474 if isinstance(times, list):
475 return [datetime(*strptime(i, format)[:6]) for i in times]
476 else:
477 return datetime(*strptime(times, format)[:6])
[102]478
[1348]479
480 def get_inttime(self, row=-1):
481 """
482 Get a list of integration times for the observations.
483 Return a time in seconds for each integration in the scantable.
484 Parameters:
485 row: row no of integration. Default -1 return all rows.
486 Example:
487 none
488 """
489 return self._get_column(self._getinttime, row)
490
491
[714]492 def get_sourcename(self, row=-1):
493 """
[794]494 Get a list source names for the observations.
[714]495 Return a string for each integration in the scantable.
496 Parameters:
[1348]497 row: row no of integration. Default -1 return all rows.
[714]498 Example:
499 none
500 """
[1070]501 return self._get_column(self._getsourcename, row)
[714]502
[794]503 def get_elevation(self, row=-1):
504 """
505 Get a list of elevations for the observations.
506 Return a float for each integration in the scantable.
507 Parameters:
[1348]508 row: row no of integration. Default -1 return all rows.
[794]509 Example:
510 none
511 """
[1070]512 return self._get_column(self._getelevation, row)
[794]513
514 def get_azimuth(self, row=-1):
515 """
516 Get a list of azimuths for the observations.
517 Return a float for each integration in the scantable.
518 Parameters:
[1348]519 row: row no of integration. Default -1 return all rows.
[794]520 Example:
521 none
522 """
[1070]523 return self._get_column(self._getazimuth, row)
[794]524
525 def get_parangle(self, row=-1):
526 """
527 Get a list of parallactic angles for the observations.
528 Return a float for each integration in the scantable.
529 Parameters:
[1348]530 row: row no of integration. Default -1 return all rows.
[794]531 Example:
532 none
533 """
[1070]534 return self._get_column(self._getparangle, row)
[794]535
[1070]536 def get_direction(self, row=-1):
537 """
538 Get a list of Positions on the sky (direction) for the observations.
539 Return a float for each integration in the scantable.
540 Parameters:
541 row: row no of integration. Default -1 return all rows
542 Example:
543 none
544 """
545 return self._get_column(self._getdirection, row)
546
[1391]547 def get_directionval(self, row=-1):
548 """
549 Get a list of Positions on the sky (direction) for the observations.
550 Return a float for each integration in the scantable.
551 Parameters:
552 row: row no of integration. Default -1 return all rows
553 Example:
554 none
555 """
556 return self._get_column(self._getdirectionvec, row)
557
558
[102]559 def set_unit(self, unit='channel'):
560 """
561 Set the unit for all following operations on this scantable
562 Parameters:
563 unit: optional unit, default is 'channel'
[1118]564 one of '*Hz', 'km/s', 'channel', ''
[102]565 """
[484]566 varlist = vars()
[1118]567 if unit in ['', 'pixel', 'channel']:
[113]568 unit = ''
569 inf = list(self._getcoordinfo())
570 inf[0] = unit
571 self._setcoordinfo(inf)
[1118]572 self._add_history("set_unit", varlist)
[113]573
[484]574 def set_instrument(self, instr):
[358]575 """
[1348]576 Set the instrument for subsequent processing.
[358]577 Parameters:
[710]578 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]579 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]580 """
581 self._setInstrument(instr)
[1118]582 self._add_history("set_instument", vars())
[718]583 print_log()
[358]584
[1190]585 def set_feedtype(self, feedtype):
586 """
587 Overwrite the feed type, which might not be set correctly.
588 Parameters:
589 feedtype: 'linear' or 'circular'
590 """
591 self._setfeedtype(feedtype)
592 self._add_history("set_feedtype", vars())
593 print_log()
594
[276]595 def set_doppler(self, doppler='RADIO'):
596 """
597 Set the doppler for all following operations on this scantable.
598 Parameters:
599 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
600 """
[484]601 varlist = vars()
[276]602 inf = list(self._getcoordinfo())
603 inf[2] = doppler
604 self._setcoordinfo(inf)
[1118]605 self._add_history("set_doppler", vars())
[718]606 print_log()
[710]607
[226]608 def set_freqframe(self, frame=None):
[113]609 """
610 Set the frame type of the Spectral Axis.
611 Parameters:
[591]612 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]613 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
614 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]615 Examples:
616 scan.set_freqframe('BARY')
617 """
[484]618 if frame is None: frame = rcParams['scantable.freqframe']
619 varlist = vars()
[1118]620 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
621 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]622
[989]623 if frame in valid:
[113]624 inf = list(self._getcoordinfo())
625 inf[1] = frame
626 self._setcoordinfo(inf)
[1118]627 self._add_history("set_freqframe", varlist)
[102]628 else:
[1118]629 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]630 if rcParams['verbose']:
631 print msg
632 else:
633 raise TypeError(msg)
634 print_log()
[710]635
[989]636 def set_dirframe(self, frame=""):
637 """
638 Set the frame type of the Direction on the sky.
639 Parameters:
640 frame: an optional frame type, default ''. Valid frames are:
641 'J2000', 'B1950', 'GALACTIC'
642 Examples:
643 scan.set_dirframe('GALACTIC')
644 """
645 varlist = vars()
646 try:
647 Scantable.set_dirframe(self, frame)
[1118]648 except RuntimeError, msg:
[989]649 if rcParams['verbose']:
650 print msg
651 else:
652 raise
[1118]653 self._add_history("set_dirframe", varlist)
[989]654
[113]655 def get_unit(self):
656 """
657 Get the default unit set in this scantable
658 Returns:
659 A unit string
660 """
661 inf = self._getcoordinfo()
662 unit = inf[0]
663 if unit == '': unit = 'channel'
664 return unit
[102]665
[158]666 def get_abcissa(self, rowno=0):
[102]667 """
[158]668 Get the abcissa in the current coordinate setup for the currently
[113]669 selected Beam/IF/Pol
670 Parameters:
[226]671 rowno: an optional row number in the scantable. Default is the
672 first row, i.e. rowno=0
[113]673 Returns:
[1348]674 The abcissa values and the format string (as a dictionary)
[113]675 """
[256]676 abc = self._getabcissa(rowno)
[710]677 lbl = self._getabcissalabel(rowno)
[718]678 print_log()
[158]679 return abc, lbl
[113]680
[1118]681 def flag(self, mask=None):
[1001]682 """
683 Flag the selected data using an optional channel mask.
684 Parameters:
685 mask: an optional channel mask, created with create_mask. Default
686 (no mask) is all channels.
687 """
688 varlist = vars()
[1118]689 if mask is None:
690 mask = []
[1001]691 try:
692 self._flag(mask)
[1118]693 except RuntimeError, msg:
[1001]694 if rcParams['verbose']:
695 print msg
696 return
697 else: raise
698 self._add_history("flag", varlist)
699
[1203]700 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]701 """
702 Flag the data in 'lag' space by providing a frequency to remove.
703 Flagged data in the scantable gets set to 0.0 before the fft.
704 No taper is applied.
705 Parameters:
[1348]706 frequency: the frequency (really a period within the bandwidth)
707 to remove
708 width: the width of the frequency to remove, to remove a
[1471]709 range of frequencies around the centre.
[1203]710 unit: the frequency unit (default "GHz")
711 Notes:
[1348]712 It is recommended to flag edges of the band or strong
713 signals beforehand.
[1192]714 """
715 if insitu is None: insitu = rcParams['insitu']
716 self._math._setinsitu(insitu)
717 varlist = vars()
[1370]718 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]719 if not base.has_key(unit):
720 raise ValueError("%s is not a valid unit." % unit)
721 try:
[1200]722 s = scantable(self._math._lag_flag(self, frequency*base[unit],
723 width*base[unit]))
[1192]724 except RuntimeError, msg:
725 if rcParams['verbose']:
726 print msg
727 return
728 else: raise
729 s._add_history("lag_flag", varlist)
730 print_log()
731 if insitu:
732 self._assign(s)
733 else:
734 return s
[1001]735
[1192]736
[113]737 def create_mask(self, *args, **kwargs):
738 """
[1118]739 Compute and return a mask based on [min, max] windows.
[189]740 The specified windows are to be INCLUDED, when the mask is
[113]741 applied.
[102]742 Parameters:
[1118]743 [min, max], [min2, max2], ...
[1024]744 Pairs of start/end points (inclusive)specifying the regions
[102]745 to be masked
[189]746 invert: optional argument. If specified as True,
747 return an inverted mask, i.e. the regions
748 specified are EXCLUDED
[513]749 row: create the mask using the specified row for
750 unit conversions, default is row=0
751 only necessary if frequency varies over rows.
[102]752 Example:
[113]753 scan.set_unit('channel')
754 a)
[1118]755 msk = scan.create_mask([400, 500], [800, 900])
[189]756 # masks everything outside 400 and 500
[113]757 # and 800 and 900 in the unit 'channel'
758
759 b)
[1118]760 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]761 # masks the regions between 400 and 500
[113]762 # and 800 and 900 in the unit 'channel'
[1024]763 c)
764 mask only channel 400
[1554]765 msk = scan.create_mask([400])
[102]766 """
[1554]767 row = kwargs.get("row", 0)
[513]768 data = self._getabcissa(row)
[113]769 u = self._getcoordinfo()[0]
[718]770 if rcParams['verbose']:
[113]771 if u == "": u = "channel"
[718]772 msg = "The current mask window unit is %s" % u
[1118]773 i = self._check_ifs()
774 if not i:
[876]775 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]776 asaplog.push(msg)
[102]777 n = self.nchan()
[1295]778 msk = _n_bools(n, False)
[710]779 # test if args is a 'list' or a 'normal *args - UGLY!!!
780
[1118]781 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
782 and args or args[0]
[710]783 for window in ws:
[1554]784 if len(window) == 1:
785 window = [window[0], window[0]]
786 if len(window) == 0 or len(window) > 2:
787 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]788 if window[0] > window[1]:
789 tmp = window[0]
790 window[0] = window[1]
791 window[1] = tmp
[102]792 for i in range(n):
[1024]793 if data[i] >= window[0] and data[i] <= window[1]:
[1295]794 msk[i] = True
[113]795 if kwargs.has_key('invert'):
796 if kwargs.get('invert'):
[1295]797 msk = mask_not(msk)
[718]798 print_log()
[102]799 return msk
[710]800
[256]801 def get_restfreqs(self):
802 """
803 Get the restfrequency(s) stored in this scantable.
804 The return value(s) are always of unit 'Hz'
805 Parameters:
806 none
807 Returns:
808 a list of doubles
809 """
810 return list(self._getrestfreqs())
[102]811
[402]812
[931]813 def set_restfreqs(self, freqs=None, unit='Hz'):
814 """
815 Set or replace the restfrequency specified and
816 If the 'freqs' argument holds a scalar,
817 then that rest frequency will be applied to all the selected
818 data. If the 'freqs' argument holds
819 a vector, then it MUST be of equal or smaller length than
820 the number of IFs (and the available restfrequencies will be
821 replaced by this vector). In this case, *all* data have
822 the restfrequency set per IF according
823 to the corresponding value you give in the 'freqs' vector.
[1118]824 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]825 IF 1 gets restfreq 2e9.
[1395]826 You can also specify the frequencies via a linecatalog.
[1153]827
[931]828 Parameters:
829 freqs: list of rest frequency values or string idenitfiers
830 unit: unit for rest frequency (default 'Hz')
[402]831
[931]832 Example:
833 # set the given restfrequency for the whole table
834 scan.set_restfreqs(freqs=1.4e9)
[1348]835 # If thee number of IFs in the data is >= 2 IF0 gets the first
[931]836 # value IF1 the second...
[1118]837 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]838 #set the given restfrequency for the whole table (by name)
839 scan.set_restfreqs(freqs="OH1667")
[391]840
[931]841 Note:
842 To do more sophisticate Restfrequency setting, e.g. on a
843 source and IF basis, use scantable.set_selection() before using
844 this function.
845 # provide your scantable is call scan
846 selection = selector()
847 selection.set_name("ORION*")
848 selection.set_ifs([1])
849 scan.set_selection(selection)
850 scan.set_restfreqs(freqs=86.6e9)
851
852 """
853 varlist = vars()
[1157]854 from asap import linecatalog
855 # simple value
[1118]856 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]857 self._setrestfreqs(freqs, "",unit)
[1157]858 # list of values
[1118]859 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]860 # list values are scalars
[1118]861 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]862 sel = selector()
863 savesel = self._getselection()
[1322]864 iflist = self.getifnos()
[931]865 for i in xrange(len(freqs)):
[1322]866 sel.set_ifs(iflist[i])
[931]867 self._setselection(sel)
[1153]868 self._setrestfreqs(freqs[i], "",unit)
[931]869 self._setselection(savesel)
[1157]870 # list values are tuples, (value, name)
871 elif isinstance(freqs[-1], dict):
872 sel = selector()
873 savesel = self._getselection()
[1322]874 iflist = self.getifnos()
[1157]875 for i in xrange(len(freqs)):
[1322]876 sel.set_ifs(iflist[i])
[1259]877 self._setselection(sel)
[1157]878 self._setrestfreqs(freqs[i]["value"],
879 freqs[i]["name"], "MHz")
880 self._setselection(savesel)
881 # freqs are to be taken from a linecatalog
[1153]882 elif isinstance(freqs, linecatalog):
883 sel = selector()
884 savesel = self._getselection()
[1322]885 iflist = self.getifnos()
[1153]886 for i in xrange(freqs.nrow()):
[1322]887 sel.set_ifs(iflist[i])
[1153]888 self._setselection(sel)
889 self._setrestfreqs(freqs.get_frequency(i),
890 freqs.get_name(i), "MHz")
891 # ensure that we are not iterating past nIF
892 if i == self.nif()-1: break
893 self._setselection(savesel)
[931]894 else:
895 return
896 self._add_history("set_restfreqs", varlist)
897
[1360]898 def shift_refpix(self, delta):
899 """
900 Shift the reference pixel of the Spectra Coordinate by an
901 integer amount.
902 Parameters:
903 delta: the amount to shift by
904 Note:
905 Be careful using this with broadband data.
906 """
907 Scantable.shift(self, delta)
[931]908
[1259]909 def history(self, filename=None):
910 """
911 Print the history. Optionally to a file.
[1348]912 Parameters:
913 filename: The name of the file to save the history to.
[1259]914 """
[484]915 hist = list(self._gethistory())
[794]916 out = "-"*80
[484]917 for h in hist:
[489]918 if h.startswith("---"):
[794]919 out += "\n"+h
[489]920 else:
921 items = h.split("##")
922 date = items[0]
923 func = items[1]
924 items = items[2:]
[794]925 out += "\n"+date+"\n"
926 out += "Function: %s\n Parameters:" % (func)
[489]927 for i in items:
928 s = i.split("=")
[1118]929 out += "\n %s = %s" % (s[0], s[1])
[794]930 out += "\n"+"-"*80
[1259]931 if filename is not None:
932 if filename is "":
933 filename = 'scantable_history.txt'
934 import os
935 filename = os.path.expandvars(os.path.expanduser(filename))
936 if not os.path.isdir(filename):
937 data = open(filename, 'w')
938 data.write(out)
939 data.close()
940 else:
941 msg = "Illegal file name '%s'." % (filename)
942 if rcParams['verbose']:
943 print msg
944 else:
945 raise IOError(msg)
946 if rcParams['verbose']:
947 try:
948 from IPython.genutils import page as pager
949 except ImportError:
950 from pydoc import pager
951 pager(out)
952 else:
953 return out
[484]954 return
[513]955 #
956 # Maths business
957 #
958
[931]959 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]960 """
[1070]961 Return the (time) weighted average of a scan.
[513]962 Note:
[1070]963 in channels only - align if necessary
[513]964 Parameters:
965 mask: an optional mask (only used for 'var' and 'tsys'
966 weighting)
[558]967 scanav: True averages each scan separately
968 False (default) averages all scans together,
[1099]969 weight: Weighting scheme.
970 'none' (mean no weight)
971 'var' (1/var(spec) weighted)
972 'tsys' (1/Tsys**2 weighted)
973 'tint' (integration time weighted)
974 'tintsys' (Tint/Tsys**2)
975 'median' ( median averaging)
[535]976 The default is 'tint'
[931]977 align: align the spectra in velocity before averaging. It takes
978 the time of the first spectrum as reference time.
[513]979 Example:
980 # time average the scantable without using a mask
[710]981 newscan = scan.average_time()
[513]982 """
983 varlist = vars()
[976]984 if weight is None: weight = 'TINT'
[513]985 if mask is None: mask = ()
[1099]986 if scanav: scanav = "SCAN"
987 else: scanav = "NONE"
[1118]988 scan = (self, )
[989]989 try:
[1118]990 if align:
991 scan = (self.freq_align(insitu=False), )
992 s = None
993 if weight.upper() == 'MEDIAN':
994 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
995 scanav))
996 else:
997 s = scantable(self._math._average(scan, mask, weight.upper(),
998 scanav))
999 except RuntimeError, msg:
[989]1000 if rcParams['verbose']:
1001 print msg
1002 return
1003 else: raise
[1099]1004 s._add_history("average_time", varlist)
[718]1005 print_log()
[513]1006 return s
[710]1007
[876]1008 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1009 """
1010 Return a scan where all spectra are converted to either
1011 Jansky or Kelvin depending upon the flux units of the scan table.
1012 By default the function tries to look the values up internally.
1013 If it can't find them (or if you want to over-ride), you must
1014 specify EITHER jyperk OR eta (and D which it will try to look up
1015 also if you don't set it). jyperk takes precedence if you set both.
1016 Parameters:
1017 jyperk: the Jy / K conversion factor
1018 eta: the aperture efficiency
1019 d: the geomtric diameter (metres)
1020 insitu: if False a new scantable is returned.
1021 Otherwise, the scaling is done in-situ
1022 The default is taken from .asaprc (False)
1023 """
1024 if insitu is None: insitu = rcParams['insitu']
[876]1025 self._math._setinsitu(insitu)
[513]1026 varlist = vars()
1027 if jyperk is None: jyperk = -1.0
1028 if d is None: d = -1.0
1029 if eta is None: eta = -1.0
[876]1030 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1031 s._add_history("convert_flux", varlist)
1032 print_log()
1033 if insitu: self._assign(s)
1034 else: return s
[513]1035
[876]1036 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1037 """
1038 Return a scan after applying a gain-elevation correction.
1039 The correction can be made via either a polynomial or a
1040 table-based interpolation (and extrapolation if necessary).
1041 You specify polynomial coefficients, an ascii table or neither.
1042 If you specify neither, then a polynomial correction will be made
1043 with built in coefficients known for certain telescopes (an error
1044 will occur if the instrument is not known).
1045 The data and Tsys are *divided* by the scaling factors.
1046 Parameters:
1047 poly: Polynomial coefficients (default None) to compute a
1048 gain-elevation correction as a function of
1049 elevation (in degrees).
1050 filename: The name of an ascii file holding correction factors.
1051 The first row of the ascii file must give the column
1052 names and these MUST include columns
1053 "ELEVATION" (degrees) and "FACTOR" (multiply data
1054 by this) somewhere.
1055 The second row must give the data type of the
1056 column. Use 'R' for Real and 'I' for Integer.
1057 An example file would be
1058 (actual factors are arbitrary) :
1059
1060 TIME ELEVATION FACTOR
1061 R R R
1062 0.1 0 0.8
1063 0.2 20 0.85
1064 0.3 40 0.9
1065 0.4 60 0.85
1066 0.5 80 0.8
1067 0.6 90 0.75
1068 method: Interpolation method when correcting from a table.
1069 Values are "nearest", "linear" (default), "cubic"
1070 and "spline"
1071 insitu: if False a new scantable is returned.
1072 Otherwise, the scaling is done in-situ
1073 The default is taken from .asaprc (False)
1074 """
1075
1076 if insitu is None: insitu = rcParams['insitu']
[876]1077 self._math._setinsitu(insitu)
[513]1078 varlist = vars()
1079 if poly is None:
[1118]1080 poly = ()
[513]1081 from os.path import expandvars
1082 filename = expandvars(filename)
[876]1083 s = scantable(self._math._gainel(self, poly, filename, method))
1084 s._add_history("gain_el", varlist)
1085 print_log()
1086 if insitu: self._assign(s)
1087 else: return s
[710]1088
[931]1089 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1090 """
1091 Return a scan where all rows have been aligned in frequency/velocity.
1092 The alignment frequency frame (e.g. LSRK) is that set by function
1093 set_freqframe.
1094 Parameters:
1095 reftime: reference time to align at. By default, the time of
1096 the first row of data is used.
1097 method: Interpolation method for regridding the spectra.
1098 Choose from "nearest", "linear", "cubic" (default)
1099 and "spline"
1100 insitu: if False a new scantable is returned.
1101 Otherwise, the scaling is done in-situ
1102 The default is taken from .asaprc (False)
1103 """
[931]1104 if insitu is None: insitu = rcParams["insitu"]
[876]1105 self._math._setinsitu(insitu)
[513]1106 varlist = vars()
[931]1107 if reftime is None: reftime = ""
1108 s = scantable(self._math._freq_align(self, reftime, method))
[876]1109 s._add_history("freq_align", varlist)
1110 print_log()
1111 if insitu: self._assign(s)
1112 else: return s
[513]1113
[876]1114 def opacity(self, tau, insitu=None):
[513]1115 """
1116 Apply an opacity correction. The data
1117 and Tsys are multiplied by the correction factor.
1118 Parameters:
1119 tau: Opacity from which the correction factor is
1120 exp(tau*ZD)
1121 where ZD is the zenith-distance
1122 insitu: if False a new scantable is returned.
1123 Otherwise, the scaling is done in-situ
1124 The default is taken from .asaprc (False)
1125 """
1126 if insitu is None: insitu = rcParams['insitu']
[876]1127 self._math._setinsitu(insitu)
[513]1128 varlist = vars()
[876]1129 s = scantable(self._math._opacity(self, tau))
1130 s._add_history("opacity", varlist)
1131 print_log()
1132 if insitu: self._assign(s)
1133 else: return s
[513]1134
1135 def bin(self, width=5, insitu=None):
1136 """
1137 Return a scan where all spectra have been binned up.
[1348]1138 Parameters:
[513]1139 width: The bin width (default=5) in pixels
1140 insitu: if False a new scantable is returned.
1141 Otherwise, the scaling is done in-situ
1142 The default is taken from .asaprc (False)
1143 """
1144 if insitu is None: insitu = rcParams['insitu']
[876]1145 self._math._setinsitu(insitu)
[513]1146 varlist = vars()
[876]1147 s = scantable(self._math._bin(self, width))
[1118]1148 s._add_history("bin", varlist)
[876]1149 print_log()
1150 if insitu: self._assign(s)
1151 else: return s
[513]1152
[710]1153
[513]1154 def resample(self, width=5, method='cubic', insitu=None):
1155 """
[1348]1156 Return a scan where all spectra have been binned up.
1157
1158 Parameters:
[513]1159 width: The bin width (default=5) in pixels
1160 method: Interpolation method when correcting from a table.
1161 Values are "nearest", "linear", "cubic" (default)
1162 and "spline"
1163 insitu: if False a new scantable is returned.
1164 Otherwise, the scaling is done in-situ
1165 The default is taken from .asaprc (False)
1166 """
1167 if insitu is None: insitu = rcParams['insitu']
[876]1168 self._math._setinsitu(insitu)
[513]1169 varlist = vars()
[876]1170 s = scantable(self._math._resample(self, method, width))
[1118]1171 s._add_history("resample", varlist)
[876]1172 print_log()
1173 if insitu: self._assign(s)
1174 else: return s
[513]1175
1176
[946]1177 def average_pol(self, mask=None, weight='none'):
1178 """
1179 Average the Polarisations together.
1180 Parameters:
1181 mask: An optional mask defining the region, where the
1182 averaging will be applied. The output will have all
1183 specified points masked.
1184 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1185 weighted), or 'tsys' (1/Tsys**2 weighted)
1186 """
1187 varlist = vars()
1188 if mask is None:
1189 mask = ()
[1010]1190 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1191 s._add_history("average_pol", varlist)
[946]1192 print_log()
[992]1193 return s
[513]1194
[1145]1195 def average_beam(self, mask=None, weight='none'):
1196 """
1197 Average the Beams together.
1198 Parameters:
1199 mask: An optional mask defining the region, where the
1200 averaging will be applied. The output will have all
1201 specified points masked.
1202 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1203 weighted), or 'tsys' (1/Tsys**2 weighted)
1204 """
1205 varlist = vars()
1206 if mask is None:
1207 mask = ()
1208 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1209 s._add_history("average_beam", varlist)
1210 print_log()
1211 return s
1212
[992]1213 def convert_pol(self, poltype=None):
1214 """
1215 Convert the data to a different polarisation type.
[1565]1216 Note that you will need cross-polarisation terms for most conversions.
[992]1217 Parameters:
1218 poltype: The new polarisation type. Valid types are:
[1565]1219 "linear", "circular", "stokes" and "linpol"
[992]1220 """
1221 varlist = vars()
1222 try:
1223 s = scantable(self._math._convertpol(self, poltype))
[1118]1224 except RuntimeError, msg:
[992]1225 if rcParams['verbose']:
[1118]1226 print msg
1227 return
[992]1228 else:
1229 raise
[1118]1230 s._add_history("convert_pol", varlist)
[992]1231 print_log()
1232 return s
1233
[876]1234 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1235 """
1236 Smooth the spectrum by the specified kernel (conserving flux).
1237 Parameters:
1238 kernel: The type of smoothing kernel. Select from
[1373]1239 'hanning' (default), 'gaussian', 'boxcar' and
1240 'rmedian'
[513]1241 width: The width of the kernel in pixels. For hanning this is
1242 ignored otherwise it defauls to 5 pixels.
1243 For 'gaussian' it is the Full Width Half
1244 Maximum. For 'boxcar' it is the full width.
[1373]1245 For 'rmedian' it is the half width.
[513]1246 insitu: if False a new scantable is returned.
1247 Otherwise, the scaling is done in-situ
1248 The default is taken from .asaprc (False)
1249 Example:
1250 none
1251 """
1252 if insitu is None: insitu = rcParams['insitu']
[876]1253 self._math._setinsitu(insitu)
[513]1254 varlist = vars()
[1118]1255 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1256 s._add_history("smooth", varlist)
1257 print_log()
1258 if insitu: self._assign(s)
1259 else: return s
[513]1260
[876]1261
[1391]1262 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1263 """
1264 Return a scan which has been baselined (all rows) by a polynomial.
1265 Parameters:
[794]1266 mask: an optional mask
1267 order: the order of the polynomial (default is 0)
[1061]1268 plot: plot the fit and the residual. In this each
1269 indivual fit has to be approved, by typing 'y'
1270 or 'n'
[1391]1271 uselin: use linear polynomial fit
[794]1272 insitu: if False a new scantable is returned.
1273 Otherwise, the scaling is done in-situ
1274 The default is taken from .asaprc (False)
[513]1275 Example:
1276 # return a scan baselined by a third order polynomial,
1277 # not using a mask
1278 bscan = scan.poly_baseline(order=3)
[579]1279 """
[513]1280 if insitu is None: insitu = rcParams['insitu']
1281 varlist = vars()
1282 if mask is None:
[1295]1283 mask = [True for i in xrange(self.nchan(-1))]
[513]1284 from asap.asapfitter import fitter
[1217]1285 try:
1286 f = fitter()
1287 f.set_scan(self, mask)
[1391]1288 #f.set_function(poly=order)
1289 if uselin:
1290 f.set_function(lpoly=order)
1291 else:
1292 f.set_function(poly=order)
[1217]1293 s = f.auto_fit(insitu, plot=plot)
1294 s._add_history("poly_baseline", varlist)
1295 print_log()
1296 if insitu: self._assign(s)
1297 else: return s
1298 except RuntimeError:
1299 msg = "The fit failed, possibly because it didn't converge."
1300 if rcParams['verbose']:
1301 print msg
1302 return
1303 else:
1304 raise RuntimeError(msg)
[513]1305
[1217]1306
[1118]1307 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1308 threshold=3, chan_avg_limit=1, plot=False,
1309 insitu=None):
[880]1310 """
1311 Return a scan which has been baselined (all rows) by a polynomial.
1312 Spectral lines are detected first using linefinder and masked out
1313 to avoid them affecting the baseline solution.
1314
1315 Parameters:
1316 mask: an optional mask retreived from scantable
1317 edge: an optional number of channel to drop at
1318 the edge of spectrum. If only one value is
1319 specified, the same number will be dropped from
1320 both sides of the spectrum. Default is to keep
[907]1321 all channels. Nested tuples represent individual
[976]1322 edge selection for different IFs (a number of spectral
1323 channels can be different)
[880]1324 order: the order of the polynomial (default is 0)
1325 threshold: the threshold used by line finder. It is better to
1326 keep it large as only strong lines affect the
1327 baseline solution.
[1280]1328 chan_avg_limit:
1329 a maximum number of consequtive spectral channels to
1330 average during the search of weak and broad lines.
1331 The default is no averaging (and no search for weak
1332 lines). If such lines can affect the fitted baseline
1333 (e.g. a high order polynomial is fitted), increase this
1334 parameter (usually values up to 8 are reasonable). Most
1335 users of this method should find the default value
1336 sufficient.
[1061]1337 plot: plot the fit and the residual. In this each
1338 indivual fit has to be approved, by typing 'y'
1339 or 'n'
[880]1340 insitu: if False a new scantable is returned.
1341 Otherwise, the scaling is done in-situ
1342 The default is taken from .asaprc (False)
1343
1344 Example:
1345 scan2=scan.auto_poly_baseline(order=7)
1346 """
1347 if insitu is None: insitu = rcParams['insitu']
1348 varlist = vars()
1349 from asap.asapfitter import fitter
1350 from asap.asaplinefind import linefinder
1351 from asap import _is_sequence_or_number as _is_valid
1352
[976]1353 # check whether edge is set up for each IF individually
[1118]1354 individualedge = False;
1355 if len(edge) > 1:
1356 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1357 individualedge = True;
[907]1358
[1118]1359 if not _is_valid(edge, int) and not individualedge:
[909]1360 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1361 pair of integers specified as a tuple. Nested tuples are allowed \
1362 to make individual selection for different IFs."
[919]1363
[1118]1364 curedge = (0, 0)
1365 if individualedge:
1366 for edgepar in edge:
1367 if not _is_valid(edgepar, int):
1368 raise ValueError, "Each element of the 'edge' tuple has \
1369 to be a pair of integers or an integer."
[907]1370 else:
[1118]1371 curedge = edge;
[880]1372
1373 # setup fitter
1374 f = fitter()
1375 f.set_function(poly=order)
1376
1377 # setup line finder
[1118]1378 fl = linefinder()
[1268]1379 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1380
1381 if not insitu:
[1118]1382 workscan = self.copy()
[880]1383 else:
[1118]1384 workscan = self
[880]1385
[907]1386 fl.set_scan(workscan)
1387
[1118]1388 rows = range(workscan.nrow())
[880]1389 asaplog.push("Processing:")
1390 for r in rows:
[1118]1391 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1392 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1393 workscan.getpol(r), workscan.getcycle(r))
[880]1394 asaplog.push(msg, False)
[907]1395
[976]1396 # figure out edge parameter
[1118]1397 if individualedge:
1398 if len(edge) >= workscan.getif(r):
1399 raise RuntimeError, "Number of edge elements appear to " \
1400 "be less than the number of IFs"
1401 curedge = edge[workscan.getif(r)]
[919]1402
[976]1403 # setup line finder
[1118]1404 fl.find_lines(r, mask, curedge)
[1536]1405 f.set_data(workscan._getabcissa(r), workscan._getspectrum(r),
1406 mask_and(workscan._getmask(r), fl.get_mask()))
[880]1407 f.fit()
1408 x = f.get_parameters()
[1061]1409 if plot:
1410 f.plot(residual=True)
1411 x = raw_input("Accept fit ( [y]/n ): ")
1412 if x.upper() == 'N':
1413 continue
[880]1414 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1415 if plot:
1416 f._p.unmap()
1417 f._p = None
1418 workscan._add_history("auto_poly_baseline", varlist)
[880]1419 if insitu:
1420 self._assign(workscan)
1421 else:
1422 return workscan
1423
[914]1424 def rotate_linpolphase(self, angle):
1425 """
1426 Rotate the phase of the complex polarization O=Q+iU correlation.
1427 This is always done in situ in the raw data. So if you call this
1428 function more than once then each call rotates the phase further.
1429 Parameters:
1430 angle: The angle (degrees) to rotate (add) by.
1431 Examples:
1432 scan.rotate_linpolphase(2.3)
1433 """
1434 varlist = vars()
[936]1435 self._math._rotate_linpolphase(self, angle)
[914]1436 self._add_history("rotate_linpolphase", varlist)
1437 print_log()
1438 return
[710]1439
[513]1440
[914]1441 def rotate_xyphase(self, angle):
1442 """
1443 Rotate the phase of the XY correlation. This is always done in situ
1444 in the data. So if you call this function more than once
1445 then each call rotates the phase further.
1446 Parameters:
1447 angle: The angle (degrees) to rotate (add) by.
1448 Examples:
1449 scan.rotate_xyphase(2.3)
1450 """
1451 varlist = vars()
[936]1452 self._math._rotate_xyphase(self, angle)
[914]1453 self._add_history("rotate_xyphase", varlist)
1454 print_log()
1455 return
1456
1457 def swap_linears(self):
1458 """
[1348]1459 Swap the linear polarisations XX and YY, or better the first two
1460 polarisations as this also works for ciculars.
[914]1461 """
1462 varlist = vars()
[936]1463 self._math._swap_linears(self)
[914]1464 self._add_history("swap_linears", varlist)
1465 print_log()
1466 return
1467
1468 def invert_phase(self):
1469 """
1470 Invert the phase of the complex polarisation
1471 """
1472 varlist = vars()
[936]1473 self._math._invert_phase(self)
[914]1474 self._add_history("invert_phase", varlist)
1475 print_log()
1476 return
1477
[876]1478 def add(self, offset, insitu=None):
[513]1479 """
1480 Return a scan where all spectra have the offset added
1481 Parameters:
1482 offset: the offset
1483 insitu: if False a new scantable is returned.
1484 Otherwise, the scaling is done in-situ
1485 The default is taken from .asaprc (False)
1486 """
1487 if insitu is None: insitu = rcParams['insitu']
[876]1488 self._math._setinsitu(insitu)
[513]1489 varlist = vars()
[876]1490 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1491 s._add_history("add", varlist)
[876]1492 print_log()
1493 if insitu:
1494 self._assign(s)
1495 else:
[513]1496 return s
1497
[1308]1498 def scale(self, factor, tsys=True, insitu=None):
[513]1499 """
1500 Return a scan where all spectra are scaled by the give 'factor'
1501 Parameters:
1502 factor: the scaling factor
1503 insitu: if False a new scantable is returned.
1504 Otherwise, the scaling is done in-situ
1505 The default is taken from .asaprc (False)
1506 tsys: if True (default) then apply the operation to Tsys
1507 as well as the data
1508 """
1509 if insitu is None: insitu = rcParams['insitu']
[876]1510 self._math._setinsitu(insitu)
[513]1511 varlist = vars()
[876]1512 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1513 s._add_history("scale", varlist)
[876]1514 print_log()
1515 if insitu:
1516 self._assign(s)
1517 else:
[513]1518 return s
1519
[1504]1520 def set_sourcetype(self, match, matchtype="pattern",
1521 sourcetype="reference"):
[1502]1522 """
1523 Set the type of the source to be an source or reference scan
1524 using the provided pattern:
1525 Parameters:
[1504]1526 match: a Unix style pattern, regular expression or selector
1527 matchtype: 'pattern' (default) UNIX style pattern or
1528 'regex' regular expression
[1502]1529 sourcetype: the type of the source to use (source/reference)
1530 """
1531 varlist = vars()
1532 basesel = self.get_selection()
1533 stype = -1
1534 if sourcetype.lower().startswith("r"):
1535 stype = 1
1536 elif sourcetype.lower().startswith("s"):
1537 stype = 0
[1504]1538 else:
[1502]1539 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]1540 if matchtype.lower().startswith("p"):
1541 matchtype = "pattern"
1542 elif matchtype.lower().startswith("r"):
1543 matchtype = "regex"
1544 else:
1545 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]1546 sel = selector()
1547 if isinstance(match, selector):
1548 sel = match
1549 else:
[1504]1550 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]1551 self.set_selection(basesel+sel)
1552 self._setsourcetype(stype)
1553 self.set_selection(basesel)
1554 s._add_history("set_sourcetype", varlist)
1555
[1348]1556 def auto_quotient(self, preserve=True, mode='paired'):
[670]1557 """
1558 This function allows to build quotients automatically.
1559 It assumes the observation to have the same numer of
1560 "ons" and "offs"
1561 Parameters:
[710]1562 preserve: you can preserve (default) the continuum or
1563 remove it. The equations used are
[670]1564 preserve: Output = Toff * (on/off) - Toff
[1070]1565 remove: Output = Toff * (on/off) - Ton
[1348]1566 mode: the on/off detection mode
1567 'paired' (default)
1568 identifies 'off' scans by the
1569 trailing '_R' (Mopra/Parkes) or
1570 '_e'/'_w' (Tid) and matches
1571 on/off pairs from the observing pattern
[1502]1572 'time'
1573 finds the closest off in time
[1348]1574
[670]1575 """
[1348]1576 modes = ["time", "paired"]
[670]1577 if not mode in modes:
[876]1578 msg = "please provide valid mode. Valid modes are %s" % (modes)
1579 raise ValueError(msg)
1580 varlist = vars()
[1348]1581 s = None
1582 if mode.lower() == "paired":
1583 basesel = self.get_selection()
[1356]1584 sel = selector()+basesel
1585 sel.set_query("SRCTYPE==1")
1586 self.set_selection(sel)
[1348]1587 offs = self.copy()
1588 sel.set_query("SRCTYPE==0")
[1356]1589 self.set_selection(sel)
[1348]1590 ons = self.copy()
1591 s = scantable(self._math._quotient(ons, offs, preserve))
1592 self.set_selection(basesel)
1593 elif mode.lower() == "time":
1594 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1595 s._add_history("auto_quotient", varlist)
[876]1596 print_log()
1597 return s
[710]1598
[1145]1599 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1600 """
[1143]1601 Form a quotient using "off" beams when observing in "MX" mode.
1602 Parameters:
[1145]1603 mask: an optional mask to be used when weight == 'stddev'
[1143]1604 weight: How to average the off beams. Default is 'median'.
[1145]1605 preserve: you can preserve (default) the continuum or
1606 remove it. The equations used are
1607 preserve: Output = Toff * (on/off) - Toff
1608 remove: Output = Toff * (on/off) - Ton
[1217]1609 """
[1143]1610 if mask is None: mask = ()
[1141]1611 varlist = vars()
1612 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1613 preoff = scantable(self._math._mx_extract(self, 'off'))
1614 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1615 from asapmath import quotient
[1145]1616 q = quotient(on, off, preserve)
[1143]1617 q._add_history("mx_quotient", varlist)
[1145]1618 print_log()
[1217]1619 return q
[513]1620
[718]1621 def freq_switch(self, insitu=None):
1622 """
1623 Apply frequency switching to the data.
1624 Parameters:
1625 insitu: if False a new scantable is returned.
1626 Otherwise, the swictching is done in-situ
1627 The default is taken from .asaprc (False)
1628 Example:
1629 none
1630 """
1631 if insitu is None: insitu = rcParams['insitu']
[876]1632 self._math._setinsitu(insitu)
[718]1633 varlist = vars()
[876]1634 s = scantable(self._math._freqswitch(self))
[1118]1635 s._add_history("freq_switch", varlist)
[876]1636 print_log()
1637 if insitu: self._assign(s)
1638 else: return s
[718]1639
[780]1640 def recalc_azel(self):
1641 """
1642 Recalculate the azimuth and elevation for each position.
1643 Parameters:
1644 none
1645 Example:
1646 """
1647 varlist = vars()
[876]1648 self._recalcazel()
[780]1649 self._add_history("recalc_azel", varlist)
1650 print_log()
1651 return
1652
[513]1653 def __add__(self, other):
1654 varlist = vars()
1655 s = None
1656 if isinstance(other, scantable):
[1308]1657 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1658 elif isinstance(other, float):
[876]1659 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1660 else:
[718]1661 raise TypeError("Other input is not a scantable or float value")
[513]1662 s._add_history("operator +", varlist)
[718]1663 print_log()
[513]1664 return s
1665
1666 def __sub__(self, other):
1667 """
1668 implicit on all axes and on Tsys
1669 """
1670 varlist = vars()
1671 s = None
1672 if isinstance(other, scantable):
[1308]1673 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1674 elif isinstance(other, float):
[876]1675 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1676 else:
[718]1677 raise TypeError("Other input is not a scantable or float value")
[513]1678 s._add_history("operator -", varlist)
[718]1679 print_log()
[513]1680 return s
[710]1681
[513]1682 def __mul__(self, other):
1683 """
1684 implicit on all axes and on Tsys
1685 """
1686 varlist = vars()
1687 s = None
1688 if isinstance(other, scantable):
[1308]1689 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1690 elif isinstance(other, float):
[876]1691 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1692 else:
[718]1693 raise TypeError("Other input is not a scantable or float value")
[513]1694 s._add_history("operator *", varlist)
[718]1695 print_log()
[513]1696 return s
1697
[710]1698
[513]1699 def __div__(self, other):
1700 """
1701 implicit on all axes and on Tsys
1702 """
1703 varlist = vars()
1704 s = None
1705 if isinstance(other, scantable):
[1308]1706 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1707 elif isinstance(other, float):
1708 if other == 0.0:
[718]1709 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1710 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1711 else:
[718]1712 raise TypeError("Other input is not a scantable or float value")
[513]1713 s._add_history("operator /", varlist)
[718]1714 print_log()
[513]1715 return s
1716
[530]1717 def get_fit(self, row=0):
1718 """
1719 Print or return the stored fits for a row in the scantable
1720 Parameters:
1721 row: the row which the fit has been applied to.
1722 """
1723 if row > self.nrow():
1724 return
[976]1725 from asap.asapfit import asapfit
[530]1726 fit = asapfit(self._getfit(row))
[718]1727 if rcParams['verbose']:
[530]1728 print fit
1729 return
1730 else:
1731 return fit.as_dict()
1732
[1483]1733 def flag_nans(self):
1734 """
1735 Utility function to flag NaN values in the scantable.
1736 """
1737 import numpy
1738 basesel = self.get_selection()
1739 for i in range(self.nrow()):
1740 sel = selector()+basesel
1741 sel.set_scans(self.getscan(i))
1742 sel.set_beams(self.getbeam(i))
1743 sel.set_ifs(self.getif(i))
1744 sel.set_polarisations(self.getpol(i))
1745 self.set_selection(sel)
1746 nans = numpy.isnan(self._getspectrum(0))
1747 if numpy.any(nans):
1748 bnans = [ bool(v) for v in nans]
1749 self.flag(bnans)
1750 self.set_selection(basesel)
1751
1752
[484]1753 def _add_history(self, funcname, parameters):
[1435]1754 if not rcParams['scantable.history']:
1755 return
[484]1756 # create date
1757 sep = "##"
1758 from datetime import datetime
1759 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1760 hist = dstr+sep
1761 hist += funcname+sep#cdate+sep
1762 if parameters.has_key('self'): del parameters['self']
[1118]1763 for k, v in parameters.iteritems():
[484]1764 if type(v) is dict:
[1118]1765 for k2, v2 in v.iteritems():
[484]1766 hist += k2
1767 hist += "="
[1118]1768 if isinstance(v2, scantable):
[484]1769 hist += 'scantable'
1770 elif k2 == 'mask':
[1118]1771 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1772 hist += str(self._zip_mask(v2))
1773 else:
1774 hist += str(v2)
[484]1775 else:
[513]1776 hist += str(v2)
[484]1777 else:
1778 hist += k
1779 hist += "="
[1118]1780 if isinstance(v, scantable):
[484]1781 hist += 'scantable'
1782 elif k == 'mask':
[1118]1783 if isinstance(v, list) or isinstance(v, tuple):
[513]1784 hist += str(self._zip_mask(v))
1785 else:
1786 hist += str(v)
[484]1787 else:
1788 hist += str(v)
1789 hist += sep
1790 hist = hist[:-2] # remove trailing '##'
1791 self._addhistory(hist)
1792
[710]1793
[484]1794 def _zip_mask(self, mask):
1795 mask = list(mask)
1796 i = 0
1797 segments = []
1798 while mask[i:].count(1):
1799 i += mask[i:].index(1)
1800 if mask[i:].count(0):
1801 j = i + mask[i:].index(0)
1802 else:
[710]1803 j = len(mask)
[1118]1804 segments.append([i, j])
[710]1805 i = j
[484]1806 return segments
[714]1807
[626]1808 def _get_ordinate_label(self):
1809 fu = "("+self.get_fluxunit()+")"
1810 import re
1811 lbl = "Intensity"
[1118]1812 if re.match(".K.", fu):
[626]1813 lbl = "Brightness Temperature "+ fu
[1118]1814 elif re.match(".Jy.", fu):
[626]1815 lbl = "Flux density "+ fu
1816 return lbl
[710]1817
[876]1818 def _check_ifs(self):
1819 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1820 nchans = filter(lambda t: t > 0, nchans)
[876]1821 return (sum(nchans)/len(nchans) == nchans[0])
[976]1822
1823 def _fill(self, names, unit, average):
1824 import os
1825 from asap._asap import stfiller
1826 first = True
1827 fullnames = []
1828 for name in names:
1829 name = os.path.expandvars(name)
1830 name = os.path.expanduser(name)
1831 if not os.path.exists(name):
1832 msg = "File '%s' does not exists" % (name)
1833 if rcParams['verbose']:
1834 asaplog.push(msg)
1835 print asaplog.pop().strip()
1836 return
1837 raise IOError(msg)
1838 fullnames.append(name)
1839 if average:
1840 asaplog.push('Auto averaging integrations')
[1079]1841 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1842 for name in fullnames:
[1073]1843 tbl = Scantable(stype)
1844 r = stfiller(tbl)
[1504]1845 rx = rcParams['scantable.reference']
1846 r._setreferenceexpr(rx)
[976]1847 msg = "Importing %s..." % (name)
[1118]1848 asaplog.push(msg, False)
[976]1849 print_log()
[1118]1850 r._open(name, -1, -1)
[976]1851 r._read()
1852 if average:
[1118]1853 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1854 if not first:
1855 tbl = self._math._merge([self, tbl])
1856 Scantable.__init__(self, tbl)
1857 r._close()
[1118]1858 del r, tbl
[976]1859 first = False
1860 if unit is not None:
1861 self.set_fluxunit(unit)
1862 self.set_freqframe(rcParams['scantable.freqframe'])
1863
[1402]1864 def __getitem__(self, key):
1865 if key < 0:
1866 key += self.nrow()
1867 if key >= self.nrow():
1868 raise IndexError("Row index out of range.")
1869 return self._getspectrum(key)
1870
1871 def __setitem__(self, key, value):
1872 if key < 0:
1873 key += self.nrow()
1874 if key >= self.nrow():
1875 raise IndexError("Row index out of range.")
1876 if not hasattr(value, "__len__") or \
1877 len(value) > self.nchan(self.getif(key)):
1878 raise ValueError("Spectrum length doesn't match.")
1879 return self._setspectrum(value, key)
1880
1881 def __len__(self):
1882 return self.nrow()
1883
1884 def __iter__(self):
1885 for i in range(len(self)):
1886 yield self[i]
Note: See TracBrowser for help on using the repository browser.