source: trunk/python/scantable.py@ 1391

Last change on this file since 1391 was 1391, checked in by Malte Marquarding, 17 years ago

merge from alma branch to get alma/GBT support. Commented out fluxUnit changes as they are using a chnaged interface to PKSreader/writer. Also commented out progress meter related code.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 65.9 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1118]41 if isinstance(filename, str):
[976]42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
[718]47 if rcParams['verbose']:
[976]48 asaplog.push(s)
49 print asaplog.pop().strip()
[718]50 return
[976]51 raise IOError(s)
[1115]52 if os.path.isdir(filename) \
[1118]53 and not os.path.exists(filename+'/table.f1'):
[976]54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
[1118]56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
[976]58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
[718]61 else:
[1118]62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
[976]64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
[226]69 else:
[1118]70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
[1259]74 self._add_history("scantable", varlist)
[714]75 print_log()
[102]76
[876]77 def save(self, name=None, format=None, overwrite=False):
[116]78 """
[1280]79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
[116]81 Parameters:
[1093]82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
[497]84 and header in 'name'_header.txt)
[116]85 format: an optional file format. Default is ASAP.
[280]86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]87 'SDFITS' (save as SDFITS file)
[200]88 'ASCII' (saves as ascii text file)
[226]89 'MS2' (saves as an aips++
90 MeasurementSet V2)
[411]91 overwrite: If the file should be overwritten if it exists.
[256]92 The default False is to return with warning
[411]93 without writing the output. USE WITH CARE.
[116]94 Example:
95 scan.save('myscan.asap')
[1118]96 scan.save('myscan.sdfits', 'SDFITS')
[116]97 """
[411]98 from os import path
[226]99 if format is None: format = rcParams['scantable.save']
[256]100 suffix = '.'+format.lower()
[1118]101 if name is None or name == "":
[256]102 name = 'scantable'+suffix
[718]103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
[411]105 name = path.expandvars(name)
[256]106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
[718]108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
[451]114 format2 = format.upper()
115 if format2 == 'ASAP':
[116]116 self._save(name)
117 else:
[989]118 from asap._asap import stwriter as stw
[1118]119 writer = stw(format2)
120 writer.write(self, name)
[718]121 print_log()
[116]122 return
123
[102]124 def copy(self):
125 """
126 Return a copy of this scantable.
[1348]127 Note:
128 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]129 Parameters:
[113]130 none
[102]131 Example:
132 copiedscan = scan.copy()
133 """
[876]134 sd = scantable(Scantable._copy(self))
[113]135 return sd
136
[1093]137 def drop_scan(self, scanid=None):
138 """
139 Return a new scantable where the specified scan number(s) has(have)
140 been dropped.
141 Parameters:
142 scanid: a (list of) scan number(s)
143 """
144 from asap import _is_sequence_or_number as _is_valid
145 from asap import _to_list
146 from asap import unique
147 if not _is_valid(scanid):
148 if rcParams['verbose']:
149 print "Please specify a scanno to drop from the scantable"
150 return
151 else:
152 raise RuntimeError("No scan given")
153 try:
154 scanid = _to_list(scanid)
155 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156 for sid in scanid: allscans.remove(sid)
[1118]157 if len(allscans) == 0:
158 raise ValueError("Can't remove all scans")
[1093]159 except ValueError:
160 if rcParams['verbose']:
161 print "Couldn't find any match."
162 return
163 else: raise
164 try:
165 bsel = self.get_selection()
166 sel = selector()
167 sel.set_scans(allscans)
168 self.set_selection(bsel+sel)
169 scopy = self._copy()
170 self.set_selection(bsel)
171 return scantable(scopy)
172 except RuntimeError:
[1118]173 if rcParams['verbose']:
174 print "Couldn't find any match."
175 else:
176 raise
[1093]177
178
[102]179 def get_scan(self, scanid=None):
180 """
181 Return a specific scan (by scanno) or collection of scans (by
182 source name) in a new scantable.
[1348]183 Note:
184 See scantable.drop_scan() for the inverse operation.
[102]185 Parameters:
[513]186 scanid: a (list of) scanno or a source name, unix-style
187 patterns are accepted for source name matching, e.g.
188 '*_R' gets all 'ref scans
[102]189 Example:
[513]190 # get all scans containing the source '323p459'
191 newscan = scan.get_scan('323p459')
192 # get all 'off' scans
193 refscans = scan.get_scan('*_R')
194 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]195 newscan = scan.get_scan([0, 2, 7, 10])
[102]196 """
197 if scanid is None:
[718]198 if rcParams['verbose']:
[1118]199 print "Please specify a scan no or name to " \
200 "retrieve from the scantable"
[718]201 return
202 else:
203 raise RuntimeError("No scan given")
204
[102]205 try:
[946]206 bsel = self.get_selection()
207 sel = selector()
[102]208 if type(scanid) is str:
[946]209 sel.set_name(scanid)
210 self.set_selection(bsel+sel)
[876]211 scopy = self._copy()
[946]212 self.set_selection(bsel)
[876]213 return scantable(scopy)
[102]214 elif type(scanid) is int:
[946]215 sel.set_scans([scanid])
216 self.set_selection(bsel+sel)
[876]217 scopy = self._copy()
[946]218 self.set_selection(bsel)
[876]219 return scantable(scopy)
[381]220 elif type(scanid) is list:
[946]221 sel.set_scans(scanid)
222 self.set_selection(sel)
[876]223 scopy = self._copy()
[946]224 self.set_selection(bsel)
[876]225 return scantable(scopy)
[381]226 else:
[718]227 msg = "Illegal scanid type, use 'int' or 'list' if ints."
228 if rcParams['verbose']:
229 print msg
230 else:
231 raise TypeError(msg)
[102]232 except RuntimeError:
[718]233 if rcParams['verbose']: print "Couldn't find any match."
234 else: raise
[102]235
236 def __str__(self):
[1118]237 return Scantable._summary(self, True)
[102]238
[976]239 def summary(self, filename=None):
[102]240 """
241 Print a summary of the contents of this scantable.
242 Parameters:
243 filename: the name of a file to write the putput to
244 Default - no file output
[381]245 verbose: print extra info such as the frequency table
246 The default (False) is taken from .asaprc
[102]247 """
[976]248 info = Scantable._summary(self, True)
249 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]250 if filename is not None:
[256]251 if filename is "":
252 filename = 'scantable_summary.txt'
[415]253 from os.path import expandvars, isdir
[411]254 filename = expandvars(filename)
[415]255 if not isdir(filename):
[413]256 data = open(filename, 'w')
257 data.write(info)
258 data.close()
259 else:
[718]260 msg = "Illegal file name '%s'." % (filename)
261 if rcParams['verbose']:
262 print msg
263 else:
264 raise IOError(msg)
265 if rcParams['verbose']:
[794]266 try:
267 from IPython.genutils import page as pager
268 except ImportError:
269 from pydoc import pager
270 pager(info)
[718]271 else:
272 return info
[710]273
[946]274
275 def get_selection(self):
276 """
[1005]277 Get the selection object currently set on this scantable.
278 Parameters:
279 none
280 Example:
281 sel = scan.get_selection()
282 sel.set_ifs(0) # select IF 0
283 scan.set_selection(sel) # apply modified selection
[946]284 """
285 return selector(self._getselection())
286
[1005]287 def set_selection(self, selection=selector()):
[946]288 """
[1005]289 Select a subset of the data. All following operations on this scantable
290 are only applied to thi selection.
291 Parameters:
292 selection: a selector object (default unset the selection)
293 Examples:
294 sel = selector() # create a selection object
[1118]295 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]296 scan.set_selection(sel) # set the selection
297 scan.summary() # will only print summary of scanno 0 an 3
298 scan.set_selection() # unset the selection
[946]299 """
300 self._setselection(selection)
301
[876]302 def stats(self, stat='stddev', mask=None):
[102]303 """
[135]304 Determine the specified statistic of the current beam/if/pol
[102]305 Takes a 'mask' as an optional parameter to specify which
306 channels should be excluded.
307 Parameters:
[135]308 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
309 'var', 'stddev', 'avdev', 'rms', 'median'
310 mask: an optional mask specifying where the statistic
[102]311 should be determined.
312 Example:
[113]313 scan.set_unit('channel')
[1118]314 msk = scan.create_mask([100, 200], [500, 600])
[135]315 scan.stats(stat='mean', mask=m)
[102]316 """
317 if mask == None:
[876]318 mask = []
[1118]319 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]320 if not self._check_ifs():
[1118]321 raise ValueError("Cannot apply mask as the IFs have different "
322 "number of channels. Please use setselection() "
323 "to select individual IFs")
[256]324
[876]325 statvals = self._math._stats(self, mask, stat)
326 out = ''
327 axes = []
328 for i in range(self.nrow()):
329 axis = []
330 axis.append(self.getscan(i))
331 axis.append(self.getbeam(i))
332 axis.append(self.getif(i))
333 axis.append(self.getpol(i))
334 axis.append(self.getcycle(i))
335 axes.append(axis)
336 tm = self._gettime(i)
337 src = self._getsourcename(i)
338 out += 'Scan[%d] (%s) ' % (axis[0], src)
339 out += 'Time[%s]:\n' % (tm)
340 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
341 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
342 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
343 out += '= %3.3f\n' % (statvals[i])
344 out += "--------------------------------------------------\n"
[256]345
[876]346 if rcParams['verbose']:
347 print "--------------------------------------------------"
[1118]348 print " ", stat
[876]349 print "--------------------------------------------------"
350 print out
[1295]351 #else:
352 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
353 # 'axes' : axes,
354 # 'data': statvals}
355 return statvals
[102]356
[1118]357 def stddev(self, mask=None):
[135]358 """
359 Determine the standard deviation of the current beam/if/pol
360 Takes a 'mask' as an optional parameter to specify which
361 channels should be excluded.
362 Parameters:
363 mask: an optional mask specifying where the standard
364 deviation should be determined.
365
366 Example:
367 scan.set_unit('channel')
[1118]368 msk = scan.create_mask([100, 200], [500, 600])
[135]369 scan.stddev(mask=m)
370 """
[1118]371 return self.stats(stat='stddev', mask=mask);
[135]372
[1003]373
[1259]374 def get_column_names(self):
[1003]375 """
376 Return a list of column names, which can be used for selection.
377 """
[1259]378 return list(Scantable.get_column_names(self))
[1003]379
[876]380 def get_tsys(self):
[113]381 """
382 Return the System temperatures.
383 Returns:
[876]384 a list of Tsys values for the current selection
[113]385 """
[256]386
[876]387 return self._row_callback(self._gettsys, "Tsys")
[256]388
[876]389 def _row_callback(self, callback, label):
390 axes = []
[1118]391 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]392 out = ""
[1118]393 outvec = []
[876]394 for i in range(self.nrow()):
395 axis = []
396 axis.append(self.getscan(i))
397 axis.append(self.getbeam(i))
398 axis.append(self.getif(i))
399 axis.append(self.getpol(i))
400 axis.append(self.getcycle(i))
401 axes.append(axis)
402 tm = self._gettime(i)
403 src = self._getsourcename(i)
404 out += 'Scan[%d] (%s) ' % (axis[0], src)
405 out += 'Time[%s]:\n' % (tm)
406 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
407 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
408 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
409 outvec.append(callback(i))
410 out += '= %3.3f\n' % (outvec[i])
411 out += "--------------------------------------------------\n"
412 if rcParams['verbose']:
413 print "--------------------------------------------------"
414 print " %s" % (label)
415 print "--------------------------------------------------"
416 print out
[1175]417 # disabled because the vector seems more useful
418 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
419 return outvec
[256]420
[1070]421 def _get_column(self, callback, row=-1):
422 """
423 """
424 if row == -1:
425 return [callback(i) for i in range(self.nrow())]
426 else:
427 if 0 <= row < self.nrow():
428 return callback(row)
[256]429
[1070]430
[1348]431 def get_time(self, row=-1, asdatetime=False):
[113]432 """
433 Get a list of time stamps for the observations.
[1348]434 Return a datetime object for each integration time stamp in the scantable.
[113]435 Parameters:
[1348]436 row: row no of integration. Default -1 return all rows
437 asdatetime: return values as datetime objects rather than strings
[113]438 Example:
439 none
440 """
[1175]441 from time import strptime
442 from datetime import datetime
[1348]443 if not asdatetime:
444 return self._get_column(self._gettime, row)
[1175]445 format = "%Y/%m/%d/%H:%M:%S"
446 if isinstance(times, list):
447 return [datetime(*strptime(i, format)[:6]) for i in times]
448 else:
449 return datetime(*strptime(times, format)[:6])
[102]450
[1348]451
452 def get_inttime(self, row=-1):
453 """
454 Get a list of integration times for the observations.
455 Return a time in seconds for each integration in the scantable.
456 Parameters:
457 row: row no of integration. Default -1 return all rows.
458 Example:
459 none
460 """
461 return self._get_column(self._getinttime, row)
462
463
[714]464 def get_sourcename(self, row=-1):
465 """
[794]466 Get a list source names for the observations.
[714]467 Return a string for each integration in the scantable.
468 Parameters:
[1348]469 row: row no of integration. Default -1 return all rows.
[714]470 Example:
471 none
472 """
[1070]473 return self._get_column(self._getsourcename, row)
[714]474
[794]475 def get_elevation(self, row=-1):
476 """
477 Get a list of elevations for the observations.
478 Return a float for each integration in the scantable.
479 Parameters:
[1348]480 row: row no of integration. Default -1 return all rows.
[794]481 Example:
482 none
483 """
[1070]484 return self._get_column(self._getelevation, row)
[794]485
486 def get_azimuth(self, row=-1):
487 """
488 Get a list of azimuths for the observations.
489 Return a float for each integration in the scantable.
490 Parameters:
[1348]491 row: row no of integration. Default -1 return all rows.
[794]492 Example:
493 none
494 """
[1070]495 return self._get_column(self._getazimuth, row)
[794]496
497 def get_parangle(self, row=-1):
498 """
499 Get a list of parallactic angles for the observations.
500 Return a float for each integration in the scantable.
501 Parameters:
[1348]502 row: row no of integration. Default -1 return all rows.
[794]503 Example:
504 none
505 """
[1070]506 return self._get_column(self._getparangle, row)
[794]507
[1070]508 def get_direction(self, row=-1):
509 """
510 Get a list of Positions on the sky (direction) for the observations.
511 Return a float for each integration in the scantable.
512 Parameters:
513 row: row no of integration. Default -1 return all rows
514 Example:
515 none
516 """
517 return self._get_column(self._getdirection, row)
518
[1391]519 def get_directionval(self, row=-1):
520 """
521 Get a list of Positions on the sky (direction) for the observations.
522 Return a float for each integration in the scantable.
523 Parameters:
524 row: row no of integration. Default -1 return all rows
525 Example:
526 none
527 """
528 return self._get_column(self._getdirectionvec, row)
529
530
[102]531 def set_unit(self, unit='channel'):
532 """
533 Set the unit for all following operations on this scantable
534 Parameters:
535 unit: optional unit, default is 'channel'
[1118]536 one of '*Hz', 'km/s', 'channel', ''
[102]537 """
[484]538 varlist = vars()
[1118]539 if unit in ['', 'pixel', 'channel']:
[113]540 unit = ''
541 inf = list(self._getcoordinfo())
542 inf[0] = unit
543 self._setcoordinfo(inf)
[1118]544 self._add_history("set_unit", varlist)
[113]545
[484]546 def set_instrument(self, instr):
[358]547 """
[1348]548 Set the instrument for subsequent processing.
[358]549 Parameters:
[710]550 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]551 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]552 """
553 self._setInstrument(instr)
[1118]554 self._add_history("set_instument", vars())
[718]555 print_log()
[358]556
[1190]557 def set_feedtype(self, feedtype):
558 """
559 Overwrite the feed type, which might not be set correctly.
560 Parameters:
561 feedtype: 'linear' or 'circular'
562 """
563 self._setfeedtype(feedtype)
564 self._add_history("set_feedtype", vars())
565 print_log()
566
[276]567 def set_doppler(self, doppler='RADIO'):
568 """
569 Set the doppler for all following operations on this scantable.
570 Parameters:
571 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
572 """
[484]573 varlist = vars()
[276]574 inf = list(self._getcoordinfo())
575 inf[2] = doppler
576 self._setcoordinfo(inf)
[1118]577 self._add_history("set_doppler", vars())
[718]578 print_log()
[710]579
[226]580 def set_freqframe(self, frame=None):
[113]581 """
582 Set the frame type of the Spectral Axis.
583 Parameters:
[591]584 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]585 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
586 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]587 Examples:
588 scan.set_freqframe('BARY')
589 """
[484]590 if frame is None: frame = rcParams['scantable.freqframe']
591 varlist = vars()
[1118]592 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
593 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]594
[989]595 if frame in valid:
[113]596 inf = list(self._getcoordinfo())
597 inf[1] = frame
598 self._setcoordinfo(inf)
[1118]599 self._add_history("set_freqframe", varlist)
[102]600 else:
[1118]601 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]602 if rcParams['verbose']:
603 print msg
604 else:
605 raise TypeError(msg)
606 print_log()
[710]607
[989]608 def set_dirframe(self, frame=""):
609 """
610 Set the frame type of the Direction on the sky.
611 Parameters:
612 frame: an optional frame type, default ''. Valid frames are:
613 'J2000', 'B1950', 'GALACTIC'
614 Examples:
615 scan.set_dirframe('GALACTIC')
616 """
617 varlist = vars()
618 try:
619 Scantable.set_dirframe(self, frame)
[1118]620 except RuntimeError, msg:
[989]621 if rcParams['verbose']:
622 print msg
623 else:
624 raise
[1118]625 self._add_history("set_dirframe", varlist)
[989]626
[113]627 def get_unit(self):
628 """
629 Get the default unit set in this scantable
630 Returns:
631 A unit string
632 """
633 inf = self._getcoordinfo()
634 unit = inf[0]
635 if unit == '': unit = 'channel'
636 return unit
[102]637
[158]638 def get_abcissa(self, rowno=0):
[102]639 """
[158]640 Get the abcissa in the current coordinate setup for the currently
[113]641 selected Beam/IF/Pol
642 Parameters:
[226]643 rowno: an optional row number in the scantable. Default is the
644 first row, i.e. rowno=0
[113]645 Returns:
[1348]646 The abcissa values and the format string (as a dictionary)
[113]647 """
[256]648 abc = self._getabcissa(rowno)
[710]649 lbl = self._getabcissalabel(rowno)
[718]650 print_log()
[158]651 return abc, lbl
[113]652
[1118]653 def flag(self, mask=None):
[1001]654 """
655 Flag the selected data using an optional channel mask.
656 Parameters:
657 mask: an optional channel mask, created with create_mask. Default
658 (no mask) is all channels.
659 """
660 varlist = vars()
[1118]661 if mask is None:
662 mask = []
[1001]663 try:
664 self._flag(mask)
[1118]665 except RuntimeError, msg:
[1001]666 if rcParams['verbose']:
667 print msg
668 return
669 else: raise
670 self._add_history("flag", varlist)
671
[1203]672 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]673 """
674 Flag the data in 'lag' space by providing a frequency to remove.
675 Flagged data in the scantable gets set to 0.0 before the fft.
676 No taper is applied.
677 Parameters:
[1348]678 frequency: the frequency (really a period within the bandwidth)
679 to remove
680 width: the width of the frequency to remove, to remove a
681 range of frequencies aroung the centre.
[1203]682 unit: the frequency unit (default "GHz")
683 Notes:
[1348]684 It is recommended to flag edges of the band or strong
685 signals beforehand.
[1192]686 """
687 if insitu is None: insitu = rcParams['insitu']
688 self._math._setinsitu(insitu)
689 varlist = vars()
[1370]690 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]691 if not base.has_key(unit):
692 raise ValueError("%s is not a valid unit." % unit)
693 try:
[1200]694 s = scantable(self._math._lag_flag(self, frequency*base[unit],
695 width*base[unit]))
[1192]696 except RuntimeError, msg:
697 if rcParams['verbose']:
698 print msg
699 return
700 else: raise
701 s._add_history("lag_flag", varlist)
702 print_log()
703 if insitu:
704 self._assign(s)
705 else:
706 return s
[1001]707
[1192]708
[113]709 def create_mask(self, *args, **kwargs):
710 """
[1118]711 Compute and return a mask based on [min, max] windows.
[189]712 The specified windows are to be INCLUDED, when the mask is
[113]713 applied.
[102]714 Parameters:
[1118]715 [min, max], [min2, max2], ...
[1024]716 Pairs of start/end points (inclusive)specifying the regions
[102]717 to be masked
[189]718 invert: optional argument. If specified as True,
719 return an inverted mask, i.e. the regions
720 specified are EXCLUDED
[513]721 row: create the mask using the specified row for
722 unit conversions, default is row=0
723 only necessary if frequency varies over rows.
[102]724 Example:
[113]725 scan.set_unit('channel')
726 a)
[1118]727 msk = scan.create_mask([400, 500], [800, 900])
[189]728 # masks everything outside 400 and 500
[113]729 # and 800 and 900 in the unit 'channel'
730
731 b)
[1118]732 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]733 # masks the regions between 400 and 500
[113]734 # and 800 and 900 in the unit 'channel'
[1024]735 c)
736 mask only channel 400
[1118]737 msk = scan.create_mask([400, 400])
[102]738 """
[513]739 row = 0
740 if kwargs.has_key("row"):
741 row = kwargs.get("row")
742 data = self._getabcissa(row)
[113]743 u = self._getcoordinfo()[0]
[718]744 if rcParams['verbose']:
[113]745 if u == "": u = "channel"
[718]746 msg = "The current mask window unit is %s" % u
[1118]747 i = self._check_ifs()
748 if not i:
[876]749 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]750 asaplog.push(msg)
[102]751 n = self.nchan()
[1295]752 msk = _n_bools(n, False)
[710]753 # test if args is a 'list' or a 'normal *args - UGLY!!!
754
[1118]755 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
756 and args or args[0]
[710]757 for window in ws:
[102]758 if (len(window) != 2 or window[0] > window[1] ):
[1118]759 raise TypeError("A window needs to be defined as [min, max]")
[102]760 for i in range(n):
[1024]761 if data[i] >= window[0] and data[i] <= window[1]:
[1295]762 msk[i] = True
[113]763 if kwargs.has_key('invert'):
764 if kwargs.get('invert'):
[1295]765 msk = mask_not(msk)
[718]766 print_log()
[102]767 return msk
[710]768
[256]769 def get_restfreqs(self):
770 """
771 Get the restfrequency(s) stored in this scantable.
772 The return value(s) are always of unit 'Hz'
773 Parameters:
774 none
775 Returns:
776 a list of doubles
777 """
778 return list(self._getrestfreqs())
[102]779
[402]780
[931]781 def set_restfreqs(self, freqs=None, unit='Hz'):
782 """
783 Set or replace the restfrequency specified and
784 If the 'freqs' argument holds a scalar,
785 then that rest frequency will be applied to all the selected
786 data. If the 'freqs' argument holds
787 a vector, then it MUST be of equal or smaller length than
788 the number of IFs (and the available restfrequencies will be
789 replaced by this vector). In this case, *all* data have
790 the restfrequency set per IF according
791 to the corresponding value you give in the 'freqs' vector.
[1118]792 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]793 IF 1 gets restfreq 2e9.
[1153]794 You can also specify the frequencies via a linecatalog/
795
[931]796 Parameters:
797 freqs: list of rest frequency values or string idenitfiers
798 unit: unit for rest frequency (default 'Hz')
[402]799
[931]800 Example:
801 # set the given restfrequency for the whole table
802 scan.set_restfreqs(freqs=1.4e9)
[1348]803 # If thee number of IFs in the data is >= 2 IF0 gets the first
[931]804 # value IF1 the second...
[1118]805 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]806 #set the given restfrequency for the whole table (by name)
807 scan.set_restfreqs(freqs="OH1667")
[391]808
[931]809 Note:
810 To do more sophisticate Restfrequency setting, e.g. on a
811 source and IF basis, use scantable.set_selection() before using
812 this function.
813 # provide your scantable is call scan
814 selection = selector()
815 selection.set_name("ORION*")
816 selection.set_ifs([1])
817 scan.set_selection(selection)
818 scan.set_restfreqs(freqs=86.6e9)
819
820 """
821 varlist = vars()
[1157]822 from asap import linecatalog
823 # simple value
[1118]824 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]825 self._setrestfreqs(freqs, "",unit)
[1157]826 # list of values
[1118]827 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]828 # list values are scalars
[1118]829 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]830 sel = selector()
831 savesel = self._getselection()
[1322]832 iflist = self.getifnos()
[931]833 for i in xrange(len(freqs)):
[1322]834 sel.set_ifs(iflist[i])
[931]835 self._setselection(sel)
[1153]836 self._setrestfreqs(freqs[i], "",unit)
[931]837 self._setselection(savesel)
[1157]838 # list values are tuples, (value, name)
839 elif isinstance(freqs[-1], dict):
840 sel = selector()
841 savesel = self._getselection()
[1322]842 iflist = self.getifnos()
[1157]843 for i in xrange(len(freqs)):
[1322]844 sel.set_ifs(iflist[i])
[1259]845 self._setselection(sel)
[1157]846 self._setrestfreqs(freqs[i]["value"],
847 freqs[i]["name"], "MHz")
848 self._setselection(savesel)
849 # freqs are to be taken from a linecatalog
[1153]850 elif isinstance(freqs, linecatalog):
851 sel = selector()
852 savesel = self._getselection()
[1322]853 iflist = self.getifnos()
[1153]854 for i in xrange(freqs.nrow()):
[1322]855 sel.set_ifs(iflist[i])
[1153]856 self._setselection(sel)
857 self._setrestfreqs(freqs.get_frequency(i),
858 freqs.get_name(i), "MHz")
859 # ensure that we are not iterating past nIF
860 if i == self.nif()-1: break
861 self._setselection(savesel)
[931]862 else:
863 return
864 self._add_history("set_restfreqs", varlist)
865
[1360]866 def shift_refpix(self, delta):
867 """
868 Shift the reference pixel of the Spectra Coordinate by an
869 integer amount.
870 Parameters:
871 delta: the amount to shift by
872 Note:
873 Be careful using this with broadband data.
874 """
875 Scantable.shift(self, delta)
[931]876
[1259]877 def history(self, filename=None):
878 """
879 Print the history. Optionally to a file.
[1348]880 Parameters:
881 filename: The name of the file to save the history to.
[1259]882 """
[484]883 hist = list(self._gethistory())
[794]884 out = "-"*80
[484]885 for h in hist:
[489]886 if h.startswith("---"):
[794]887 out += "\n"+h
[489]888 else:
889 items = h.split("##")
890 date = items[0]
891 func = items[1]
892 items = items[2:]
[794]893 out += "\n"+date+"\n"
894 out += "Function: %s\n Parameters:" % (func)
[489]895 for i in items:
896 s = i.split("=")
[1118]897 out += "\n %s = %s" % (s[0], s[1])
[794]898 out += "\n"+"-"*80
[1259]899 if filename is not None:
900 if filename is "":
901 filename = 'scantable_history.txt'
902 import os
903 filename = os.path.expandvars(os.path.expanduser(filename))
904 if not os.path.isdir(filename):
905 data = open(filename, 'w')
906 data.write(out)
907 data.close()
908 else:
909 msg = "Illegal file name '%s'." % (filename)
910 if rcParams['verbose']:
911 print msg
912 else:
913 raise IOError(msg)
914 if rcParams['verbose']:
915 try:
916 from IPython.genutils import page as pager
917 except ImportError:
918 from pydoc import pager
919 pager(out)
920 else:
921 return out
[484]922 return
[513]923 #
924 # Maths business
925 #
926
[931]927 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]928 """
[1070]929 Return the (time) weighted average of a scan.
[513]930 Note:
[1070]931 in channels only - align if necessary
[513]932 Parameters:
933 mask: an optional mask (only used for 'var' and 'tsys'
934 weighting)
[558]935 scanav: True averages each scan separately
936 False (default) averages all scans together,
[1099]937 weight: Weighting scheme.
938 'none' (mean no weight)
939 'var' (1/var(spec) weighted)
940 'tsys' (1/Tsys**2 weighted)
941 'tint' (integration time weighted)
942 'tintsys' (Tint/Tsys**2)
943 'median' ( median averaging)
[535]944 The default is 'tint'
[931]945 align: align the spectra in velocity before averaging. It takes
946 the time of the first spectrum as reference time.
[513]947 Example:
948 # time average the scantable without using a mask
[710]949 newscan = scan.average_time()
[513]950 """
951 varlist = vars()
[976]952 if weight is None: weight = 'TINT'
[513]953 if mask is None: mask = ()
[1099]954 if scanav: scanav = "SCAN"
955 else: scanav = "NONE"
[1118]956 scan = (self, )
[989]957 try:
[1118]958 if align:
959 scan = (self.freq_align(insitu=False), )
960 s = None
961 if weight.upper() == 'MEDIAN':
962 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
963 scanav))
964 else:
965 s = scantable(self._math._average(scan, mask, weight.upper(),
966 scanav))
967 except RuntimeError, msg:
[989]968 if rcParams['verbose']:
969 print msg
970 return
971 else: raise
[1099]972 s._add_history("average_time", varlist)
[718]973 print_log()
[513]974 return s
[710]975
[876]976 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]977 """
978 Return a scan where all spectra are converted to either
979 Jansky or Kelvin depending upon the flux units of the scan table.
980 By default the function tries to look the values up internally.
981 If it can't find them (or if you want to over-ride), you must
982 specify EITHER jyperk OR eta (and D which it will try to look up
983 also if you don't set it). jyperk takes precedence if you set both.
984 Parameters:
985 jyperk: the Jy / K conversion factor
986 eta: the aperture efficiency
987 d: the geomtric diameter (metres)
988 insitu: if False a new scantable is returned.
989 Otherwise, the scaling is done in-situ
990 The default is taken from .asaprc (False)
991 """
992 if insitu is None: insitu = rcParams['insitu']
[876]993 self._math._setinsitu(insitu)
[513]994 varlist = vars()
995 if jyperk is None: jyperk = -1.0
996 if d is None: d = -1.0
997 if eta is None: eta = -1.0
[876]998 s = scantable(self._math._convertflux(self, d, eta, jyperk))
999 s._add_history("convert_flux", varlist)
1000 print_log()
1001 if insitu: self._assign(s)
1002 else: return s
[513]1003
[876]1004 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1005 """
1006 Return a scan after applying a gain-elevation correction.
1007 The correction can be made via either a polynomial or a
1008 table-based interpolation (and extrapolation if necessary).
1009 You specify polynomial coefficients, an ascii table or neither.
1010 If you specify neither, then a polynomial correction will be made
1011 with built in coefficients known for certain telescopes (an error
1012 will occur if the instrument is not known).
1013 The data and Tsys are *divided* by the scaling factors.
1014 Parameters:
1015 poly: Polynomial coefficients (default None) to compute a
1016 gain-elevation correction as a function of
1017 elevation (in degrees).
1018 filename: The name of an ascii file holding correction factors.
1019 The first row of the ascii file must give the column
1020 names and these MUST include columns
1021 "ELEVATION" (degrees) and "FACTOR" (multiply data
1022 by this) somewhere.
1023 The second row must give the data type of the
1024 column. Use 'R' for Real and 'I' for Integer.
1025 An example file would be
1026 (actual factors are arbitrary) :
1027
1028 TIME ELEVATION FACTOR
1029 R R R
1030 0.1 0 0.8
1031 0.2 20 0.85
1032 0.3 40 0.9
1033 0.4 60 0.85
1034 0.5 80 0.8
1035 0.6 90 0.75
1036 method: Interpolation method when correcting from a table.
1037 Values are "nearest", "linear" (default), "cubic"
1038 and "spline"
1039 insitu: if False a new scantable is returned.
1040 Otherwise, the scaling is done in-situ
1041 The default is taken from .asaprc (False)
1042 """
1043
1044 if insitu is None: insitu = rcParams['insitu']
[876]1045 self._math._setinsitu(insitu)
[513]1046 varlist = vars()
1047 if poly is None:
[1118]1048 poly = ()
[513]1049 from os.path import expandvars
1050 filename = expandvars(filename)
[876]1051 s = scantable(self._math._gainel(self, poly, filename, method))
1052 s._add_history("gain_el", varlist)
1053 print_log()
1054 if insitu: self._assign(s)
1055 else: return s
[710]1056
[931]1057 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1058 """
1059 Return a scan where all rows have been aligned in frequency/velocity.
1060 The alignment frequency frame (e.g. LSRK) is that set by function
1061 set_freqframe.
1062 Parameters:
1063 reftime: reference time to align at. By default, the time of
1064 the first row of data is used.
1065 method: Interpolation method for regridding the spectra.
1066 Choose from "nearest", "linear", "cubic" (default)
1067 and "spline"
1068 insitu: if False a new scantable is returned.
1069 Otherwise, the scaling is done in-situ
1070 The default is taken from .asaprc (False)
1071 """
[931]1072 if insitu is None: insitu = rcParams["insitu"]
[876]1073 self._math._setinsitu(insitu)
[513]1074 varlist = vars()
[931]1075 if reftime is None: reftime = ""
1076 s = scantable(self._math._freq_align(self, reftime, method))
[876]1077 s._add_history("freq_align", varlist)
1078 print_log()
1079 if insitu: self._assign(s)
1080 else: return s
[513]1081
[876]1082 def opacity(self, tau, insitu=None):
[513]1083 """
1084 Apply an opacity correction. The data
1085 and Tsys are multiplied by the correction factor.
1086 Parameters:
1087 tau: Opacity from which the correction factor is
1088 exp(tau*ZD)
1089 where ZD is the zenith-distance
1090 insitu: if False a new scantable is returned.
1091 Otherwise, the scaling is done in-situ
1092 The default is taken from .asaprc (False)
1093 """
1094 if insitu is None: insitu = rcParams['insitu']
[876]1095 self._math._setinsitu(insitu)
[513]1096 varlist = vars()
[876]1097 s = scantable(self._math._opacity(self, tau))
1098 s._add_history("opacity", varlist)
1099 print_log()
1100 if insitu: self._assign(s)
1101 else: return s
[513]1102
1103 def bin(self, width=5, insitu=None):
1104 """
1105 Return a scan where all spectra have been binned up.
[1348]1106 Parameters:
[513]1107 width: The bin width (default=5) in pixels
1108 insitu: if False a new scantable is returned.
1109 Otherwise, the scaling is done in-situ
1110 The default is taken from .asaprc (False)
1111 """
1112 if insitu is None: insitu = rcParams['insitu']
[876]1113 self._math._setinsitu(insitu)
[513]1114 varlist = vars()
[876]1115 s = scantable(self._math._bin(self, width))
[1118]1116 s._add_history("bin", varlist)
[876]1117 print_log()
1118 if insitu: self._assign(s)
1119 else: return s
[513]1120
[710]1121
[513]1122 def resample(self, width=5, method='cubic', insitu=None):
1123 """
[1348]1124 Return a scan where all spectra have been binned up.
1125
1126 Parameters:
[513]1127 width: The bin width (default=5) in pixels
1128 method: Interpolation method when correcting from a table.
1129 Values are "nearest", "linear", "cubic" (default)
1130 and "spline"
1131 insitu: if False a new scantable is returned.
1132 Otherwise, the scaling is done in-situ
1133 The default is taken from .asaprc (False)
1134 """
1135 if insitu is None: insitu = rcParams['insitu']
[876]1136 self._math._setinsitu(insitu)
[513]1137 varlist = vars()
[876]1138 s = scantable(self._math._resample(self, method, width))
[1118]1139 s._add_history("resample", varlist)
[876]1140 print_log()
1141 if insitu: self._assign(s)
1142 else: return s
[513]1143
1144
[946]1145 def average_pol(self, mask=None, weight='none'):
1146 """
1147 Average the Polarisations together.
1148 Parameters:
1149 mask: An optional mask defining the region, where the
1150 averaging will be applied. The output will have all
1151 specified points masked.
1152 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1153 weighted), or 'tsys' (1/Tsys**2 weighted)
1154 """
1155 varlist = vars()
1156 if mask is None:
1157 mask = ()
[1010]1158 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1159 s._add_history("average_pol", varlist)
[946]1160 print_log()
[992]1161 return s
[513]1162
[1145]1163 def average_beam(self, mask=None, weight='none'):
1164 """
1165 Average the Beams together.
1166 Parameters:
1167 mask: An optional mask defining the region, where the
1168 averaging will be applied. The output will have all
1169 specified points masked.
1170 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1171 weighted), or 'tsys' (1/Tsys**2 weighted)
1172 """
1173 varlist = vars()
1174 if mask is None:
1175 mask = ()
1176 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1177 s._add_history("average_beam", varlist)
1178 print_log()
1179 return s
1180
[992]1181 def convert_pol(self, poltype=None):
1182 """
1183 Convert the data to a different polarisation type.
1184 Parameters:
1185 poltype: The new polarisation type. Valid types are:
1186 "linear", "stokes" and "circular"
1187 """
1188 varlist = vars()
1189 try:
1190 s = scantable(self._math._convertpol(self, poltype))
[1118]1191 except RuntimeError, msg:
[992]1192 if rcParams['verbose']:
[1118]1193 print msg
1194 return
[992]1195 else:
1196 raise
[1118]1197 s._add_history("convert_pol", varlist)
[992]1198 print_log()
1199 return s
1200
[876]1201 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1202 """
1203 Smooth the spectrum by the specified kernel (conserving flux).
1204 Parameters:
1205 kernel: The type of smoothing kernel. Select from
[1373]1206 'hanning' (default), 'gaussian', 'boxcar' and
1207 'rmedian'
[513]1208 width: The width of the kernel in pixels. For hanning this is
1209 ignored otherwise it defauls to 5 pixels.
1210 For 'gaussian' it is the Full Width Half
1211 Maximum. For 'boxcar' it is the full width.
[1373]1212 For 'rmedian' it is the half width.
[513]1213 insitu: if False a new scantable is returned.
1214 Otherwise, the scaling is done in-situ
1215 The default is taken from .asaprc (False)
1216 Example:
1217 none
1218 """
1219 if insitu is None: insitu = rcParams['insitu']
[876]1220 self._math._setinsitu(insitu)
[513]1221 varlist = vars()
[1118]1222 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1223 s._add_history("smooth", varlist)
1224 print_log()
1225 if insitu: self._assign(s)
1226 else: return s
[513]1227
[876]1228
[1391]1229 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1230 """
1231 Return a scan which has been baselined (all rows) by a polynomial.
1232 Parameters:
[794]1233 mask: an optional mask
1234 order: the order of the polynomial (default is 0)
[1061]1235 plot: plot the fit and the residual. In this each
1236 indivual fit has to be approved, by typing 'y'
1237 or 'n'
[1391]1238 uselin: use linear polynomial fit
[794]1239 insitu: if False a new scantable is returned.
1240 Otherwise, the scaling is done in-situ
1241 The default is taken from .asaprc (False)
[513]1242 Example:
1243 # return a scan baselined by a third order polynomial,
1244 # not using a mask
1245 bscan = scan.poly_baseline(order=3)
[579]1246 """
[513]1247 if insitu is None: insitu = rcParams['insitu']
1248 varlist = vars()
1249 if mask is None:
[1295]1250 mask = [True for i in xrange(self.nchan(-1))]
[513]1251 from asap.asapfitter import fitter
[1217]1252 try:
1253 f = fitter()
1254 f.set_scan(self, mask)
[1391]1255 #f.set_function(poly=order)
1256 if uselin:
1257 f.set_function(lpoly=order)
1258 else:
1259 f.set_function(poly=order)
[1217]1260 s = f.auto_fit(insitu, plot=plot)
1261 s._add_history("poly_baseline", varlist)
1262 print_log()
1263 if insitu: self._assign(s)
1264 else: return s
1265 except RuntimeError:
1266 msg = "The fit failed, possibly because it didn't converge."
1267 if rcParams['verbose']:
1268 print msg
1269 return
1270 else:
1271 raise RuntimeError(msg)
[513]1272
[1217]1273
[1118]1274 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1275 threshold=3, chan_avg_limit=1, plot=False,
1276 insitu=None):
[880]1277 """
1278 Return a scan which has been baselined (all rows) by a polynomial.
1279 Spectral lines are detected first using linefinder and masked out
1280 to avoid them affecting the baseline solution.
1281
1282 Parameters:
1283 mask: an optional mask retreived from scantable
1284 edge: an optional number of channel to drop at
1285 the edge of spectrum. If only one value is
1286 specified, the same number will be dropped from
1287 both sides of the spectrum. Default is to keep
[907]1288 all channels. Nested tuples represent individual
[976]1289 edge selection for different IFs (a number of spectral
1290 channels can be different)
[880]1291 order: the order of the polynomial (default is 0)
1292 threshold: the threshold used by line finder. It is better to
1293 keep it large as only strong lines affect the
1294 baseline solution.
[1280]1295 chan_avg_limit:
1296 a maximum number of consequtive spectral channels to
1297 average during the search of weak and broad lines.
1298 The default is no averaging (and no search for weak
1299 lines). If such lines can affect the fitted baseline
1300 (e.g. a high order polynomial is fitted), increase this
1301 parameter (usually values up to 8 are reasonable). Most
1302 users of this method should find the default value
1303 sufficient.
[1061]1304 plot: plot the fit and the residual. In this each
1305 indivual fit has to be approved, by typing 'y'
1306 or 'n'
[880]1307 insitu: if False a new scantable is returned.
1308 Otherwise, the scaling is done in-situ
1309 The default is taken from .asaprc (False)
1310
1311 Example:
1312 scan2=scan.auto_poly_baseline(order=7)
1313 """
1314 if insitu is None: insitu = rcParams['insitu']
1315 varlist = vars()
1316 from asap.asapfitter import fitter
1317 from asap.asaplinefind import linefinder
1318 from asap import _is_sequence_or_number as _is_valid
1319
[976]1320 # check whether edge is set up for each IF individually
[1118]1321 individualedge = False;
1322 if len(edge) > 1:
1323 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1324 individualedge = True;
[907]1325
[1118]1326 if not _is_valid(edge, int) and not individualedge:
[909]1327 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1328 pair of integers specified as a tuple. Nested tuples are allowed \
1329 to make individual selection for different IFs."
[919]1330
[1118]1331 curedge = (0, 0)
1332 if individualedge:
1333 for edgepar in edge:
1334 if not _is_valid(edgepar, int):
1335 raise ValueError, "Each element of the 'edge' tuple has \
1336 to be a pair of integers or an integer."
[907]1337 else:
[1118]1338 curedge = edge;
[880]1339
1340 # setup fitter
1341 f = fitter()
1342 f.set_function(poly=order)
1343
1344 # setup line finder
[1118]1345 fl = linefinder()
[1268]1346 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1347
1348 if not insitu:
[1118]1349 workscan = self.copy()
[880]1350 else:
[1118]1351 workscan = self
[880]1352
[907]1353 fl.set_scan(workscan)
1354
[1118]1355 rows = range(workscan.nrow())
[880]1356 asaplog.push("Processing:")
1357 for r in rows:
[1118]1358 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1359 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1360 workscan.getpol(r), workscan.getcycle(r))
[880]1361 asaplog.push(msg, False)
[907]1362
[976]1363 # figure out edge parameter
[1118]1364 if individualedge:
1365 if len(edge) >= workscan.getif(r):
1366 raise RuntimeError, "Number of edge elements appear to " \
1367 "be less than the number of IFs"
1368 curedge = edge[workscan.getif(r)]
[919]1369
[976]1370 # setup line finder
[1118]1371 fl.find_lines(r, mask, curedge)
[880]1372 f.set_scan(workscan, fl.get_mask())
1373 f.x = workscan._getabcissa(r)
1374 f.y = workscan._getspectrum(r)
1375 f.data = None
1376 f.fit()
1377 x = f.get_parameters()
[1061]1378 if plot:
1379 f.plot(residual=True)
1380 x = raw_input("Accept fit ( [y]/n ): ")
1381 if x.upper() == 'N':
1382 continue
[880]1383 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1384 if plot:
1385 f._p.unmap()
1386 f._p = None
1387 workscan._add_history("auto_poly_baseline", varlist)
[880]1388 if insitu:
1389 self._assign(workscan)
1390 else:
1391 return workscan
1392
[914]1393 def rotate_linpolphase(self, angle):
1394 """
1395 Rotate the phase of the complex polarization O=Q+iU correlation.
1396 This is always done in situ in the raw data. So if you call this
1397 function more than once then each call rotates the phase further.
1398 Parameters:
1399 angle: The angle (degrees) to rotate (add) by.
1400 Examples:
1401 scan.rotate_linpolphase(2.3)
1402 """
1403 varlist = vars()
[936]1404 self._math._rotate_linpolphase(self, angle)
[914]1405 self._add_history("rotate_linpolphase", varlist)
1406 print_log()
1407 return
[710]1408
[513]1409
[914]1410 def rotate_xyphase(self, angle):
1411 """
1412 Rotate the phase of the XY correlation. This is always done in situ
1413 in the data. So if you call this function more than once
1414 then each call rotates the phase further.
1415 Parameters:
1416 angle: The angle (degrees) to rotate (add) by.
1417 Examples:
1418 scan.rotate_xyphase(2.3)
1419 """
1420 varlist = vars()
[936]1421 self._math._rotate_xyphase(self, angle)
[914]1422 self._add_history("rotate_xyphase", varlist)
1423 print_log()
1424 return
1425
1426 def swap_linears(self):
1427 """
[1348]1428 Swap the linear polarisations XX and YY, or better the first two
1429 polarisations as this also works for ciculars.
[914]1430 """
1431 varlist = vars()
[936]1432 self._math._swap_linears(self)
[914]1433 self._add_history("swap_linears", varlist)
1434 print_log()
1435 return
1436
1437 def invert_phase(self):
1438 """
1439 Invert the phase of the complex polarisation
1440 """
1441 varlist = vars()
[936]1442 self._math._invert_phase(self)
[914]1443 self._add_history("invert_phase", varlist)
1444 print_log()
1445 return
1446
[876]1447 def add(self, offset, insitu=None):
[513]1448 """
1449 Return a scan where all spectra have the offset added
1450 Parameters:
1451 offset: the offset
1452 insitu: if False a new scantable is returned.
1453 Otherwise, the scaling is done in-situ
1454 The default is taken from .asaprc (False)
1455 """
1456 if insitu is None: insitu = rcParams['insitu']
[876]1457 self._math._setinsitu(insitu)
[513]1458 varlist = vars()
[876]1459 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1460 s._add_history("add", varlist)
[876]1461 print_log()
1462 if insitu:
1463 self._assign(s)
1464 else:
[513]1465 return s
1466
[1308]1467 def scale(self, factor, tsys=True, insitu=None):
[513]1468 """
1469 Return a scan where all spectra are scaled by the give 'factor'
1470 Parameters:
1471 factor: the scaling factor
1472 insitu: if False a new scantable is returned.
1473 Otherwise, the scaling is done in-situ
1474 The default is taken from .asaprc (False)
1475 tsys: if True (default) then apply the operation to Tsys
1476 as well as the data
1477 """
1478 if insitu is None: insitu = rcParams['insitu']
[876]1479 self._math._setinsitu(insitu)
[513]1480 varlist = vars()
[876]1481 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1482 s._add_history("scale", varlist)
[876]1483 print_log()
1484 if insitu:
1485 self._assign(s)
1486 else:
[513]1487 return s
1488
[1348]1489 def auto_quotient(self, preserve=True, mode='paired'):
[670]1490 """
1491 This function allows to build quotients automatically.
1492 It assumes the observation to have the same numer of
1493 "ons" and "offs"
1494 Parameters:
[710]1495 preserve: you can preserve (default) the continuum or
1496 remove it. The equations used are
[670]1497 preserve: Output = Toff * (on/off) - Toff
[1070]1498 remove: Output = Toff * (on/off) - Ton
[1348]1499 mode: the on/off detection mode
1500 'paired' (default)
1501 identifies 'off' scans by the
1502 trailing '_R' (Mopra/Parkes) or
1503 '_e'/'_w' (Tid) and matches
1504 on/off pairs from the observing pattern
1505 'time'
1506 finds the closest off in time
1507
[670]1508 """
[1348]1509 modes = ["time", "paired"]
[670]1510 if not mode in modes:
[876]1511 msg = "please provide valid mode. Valid modes are %s" % (modes)
1512 raise ValueError(msg)
1513 varlist = vars()
[1348]1514 s = None
1515 if mode.lower() == "paired":
1516 basesel = self.get_selection()
[1356]1517 sel = selector()+basesel
1518 sel.set_query("SRCTYPE==1")
1519 self.set_selection(sel)
[1348]1520 offs = self.copy()
1521 sel.set_query("SRCTYPE==0")
[1356]1522 self.set_selection(sel)
[1348]1523 ons = self.copy()
1524 s = scantable(self._math._quotient(ons, offs, preserve))
1525 self.set_selection(basesel)
1526 elif mode.lower() == "time":
1527 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1528 s._add_history("auto_quotient", varlist)
[876]1529 print_log()
1530 return s
[710]1531
[1145]1532 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1533 """
[1143]1534 Form a quotient using "off" beams when observing in "MX" mode.
1535 Parameters:
[1145]1536 mask: an optional mask to be used when weight == 'stddev'
[1143]1537 weight: How to average the off beams. Default is 'median'.
[1145]1538 preserve: you can preserve (default) the continuum or
1539 remove it. The equations used are
1540 preserve: Output = Toff * (on/off) - Toff
1541 remove: Output = Toff * (on/off) - Ton
[1217]1542 """
[1143]1543 if mask is None: mask = ()
[1141]1544 varlist = vars()
1545 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1546 preoff = scantable(self._math._mx_extract(self, 'off'))
1547 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1548 from asapmath import quotient
[1145]1549 q = quotient(on, off, preserve)
[1143]1550 q._add_history("mx_quotient", varlist)
[1145]1551 print_log()
[1217]1552 return q
[513]1553
[718]1554 def freq_switch(self, insitu=None):
1555 """
1556 Apply frequency switching to the data.
1557 Parameters:
1558 insitu: if False a new scantable is returned.
1559 Otherwise, the swictching is done in-situ
1560 The default is taken from .asaprc (False)
1561 Example:
1562 none
1563 """
1564 if insitu is None: insitu = rcParams['insitu']
[876]1565 self._math._setinsitu(insitu)
[718]1566 varlist = vars()
[876]1567 s = scantable(self._math._freqswitch(self))
[1118]1568 s._add_history("freq_switch", varlist)
[876]1569 print_log()
1570 if insitu: self._assign(s)
1571 else: return s
[718]1572
[780]1573 def recalc_azel(self):
1574 """
1575 Recalculate the azimuth and elevation for each position.
1576 Parameters:
1577 none
1578 Example:
1579 """
1580 varlist = vars()
[876]1581 self._recalcazel()
[780]1582 self._add_history("recalc_azel", varlist)
1583 print_log()
1584 return
1585
[513]1586 def __add__(self, other):
1587 varlist = vars()
1588 s = None
1589 if isinstance(other, scantable):
[1308]1590 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1591 elif isinstance(other, float):
[876]1592 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1593 else:
[718]1594 raise TypeError("Other input is not a scantable or float value")
[513]1595 s._add_history("operator +", varlist)
[718]1596 print_log()
[513]1597 return s
1598
1599 def __sub__(self, other):
1600 """
1601 implicit on all axes and on Tsys
1602 """
1603 varlist = vars()
1604 s = None
1605 if isinstance(other, scantable):
[1308]1606 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1607 elif isinstance(other, float):
[876]1608 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1609 else:
[718]1610 raise TypeError("Other input is not a scantable or float value")
[513]1611 s._add_history("operator -", varlist)
[718]1612 print_log()
[513]1613 return s
[710]1614
[513]1615 def __mul__(self, other):
1616 """
1617 implicit on all axes and on Tsys
1618 """
1619 varlist = vars()
1620 s = None
1621 if isinstance(other, scantable):
[1308]1622 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1623 elif isinstance(other, float):
[876]1624 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1625 else:
[718]1626 raise TypeError("Other input is not a scantable or float value")
[513]1627 s._add_history("operator *", varlist)
[718]1628 print_log()
[513]1629 return s
1630
[710]1631
[513]1632 def __div__(self, other):
1633 """
1634 implicit on all axes and on Tsys
1635 """
1636 varlist = vars()
1637 s = None
1638 if isinstance(other, scantable):
[1308]1639 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1640 elif isinstance(other, float):
1641 if other == 0.0:
[718]1642 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1643 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1644 else:
[718]1645 raise TypeError("Other input is not a scantable or float value")
[513]1646 s._add_history("operator /", varlist)
[718]1647 print_log()
[513]1648 return s
1649
[530]1650 def get_fit(self, row=0):
1651 """
1652 Print or return the stored fits for a row in the scantable
1653 Parameters:
1654 row: the row which the fit has been applied to.
1655 """
1656 if row > self.nrow():
1657 return
[976]1658 from asap.asapfit import asapfit
[530]1659 fit = asapfit(self._getfit(row))
[718]1660 if rcParams['verbose']:
[530]1661 print fit
1662 return
1663 else:
1664 return fit.as_dict()
1665
[484]1666 def _add_history(self, funcname, parameters):
1667 # create date
1668 sep = "##"
1669 from datetime import datetime
1670 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1671 hist = dstr+sep
1672 hist += funcname+sep#cdate+sep
1673 if parameters.has_key('self'): del parameters['self']
[1118]1674 for k, v in parameters.iteritems():
[484]1675 if type(v) is dict:
[1118]1676 for k2, v2 in v.iteritems():
[484]1677 hist += k2
1678 hist += "="
[1118]1679 if isinstance(v2, scantable):
[484]1680 hist += 'scantable'
1681 elif k2 == 'mask':
[1118]1682 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1683 hist += str(self._zip_mask(v2))
1684 else:
1685 hist += str(v2)
[484]1686 else:
[513]1687 hist += str(v2)
[484]1688 else:
1689 hist += k
1690 hist += "="
[1118]1691 if isinstance(v, scantable):
[484]1692 hist += 'scantable'
1693 elif k == 'mask':
[1118]1694 if isinstance(v, list) or isinstance(v, tuple):
[513]1695 hist += str(self._zip_mask(v))
1696 else:
1697 hist += str(v)
[484]1698 else:
1699 hist += str(v)
1700 hist += sep
1701 hist = hist[:-2] # remove trailing '##'
1702 self._addhistory(hist)
1703
[710]1704
[484]1705 def _zip_mask(self, mask):
1706 mask = list(mask)
1707 i = 0
1708 segments = []
1709 while mask[i:].count(1):
1710 i += mask[i:].index(1)
1711 if mask[i:].count(0):
1712 j = i + mask[i:].index(0)
1713 else:
[710]1714 j = len(mask)
[1118]1715 segments.append([i, j])
[710]1716 i = j
[484]1717 return segments
[714]1718
[626]1719 def _get_ordinate_label(self):
1720 fu = "("+self.get_fluxunit()+")"
1721 import re
1722 lbl = "Intensity"
[1118]1723 if re.match(".K.", fu):
[626]1724 lbl = "Brightness Temperature "+ fu
[1118]1725 elif re.match(".Jy.", fu):
[626]1726 lbl = "Flux density "+ fu
1727 return lbl
[710]1728
[876]1729 def _check_ifs(self):
1730 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1731 nchans = filter(lambda t: t > 0, nchans)
[876]1732 return (sum(nchans)/len(nchans) == nchans[0])
[976]1733
1734 def _fill(self, names, unit, average):
1735 import os
1736 from asap._asap import stfiller
1737 first = True
1738 fullnames = []
1739 for name in names:
1740 name = os.path.expandvars(name)
1741 name = os.path.expanduser(name)
1742 if not os.path.exists(name):
1743 msg = "File '%s' does not exists" % (name)
1744 if rcParams['verbose']:
1745 asaplog.push(msg)
1746 print asaplog.pop().strip()
1747 return
1748 raise IOError(msg)
1749 fullnames.append(name)
1750 if average:
1751 asaplog.push('Auto averaging integrations')
[1079]1752 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1753 for name in fullnames:
[1073]1754 tbl = Scantable(stype)
1755 r = stfiller(tbl)
[976]1756 msg = "Importing %s..." % (name)
[1118]1757 asaplog.push(msg, False)
[976]1758 print_log()
[1118]1759 r._open(name, -1, -1)
[976]1760 r._read()
[1073]1761 #tbl = r._getdata()
[976]1762 if average:
[1118]1763 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1764 #tbl = tbl2
1765 if not first:
1766 tbl = self._math._merge([self, tbl])
1767 #tbl = tbl2
1768 Scantable.__init__(self, tbl)
1769 r._close()
[1118]1770 del r, tbl
[976]1771 first = False
1772 if unit is not None:
1773 self.set_fluxunit(unit)
1774 self.set_freqframe(rcParams['scantable.freqframe'])
1775
Note: See TracBrowser for help on using the repository browser.