source: trunk/python/scantable.py@ 1377

Last change on this file since 1377 was 1373, checked in by mar637, 17 years ago

Added running median to smooth. This addresses Ticket #115

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 65.4 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1118]41 if isinstance(filename, str):
[976]42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
[718]47 if rcParams['verbose']:
[976]48 asaplog.push(s)
49 print asaplog.pop().strip()
[718]50 return
[976]51 raise IOError(s)
[1115]52 if os.path.isdir(filename) \
[1118]53 and not os.path.exists(filename+'/table.f1'):
[976]54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
[1118]56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
[976]58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
[718]61 else:
[1118]62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
[976]64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
[226]69 else:
[1118]70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
[1259]74 self._add_history("scantable", varlist)
[714]75 print_log()
[102]76
[876]77 def save(self, name=None, format=None, overwrite=False):
[116]78 """
[1280]79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
[116]81 Parameters:
[1093]82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
[497]84 and header in 'name'_header.txt)
[116]85 format: an optional file format. Default is ASAP.
[280]86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]87 'SDFITS' (save as SDFITS file)
[200]88 'ASCII' (saves as ascii text file)
[226]89 'MS2' (saves as an aips++
90 MeasurementSet V2)
[411]91 overwrite: If the file should be overwritten if it exists.
[256]92 The default False is to return with warning
[411]93 without writing the output. USE WITH CARE.
[116]94 Example:
95 scan.save('myscan.asap')
[1118]96 scan.save('myscan.sdfits', 'SDFITS')
[116]97 """
[411]98 from os import path
[226]99 if format is None: format = rcParams['scantable.save']
[256]100 suffix = '.'+format.lower()
[1118]101 if name is None or name == "":
[256]102 name = 'scantable'+suffix
[718]103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
[411]105 name = path.expandvars(name)
[256]106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
[718]108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
[451]114 format2 = format.upper()
115 if format2 == 'ASAP':
[116]116 self._save(name)
117 else:
[989]118 from asap._asap import stwriter as stw
[1118]119 writer = stw(format2)
120 writer.write(self, name)
[718]121 print_log()
[116]122 return
123
[102]124 def copy(self):
125 """
126 Return a copy of this scantable.
[1348]127 Note:
128 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]129 Parameters:
[113]130 none
[102]131 Example:
132 copiedscan = scan.copy()
133 """
[876]134 sd = scantable(Scantable._copy(self))
[113]135 return sd
136
[1093]137 def drop_scan(self, scanid=None):
138 """
139 Return a new scantable where the specified scan number(s) has(have)
140 been dropped.
141 Parameters:
142 scanid: a (list of) scan number(s)
143 """
144 from asap import _is_sequence_or_number as _is_valid
145 from asap import _to_list
146 from asap import unique
147 if not _is_valid(scanid):
148 if rcParams['verbose']:
149 print "Please specify a scanno to drop from the scantable"
150 return
151 else:
152 raise RuntimeError("No scan given")
153 try:
154 scanid = _to_list(scanid)
155 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156 for sid in scanid: allscans.remove(sid)
[1118]157 if len(allscans) == 0:
158 raise ValueError("Can't remove all scans")
[1093]159 except ValueError:
160 if rcParams['verbose']:
161 print "Couldn't find any match."
162 return
163 else: raise
164 try:
165 bsel = self.get_selection()
166 sel = selector()
167 sel.set_scans(allscans)
168 self.set_selection(bsel+sel)
169 scopy = self._copy()
170 self.set_selection(bsel)
171 return scantable(scopy)
172 except RuntimeError:
[1118]173 if rcParams['verbose']:
174 print "Couldn't find any match."
175 else:
176 raise
[1093]177
178
[102]179 def get_scan(self, scanid=None):
180 """
181 Return a specific scan (by scanno) or collection of scans (by
182 source name) in a new scantable.
[1348]183 Note:
184 See scantable.drop_scan() for the inverse operation.
[102]185 Parameters:
[513]186 scanid: a (list of) scanno or a source name, unix-style
187 patterns are accepted for source name matching, e.g.
188 '*_R' gets all 'ref scans
[102]189 Example:
[513]190 # get all scans containing the source '323p459'
191 newscan = scan.get_scan('323p459')
192 # get all 'off' scans
193 refscans = scan.get_scan('*_R')
194 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]195 newscan = scan.get_scan([0, 2, 7, 10])
[102]196 """
197 if scanid is None:
[718]198 if rcParams['verbose']:
[1118]199 print "Please specify a scan no or name to " \
200 "retrieve from the scantable"
[718]201 return
202 else:
203 raise RuntimeError("No scan given")
204
[102]205 try:
[946]206 bsel = self.get_selection()
207 sel = selector()
[102]208 if type(scanid) is str:
[946]209 sel.set_name(scanid)
210 self.set_selection(bsel+sel)
[876]211 scopy = self._copy()
[946]212 self.set_selection(bsel)
[876]213 return scantable(scopy)
[102]214 elif type(scanid) is int:
[946]215 sel.set_scans([scanid])
216 self.set_selection(bsel+sel)
[876]217 scopy = self._copy()
[946]218 self.set_selection(bsel)
[876]219 return scantable(scopy)
[381]220 elif type(scanid) is list:
[946]221 sel.set_scans(scanid)
222 self.set_selection(sel)
[876]223 scopy = self._copy()
[946]224 self.set_selection(bsel)
[876]225 return scantable(scopy)
[381]226 else:
[718]227 msg = "Illegal scanid type, use 'int' or 'list' if ints."
228 if rcParams['verbose']:
229 print msg
230 else:
231 raise TypeError(msg)
[102]232 except RuntimeError:
[718]233 if rcParams['verbose']: print "Couldn't find any match."
234 else: raise
[102]235
236 def __str__(self):
[1118]237 return Scantable._summary(self, True)
[102]238
[976]239 def summary(self, filename=None):
[102]240 """
241 Print a summary of the contents of this scantable.
242 Parameters:
243 filename: the name of a file to write the putput to
244 Default - no file output
[381]245 verbose: print extra info such as the frequency table
246 The default (False) is taken from .asaprc
[102]247 """
[976]248 info = Scantable._summary(self, True)
249 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]250 if filename is not None:
[256]251 if filename is "":
252 filename = 'scantable_summary.txt'
[415]253 from os.path import expandvars, isdir
[411]254 filename = expandvars(filename)
[415]255 if not isdir(filename):
[413]256 data = open(filename, 'w')
257 data.write(info)
258 data.close()
259 else:
[718]260 msg = "Illegal file name '%s'." % (filename)
261 if rcParams['verbose']:
262 print msg
263 else:
264 raise IOError(msg)
265 if rcParams['verbose']:
[794]266 try:
267 from IPython.genutils import page as pager
268 except ImportError:
269 from pydoc import pager
270 pager(info)
[718]271 else:
272 return info
[710]273
[946]274
275 def get_selection(self):
276 """
[1005]277 Get the selection object currently set on this scantable.
278 Parameters:
279 none
280 Example:
281 sel = scan.get_selection()
282 sel.set_ifs(0) # select IF 0
283 scan.set_selection(sel) # apply modified selection
[946]284 """
285 return selector(self._getselection())
286
[1005]287 def set_selection(self, selection=selector()):
[946]288 """
[1005]289 Select a subset of the data. All following operations on this scantable
290 are only applied to thi selection.
291 Parameters:
292 selection: a selector object (default unset the selection)
293 Examples:
294 sel = selector() # create a selection object
[1118]295 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]296 scan.set_selection(sel) # set the selection
297 scan.summary() # will only print summary of scanno 0 an 3
298 scan.set_selection() # unset the selection
[946]299 """
300 self._setselection(selection)
301
[876]302 def stats(self, stat='stddev', mask=None):
[102]303 """
[135]304 Determine the specified statistic of the current beam/if/pol
[102]305 Takes a 'mask' as an optional parameter to specify which
306 channels should be excluded.
307 Parameters:
[135]308 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
309 'var', 'stddev', 'avdev', 'rms', 'median'
310 mask: an optional mask specifying where the statistic
[102]311 should be determined.
312 Example:
[113]313 scan.set_unit('channel')
[1118]314 msk = scan.create_mask([100, 200], [500, 600])
[135]315 scan.stats(stat='mean', mask=m)
[102]316 """
317 if mask == None:
[876]318 mask = []
[1118]319 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]320 if not self._check_ifs():
[1118]321 raise ValueError("Cannot apply mask as the IFs have different "
322 "number of channels. Please use setselection() "
323 "to select individual IFs")
[256]324
[876]325 statvals = self._math._stats(self, mask, stat)
326 out = ''
327 axes = []
328 for i in range(self.nrow()):
329 axis = []
330 axis.append(self.getscan(i))
331 axis.append(self.getbeam(i))
332 axis.append(self.getif(i))
333 axis.append(self.getpol(i))
334 axis.append(self.getcycle(i))
335 axes.append(axis)
336 tm = self._gettime(i)
337 src = self._getsourcename(i)
338 out += 'Scan[%d] (%s) ' % (axis[0], src)
339 out += 'Time[%s]:\n' % (tm)
340 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
341 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
342 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
343 out += '= %3.3f\n' % (statvals[i])
344 out += "--------------------------------------------------\n"
[256]345
[876]346 if rcParams['verbose']:
347 print "--------------------------------------------------"
[1118]348 print " ", stat
[876]349 print "--------------------------------------------------"
350 print out
[1295]351 #else:
352 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
353 # 'axes' : axes,
354 # 'data': statvals}
355 return statvals
[102]356
[1118]357 def stddev(self, mask=None):
[135]358 """
359 Determine the standard deviation of the current beam/if/pol
360 Takes a 'mask' as an optional parameter to specify which
361 channels should be excluded.
362 Parameters:
363 mask: an optional mask specifying where the standard
364 deviation should be determined.
365
366 Example:
367 scan.set_unit('channel')
[1118]368 msk = scan.create_mask([100, 200], [500, 600])
[135]369 scan.stddev(mask=m)
370 """
[1118]371 return self.stats(stat='stddev', mask=mask);
[135]372
[1003]373
[1259]374 def get_column_names(self):
[1003]375 """
376 Return a list of column names, which can be used for selection.
377 """
[1259]378 return list(Scantable.get_column_names(self))
[1003]379
[876]380 def get_tsys(self):
[113]381 """
382 Return the System temperatures.
383 Returns:
[876]384 a list of Tsys values for the current selection
[113]385 """
[256]386
[876]387 return self._row_callback(self._gettsys, "Tsys")
[256]388
[876]389 def _row_callback(self, callback, label):
390 axes = []
[1118]391 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]392 out = ""
[1118]393 outvec = []
[876]394 for i in range(self.nrow()):
395 axis = []
396 axis.append(self.getscan(i))
397 axis.append(self.getbeam(i))
398 axis.append(self.getif(i))
399 axis.append(self.getpol(i))
400 axis.append(self.getcycle(i))
401 axes.append(axis)
402 tm = self._gettime(i)
403 src = self._getsourcename(i)
404 out += 'Scan[%d] (%s) ' % (axis[0], src)
405 out += 'Time[%s]:\n' % (tm)
406 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
407 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
408 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
409 outvec.append(callback(i))
410 out += '= %3.3f\n' % (outvec[i])
411 out += "--------------------------------------------------\n"
412 if rcParams['verbose']:
413 print "--------------------------------------------------"
414 print " %s" % (label)
415 print "--------------------------------------------------"
416 print out
[1175]417 # disabled because the vector seems more useful
418 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
419 return outvec
[256]420
[1070]421 def _get_column(self, callback, row=-1):
422 """
423 """
424 if row == -1:
425 return [callback(i) for i in range(self.nrow())]
426 else:
427 if 0 <= row < self.nrow():
428 return callback(row)
[256]429
[1070]430
[1348]431 def get_time(self, row=-1, asdatetime=False):
[113]432 """
433 Get a list of time stamps for the observations.
[1348]434 Return a datetime object for each integration time stamp in the scantable.
[113]435 Parameters:
[1348]436 row: row no of integration. Default -1 return all rows
437 asdatetime: return values as datetime objects rather than strings
[113]438 Example:
439 none
440 """
[1175]441 from time import strptime
442 from datetime import datetime
[1348]443 if not asdatetime:
444 return self._get_column(self._gettime, row)
[1175]445 format = "%Y/%m/%d/%H:%M:%S"
446 if isinstance(times, list):
447 return [datetime(*strptime(i, format)[:6]) for i in times]
448 else:
449 return datetime(*strptime(times, format)[:6])
[102]450
[1348]451
452 def get_inttime(self, row=-1):
453 """
454 Get a list of integration times for the observations.
455 Return a time in seconds for each integration in the scantable.
456 Parameters:
457 row: row no of integration. Default -1 return all rows.
458 Example:
459 none
460 """
461 return self._get_column(self._getinttime, row)
462
463
[714]464 def get_sourcename(self, row=-1):
465 """
[794]466 Get a list source names for the observations.
[714]467 Return a string for each integration in the scantable.
468 Parameters:
[1348]469 row: row no of integration. Default -1 return all rows.
[714]470 Example:
471 none
472 """
[1070]473 return self._get_column(self._getsourcename, row)
[714]474
[794]475 def get_elevation(self, row=-1):
476 """
477 Get a list of elevations for the observations.
478 Return a float for each integration in the scantable.
479 Parameters:
[1348]480 row: row no of integration. Default -1 return all rows.
[794]481 Example:
482 none
483 """
[1070]484 return self._get_column(self._getelevation, row)
[794]485
486 def get_azimuth(self, row=-1):
487 """
488 Get a list of azimuths for the observations.
489 Return a float for each integration in the scantable.
490 Parameters:
[1348]491 row: row no of integration. Default -1 return all rows.
[794]492 Example:
493 none
494 """
[1070]495 return self._get_column(self._getazimuth, row)
[794]496
497 def get_parangle(self, row=-1):
498 """
499 Get a list of parallactic angles for the observations.
500 Return a float for each integration in the scantable.
501 Parameters:
[1348]502 row: row no of integration. Default -1 return all rows.
[794]503 Example:
504 none
505 """
[1070]506 return self._get_column(self._getparangle, row)
[794]507
[1070]508 def get_direction(self, row=-1):
509 """
510 Get a list of Positions on the sky (direction) for the observations.
511 Return a float for each integration in the scantable.
512 Parameters:
513 row: row no of integration. Default -1 return all rows
514 Example:
515 none
516 """
517 return self._get_column(self._getdirection, row)
518
[102]519 def set_unit(self, unit='channel'):
520 """
521 Set the unit for all following operations on this scantable
522 Parameters:
523 unit: optional unit, default is 'channel'
[1118]524 one of '*Hz', 'km/s', 'channel', ''
[102]525 """
[484]526 varlist = vars()
[1118]527 if unit in ['', 'pixel', 'channel']:
[113]528 unit = ''
529 inf = list(self._getcoordinfo())
530 inf[0] = unit
531 self._setcoordinfo(inf)
[1118]532 self._add_history("set_unit", varlist)
[113]533
[484]534 def set_instrument(self, instr):
[358]535 """
[1348]536 Set the instrument for subsequent processing.
[358]537 Parameters:
[710]538 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]539 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]540 """
541 self._setInstrument(instr)
[1118]542 self._add_history("set_instument", vars())
[718]543 print_log()
[358]544
[1190]545 def set_feedtype(self, feedtype):
546 """
547 Overwrite the feed type, which might not be set correctly.
548 Parameters:
549 feedtype: 'linear' or 'circular'
550 """
551 self._setfeedtype(feedtype)
552 self._add_history("set_feedtype", vars())
553 print_log()
554
[276]555 def set_doppler(self, doppler='RADIO'):
556 """
557 Set the doppler for all following operations on this scantable.
558 Parameters:
559 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
560 """
[484]561 varlist = vars()
[276]562 inf = list(self._getcoordinfo())
563 inf[2] = doppler
564 self._setcoordinfo(inf)
[1118]565 self._add_history("set_doppler", vars())
[718]566 print_log()
[710]567
[226]568 def set_freqframe(self, frame=None):
[113]569 """
570 Set the frame type of the Spectral Axis.
571 Parameters:
[591]572 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]573 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
574 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]575 Examples:
576 scan.set_freqframe('BARY')
577 """
[484]578 if frame is None: frame = rcParams['scantable.freqframe']
579 varlist = vars()
[1118]580 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
581 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]582
[989]583 if frame in valid:
[113]584 inf = list(self._getcoordinfo())
585 inf[1] = frame
586 self._setcoordinfo(inf)
[1118]587 self._add_history("set_freqframe", varlist)
[102]588 else:
[1118]589 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]590 if rcParams['verbose']:
591 print msg
592 else:
593 raise TypeError(msg)
594 print_log()
[710]595
[989]596 def set_dirframe(self, frame=""):
597 """
598 Set the frame type of the Direction on the sky.
599 Parameters:
600 frame: an optional frame type, default ''. Valid frames are:
601 'J2000', 'B1950', 'GALACTIC'
602 Examples:
603 scan.set_dirframe('GALACTIC')
604 """
605 varlist = vars()
606 try:
607 Scantable.set_dirframe(self, frame)
[1118]608 except RuntimeError, msg:
[989]609 if rcParams['verbose']:
610 print msg
611 else:
612 raise
[1118]613 self._add_history("set_dirframe", varlist)
[989]614
[113]615 def get_unit(self):
616 """
617 Get the default unit set in this scantable
618 Returns:
619 A unit string
620 """
621 inf = self._getcoordinfo()
622 unit = inf[0]
623 if unit == '': unit = 'channel'
624 return unit
[102]625
[158]626 def get_abcissa(self, rowno=0):
[102]627 """
[158]628 Get the abcissa in the current coordinate setup for the currently
[113]629 selected Beam/IF/Pol
630 Parameters:
[226]631 rowno: an optional row number in the scantable. Default is the
632 first row, i.e. rowno=0
[113]633 Returns:
[1348]634 The abcissa values and the format string (as a dictionary)
[113]635 """
[256]636 abc = self._getabcissa(rowno)
[710]637 lbl = self._getabcissalabel(rowno)
[718]638 print_log()
[158]639 return abc, lbl
[113]640
[1118]641 def flag(self, mask=None):
[1001]642 """
643 Flag the selected data using an optional channel mask.
644 Parameters:
645 mask: an optional channel mask, created with create_mask. Default
646 (no mask) is all channels.
647 """
648 varlist = vars()
[1118]649 if mask is None:
650 mask = []
[1001]651 try:
652 self._flag(mask)
[1118]653 except RuntimeError, msg:
[1001]654 if rcParams['verbose']:
655 print msg
656 return
657 else: raise
658 self._add_history("flag", varlist)
659
[1203]660 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]661 """
662 Flag the data in 'lag' space by providing a frequency to remove.
663 Flagged data in the scantable gets set to 0.0 before the fft.
664 No taper is applied.
665 Parameters:
[1348]666 frequency: the frequency (really a period within the bandwidth)
667 to remove
668 width: the width of the frequency to remove, to remove a
669 range of frequencies aroung the centre.
[1203]670 unit: the frequency unit (default "GHz")
671 Notes:
[1348]672 It is recommended to flag edges of the band or strong
673 signals beforehand.
[1192]674 """
675 if insitu is None: insitu = rcParams['insitu']
676 self._math._setinsitu(insitu)
677 varlist = vars()
[1370]678 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]679 if not base.has_key(unit):
680 raise ValueError("%s is not a valid unit." % unit)
681 try:
[1200]682 s = scantable(self._math._lag_flag(self, frequency*base[unit],
683 width*base[unit]))
[1192]684 except RuntimeError, msg:
685 if rcParams['verbose']:
686 print msg
687 return
688 else: raise
689 s._add_history("lag_flag", varlist)
690 print_log()
691 if insitu:
692 self._assign(s)
693 else:
694 return s
[1001]695
[1192]696
[113]697 def create_mask(self, *args, **kwargs):
698 """
[1118]699 Compute and return a mask based on [min, max] windows.
[189]700 The specified windows are to be INCLUDED, when the mask is
[113]701 applied.
[102]702 Parameters:
[1118]703 [min, max], [min2, max2], ...
[1024]704 Pairs of start/end points (inclusive)specifying the regions
[102]705 to be masked
[189]706 invert: optional argument. If specified as True,
707 return an inverted mask, i.e. the regions
708 specified are EXCLUDED
[513]709 row: create the mask using the specified row for
710 unit conversions, default is row=0
711 only necessary if frequency varies over rows.
[102]712 Example:
[113]713 scan.set_unit('channel')
714 a)
[1118]715 msk = scan.create_mask([400, 500], [800, 900])
[189]716 # masks everything outside 400 and 500
[113]717 # and 800 and 900 in the unit 'channel'
718
719 b)
[1118]720 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]721 # masks the regions between 400 and 500
[113]722 # and 800 and 900 in the unit 'channel'
[1024]723 c)
724 mask only channel 400
[1118]725 msk = scan.create_mask([400, 400])
[102]726 """
[513]727 row = 0
728 if kwargs.has_key("row"):
729 row = kwargs.get("row")
730 data = self._getabcissa(row)
[113]731 u = self._getcoordinfo()[0]
[718]732 if rcParams['verbose']:
[113]733 if u == "": u = "channel"
[718]734 msg = "The current mask window unit is %s" % u
[1118]735 i = self._check_ifs()
736 if not i:
[876]737 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]738 asaplog.push(msg)
[102]739 n = self.nchan()
[1295]740 msk = _n_bools(n, False)
[710]741 # test if args is a 'list' or a 'normal *args - UGLY!!!
742
[1118]743 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
744 and args or args[0]
[710]745 for window in ws:
[102]746 if (len(window) != 2 or window[0] > window[1] ):
[1118]747 raise TypeError("A window needs to be defined as [min, max]")
[102]748 for i in range(n):
[1024]749 if data[i] >= window[0] and data[i] <= window[1]:
[1295]750 msk[i] = True
[113]751 if kwargs.has_key('invert'):
752 if kwargs.get('invert'):
[1295]753 msk = mask_not(msk)
[718]754 print_log()
[102]755 return msk
[710]756
[256]757 def get_restfreqs(self):
758 """
759 Get the restfrequency(s) stored in this scantable.
760 The return value(s) are always of unit 'Hz'
761 Parameters:
762 none
763 Returns:
764 a list of doubles
765 """
766 return list(self._getrestfreqs())
[102]767
[402]768
[931]769 def set_restfreqs(self, freqs=None, unit='Hz'):
770 """
771 Set or replace the restfrequency specified and
772 If the 'freqs' argument holds a scalar,
773 then that rest frequency will be applied to all the selected
774 data. If the 'freqs' argument holds
775 a vector, then it MUST be of equal or smaller length than
776 the number of IFs (and the available restfrequencies will be
777 replaced by this vector). In this case, *all* data have
778 the restfrequency set per IF according
779 to the corresponding value you give in the 'freqs' vector.
[1118]780 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]781 IF 1 gets restfreq 2e9.
[1153]782 You can also specify the frequencies via a linecatalog/
783
[931]784 Parameters:
785 freqs: list of rest frequency values or string idenitfiers
786 unit: unit for rest frequency (default 'Hz')
[402]787
[931]788 Example:
789 # set the given restfrequency for the whole table
790 scan.set_restfreqs(freqs=1.4e9)
[1348]791 # If thee number of IFs in the data is >= 2 IF0 gets the first
[931]792 # value IF1 the second...
[1118]793 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]794 #set the given restfrequency for the whole table (by name)
795 scan.set_restfreqs(freqs="OH1667")
[391]796
[931]797 Note:
798 To do more sophisticate Restfrequency setting, e.g. on a
799 source and IF basis, use scantable.set_selection() before using
800 this function.
801 # provide your scantable is call scan
802 selection = selector()
803 selection.set_name("ORION*")
804 selection.set_ifs([1])
805 scan.set_selection(selection)
806 scan.set_restfreqs(freqs=86.6e9)
807
808 """
809 varlist = vars()
[1157]810 from asap import linecatalog
811 # simple value
[1118]812 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]813 self._setrestfreqs(freqs, "",unit)
[1157]814 # list of values
[1118]815 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]816 # list values are scalars
[1118]817 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]818 sel = selector()
819 savesel = self._getselection()
[1322]820 iflist = self.getifnos()
[931]821 for i in xrange(len(freqs)):
[1322]822 sel.set_ifs(iflist[i])
[931]823 self._setselection(sel)
[1153]824 self._setrestfreqs(freqs[i], "",unit)
[931]825 self._setselection(savesel)
[1157]826 # list values are tuples, (value, name)
827 elif isinstance(freqs[-1], dict):
828 sel = selector()
829 savesel = self._getselection()
[1322]830 iflist = self.getifnos()
[1157]831 for i in xrange(len(freqs)):
[1322]832 sel.set_ifs(iflist[i])
[1259]833 self._setselection(sel)
[1157]834 self._setrestfreqs(freqs[i]["value"],
835 freqs[i]["name"], "MHz")
836 self._setselection(savesel)
837 # freqs are to be taken from a linecatalog
[1153]838 elif isinstance(freqs, linecatalog):
839 sel = selector()
840 savesel = self._getselection()
[1322]841 iflist = self.getifnos()
[1153]842 for i in xrange(freqs.nrow()):
[1322]843 sel.set_ifs(iflist[i])
[1153]844 self._setselection(sel)
845 self._setrestfreqs(freqs.get_frequency(i),
846 freqs.get_name(i), "MHz")
847 # ensure that we are not iterating past nIF
848 if i == self.nif()-1: break
849 self._setselection(savesel)
[931]850 else:
851 return
852 self._add_history("set_restfreqs", varlist)
853
[1360]854 def shift_refpix(self, delta):
855 """
856 Shift the reference pixel of the Spectra Coordinate by an
857 integer amount.
858 Parameters:
859 delta: the amount to shift by
860 Note:
861 Be careful using this with broadband data.
862 """
863 Scantable.shift(self, delta)
[931]864
[1259]865 def history(self, filename=None):
866 """
867 Print the history. Optionally to a file.
[1348]868 Parameters:
869 filename: The name of the file to save the history to.
[1259]870 """
[484]871 hist = list(self._gethistory())
[794]872 out = "-"*80
[484]873 for h in hist:
[489]874 if h.startswith("---"):
[794]875 out += "\n"+h
[489]876 else:
877 items = h.split("##")
878 date = items[0]
879 func = items[1]
880 items = items[2:]
[794]881 out += "\n"+date+"\n"
882 out += "Function: %s\n Parameters:" % (func)
[489]883 for i in items:
884 s = i.split("=")
[1118]885 out += "\n %s = %s" % (s[0], s[1])
[794]886 out += "\n"+"-"*80
[1259]887 if filename is not None:
888 if filename is "":
889 filename = 'scantable_history.txt'
890 import os
891 filename = os.path.expandvars(os.path.expanduser(filename))
892 if not os.path.isdir(filename):
893 data = open(filename, 'w')
894 data.write(out)
895 data.close()
896 else:
897 msg = "Illegal file name '%s'." % (filename)
898 if rcParams['verbose']:
899 print msg
900 else:
901 raise IOError(msg)
902 if rcParams['verbose']:
903 try:
904 from IPython.genutils import page as pager
905 except ImportError:
906 from pydoc import pager
907 pager(out)
908 else:
909 return out
[484]910 return
[513]911 #
912 # Maths business
913 #
914
[931]915 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]916 """
[1070]917 Return the (time) weighted average of a scan.
[513]918 Note:
[1070]919 in channels only - align if necessary
[513]920 Parameters:
921 mask: an optional mask (only used for 'var' and 'tsys'
922 weighting)
[558]923 scanav: True averages each scan separately
924 False (default) averages all scans together,
[1099]925 weight: Weighting scheme.
926 'none' (mean no weight)
927 'var' (1/var(spec) weighted)
928 'tsys' (1/Tsys**2 weighted)
929 'tint' (integration time weighted)
930 'tintsys' (Tint/Tsys**2)
931 'median' ( median averaging)
[535]932 The default is 'tint'
[931]933 align: align the spectra in velocity before averaging. It takes
934 the time of the first spectrum as reference time.
[513]935 Example:
936 # time average the scantable without using a mask
[710]937 newscan = scan.average_time()
[513]938 """
939 varlist = vars()
[976]940 if weight is None: weight = 'TINT'
[513]941 if mask is None: mask = ()
[1099]942 if scanav: scanav = "SCAN"
943 else: scanav = "NONE"
[1118]944 scan = (self, )
[989]945 try:
[1118]946 if align:
947 scan = (self.freq_align(insitu=False), )
948 s = None
949 if weight.upper() == 'MEDIAN':
950 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
951 scanav))
952 else:
953 s = scantable(self._math._average(scan, mask, weight.upper(),
954 scanav))
955 except RuntimeError, msg:
[989]956 if rcParams['verbose']:
957 print msg
958 return
959 else: raise
[1099]960 s._add_history("average_time", varlist)
[718]961 print_log()
[513]962 return s
[710]963
[876]964 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]965 """
966 Return a scan where all spectra are converted to either
967 Jansky or Kelvin depending upon the flux units of the scan table.
968 By default the function tries to look the values up internally.
969 If it can't find them (or if you want to over-ride), you must
970 specify EITHER jyperk OR eta (and D which it will try to look up
971 also if you don't set it). jyperk takes precedence if you set both.
972 Parameters:
973 jyperk: the Jy / K conversion factor
974 eta: the aperture efficiency
975 d: the geomtric diameter (metres)
976 insitu: if False a new scantable is returned.
977 Otherwise, the scaling is done in-situ
978 The default is taken from .asaprc (False)
979 """
980 if insitu is None: insitu = rcParams['insitu']
[876]981 self._math._setinsitu(insitu)
[513]982 varlist = vars()
983 if jyperk is None: jyperk = -1.0
984 if d is None: d = -1.0
985 if eta is None: eta = -1.0
[876]986 s = scantable(self._math._convertflux(self, d, eta, jyperk))
987 s._add_history("convert_flux", varlist)
988 print_log()
989 if insitu: self._assign(s)
990 else: return s
[513]991
[876]992 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]993 """
994 Return a scan after applying a gain-elevation correction.
995 The correction can be made via either a polynomial or a
996 table-based interpolation (and extrapolation if necessary).
997 You specify polynomial coefficients, an ascii table or neither.
998 If you specify neither, then a polynomial correction will be made
999 with built in coefficients known for certain telescopes (an error
1000 will occur if the instrument is not known).
1001 The data and Tsys are *divided* by the scaling factors.
1002 Parameters:
1003 poly: Polynomial coefficients (default None) to compute a
1004 gain-elevation correction as a function of
1005 elevation (in degrees).
1006 filename: The name of an ascii file holding correction factors.
1007 The first row of the ascii file must give the column
1008 names and these MUST include columns
1009 "ELEVATION" (degrees) and "FACTOR" (multiply data
1010 by this) somewhere.
1011 The second row must give the data type of the
1012 column. Use 'R' for Real and 'I' for Integer.
1013 An example file would be
1014 (actual factors are arbitrary) :
1015
1016 TIME ELEVATION FACTOR
1017 R R R
1018 0.1 0 0.8
1019 0.2 20 0.85
1020 0.3 40 0.9
1021 0.4 60 0.85
1022 0.5 80 0.8
1023 0.6 90 0.75
1024 method: Interpolation method when correcting from a table.
1025 Values are "nearest", "linear" (default), "cubic"
1026 and "spline"
1027 insitu: if False a new scantable is returned.
1028 Otherwise, the scaling is done in-situ
1029 The default is taken from .asaprc (False)
1030 """
1031
1032 if insitu is None: insitu = rcParams['insitu']
[876]1033 self._math._setinsitu(insitu)
[513]1034 varlist = vars()
1035 if poly is None:
[1118]1036 poly = ()
[513]1037 from os.path import expandvars
1038 filename = expandvars(filename)
[876]1039 s = scantable(self._math._gainel(self, poly, filename, method))
1040 s._add_history("gain_el", varlist)
1041 print_log()
1042 if insitu: self._assign(s)
1043 else: return s
[710]1044
[931]1045 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1046 """
1047 Return a scan where all rows have been aligned in frequency/velocity.
1048 The alignment frequency frame (e.g. LSRK) is that set by function
1049 set_freqframe.
1050 Parameters:
1051 reftime: reference time to align at. By default, the time of
1052 the first row of data is used.
1053 method: Interpolation method for regridding the spectra.
1054 Choose from "nearest", "linear", "cubic" (default)
1055 and "spline"
1056 insitu: if False a new scantable is returned.
1057 Otherwise, the scaling is done in-situ
1058 The default is taken from .asaprc (False)
1059 """
[931]1060 if insitu is None: insitu = rcParams["insitu"]
[876]1061 self._math._setinsitu(insitu)
[513]1062 varlist = vars()
[931]1063 if reftime is None: reftime = ""
1064 s = scantable(self._math._freq_align(self, reftime, method))
[876]1065 s._add_history("freq_align", varlist)
1066 print_log()
1067 if insitu: self._assign(s)
1068 else: return s
[513]1069
[876]1070 def opacity(self, tau, insitu=None):
[513]1071 """
1072 Apply an opacity correction. The data
1073 and Tsys are multiplied by the correction factor.
1074 Parameters:
1075 tau: Opacity from which the correction factor is
1076 exp(tau*ZD)
1077 where ZD is the zenith-distance
1078 insitu: if False a new scantable is returned.
1079 Otherwise, the scaling is done in-situ
1080 The default is taken from .asaprc (False)
1081 """
1082 if insitu is None: insitu = rcParams['insitu']
[876]1083 self._math._setinsitu(insitu)
[513]1084 varlist = vars()
[876]1085 s = scantable(self._math._opacity(self, tau))
1086 s._add_history("opacity", varlist)
1087 print_log()
1088 if insitu: self._assign(s)
1089 else: return s
[513]1090
1091 def bin(self, width=5, insitu=None):
1092 """
1093 Return a scan where all spectra have been binned up.
[1348]1094 Parameters:
[513]1095 width: The bin width (default=5) in pixels
1096 insitu: if False a new scantable is returned.
1097 Otherwise, the scaling is done in-situ
1098 The default is taken from .asaprc (False)
1099 """
1100 if insitu is None: insitu = rcParams['insitu']
[876]1101 self._math._setinsitu(insitu)
[513]1102 varlist = vars()
[876]1103 s = scantable(self._math._bin(self, width))
[1118]1104 s._add_history("bin", varlist)
[876]1105 print_log()
1106 if insitu: self._assign(s)
1107 else: return s
[513]1108
[710]1109
[513]1110 def resample(self, width=5, method='cubic', insitu=None):
1111 """
[1348]1112 Return a scan where all spectra have been binned up.
1113
1114 Parameters:
[513]1115 width: The bin width (default=5) in pixels
1116 method: Interpolation method when correcting from a table.
1117 Values are "nearest", "linear", "cubic" (default)
1118 and "spline"
1119 insitu: if False a new scantable is returned.
1120 Otherwise, the scaling is done in-situ
1121 The default is taken from .asaprc (False)
1122 """
1123 if insitu is None: insitu = rcParams['insitu']
[876]1124 self._math._setinsitu(insitu)
[513]1125 varlist = vars()
[876]1126 s = scantable(self._math._resample(self, method, width))
[1118]1127 s._add_history("resample", varlist)
[876]1128 print_log()
1129 if insitu: self._assign(s)
1130 else: return s
[513]1131
1132
[946]1133 def average_pol(self, mask=None, weight='none'):
1134 """
1135 Average the Polarisations together.
1136 Parameters:
1137 mask: An optional mask defining the region, where the
1138 averaging will be applied. The output will have all
1139 specified points masked.
1140 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1141 weighted), or 'tsys' (1/Tsys**2 weighted)
1142 """
1143 varlist = vars()
1144 if mask is None:
1145 mask = ()
[1010]1146 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1147 s._add_history("average_pol", varlist)
[946]1148 print_log()
[992]1149 return s
[513]1150
[1145]1151 def average_beam(self, mask=None, weight='none'):
1152 """
1153 Average the Beams together.
1154 Parameters:
1155 mask: An optional mask defining the region, where the
1156 averaging will be applied. The output will have all
1157 specified points masked.
1158 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1159 weighted), or 'tsys' (1/Tsys**2 weighted)
1160 """
1161 varlist = vars()
1162 if mask is None:
1163 mask = ()
1164 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1165 s._add_history("average_beam", varlist)
1166 print_log()
1167 return s
1168
[992]1169 def convert_pol(self, poltype=None):
1170 """
1171 Convert the data to a different polarisation type.
1172 Parameters:
1173 poltype: The new polarisation type. Valid types are:
1174 "linear", "stokes" and "circular"
1175 """
1176 varlist = vars()
1177 try:
1178 s = scantable(self._math._convertpol(self, poltype))
[1118]1179 except RuntimeError, msg:
[992]1180 if rcParams['verbose']:
[1118]1181 print msg
1182 return
[992]1183 else:
1184 raise
[1118]1185 s._add_history("convert_pol", varlist)
[992]1186 print_log()
1187 return s
1188
[876]1189 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1190 """
1191 Smooth the spectrum by the specified kernel (conserving flux).
1192 Parameters:
1193 kernel: The type of smoothing kernel. Select from
[1373]1194 'hanning' (default), 'gaussian', 'boxcar' and
1195 'rmedian'
[513]1196 width: The width of the kernel in pixels. For hanning this is
1197 ignored otherwise it defauls to 5 pixels.
1198 For 'gaussian' it is the Full Width Half
1199 Maximum. For 'boxcar' it is the full width.
[1373]1200 For 'rmedian' it is the half width.
[513]1201 insitu: if False a new scantable is returned.
1202 Otherwise, the scaling is done in-situ
1203 The default is taken from .asaprc (False)
1204 Example:
1205 none
1206 """
1207 if insitu is None: insitu = rcParams['insitu']
[876]1208 self._math._setinsitu(insitu)
[513]1209 varlist = vars()
[1118]1210 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1211 s._add_history("smooth", varlist)
1212 print_log()
1213 if insitu: self._assign(s)
1214 else: return s
[513]1215
[876]1216
[1061]1217 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
[513]1218 """
1219 Return a scan which has been baselined (all rows) by a polynomial.
1220 Parameters:
[794]1221 mask: an optional mask
1222 order: the order of the polynomial (default is 0)
[1061]1223 plot: plot the fit and the residual. In this each
1224 indivual fit has to be approved, by typing 'y'
1225 or 'n'
[794]1226 insitu: if False a new scantable is returned.
1227 Otherwise, the scaling is done in-situ
1228 The default is taken from .asaprc (False)
[513]1229 Example:
1230 # return a scan baselined by a third order polynomial,
1231 # not using a mask
1232 bscan = scan.poly_baseline(order=3)
[579]1233 """
[513]1234 if insitu is None: insitu = rcParams['insitu']
1235 varlist = vars()
1236 if mask is None:
[1295]1237 mask = [True for i in xrange(self.nchan(-1))]
[513]1238 from asap.asapfitter import fitter
[1217]1239 try:
1240 f = fitter()
1241 f.set_scan(self, mask)
1242 f.set_function(poly=order)
1243 s = f.auto_fit(insitu, plot=plot)
1244 s._add_history("poly_baseline", varlist)
1245 print_log()
1246 if insitu: self._assign(s)
1247 else: return s
1248 except RuntimeError:
1249 msg = "The fit failed, possibly because it didn't converge."
1250 if rcParams['verbose']:
1251 print msg
1252 return
1253 else:
1254 raise RuntimeError(msg)
[513]1255
[1217]1256
[1118]1257 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1258 threshold=3, chan_avg_limit=1, plot=False,
1259 insitu=None):
[880]1260 """
1261 Return a scan which has been baselined (all rows) by a polynomial.
1262 Spectral lines are detected first using linefinder and masked out
1263 to avoid them affecting the baseline solution.
1264
1265 Parameters:
1266 mask: an optional mask retreived from scantable
1267 edge: an optional number of channel to drop at
1268 the edge of spectrum. If only one value is
1269 specified, the same number will be dropped from
1270 both sides of the spectrum. Default is to keep
[907]1271 all channels. Nested tuples represent individual
[976]1272 edge selection for different IFs (a number of spectral
1273 channels can be different)
[880]1274 order: the order of the polynomial (default is 0)
1275 threshold: the threshold used by line finder. It is better to
1276 keep it large as only strong lines affect the
1277 baseline solution.
[1280]1278 chan_avg_limit:
1279 a maximum number of consequtive spectral channels to
1280 average during the search of weak and broad lines.
1281 The default is no averaging (and no search for weak
1282 lines). If such lines can affect the fitted baseline
1283 (e.g. a high order polynomial is fitted), increase this
1284 parameter (usually values up to 8 are reasonable). Most
1285 users of this method should find the default value
1286 sufficient.
[1061]1287 plot: plot the fit and the residual. In this each
1288 indivual fit has to be approved, by typing 'y'
1289 or 'n'
[880]1290 insitu: if False a new scantable is returned.
1291 Otherwise, the scaling is done in-situ
1292 The default is taken from .asaprc (False)
1293
1294 Example:
1295 scan2=scan.auto_poly_baseline(order=7)
1296 """
1297 if insitu is None: insitu = rcParams['insitu']
1298 varlist = vars()
1299 from asap.asapfitter import fitter
1300 from asap.asaplinefind import linefinder
1301 from asap import _is_sequence_or_number as _is_valid
1302
[976]1303 # check whether edge is set up for each IF individually
[1118]1304 individualedge = False;
1305 if len(edge) > 1:
1306 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1307 individualedge = True;
[907]1308
[1118]1309 if not _is_valid(edge, int) and not individualedge:
[909]1310 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1311 pair of integers specified as a tuple. Nested tuples are allowed \
1312 to make individual selection for different IFs."
[919]1313
[1118]1314 curedge = (0, 0)
1315 if individualedge:
1316 for edgepar in edge:
1317 if not _is_valid(edgepar, int):
1318 raise ValueError, "Each element of the 'edge' tuple has \
1319 to be a pair of integers or an integer."
[907]1320 else:
[1118]1321 curedge = edge;
[880]1322
1323 # setup fitter
1324 f = fitter()
1325 f.set_function(poly=order)
1326
1327 # setup line finder
[1118]1328 fl = linefinder()
[1268]1329 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1330
1331 if not insitu:
[1118]1332 workscan = self.copy()
[880]1333 else:
[1118]1334 workscan = self
[880]1335
[907]1336 fl.set_scan(workscan)
1337
[1118]1338 rows = range(workscan.nrow())
[880]1339 asaplog.push("Processing:")
1340 for r in rows:
[1118]1341 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1342 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1343 workscan.getpol(r), workscan.getcycle(r))
[880]1344 asaplog.push(msg, False)
[907]1345
[976]1346 # figure out edge parameter
[1118]1347 if individualedge:
1348 if len(edge) >= workscan.getif(r):
1349 raise RuntimeError, "Number of edge elements appear to " \
1350 "be less than the number of IFs"
1351 curedge = edge[workscan.getif(r)]
[919]1352
[976]1353 # setup line finder
[1118]1354 fl.find_lines(r, mask, curedge)
[880]1355 f.set_scan(workscan, fl.get_mask())
1356 f.x = workscan._getabcissa(r)
1357 f.y = workscan._getspectrum(r)
1358 f.data = None
1359 f.fit()
1360 x = f.get_parameters()
[1061]1361 if plot:
1362 f.plot(residual=True)
1363 x = raw_input("Accept fit ( [y]/n ): ")
1364 if x.upper() == 'N':
1365 continue
[880]1366 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1367 if plot:
1368 f._p.unmap()
1369 f._p = None
1370 workscan._add_history("auto_poly_baseline", varlist)
[880]1371 if insitu:
1372 self._assign(workscan)
1373 else:
1374 return workscan
1375
[914]1376 def rotate_linpolphase(self, angle):
1377 """
1378 Rotate the phase of the complex polarization O=Q+iU correlation.
1379 This is always done in situ in the raw data. So if you call this
1380 function more than once then each call rotates the phase further.
1381 Parameters:
1382 angle: The angle (degrees) to rotate (add) by.
1383 Examples:
1384 scan.rotate_linpolphase(2.3)
1385 """
1386 varlist = vars()
[936]1387 self._math._rotate_linpolphase(self, angle)
[914]1388 self._add_history("rotate_linpolphase", varlist)
1389 print_log()
1390 return
[710]1391
[513]1392
[914]1393 def rotate_xyphase(self, angle):
1394 """
1395 Rotate the phase of the XY correlation. This is always done in situ
1396 in the data. So if you call this function more than once
1397 then each call rotates the phase further.
1398 Parameters:
1399 angle: The angle (degrees) to rotate (add) by.
1400 Examples:
1401 scan.rotate_xyphase(2.3)
1402 """
1403 varlist = vars()
[936]1404 self._math._rotate_xyphase(self, angle)
[914]1405 self._add_history("rotate_xyphase", varlist)
1406 print_log()
1407 return
1408
1409 def swap_linears(self):
1410 """
[1348]1411 Swap the linear polarisations XX and YY, or better the first two
1412 polarisations as this also works for ciculars.
[914]1413 """
1414 varlist = vars()
[936]1415 self._math._swap_linears(self)
[914]1416 self._add_history("swap_linears", varlist)
1417 print_log()
1418 return
1419
1420 def invert_phase(self):
1421 """
1422 Invert the phase of the complex polarisation
1423 """
1424 varlist = vars()
[936]1425 self._math._invert_phase(self)
[914]1426 self._add_history("invert_phase", varlist)
1427 print_log()
1428 return
1429
[876]1430 def add(self, offset, insitu=None):
[513]1431 """
1432 Return a scan where all spectra have the offset added
1433 Parameters:
1434 offset: the offset
1435 insitu: if False a new scantable is returned.
1436 Otherwise, the scaling is done in-situ
1437 The default is taken from .asaprc (False)
1438 """
1439 if insitu is None: insitu = rcParams['insitu']
[876]1440 self._math._setinsitu(insitu)
[513]1441 varlist = vars()
[876]1442 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1443 s._add_history("add", varlist)
[876]1444 print_log()
1445 if insitu:
1446 self._assign(s)
1447 else:
[513]1448 return s
1449
[1308]1450 def scale(self, factor, tsys=True, insitu=None):
[513]1451 """
1452 Return a scan where all spectra are scaled by the give 'factor'
1453 Parameters:
1454 factor: the scaling factor
1455 insitu: if False a new scantable is returned.
1456 Otherwise, the scaling is done in-situ
1457 The default is taken from .asaprc (False)
1458 tsys: if True (default) then apply the operation to Tsys
1459 as well as the data
1460 """
1461 if insitu is None: insitu = rcParams['insitu']
[876]1462 self._math._setinsitu(insitu)
[513]1463 varlist = vars()
[876]1464 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1465 s._add_history("scale", varlist)
[876]1466 print_log()
1467 if insitu:
1468 self._assign(s)
1469 else:
[513]1470 return s
1471
[1348]1472 def auto_quotient(self, preserve=True, mode='paired'):
[670]1473 """
1474 This function allows to build quotients automatically.
1475 It assumes the observation to have the same numer of
1476 "ons" and "offs"
1477 Parameters:
[710]1478 preserve: you can preserve (default) the continuum or
1479 remove it. The equations used are
[670]1480 preserve: Output = Toff * (on/off) - Toff
[1070]1481 remove: Output = Toff * (on/off) - Ton
[1348]1482 mode: the on/off detection mode
1483 'paired' (default)
1484 identifies 'off' scans by the
1485 trailing '_R' (Mopra/Parkes) or
1486 '_e'/'_w' (Tid) and matches
1487 on/off pairs from the observing pattern
1488 'time'
1489 finds the closest off in time
1490
[670]1491 """
[1348]1492 modes = ["time", "paired"]
[670]1493 if not mode in modes:
[876]1494 msg = "please provide valid mode. Valid modes are %s" % (modes)
1495 raise ValueError(msg)
1496 varlist = vars()
[1348]1497 s = None
1498 if mode.lower() == "paired":
1499 basesel = self.get_selection()
[1356]1500 sel = selector()+basesel
1501 sel.set_query("SRCTYPE==1")
1502 self.set_selection(sel)
[1348]1503 offs = self.copy()
1504 sel.set_query("SRCTYPE==0")
[1356]1505 self.set_selection(sel)
[1348]1506 ons = self.copy()
1507 s = scantable(self._math._quotient(ons, offs, preserve))
1508 self.set_selection(basesel)
1509 elif mode.lower() == "time":
1510 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1511 s._add_history("auto_quotient", varlist)
[876]1512 print_log()
1513 return s
[710]1514
[1145]1515 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1516 """
[1143]1517 Form a quotient using "off" beams when observing in "MX" mode.
1518 Parameters:
[1145]1519 mask: an optional mask to be used when weight == 'stddev'
[1143]1520 weight: How to average the off beams. Default is 'median'.
[1145]1521 preserve: you can preserve (default) the continuum or
1522 remove it. The equations used are
1523 preserve: Output = Toff * (on/off) - Toff
1524 remove: Output = Toff * (on/off) - Ton
[1217]1525 """
[1143]1526 if mask is None: mask = ()
[1141]1527 varlist = vars()
1528 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1529 preoff = scantable(self._math._mx_extract(self, 'off'))
1530 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1531 from asapmath import quotient
[1145]1532 q = quotient(on, off, preserve)
[1143]1533 q._add_history("mx_quotient", varlist)
[1145]1534 print_log()
[1217]1535 return q
[513]1536
[718]1537 def freq_switch(self, insitu=None):
1538 """
1539 Apply frequency switching to the data.
1540 Parameters:
1541 insitu: if False a new scantable is returned.
1542 Otherwise, the swictching is done in-situ
1543 The default is taken from .asaprc (False)
1544 Example:
1545 none
1546 """
1547 if insitu is None: insitu = rcParams['insitu']
[876]1548 self._math._setinsitu(insitu)
[718]1549 varlist = vars()
[876]1550 s = scantable(self._math._freqswitch(self))
[1118]1551 s._add_history("freq_switch", varlist)
[876]1552 print_log()
1553 if insitu: self._assign(s)
1554 else: return s
[718]1555
[780]1556 def recalc_azel(self):
1557 """
1558 Recalculate the azimuth and elevation for each position.
1559 Parameters:
1560 none
1561 Example:
1562 """
1563 varlist = vars()
[876]1564 self._recalcazel()
[780]1565 self._add_history("recalc_azel", varlist)
1566 print_log()
1567 return
1568
[513]1569 def __add__(self, other):
1570 varlist = vars()
1571 s = None
1572 if isinstance(other, scantable):
[1308]1573 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1574 elif isinstance(other, float):
[876]1575 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1576 else:
[718]1577 raise TypeError("Other input is not a scantable or float value")
[513]1578 s._add_history("operator +", varlist)
[718]1579 print_log()
[513]1580 return s
1581
1582 def __sub__(self, other):
1583 """
1584 implicit on all axes and on Tsys
1585 """
1586 varlist = vars()
1587 s = None
1588 if isinstance(other, scantable):
[1308]1589 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1590 elif isinstance(other, float):
[876]1591 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1592 else:
[718]1593 raise TypeError("Other input is not a scantable or float value")
[513]1594 s._add_history("operator -", varlist)
[718]1595 print_log()
[513]1596 return s
[710]1597
[513]1598 def __mul__(self, other):
1599 """
1600 implicit on all axes and on Tsys
1601 """
1602 varlist = vars()
1603 s = None
1604 if isinstance(other, scantable):
[1308]1605 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1606 elif isinstance(other, float):
[876]1607 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1608 else:
[718]1609 raise TypeError("Other input is not a scantable or float value")
[513]1610 s._add_history("operator *", varlist)
[718]1611 print_log()
[513]1612 return s
1613
[710]1614
[513]1615 def __div__(self, other):
1616 """
1617 implicit on all axes and on Tsys
1618 """
1619 varlist = vars()
1620 s = None
1621 if isinstance(other, scantable):
[1308]1622 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1623 elif isinstance(other, float):
1624 if other == 0.0:
[718]1625 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1626 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1627 else:
[718]1628 raise TypeError("Other input is not a scantable or float value")
[513]1629 s._add_history("operator /", varlist)
[718]1630 print_log()
[513]1631 return s
1632
[530]1633 def get_fit(self, row=0):
1634 """
1635 Print or return the stored fits for a row in the scantable
1636 Parameters:
1637 row: the row which the fit has been applied to.
1638 """
1639 if row > self.nrow():
1640 return
[976]1641 from asap.asapfit import asapfit
[530]1642 fit = asapfit(self._getfit(row))
[718]1643 if rcParams['verbose']:
[530]1644 print fit
1645 return
1646 else:
1647 return fit.as_dict()
1648
[484]1649 def _add_history(self, funcname, parameters):
1650 # create date
1651 sep = "##"
1652 from datetime import datetime
1653 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1654 hist = dstr+sep
1655 hist += funcname+sep#cdate+sep
1656 if parameters.has_key('self'): del parameters['self']
[1118]1657 for k, v in parameters.iteritems():
[484]1658 if type(v) is dict:
[1118]1659 for k2, v2 in v.iteritems():
[484]1660 hist += k2
1661 hist += "="
[1118]1662 if isinstance(v2, scantable):
[484]1663 hist += 'scantable'
1664 elif k2 == 'mask':
[1118]1665 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1666 hist += str(self._zip_mask(v2))
1667 else:
1668 hist += str(v2)
[484]1669 else:
[513]1670 hist += str(v2)
[484]1671 else:
1672 hist += k
1673 hist += "="
[1118]1674 if isinstance(v, scantable):
[484]1675 hist += 'scantable'
1676 elif k == 'mask':
[1118]1677 if isinstance(v, list) or isinstance(v, tuple):
[513]1678 hist += str(self._zip_mask(v))
1679 else:
1680 hist += str(v)
[484]1681 else:
1682 hist += str(v)
1683 hist += sep
1684 hist = hist[:-2] # remove trailing '##'
1685 self._addhistory(hist)
1686
[710]1687
[484]1688 def _zip_mask(self, mask):
1689 mask = list(mask)
1690 i = 0
1691 segments = []
1692 while mask[i:].count(1):
1693 i += mask[i:].index(1)
1694 if mask[i:].count(0):
1695 j = i + mask[i:].index(0)
1696 else:
[710]1697 j = len(mask)
[1118]1698 segments.append([i, j])
[710]1699 i = j
[484]1700 return segments
[714]1701
[626]1702 def _get_ordinate_label(self):
1703 fu = "("+self.get_fluxunit()+")"
1704 import re
1705 lbl = "Intensity"
[1118]1706 if re.match(".K.", fu):
[626]1707 lbl = "Brightness Temperature "+ fu
[1118]1708 elif re.match(".Jy.", fu):
[626]1709 lbl = "Flux density "+ fu
1710 return lbl
[710]1711
[876]1712 def _check_ifs(self):
1713 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1714 nchans = filter(lambda t: t > 0, nchans)
[876]1715 return (sum(nchans)/len(nchans) == nchans[0])
[976]1716
1717 def _fill(self, names, unit, average):
1718 import os
1719 from asap._asap import stfiller
1720 first = True
1721 fullnames = []
1722 for name in names:
1723 name = os.path.expandvars(name)
1724 name = os.path.expanduser(name)
1725 if not os.path.exists(name):
1726 msg = "File '%s' does not exists" % (name)
1727 if rcParams['verbose']:
1728 asaplog.push(msg)
1729 print asaplog.pop().strip()
1730 return
1731 raise IOError(msg)
1732 fullnames.append(name)
1733 if average:
1734 asaplog.push('Auto averaging integrations')
[1079]1735 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1736 for name in fullnames:
[1073]1737 tbl = Scantable(stype)
1738 r = stfiller(tbl)
[976]1739 msg = "Importing %s..." % (name)
[1118]1740 asaplog.push(msg, False)
[976]1741 print_log()
[1118]1742 r._open(name, -1, -1)
[976]1743 r._read()
[1073]1744 #tbl = r._getdata()
[976]1745 if average:
[1118]1746 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1747 #tbl = tbl2
1748 if not first:
1749 tbl = self._math._merge([self, tbl])
1750 #tbl = tbl2
1751 Scantable.__init__(self, tbl)
1752 r._close()
[1118]1753 del r, tbl
[976]1754 first = False
1755 if unit is not None:
1756 self.set_fluxunit(unit)
1757 self.set_freqframe(rcParams['scantable.freqframe'])
1758
Note: See TracBrowser for help on using the repository browser.