source: branches/Release2.1.1/python/scantable.py@ 2614

Last change on this file since 2614 was 1290, checked in by mar637, 18 years ago

More on ticket #81. scantable.stats now returns value as a list of length scantable.nrow

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 63.5 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1283]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1118]41 if isinstance(filename, str):
[976]42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
[718]47 if rcParams['verbose']:
[976]48 asaplog.push(s)
49 print asaplog.pop().strip()
[718]50 return
[976]51 raise IOError(s)
[1115]52 if os.path.isdir(filename) \
[1118]53 and not os.path.exists(filename+'/table.f1'):
[976]54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
[1118]56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
[976]58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
[718]61 else:
[1118]62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
[976]64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
[226]69 else:
[1118]70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
[1259]74 self._add_history("scantable", varlist)
[714]75 print_log()
[102]76
[876]77 def save(self, name=None, format=None, overwrite=False):
[116]78 """
[1276]79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
[116]81 Parameters:
[1093]82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
[497]84 and header in 'name'_header.txt)
[116]85 format: an optional file format. Default is ASAP.
[280]86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]87 'SDFITS' (save as SDFITS file)
[200]88 'ASCII' (saves as ascii text file)
[226]89 'MS2' (saves as an aips++
90 MeasurementSet V2)
[411]91 overwrite: If the file should be overwritten if it exists.
[256]92 The default False is to return with warning
[411]93 without writing the output. USE WITH CARE.
[116]94 Example:
95 scan.save('myscan.asap')
[1118]96 scan.save('myscan.sdfits', 'SDFITS')
[116]97 """
[411]98 from os import path
[226]99 if format is None: format = rcParams['scantable.save']
[256]100 suffix = '.'+format.lower()
[1118]101 if name is None or name == "":
[256]102 name = 'scantable'+suffix
[718]103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
[411]105 name = path.expandvars(name)
[256]106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
[718]108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
[451]114 format2 = format.upper()
115 if format2 == 'ASAP':
[116]116 self._save(name)
117 else:
[989]118 from asap._asap import stwriter as stw
[1118]119 writer = stw(format2)
120 writer.write(self, name)
[718]121 print_log()
[116]122 return
123
[102]124 def copy(self):
125 """
126 Return a copy of this scantable.
127 Parameters:
[113]128 none
[102]129 Example:
130 copiedscan = scan.copy()
131 """
[876]132 sd = scantable(Scantable._copy(self))
[113]133 return sd
134
[1093]135 def drop_scan(self, scanid=None):
136 """
137 Return a new scantable where the specified scan number(s) has(have)
138 been dropped.
139 Parameters:
140 scanid: a (list of) scan number(s)
141 """
142 from asap import _is_sequence_or_number as _is_valid
143 from asap import _to_list
144 from asap import unique
145 if not _is_valid(scanid):
146 if rcParams['verbose']:
147 print "Please specify a scanno to drop from the scantable"
148 return
149 else:
150 raise RuntimeError("No scan given")
151 try:
152 scanid = _to_list(scanid)
153 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
154 for sid in scanid: allscans.remove(sid)
[1118]155 if len(allscans) == 0:
156 raise ValueError("Can't remove all scans")
[1093]157 except ValueError:
158 if rcParams['verbose']:
159 print "Couldn't find any match."
160 return
161 else: raise
162 try:
163 bsel = self.get_selection()
164 sel = selector()
165 sel.set_scans(allscans)
166 self.set_selection(bsel+sel)
167 scopy = self._copy()
168 self.set_selection(bsel)
169 return scantable(scopy)
170 except RuntimeError:
[1118]171 if rcParams['verbose']:
172 print "Couldn't find any match."
173 else:
174 raise
[1093]175
176
[102]177 def get_scan(self, scanid=None):
178 """
179 Return a specific scan (by scanno) or collection of scans (by
180 source name) in a new scantable.
181 Parameters:
[513]182 scanid: a (list of) scanno or a source name, unix-style
183 patterns are accepted for source name matching, e.g.
184 '*_R' gets all 'ref scans
[102]185 Example:
[513]186 # get all scans containing the source '323p459'
187 newscan = scan.get_scan('323p459')
188 # get all 'off' scans
189 refscans = scan.get_scan('*_R')
190 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]191 newscan = scan.get_scan([0, 2, 7, 10])
[102]192 """
193 if scanid is None:
[718]194 if rcParams['verbose']:
[1118]195 print "Please specify a scan no or name to " \
196 "retrieve from the scantable"
[718]197 return
198 else:
199 raise RuntimeError("No scan given")
200
[102]201 try:
[946]202 bsel = self.get_selection()
203 sel = selector()
[102]204 if type(scanid) is str:
[946]205 sel.set_name(scanid)
206 self.set_selection(bsel+sel)
[876]207 scopy = self._copy()
[946]208 self.set_selection(bsel)
[876]209 return scantable(scopy)
[102]210 elif type(scanid) is int:
[946]211 sel.set_scans([scanid])
212 self.set_selection(bsel+sel)
[876]213 scopy = self._copy()
[946]214 self.set_selection(bsel)
[876]215 return scantable(scopy)
[381]216 elif type(scanid) is list:
[946]217 sel.set_scans(scanid)
218 self.set_selection(sel)
[876]219 scopy = self._copy()
[946]220 self.set_selection(bsel)
[876]221 return scantable(scopy)
[381]222 else:
[718]223 msg = "Illegal scanid type, use 'int' or 'list' if ints."
224 if rcParams['verbose']:
225 print msg
226 else:
227 raise TypeError(msg)
[102]228 except RuntimeError:
[718]229 if rcParams['verbose']: print "Couldn't find any match."
230 else: raise
[102]231
232 def __str__(self):
[1118]233 return Scantable._summary(self, True)
[102]234
[976]235 def summary(self, filename=None):
[102]236 """
237 Print a summary of the contents of this scantable.
238 Parameters:
239 filename: the name of a file to write the putput to
240 Default - no file output
[381]241 verbose: print extra info such as the frequency table
242 The default (False) is taken from .asaprc
[102]243 """
[976]244 info = Scantable._summary(self, True)
245 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]246 if filename is not None:
[256]247 if filename is "":
248 filename = 'scantable_summary.txt'
[415]249 from os.path import expandvars, isdir
[411]250 filename = expandvars(filename)
[415]251 if not isdir(filename):
[413]252 data = open(filename, 'w')
253 data.write(info)
254 data.close()
255 else:
[718]256 msg = "Illegal file name '%s'." % (filename)
257 if rcParams['verbose']:
258 print msg
259 else:
260 raise IOError(msg)
261 if rcParams['verbose']:
[794]262 try:
263 from IPython.genutils import page as pager
264 except ImportError:
265 from pydoc import pager
266 pager(info)
[718]267 else:
268 return info
[710]269
[946]270
271 def get_selection(self):
272 """
[1005]273 Get the selection object currently set on this scantable.
274 Parameters:
275 none
276 Example:
277 sel = scan.get_selection()
278 sel.set_ifs(0) # select IF 0
279 scan.set_selection(sel) # apply modified selection
[946]280 """
281 return selector(self._getselection())
282
[1005]283 def set_selection(self, selection=selector()):
[946]284 """
[1005]285 Select a subset of the data. All following operations on this scantable
286 are only applied to thi selection.
287 Parameters:
288 selection: a selector object (default unset the selection)
289 Examples:
290 sel = selector() # create a selection object
[1118]291 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]292 scan.set_selection(sel) # set the selection
293 scan.summary() # will only print summary of scanno 0 an 3
294 scan.set_selection() # unset the selection
[946]295 """
296 self._setselection(selection)
297
[876]298 def stats(self, stat='stddev', mask=None):
[102]299 """
[135]300 Determine the specified statistic of the current beam/if/pol
[102]301 Takes a 'mask' as an optional parameter to specify which
302 channels should be excluded.
303 Parameters:
[135]304 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
305 'var', 'stddev', 'avdev', 'rms', 'median'
306 mask: an optional mask specifying where the statistic
[102]307 should be determined.
308 Example:
[113]309 scan.set_unit('channel')
[1118]310 msk = scan.create_mask([100, 200], [500, 600])
[135]311 scan.stats(stat='mean', mask=m)
[102]312 """
313 if mask == None:
[876]314 mask = []
[1118]315 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]316 if not self._check_ifs():
[1118]317 raise ValueError("Cannot apply mask as the IFs have different "
318 "number of channels. Please use setselection() "
319 "to select individual IFs")
[256]320
[876]321 statvals = self._math._stats(self, mask, stat)
322 out = ''
323 axes = []
324 for i in range(self.nrow()):
325 axis = []
326 axis.append(self.getscan(i))
327 axis.append(self.getbeam(i))
328 axis.append(self.getif(i))
329 axis.append(self.getpol(i))
330 axis.append(self.getcycle(i))
331 axes.append(axis)
332 tm = self._gettime(i)
333 src = self._getsourcename(i)
334 out += 'Scan[%d] (%s) ' % (axis[0], src)
335 out += 'Time[%s]:\n' % (tm)
336 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
337 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
338 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
339 out += '= %3.3f\n' % (statvals[i])
340 out += "--------------------------------------------------\n"
[256]341
[876]342 if rcParams['verbose']:
343 print "--------------------------------------------------"
[1118]344 print " ", stat
[876]345 print "--------------------------------------------------"
346 print out
[1290]347 #else:
348 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
349 # 'axes' : axes,
350 # 'data': statvals}
351 return statvals
[102]352
[1118]353 def stddev(self, mask=None):
[135]354 """
355 Determine the standard deviation of the current beam/if/pol
356 Takes a 'mask' as an optional parameter to specify which
357 channels should be excluded.
358 Parameters:
359 mask: an optional mask specifying where the standard
360 deviation should be determined.
361
362 Example:
363 scan.set_unit('channel')
[1118]364 msk = scan.create_mask([100, 200], [500, 600])
[135]365 scan.stddev(mask=m)
366 """
[1118]367 return self.stats(stat='stddev', mask=mask);
[135]368
[1003]369
[1259]370 def get_column_names(self):
[1003]371 """
372 Return a list of column names, which can be used for selection.
373 """
[1259]374 return list(Scantable.get_column_names(self))
[1003]375
[876]376 def get_tsys(self):
[113]377 """
378 Return the System temperatures.
379 Parameters:
[876]380
[113]381 Returns:
[876]382 a list of Tsys values for the current selection
[113]383 """
[256]384
[876]385 return self._row_callback(self._gettsys, "Tsys")
[256]386
[876]387 def _row_callback(self, callback, label):
388 axes = []
[1118]389 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]390 out = ""
[1118]391 outvec = []
[876]392 for i in range(self.nrow()):
393 axis = []
394 axis.append(self.getscan(i))
395 axis.append(self.getbeam(i))
396 axis.append(self.getif(i))
397 axis.append(self.getpol(i))
398 axis.append(self.getcycle(i))
399 axes.append(axis)
400 tm = self._gettime(i)
401 src = self._getsourcename(i)
402 out += 'Scan[%d] (%s) ' % (axis[0], src)
403 out += 'Time[%s]:\n' % (tm)
404 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
405 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
406 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
407 outvec.append(callback(i))
408 out += '= %3.3f\n' % (outvec[i])
409 out += "--------------------------------------------------\n"
410 if rcParams['verbose']:
411 print "--------------------------------------------------"
412 print " %s" % (label)
413 print "--------------------------------------------------"
414 print out
[1175]415 # disabled because the vector seems more useful
416 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
417 return outvec
[256]418
[1070]419 def _get_column(self, callback, row=-1):
420 """
421 """
422 if row == -1:
423 return [callback(i) for i in range(self.nrow())]
424 else:
425 if 0 <= row < self.nrow():
426 return callback(row)
[256]427
[1070]428
[407]429 def get_time(self, row=-1):
[113]430 """
431 Get a list of time stamps for the observations.
[181]432 Return a string for each integration in the scantable.
[113]433 Parameters:
[407]434 row: row no of integration. Default -1 return all rows
[113]435 Example:
436 none
437 """
[1175]438 from time import strptime
439 from datetime import datetime
440 times = self._get_column(self._gettime, row)
441 format = "%Y/%m/%d/%H:%M:%S"
442 if isinstance(times, list):
443 return [datetime(*strptime(i, format)[:6]) for i in times]
444 else:
445 return datetime(*strptime(times, format)[:6])
[102]446
[714]447 def get_sourcename(self, row=-1):
448 """
[794]449 Get a list source names for the observations.
[714]450 Return a string for each integration in the scantable.
451 Parameters:
452 row: row no of integration. Default -1 return all rows
453 Example:
454 none
455 """
[1070]456 return self._get_column(self._getsourcename, row)
[714]457
[794]458 def get_elevation(self, row=-1):
459 """
460 Get a list of elevations for the observations.
461 Return a float for each integration in the scantable.
462 Parameters:
463 row: row no of integration. Default -1 return all rows
464 Example:
465 none
466 """
[1070]467 return self._get_column(self._getelevation, row)
[794]468
469 def get_azimuth(self, row=-1):
470 """
471 Get a list of azimuths for the observations.
472 Return a float for each integration in the scantable.
473 Parameters:
474 row: row no of integration. Default -1 return all rows
475 Example:
476 none
477 """
[1070]478 return self._get_column(self._getazimuth, row)
[794]479
480 def get_parangle(self, row=-1):
481 """
482 Get a list of parallactic angles for the observations.
483 Return a float for each integration in the scantable.
484 Parameters:
485 row: row no of integration. Default -1 return all rows
486 Example:
487 none
488 """
[1070]489 return self._get_column(self._getparangle, row)
[794]490
[1070]491 def get_direction(self, row=-1):
492 """
493 Get a list of Positions on the sky (direction) for the observations.
494 Return a float for each integration in the scantable.
495 Parameters:
496 row: row no of integration. Default -1 return all rows
497 Example:
498 none
499 """
500 return self._get_column(self._getdirection, row)
501
[102]502 def set_unit(self, unit='channel'):
503 """
504 Set the unit for all following operations on this scantable
505 Parameters:
506 unit: optional unit, default is 'channel'
[1118]507 one of '*Hz', 'km/s', 'channel', ''
[102]508 """
[484]509 varlist = vars()
[1118]510 if unit in ['', 'pixel', 'channel']:
[113]511 unit = ''
512 inf = list(self._getcoordinfo())
513 inf[0] = unit
514 self._setcoordinfo(inf)
[1118]515 self._add_history("set_unit", varlist)
[113]516
[484]517 def set_instrument(self, instr):
[358]518 """
519 Set the instrument for subsequent processing
520 Parameters:
[710]521 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]522 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]523 """
524 self._setInstrument(instr)
[1118]525 self._add_history("set_instument", vars())
[718]526 print_log()
[358]527
[1190]528 def set_feedtype(self, feedtype):
529 """
530 Overwrite the feed type, which might not be set correctly.
531 Parameters:
532 feedtype: 'linear' or 'circular'
533 """
534 self._setfeedtype(feedtype)
535 self._add_history("set_feedtype", vars())
536 print_log()
537
[276]538 def set_doppler(self, doppler='RADIO'):
539 """
540 Set the doppler for all following operations on this scantable.
541 Parameters:
542 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
543 """
[484]544 varlist = vars()
[276]545 inf = list(self._getcoordinfo())
546 inf[2] = doppler
547 self._setcoordinfo(inf)
[1118]548 self._add_history("set_doppler", vars())
[718]549 print_log()
[710]550
[226]551 def set_freqframe(self, frame=None):
[113]552 """
553 Set the frame type of the Spectral Axis.
554 Parameters:
[591]555 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]556 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
557 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]558 Examples:
559 scan.set_freqframe('BARY')
560 """
[484]561 if frame is None: frame = rcParams['scantable.freqframe']
562 varlist = vars()
[1118]563 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
564 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]565
[989]566 if frame in valid:
[113]567 inf = list(self._getcoordinfo())
568 inf[1] = frame
569 self._setcoordinfo(inf)
[1118]570 self._add_history("set_freqframe", varlist)
[102]571 else:
[1118]572 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]573 if rcParams['verbose']:
574 print msg
575 else:
576 raise TypeError(msg)
577 print_log()
[710]578
[989]579 def set_dirframe(self, frame=""):
580 """
581 Set the frame type of the Direction on the sky.
582 Parameters:
583 frame: an optional frame type, default ''. Valid frames are:
584 'J2000', 'B1950', 'GALACTIC'
585 Examples:
586 scan.set_dirframe('GALACTIC')
587 """
588 varlist = vars()
589 try:
590 Scantable.set_dirframe(self, frame)
[1118]591 except RuntimeError, msg:
[989]592 if rcParams['verbose']:
593 print msg
594 else:
595 raise
[1118]596 self._add_history("set_dirframe", varlist)
[989]597
[113]598 def get_unit(self):
599 """
600 Get the default unit set in this scantable
601 Parameters:
602 Returns:
603 A unit string
604 """
605 inf = self._getcoordinfo()
606 unit = inf[0]
607 if unit == '': unit = 'channel'
608 return unit
[102]609
[158]610 def get_abcissa(self, rowno=0):
[102]611 """
[158]612 Get the abcissa in the current coordinate setup for the currently
[113]613 selected Beam/IF/Pol
614 Parameters:
[226]615 rowno: an optional row number in the scantable. Default is the
616 first row, i.e. rowno=0
[113]617 Returns:
[407]618 The abcissa values and it's format string (as a dictionary)
[113]619 """
[256]620 abc = self._getabcissa(rowno)
[710]621 lbl = self._getabcissalabel(rowno)
[718]622 print_log()
[158]623 return abc, lbl
[113]624
[1118]625 def flag(self, mask=None):
[1001]626 """
627 Flag the selected data using an optional channel mask.
628 Parameters:
629 mask: an optional channel mask, created with create_mask. Default
630 (no mask) is all channels.
631 """
632 varlist = vars()
[1118]633 if mask is None:
634 mask = []
[1001]635 try:
636 self._flag(mask)
[1118]637 except RuntimeError, msg:
[1001]638 if rcParams['verbose']:
639 print msg
640 return
641 else: raise
642 self._add_history("flag", varlist)
643
[1203]644 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]645 """
646 Flag the data in 'lag' space by providing a frequency to remove.
647 Flagged data in the scantable gets set to 0.0 before the fft.
648 No taper is applied.
649 Parameters:
[1203]650 frequency: the frequency (really a period within the bandwidth) to remove
651 width: the width of the frequency to remove, to remove a range
652 of frequencies aroung the centre.
653 unit: the frequency unit (default "GHz")
654 Notes:
655 It is recommended to flag edges of the band or strong signals beforehand.
[1192]656 """
657 if insitu is None: insitu = rcParams['insitu']
658 self._math._setinsitu(insitu)
659 varlist = vars()
660 base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
661 if not base.has_key(unit):
662 raise ValueError("%s is not a valid unit." % unit)
663 try:
[1200]664 s = scantable(self._math._lag_flag(self, frequency*base[unit],
665 width*base[unit]))
[1192]666 except RuntimeError, msg:
667 if rcParams['verbose']:
668 print msg
669 return
670 else: raise
671 s._add_history("lag_flag", varlist)
672 print_log()
673 if insitu:
674 self._assign(s)
675 else:
676 return s
[1001]677
[1192]678
[113]679 def create_mask(self, *args, **kwargs):
680 """
[1118]681 Compute and return a mask based on [min, max] windows.
[189]682 The specified windows are to be INCLUDED, when the mask is
[113]683 applied.
[102]684 Parameters:
[1118]685 [min, max], [min2, max2], ...
[1024]686 Pairs of start/end points (inclusive)specifying the regions
[102]687 to be masked
[189]688 invert: optional argument. If specified as True,
689 return an inverted mask, i.e. the regions
690 specified are EXCLUDED
[513]691 row: create the mask using the specified row for
692 unit conversions, default is row=0
693 only necessary if frequency varies over rows.
[102]694 Example:
[113]695 scan.set_unit('channel')
696 a)
[1118]697 msk = scan.create_mask([400, 500], [800, 900])
[189]698 # masks everything outside 400 and 500
[113]699 # and 800 and 900 in the unit 'channel'
700
701 b)
[1118]702 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]703 # masks the regions between 400 and 500
[113]704 # and 800 and 900 in the unit 'channel'
[1024]705 c)
706 mask only channel 400
[1118]707 msk = scan.create_mask([400, 400])
[102]708 """
[513]709 row = 0
710 if kwargs.has_key("row"):
711 row = kwargs.get("row")
712 data = self._getabcissa(row)
[113]713 u = self._getcoordinfo()[0]
[718]714 if rcParams['verbose']:
[113]715 if u == "": u = "channel"
[718]716 msg = "The current mask window unit is %s" % u
[1118]717 i = self._check_ifs()
718 if not i:
[876]719 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]720 asaplog.push(msg)
[102]721 n = self.nchan()
[1283]722 msk = _n_bools(n, False)
[710]723 # test if args is a 'list' or a 'normal *args - UGLY!!!
724
[1118]725 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
726 and args or args[0]
[710]727 for window in ws:
[102]728 if (len(window) != 2 or window[0] > window[1] ):
[1118]729 raise TypeError("A window needs to be defined as [min, max]")
[102]730 for i in range(n):
[1024]731 if data[i] >= window[0] and data[i] <= window[1]:
[1283]732 msk[i] = True
[113]733 if kwargs.has_key('invert'):
734 if kwargs.get('invert'):
[1283]735 msk = mask_not(msk)
[718]736 print_log()
[102]737 return msk
[710]738
[256]739 def get_restfreqs(self):
740 """
741 Get the restfrequency(s) stored in this scantable.
742 The return value(s) are always of unit 'Hz'
743 Parameters:
744 none
745 Returns:
746 a list of doubles
747 """
748 return list(self._getrestfreqs())
[102]749
[402]750
[931]751 def set_restfreqs(self, freqs=None, unit='Hz'):
752 """
753 Set or replace the restfrequency specified and
754 If the 'freqs' argument holds a scalar,
755 then that rest frequency will be applied to all the selected
756 data. If the 'freqs' argument holds
757 a vector, then it MUST be of equal or smaller length than
758 the number of IFs (and the available restfrequencies will be
759 replaced by this vector). In this case, *all* data have
760 the restfrequency set per IF according
761 to the corresponding value you give in the 'freqs' vector.
[1118]762 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]763 IF 1 gets restfreq 2e9.
[1153]764 You can also specify the frequencies via a linecatalog/
765
[931]766 Parameters:
767 freqs: list of rest frequency values or string idenitfiers
768 unit: unit for rest frequency (default 'Hz')
[402]769
[931]770 Example:
771 # set the given restfrequency for the whole table
772 scan.set_restfreqs(freqs=1.4e9)
773 # If thee number of IFs in the data is >= 2 the IF0 gets the first
774 # value IF1 the second...
[1118]775 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]776 #set the given restfrequency for the whole table (by name)
777 scan.set_restfreqs(freqs="OH1667")
[391]778
[931]779 Note:
780 To do more sophisticate Restfrequency setting, e.g. on a
781 source and IF basis, use scantable.set_selection() before using
782 this function.
783 # provide your scantable is call scan
784 selection = selector()
785 selection.set_name("ORION*")
786 selection.set_ifs([1])
787 scan.set_selection(selection)
788 scan.set_restfreqs(freqs=86.6e9)
789
790 """
791 varlist = vars()
[1157]792 from asap import linecatalog
793 # simple value
[1118]794 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]795 self._setrestfreqs(freqs, "",unit)
[1157]796 # list of values
[1118]797 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]798 # list values are scalars
[1118]799 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]800 sel = selector()
801 savesel = self._getselection()
802 for i in xrange(len(freqs)):
[946]803 sel.set_ifs([i])
[931]804 self._setselection(sel)
[1153]805 self._setrestfreqs(freqs[i], "",unit)
[931]806 self._setselection(savesel)
[1157]807 # list values are tuples, (value, name)
808 elif isinstance(freqs[-1], dict):
809 sel = selector()
810 savesel = self._getselection()
811 for i in xrange(len(freqs)):
812 sel.set_ifs([i])
[1259]813 self._setselection(sel)
[1157]814 self._setrestfreqs(freqs[i]["value"],
815 freqs[i]["name"], "MHz")
816 self._setselection(savesel)
817 # freqs are to be taken from a linecatalog
[1153]818 elif isinstance(freqs, linecatalog):
819 sel = selector()
820 savesel = self._getselection()
821 for i in xrange(freqs.nrow()):
822 sel.set_ifs([i])
823 self._setselection(sel)
824 self._setrestfreqs(freqs.get_frequency(i),
825 freqs.get_name(i), "MHz")
826 # ensure that we are not iterating past nIF
827 if i == self.nif()-1: break
828 self._setselection(savesel)
[931]829 else:
830 return
831 self._add_history("set_restfreqs", varlist)
832
833
834
[1259]835 def history(self, filename=None):
836 """
837 Print the history. Optionally to a file.
838 """
[484]839 hist = list(self._gethistory())
[794]840 out = "-"*80
[484]841 for h in hist:
[489]842 if h.startswith("---"):
[794]843 out += "\n"+h
[489]844 else:
845 items = h.split("##")
846 date = items[0]
847 func = items[1]
848 items = items[2:]
[794]849 out += "\n"+date+"\n"
850 out += "Function: %s\n Parameters:" % (func)
[489]851 for i in items:
852 s = i.split("=")
[1118]853 out += "\n %s = %s" % (s[0], s[1])
[794]854 out += "\n"+"-"*80
[1259]855 if filename is not None:
856 if filename is "":
857 filename = 'scantable_history.txt'
858 import os
859 filename = os.path.expandvars(os.path.expanduser(filename))
860 if not os.path.isdir(filename):
861 data = open(filename, 'w')
862 data.write(out)
863 data.close()
864 else:
865 msg = "Illegal file name '%s'." % (filename)
866 if rcParams['verbose']:
867 print msg
868 else:
869 raise IOError(msg)
870 if rcParams['verbose']:
871 try:
872 from IPython.genutils import page as pager
873 except ImportError:
874 from pydoc import pager
875 pager(out)
876 else:
877 return out
[484]878 return
[513]879 #
880 # Maths business
881 #
882
[931]883 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]884 """
[1070]885 Return the (time) weighted average of a scan.
[513]886 Note:
[1070]887 in channels only - align if necessary
[513]888 Parameters:
889 mask: an optional mask (only used for 'var' and 'tsys'
890 weighting)
[558]891 scanav: True averages each scan separately
892 False (default) averages all scans together,
[1099]893 weight: Weighting scheme.
894 'none' (mean no weight)
895 'var' (1/var(spec) weighted)
896 'tsys' (1/Tsys**2 weighted)
897 'tint' (integration time weighted)
898 'tintsys' (Tint/Tsys**2)
899 'median' ( median averaging)
[535]900 The default is 'tint'
[931]901 align: align the spectra in velocity before averaging. It takes
902 the time of the first spectrum as reference time.
[513]903 Example:
904 # time average the scantable without using a mask
[710]905 newscan = scan.average_time()
[513]906 """
907 varlist = vars()
[976]908 if weight is None: weight = 'TINT'
[513]909 if mask is None: mask = ()
[1099]910 if scanav: scanav = "SCAN"
911 else: scanav = "NONE"
[1118]912 scan = (self, )
[989]913 try:
[1118]914 if align:
915 scan = (self.freq_align(insitu=False), )
916 s = None
917 if weight.upper() == 'MEDIAN':
918 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
919 scanav))
920 else:
921 s = scantable(self._math._average(scan, mask, weight.upper(),
922 scanav))
923 except RuntimeError, msg:
[989]924 if rcParams['verbose']:
925 print msg
926 return
927 else: raise
[1099]928 s._add_history("average_time", varlist)
[718]929 print_log()
[513]930 return s
[710]931
[876]932 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]933 """
934 Return a scan where all spectra are converted to either
935 Jansky or Kelvin depending upon the flux units of the scan table.
936 By default the function tries to look the values up internally.
937 If it can't find them (or if you want to over-ride), you must
938 specify EITHER jyperk OR eta (and D which it will try to look up
939 also if you don't set it). jyperk takes precedence if you set both.
940 Parameters:
941 jyperk: the Jy / K conversion factor
942 eta: the aperture efficiency
943 d: the geomtric diameter (metres)
944 insitu: if False a new scantable is returned.
945 Otherwise, the scaling is done in-situ
946 The default is taken from .asaprc (False)
947 """
948 if insitu is None: insitu = rcParams['insitu']
[876]949 self._math._setinsitu(insitu)
[513]950 varlist = vars()
951 if jyperk is None: jyperk = -1.0
952 if d is None: d = -1.0
953 if eta is None: eta = -1.0
[876]954 s = scantable(self._math._convertflux(self, d, eta, jyperk))
955 s._add_history("convert_flux", varlist)
956 print_log()
957 if insitu: self._assign(s)
958 else: return s
[513]959
[876]960 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]961 """
962 Return a scan after applying a gain-elevation correction.
963 The correction can be made via either a polynomial or a
964 table-based interpolation (and extrapolation if necessary).
965 You specify polynomial coefficients, an ascii table or neither.
966 If you specify neither, then a polynomial correction will be made
967 with built in coefficients known for certain telescopes (an error
968 will occur if the instrument is not known).
969 The data and Tsys are *divided* by the scaling factors.
970 Parameters:
971 poly: Polynomial coefficients (default None) to compute a
972 gain-elevation correction as a function of
973 elevation (in degrees).
974 filename: The name of an ascii file holding correction factors.
975 The first row of the ascii file must give the column
976 names and these MUST include columns
977 "ELEVATION" (degrees) and "FACTOR" (multiply data
978 by this) somewhere.
979 The second row must give the data type of the
980 column. Use 'R' for Real and 'I' for Integer.
981 An example file would be
982 (actual factors are arbitrary) :
983
984 TIME ELEVATION FACTOR
985 R R R
986 0.1 0 0.8
987 0.2 20 0.85
988 0.3 40 0.9
989 0.4 60 0.85
990 0.5 80 0.8
991 0.6 90 0.75
992 method: Interpolation method when correcting from a table.
993 Values are "nearest", "linear" (default), "cubic"
994 and "spline"
995 insitu: if False a new scantable is returned.
996 Otherwise, the scaling is done in-situ
997 The default is taken from .asaprc (False)
998 """
999
1000 if insitu is None: insitu = rcParams['insitu']
[876]1001 self._math._setinsitu(insitu)
[513]1002 varlist = vars()
1003 if poly is None:
[1118]1004 poly = ()
[513]1005 from os.path import expandvars
1006 filename = expandvars(filename)
[876]1007 s = scantable(self._math._gainel(self, poly, filename, method))
1008 s._add_history("gain_el", varlist)
1009 print_log()
1010 if insitu: self._assign(s)
1011 else: return s
[710]1012
[931]1013 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1014 """
1015 Return a scan where all rows have been aligned in frequency/velocity.
1016 The alignment frequency frame (e.g. LSRK) is that set by function
1017 set_freqframe.
1018 Parameters:
1019 reftime: reference time to align at. By default, the time of
1020 the first row of data is used.
1021 method: Interpolation method for regridding the spectra.
1022 Choose from "nearest", "linear", "cubic" (default)
1023 and "spline"
1024 insitu: if False a new scantable is returned.
1025 Otherwise, the scaling is done in-situ
1026 The default is taken from .asaprc (False)
1027 """
[931]1028 if insitu is None: insitu = rcParams["insitu"]
[876]1029 self._math._setinsitu(insitu)
[513]1030 varlist = vars()
[931]1031 if reftime is None: reftime = ""
1032 s = scantable(self._math._freq_align(self, reftime, method))
[876]1033 s._add_history("freq_align", varlist)
1034 print_log()
1035 if insitu: self._assign(s)
1036 else: return s
[513]1037
[876]1038 def opacity(self, tau, insitu=None):
[513]1039 """
1040 Apply an opacity correction. The data
1041 and Tsys are multiplied by the correction factor.
1042 Parameters:
1043 tau: Opacity from which the correction factor is
1044 exp(tau*ZD)
1045 where ZD is the zenith-distance
1046 insitu: if False a new scantable is returned.
1047 Otherwise, the scaling is done in-situ
1048 The default is taken from .asaprc (False)
1049 """
1050 if insitu is None: insitu = rcParams['insitu']
[876]1051 self._math._setinsitu(insitu)
[513]1052 varlist = vars()
[876]1053 s = scantable(self._math._opacity(self, tau))
1054 s._add_history("opacity", varlist)
1055 print_log()
1056 if insitu: self._assign(s)
1057 else: return s
[513]1058
1059 def bin(self, width=5, insitu=None):
1060 """
1061 Return a scan where all spectra have been binned up.
1062 width: The bin width (default=5) in pixels
1063 insitu: if False a new scantable is returned.
1064 Otherwise, the scaling is done in-situ
1065 The default is taken from .asaprc (False)
1066 """
1067 if insitu is None: insitu = rcParams['insitu']
[876]1068 self._math._setinsitu(insitu)
[513]1069 varlist = vars()
[876]1070 s = scantable(self._math._bin(self, width))
[1118]1071 s._add_history("bin", varlist)
[876]1072 print_log()
1073 if insitu: self._assign(s)
1074 else: return s
[513]1075
[710]1076
[513]1077 def resample(self, width=5, method='cubic', insitu=None):
1078 """
1079 Return a scan where all spectra have been binned up
1080 width: The bin width (default=5) in pixels
1081 method: Interpolation method when correcting from a table.
1082 Values are "nearest", "linear", "cubic" (default)
1083 and "spline"
1084 insitu: if False a new scantable is returned.
1085 Otherwise, the scaling is done in-situ
1086 The default is taken from .asaprc (False)
1087 """
1088 if insitu is None: insitu = rcParams['insitu']
[876]1089 self._math._setinsitu(insitu)
[513]1090 varlist = vars()
[876]1091 s = scantable(self._math._resample(self, method, width))
[1118]1092 s._add_history("resample", varlist)
[876]1093 print_log()
1094 if insitu: self._assign(s)
1095 else: return s
[513]1096
1097
[946]1098 def average_pol(self, mask=None, weight='none'):
1099 """
1100 Average the Polarisations together.
1101 Parameters:
1102 mask: An optional mask defining the region, where the
1103 averaging will be applied. The output will have all
1104 specified points masked.
1105 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1106 weighted), or 'tsys' (1/Tsys**2 weighted)
1107 """
1108 varlist = vars()
1109 if mask is None:
1110 mask = ()
[1010]1111 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1112 s._add_history("average_pol", varlist)
[946]1113 print_log()
[992]1114 return s
[513]1115
[1145]1116 def average_beam(self, mask=None, weight='none'):
1117 """
1118 Average the Beams together.
1119 Parameters:
1120 mask: An optional mask defining the region, where the
1121 averaging will be applied. The output will have all
1122 specified points masked.
1123 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1124 weighted), or 'tsys' (1/Tsys**2 weighted)
1125 """
1126 varlist = vars()
1127 if mask is None:
1128 mask = ()
1129 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1130 s._add_history("average_beam", varlist)
1131 print_log()
1132 return s
1133
[992]1134 def convert_pol(self, poltype=None):
1135 """
1136 Convert the data to a different polarisation type.
1137 Parameters:
1138 poltype: The new polarisation type. Valid types are:
1139 "linear", "stokes" and "circular"
1140 """
1141 varlist = vars()
1142 try:
1143 s = scantable(self._math._convertpol(self, poltype))
[1118]1144 except RuntimeError, msg:
[992]1145 if rcParams['verbose']:
[1118]1146 print msg
1147 return
[992]1148 else:
1149 raise
[1118]1150 s._add_history("convert_pol", varlist)
[992]1151 print_log()
1152 return s
1153
[876]1154 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1155 """
1156 Smooth the spectrum by the specified kernel (conserving flux).
1157 Parameters:
1158 scan: The input scan
1159 kernel: The type of smoothing kernel. Select from
1160 'hanning' (default), 'gaussian' and 'boxcar'.
1161 The first three characters are sufficient.
1162 width: The width of the kernel in pixels. For hanning this is
1163 ignored otherwise it defauls to 5 pixels.
1164 For 'gaussian' it is the Full Width Half
1165 Maximum. For 'boxcar' it is the full width.
1166 insitu: if False a new scantable is returned.
1167 Otherwise, the scaling is done in-situ
1168 The default is taken from .asaprc (False)
1169 Example:
1170 none
1171 """
1172 if insitu is None: insitu = rcParams['insitu']
[876]1173 self._math._setinsitu(insitu)
[513]1174 varlist = vars()
[1118]1175 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1176 s._add_history("smooth", varlist)
1177 print_log()
1178 if insitu: self._assign(s)
1179 else: return s
[513]1180
[876]1181
[1061]1182 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
[513]1183 """
1184 Return a scan which has been baselined (all rows) by a polynomial.
1185 Parameters:
[794]1186 scan: a scantable
1187 mask: an optional mask
1188 order: the order of the polynomial (default is 0)
[1061]1189 plot: plot the fit and the residual. In this each
1190 indivual fit has to be approved, by typing 'y'
1191 or 'n'
[794]1192 insitu: if False a new scantable is returned.
1193 Otherwise, the scaling is done in-situ
1194 The default is taken from .asaprc (False)
[513]1195 Example:
1196 # return a scan baselined by a third order polynomial,
1197 # not using a mask
1198 bscan = scan.poly_baseline(order=3)
[579]1199 """
[513]1200 if insitu is None: insitu = rcParams['insitu']
1201 varlist = vars()
1202 if mask is None:
[1283]1203 mask = [True for i in xrange(self.nchan(-1))]
[513]1204 from asap.asapfitter import fitter
[1217]1205 try:
1206 f = fitter()
1207 f.set_scan(self, mask)
1208 f.set_function(poly=order)
1209 s = f.auto_fit(insitu, plot=plot)
1210 s._add_history("poly_baseline", varlist)
1211 print_log()
1212 if insitu: self._assign(s)
1213 else: return s
1214 except RuntimeError:
1215 msg = "The fit failed, possibly because it didn't converge."
1216 if rcParams['verbose']:
1217 print msg
1218 return
1219 else:
1220 raise RuntimeError(msg)
[513]1221
[1217]1222
[1118]1223 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1274]1224 threshold=3, chan_avg_limit=1, plot=False,
1225 insitu=None):
[880]1226 """
1227 Return a scan which has been baselined (all rows) by a polynomial.
1228 Spectral lines are detected first using linefinder and masked out
1229 to avoid them affecting the baseline solution.
1230
1231 Parameters:
1232 mask: an optional mask retreived from scantable
1233 edge: an optional number of channel to drop at
1234 the edge of spectrum. If only one value is
1235 specified, the same number will be dropped from
1236 both sides of the spectrum. Default is to keep
[907]1237 all channels. Nested tuples represent individual
[976]1238 edge selection for different IFs (a number of spectral
1239 channels can be different)
[880]1240 order: the order of the polynomial (default is 0)
1241 threshold: the threshold used by line finder. It is better to
1242 keep it large as only strong lines affect the
1243 baseline solution.
[1274]1244 chan_avg_limit:
1245 a maximum number of consequtive spectral channels to
1246 average during the search of weak and broad lines.
1247 The default is no averaging (and no search for weak
1248 lines). If such lines can affect the fitted baseline
1249 (e.g. a high order polynomial is fitted), increase this
1250 parameter (usually values up to 8 are reasonable). Most
1251 users of this method should find the default value
1252 sufficient.
[1061]1253 plot: plot the fit and the residual. In this each
1254 indivual fit has to be approved, by typing 'y'
1255 or 'n'
[880]1256 insitu: if False a new scantable is returned.
1257 Otherwise, the scaling is done in-situ
1258 The default is taken from .asaprc (False)
1259
1260 Example:
1261 scan2=scan.auto_poly_baseline(order=7)
1262 """
1263 if insitu is None: insitu = rcParams['insitu']
1264 varlist = vars()
1265 from asap.asapfitter import fitter
1266 from asap.asaplinefind import linefinder
1267 from asap import _is_sequence_or_number as _is_valid
1268
[976]1269 # check whether edge is set up for each IF individually
[1118]1270 individualedge = False;
1271 if len(edge) > 1:
1272 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1273 individualedge = True;
[907]1274
[1118]1275 if not _is_valid(edge, int) and not individualedge:
[909]1276 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1277 pair of integers specified as a tuple. Nested tuples are allowed \
1278 to make individual selection for different IFs."
[919]1279
[1118]1280 curedge = (0, 0)
1281 if individualedge:
1282 for edgepar in edge:
1283 if not _is_valid(edgepar, int):
1284 raise ValueError, "Each element of the 'edge' tuple has \
1285 to be a pair of integers or an integer."
[907]1286 else:
[1118]1287 curedge = edge;
[880]1288
1289 # setup fitter
1290 f = fitter()
1291 f.set_function(poly=order)
1292
1293 # setup line finder
[1118]1294 fl = linefinder()
[1268]1295 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1296
1297 if not insitu:
[1118]1298 workscan = self.copy()
[880]1299 else:
[1118]1300 workscan = self
[880]1301
[907]1302 fl.set_scan(workscan)
1303
[1118]1304 rows = range(workscan.nrow())
[880]1305 asaplog.push("Processing:")
1306 for r in rows:
[1118]1307 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1308 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1309 workscan.getpol(r), workscan.getcycle(r))
[880]1310 asaplog.push(msg, False)
[907]1311
[976]1312 # figure out edge parameter
[1118]1313 if individualedge:
1314 if len(edge) >= workscan.getif(r):
1315 raise RuntimeError, "Number of edge elements appear to " \
1316 "be less than the number of IFs"
1317 curedge = edge[workscan.getif(r)]
[919]1318
[976]1319 # setup line finder
[1118]1320 fl.find_lines(r, mask, curedge)
[880]1321 f.set_scan(workscan, fl.get_mask())
1322 f.x = workscan._getabcissa(r)
1323 f.y = workscan._getspectrum(r)
1324 f.data = None
1325 f.fit()
1326 x = f.get_parameters()
[1061]1327 if plot:
1328 f.plot(residual=True)
1329 x = raw_input("Accept fit ( [y]/n ): ")
1330 if x.upper() == 'N':
1331 continue
[880]1332 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1333 if plot:
1334 f._p.unmap()
1335 f._p = None
1336 workscan._add_history("auto_poly_baseline", varlist)
[880]1337 if insitu:
1338 self._assign(workscan)
1339 else:
1340 return workscan
1341
[914]1342 def rotate_linpolphase(self, angle):
1343 """
1344 Rotate the phase of the complex polarization O=Q+iU correlation.
1345 This is always done in situ in the raw data. So if you call this
1346 function more than once then each call rotates the phase further.
1347 Parameters:
1348 angle: The angle (degrees) to rotate (add) by.
1349 Examples:
1350 scan.rotate_linpolphase(2.3)
1351 """
1352 varlist = vars()
[936]1353 self._math._rotate_linpolphase(self, angle)
[914]1354 self._add_history("rotate_linpolphase", varlist)
1355 print_log()
1356 return
[710]1357
[513]1358
[914]1359 def rotate_xyphase(self, angle):
1360 """
1361 Rotate the phase of the XY correlation. This is always done in situ
1362 in the data. So if you call this function more than once
1363 then each call rotates the phase further.
1364 Parameters:
1365 angle: The angle (degrees) to rotate (add) by.
1366 Examples:
1367 scan.rotate_xyphase(2.3)
1368 """
1369 varlist = vars()
[936]1370 self._math._rotate_xyphase(self, angle)
[914]1371 self._add_history("rotate_xyphase", varlist)
1372 print_log()
1373 return
1374
1375 def swap_linears(self):
1376 """
1377 Swap the linear polarisations XX and YY
1378 """
1379 varlist = vars()
[936]1380 self._math._swap_linears(self)
[914]1381 self._add_history("swap_linears", varlist)
1382 print_log()
1383 return
1384
1385 def invert_phase(self):
1386 """
1387 Invert the phase of the complex polarisation
1388 """
1389 varlist = vars()
[936]1390 self._math._invert_phase(self)
[914]1391 self._add_history("invert_phase", varlist)
1392 print_log()
1393 return
1394
[876]1395 def add(self, offset, insitu=None):
[513]1396 """
1397 Return a scan where all spectra have the offset added
1398 Parameters:
1399 offset: the offset
1400 insitu: if False a new scantable is returned.
1401 Otherwise, the scaling is done in-situ
1402 The default is taken from .asaprc (False)
1403 """
1404 if insitu is None: insitu = rcParams['insitu']
[876]1405 self._math._setinsitu(insitu)
[513]1406 varlist = vars()
[876]1407 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1408 s._add_history("add", varlist)
[876]1409 print_log()
1410 if insitu:
1411 self._assign(s)
1412 else:
[513]1413 return s
1414
[1118]1415 def scale(self, factor, tsys=True, insitu=None, ):
[513]1416 """
1417 Return a scan where all spectra are scaled by the give 'factor'
1418 Parameters:
1419 factor: the scaling factor
1420 insitu: if False a new scantable is returned.
1421 Otherwise, the scaling is done in-situ
1422 The default is taken from .asaprc (False)
1423 tsys: if True (default) then apply the operation to Tsys
1424 as well as the data
1425 """
1426 if insitu is None: insitu = rcParams['insitu']
[876]1427 self._math._setinsitu(insitu)
[513]1428 varlist = vars()
[876]1429 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1430 s._add_history("scale", varlist)
[876]1431 print_log()
1432 if insitu:
1433 self._assign(s)
1434 else:
[513]1435 return s
1436
[876]1437 def auto_quotient(self, mode='time', preserve=True):
[670]1438 """
1439 This function allows to build quotients automatically.
1440 It assumes the observation to have the same numer of
1441 "ons" and "offs"
1442 It will support "closest off in time" in the future
1443 Parameters:
1444 mode: the on/off detection mode; 'suffix' (default)
1445 'suffix' identifies 'off' scans by the
1446 trailing '_R' (Mopra/Parkes) or
1447 '_e'/'_w' (Tid)
[710]1448 preserve: you can preserve (default) the continuum or
1449 remove it. The equations used are
[670]1450 preserve: Output = Toff * (on/off) - Toff
[1070]1451 remove: Output = Toff * (on/off) - Ton
[670]1452 """
[876]1453 modes = ["time"]
[670]1454 if not mode in modes:
[876]1455 msg = "please provide valid mode. Valid modes are %s" % (modes)
1456 raise ValueError(msg)
1457 varlist = vars()
[1070]1458 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1459 s._add_history("auto_quotient", varlist)
[876]1460 print_log()
1461 return s
[710]1462
[1145]1463 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1464 """
[1143]1465 Form a quotient using "off" beams when observing in "MX" mode.
1466 Parameters:
[1145]1467 mask: an optional mask to be used when weight == 'stddev'
[1143]1468 weight: How to average the off beams. Default is 'median'.
[1145]1469 preserve: you can preserve (default) the continuum or
1470 remove it. The equations used are
1471 preserve: Output = Toff * (on/off) - Toff
1472 remove: Output = Toff * (on/off) - Ton
[1217]1473 """
[1143]1474 if mask is None: mask = ()
[1141]1475 varlist = vars()
1476 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1477 preoff = scantable(self._math._mx_extract(self, 'off'))
1478 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1479 from asapmath import quotient
[1145]1480 q = quotient(on, off, preserve)
[1143]1481 q._add_history("mx_quotient", varlist)
[1145]1482 print_log()
[1217]1483 return q
[513]1484
[718]1485 def freq_switch(self, insitu=None):
1486 """
1487 Apply frequency switching to the data.
1488 Parameters:
1489 insitu: if False a new scantable is returned.
1490 Otherwise, the swictching is done in-situ
1491 The default is taken from .asaprc (False)
1492 Example:
1493 none
1494 """
1495 if insitu is None: insitu = rcParams['insitu']
[876]1496 self._math._setinsitu(insitu)
[718]1497 varlist = vars()
[876]1498 s = scantable(self._math._freqswitch(self))
[1118]1499 s._add_history("freq_switch", varlist)
[876]1500 print_log()
1501 if insitu: self._assign(s)
1502 else: return s
[718]1503
[780]1504 def recalc_azel(self):
1505 """
1506 Recalculate the azimuth and elevation for each position.
1507 Parameters:
1508 none
1509 Example:
1510 """
1511 varlist = vars()
[876]1512 self._recalcazel()
[780]1513 self._add_history("recalc_azel", varlist)
1514 print_log()
1515 return
1516
[513]1517 def __add__(self, other):
1518 varlist = vars()
1519 s = None
1520 if isinstance(other, scantable):
[876]1521 print "scantable + scantable NYI"
1522 return
[513]1523 elif isinstance(other, float):
[876]1524 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1525 else:
[718]1526 raise TypeError("Other input is not a scantable or float value")
[513]1527 s._add_history("operator +", varlist)
[718]1528 print_log()
[513]1529 return s
1530
1531 def __sub__(self, other):
1532 """
1533 implicit on all axes and on Tsys
1534 """
1535 varlist = vars()
1536 s = None
1537 if isinstance(other, scantable):
[876]1538 print "scantable - scantable NYI"
1539 return
[513]1540 elif isinstance(other, float):
[876]1541 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1542 else:
[718]1543 raise TypeError("Other input is not a scantable or float value")
[513]1544 s._add_history("operator -", varlist)
[718]1545 print_log()
[513]1546 return s
[710]1547
[513]1548 def __mul__(self, other):
1549 """
1550 implicit on all axes and on Tsys
1551 """
1552 varlist = vars()
1553 s = None
1554 if isinstance(other, scantable):
[876]1555 print "scantable * scantable NYI"
1556 return
[513]1557 elif isinstance(other, float):
[876]1558 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1559 else:
[718]1560 raise TypeError("Other input is not a scantable or float value")
[513]1561 s._add_history("operator *", varlist)
[718]1562 print_log()
[513]1563 return s
1564
[710]1565
[513]1566 def __div__(self, other):
1567 """
1568 implicit on all axes and on Tsys
1569 """
1570 varlist = vars()
1571 s = None
1572 if isinstance(other, scantable):
[876]1573 print "scantable / scantable NYI"
1574 return
[513]1575 elif isinstance(other, float):
1576 if other == 0.0:
[718]1577 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1578 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1579 else:
[718]1580 raise TypeError("Other input is not a scantable or float value")
[513]1581 s._add_history("operator /", varlist)
[718]1582 print_log()
[513]1583 return s
1584
[530]1585 def get_fit(self, row=0):
1586 """
1587 Print or return the stored fits for a row in the scantable
1588 Parameters:
1589 row: the row which the fit has been applied to.
1590 """
1591 if row > self.nrow():
1592 return
[976]1593 from asap.asapfit import asapfit
[530]1594 fit = asapfit(self._getfit(row))
[718]1595 if rcParams['verbose']:
[530]1596 print fit
1597 return
1598 else:
1599 return fit.as_dict()
1600
[484]1601 def _add_history(self, funcname, parameters):
1602 # create date
1603 sep = "##"
1604 from datetime import datetime
1605 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1606 hist = dstr+sep
1607 hist += funcname+sep#cdate+sep
1608 if parameters.has_key('self'): del parameters['self']
[1118]1609 for k, v in parameters.iteritems():
[484]1610 if type(v) is dict:
[1118]1611 for k2, v2 in v.iteritems():
[484]1612 hist += k2
1613 hist += "="
[1118]1614 if isinstance(v2, scantable):
[484]1615 hist += 'scantable'
1616 elif k2 == 'mask':
[1118]1617 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1618 hist += str(self._zip_mask(v2))
1619 else:
1620 hist += str(v2)
[484]1621 else:
[513]1622 hist += str(v2)
[484]1623 else:
1624 hist += k
1625 hist += "="
[1118]1626 if isinstance(v, scantable):
[484]1627 hist += 'scantable'
1628 elif k == 'mask':
[1118]1629 if isinstance(v, list) or isinstance(v, tuple):
[513]1630 hist += str(self._zip_mask(v))
1631 else:
1632 hist += str(v)
[484]1633 else:
1634 hist += str(v)
1635 hist += sep
1636 hist = hist[:-2] # remove trailing '##'
1637 self._addhistory(hist)
1638
[710]1639
[484]1640 def _zip_mask(self, mask):
1641 mask = list(mask)
1642 i = 0
1643 segments = []
1644 while mask[i:].count(1):
1645 i += mask[i:].index(1)
1646 if mask[i:].count(0):
1647 j = i + mask[i:].index(0)
1648 else:
[710]1649 j = len(mask)
[1118]1650 segments.append([i, j])
[710]1651 i = j
[484]1652 return segments
[714]1653
[626]1654 def _get_ordinate_label(self):
1655 fu = "("+self.get_fluxunit()+")"
1656 import re
1657 lbl = "Intensity"
[1118]1658 if re.match(".K.", fu):
[626]1659 lbl = "Brightness Temperature "+ fu
[1118]1660 elif re.match(".Jy.", fu):
[626]1661 lbl = "Flux density "+ fu
1662 return lbl
[710]1663
[876]1664 def _check_ifs(self):
1665 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1666 nchans = filter(lambda t: t > 0, nchans)
[876]1667 return (sum(nchans)/len(nchans) == nchans[0])
[976]1668
1669 def _fill(self, names, unit, average):
1670 import os
1671 from asap._asap import stfiller
1672 first = True
1673 fullnames = []
1674 for name in names:
1675 name = os.path.expandvars(name)
1676 name = os.path.expanduser(name)
1677 if not os.path.exists(name):
1678 msg = "File '%s' does not exists" % (name)
1679 if rcParams['verbose']:
1680 asaplog.push(msg)
1681 print asaplog.pop().strip()
1682 return
1683 raise IOError(msg)
1684 fullnames.append(name)
1685 if average:
1686 asaplog.push('Auto averaging integrations')
[1079]1687 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1688 for name in fullnames:
[1073]1689 tbl = Scantable(stype)
1690 r = stfiller(tbl)
[976]1691 msg = "Importing %s..." % (name)
[1118]1692 asaplog.push(msg, False)
[976]1693 print_log()
[1118]1694 r._open(name, -1, -1)
[976]1695 r._read()
[1073]1696 #tbl = r._getdata()
[976]1697 if average:
[1118]1698 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1699 #tbl = tbl2
1700 if not first:
1701 tbl = self._math._merge([self, tbl])
1702 #tbl = tbl2
1703 Scantable.__init__(self, tbl)
1704 r._close()
[1118]1705 del r, tbl
[976]1706 first = False
1707 if unit is not None:
1708 self.set_fluxunit(unit)
1709 self.set_freqframe(rcParams['scantable.freqframe'])
1710
Note: See TracBrowser for help on using the repository browser.