source: branches/alma/python/scantable.py@ 1524

Last change on this file since 1524 was 1522, checked in by TakTsutsumi, 16 years ago

New Development: No

JIRA Issue: No

Ready to Release: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: No

Module(s): Module Names change impacts.

Description: Updated the documentation for getpt


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 74.2 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[1496]14 def __init__(self, filename, average=None, unit=None, getpt=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[1522]32 getpt: for MeasurementSet input data only:
33 If True, all pointing data are filled.
34 The deafult is False, which makes time to load
35 the MS data faster in some cases.
[710]36 """
[976]37 if average is None:
[710]38 average = rcParams['scantable.autoaverage']
[1496]39 if getpt is None:
40 getpt = False
[1259]41 varlist = vars()
[876]42 from asap._asap import stmath
43 self._math = stmath()
44 if isinstance(filename, Scantable):
45 Scantable.__init__(self, filename)
[181]46 else:
[1118]47 if isinstance(filename, str):
[976]48 import os.path
49 filename = os.path.expandvars(filename)
50 filename = os.path.expanduser(filename)
51 if not os.path.exists(filename):
52 s = "File '%s' not found." % (filename)
[718]53 if rcParams['verbose']:
[976]54 asaplog.push(s)
55 print asaplog.pop().strip()
[718]56 return
[976]57 raise IOError(s)
[1115]58 if os.path.isdir(filename) \
[1118]59 and not os.path.exists(filename+'/table.f1'):
[976]60 # crude check if asap table
61 if os.path.exists(filename+'/table.info'):
[1118]62 ondisk = rcParams['scantable.storage'] == 'disk'
63 Scantable.__init__(self, filename, ondisk)
[976]64 if unit is not None:
65 self.set_fluxunit(unit)
[1496]66 # do not reset to the default freqframe
67 #self.set_freqframe(rcParams['scantable.freqframe'])
[718]68 else:
[1118]69 msg = "The given file '%s'is not a valid " \
70 "asap table." % (filename)
[976]71 if rcParams['verbose']:
72 print msg
73 return
74 else:
75 raise IOError(msg)
[226]76 else:
[1496]77 self._fill([filename], unit, average, getpt)
[1118]78 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]79 and isinstance(filename[-1], str):
[1496]80 self._fill(filename, unit, average, getpt)
[1259]81 self._add_history("scantable", varlist)
[714]82 print_log()
[102]83
[876]84 def save(self, name=None, format=None, overwrite=False):
[116]85 """
[1280]86 Store the scantable on disk. This can be an asap (aips++) Table,
87 SDFITS or MS2 format.
[116]88 Parameters:
[1093]89 name: the name of the outputfile. For format "ASCII"
90 this is the root file name (data in 'name'.txt
[497]91 and header in 'name'_header.txt)
[116]92 format: an optional file format. Default is ASAP.
[280]93 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]94 'SDFITS' (save as SDFITS file)
[200]95 'ASCII' (saves as ascii text file)
[226]96 'MS2' (saves as an aips++
97 MeasurementSet V2)
[411]98 overwrite: If the file should be overwritten if it exists.
[256]99 The default False is to return with warning
[411]100 without writing the output. USE WITH CARE.
[116]101 Example:
102 scan.save('myscan.asap')
[1118]103 scan.save('myscan.sdfits', 'SDFITS')
[116]104 """
[411]105 from os import path
[226]106 if format is None: format = rcParams['scantable.save']
[256]107 suffix = '.'+format.lower()
[1118]108 if name is None or name == "":
[256]109 name = 'scantable'+suffix
[718]110 msg = "No filename given. Using default name %s..." % name
111 asaplog.push(msg)
[411]112 name = path.expandvars(name)
[256]113 if path.isfile(name) or path.isdir(name):
114 if not overwrite:
[718]115 msg = "File %s exists." % name
116 if rcParams['verbose']:
117 print msg
118 return
119 else:
120 raise IOError(msg)
[451]121 format2 = format.upper()
122 if format2 == 'ASAP':
[116]123 self._save(name)
124 else:
[989]125 from asap._asap import stwriter as stw
[1118]126 writer = stw(format2)
127 writer.write(self, name)
[718]128 print_log()
[116]129 return
130
[102]131 def copy(self):
132 """
133 Return a copy of this scantable.
[1348]134 Note:
135 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]136 Parameters:
[113]137 none
[102]138 Example:
139 copiedscan = scan.copy()
140 """
[876]141 sd = scantable(Scantable._copy(self))
[113]142 return sd
143
[1093]144 def drop_scan(self, scanid=None):
145 """
146 Return a new scantable where the specified scan number(s) has(have)
147 been dropped.
148 Parameters:
149 scanid: a (list of) scan number(s)
150 """
151 from asap import _is_sequence_or_number as _is_valid
152 from asap import _to_list
153 from asap import unique
154 if not _is_valid(scanid):
155 if rcParams['verbose']:
156 print "Please specify a scanno to drop from the scantable"
157 return
158 else:
159 raise RuntimeError("No scan given")
160 try:
161 scanid = _to_list(scanid)
162 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
163 for sid in scanid: allscans.remove(sid)
[1118]164 if len(allscans) == 0:
165 raise ValueError("Can't remove all scans")
[1093]166 except ValueError:
167 if rcParams['verbose']:
168 print "Couldn't find any match."
169 return
170 else: raise
171 try:
172 bsel = self.get_selection()
173 sel = selector()
174 sel.set_scans(allscans)
175 self.set_selection(bsel+sel)
176 scopy = self._copy()
177 self.set_selection(bsel)
178 return scantable(scopy)
179 except RuntimeError:
[1118]180 if rcParams['verbose']:
181 print "Couldn't find any match."
182 else:
183 raise
[1093]184
185
[102]186 def get_scan(self, scanid=None):
187 """
188 Return a specific scan (by scanno) or collection of scans (by
189 source name) in a new scantable.
[1348]190 Note:
191 See scantable.drop_scan() for the inverse operation.
[102]192 Parameters:
[513]193 scanid: a (list of) scanno or a source name, unix-style
194 patterns are accepted for source name matching, e.g.
195 '*_R' gets all 'ref scans
[102]196 Example:
[513]197 # get all scans containing the source '323p459'
198 newscan = scan.get_scan('323p459')
199 # get all 'off' scans
200 refscans = scan.get_scan('*_R')
201 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]202 newscan = scan.get_scan([0, 2, 7, 10])
[102]203 """
204 if scanid is None:
[718]205 if rcParams['verbose']:
[1118]206 print "Please specify a scan no or name to " \
207 "retrieve from the scantable"
[718]208 return
209 else:
210 raise RuntimeError("No scan given")
211
[102]212 try:
[946]213 bsel = self.get_selection()
214 sel = selector()
[102]215 if type(scanid) is str:
[946]216 sel.set_name(scanid)
217 self.set_selection(bsel+sel)
[876]218 scopy = self._copy()
[946]219 self.set_selection(bsel)
[876]220 return scantable(scopy)
[102]221 elif type(scanid) is int:
[946]222 sel.set_scans([scanid])
223 self.set_selection(bsel+sel)
[876]224 scopy = self._copy()
[946]225 self.set_selection(bsel)
[876]226 return scantable(scopy)
[381]227 elif type(scanid) is list:
[946]228 sel.set_scans(scanid)
229 self.set_selection(sel)
[876]230 scopy = self._copy()
[946]231 self.set_selection(bsel)
[876]232 return scantable(scopy)
[381]233 else:
[718]234 msg = "Illegal scanid type, use 'int' or 'list' if ints."
235 if rcParams['verbose']:
236 print msg
237 else:
238 raise TypeError(msg)
[102]239 except RuntimeError:
[718]240 if rcParams['verbose']: print "Couldn't find any match."
241 else: raise
[102]242
243 def __str__(self):
[1118]244 return Scantable._summary(self, True)
[102]245
[976]246 def summary(self, filename=None):
[102]247 """
248 Print a summary of the contents of this scantable.
249 Parameters:
250 filename: the name of a file to write the putput to
251 Default - no file output
[381]252 verbose: print extra info such as the frequency table
253 The default (False) is taken from .asaprc
[102]254 """
[976]255 info = Scantable._summary(self, True)
256 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]257 if filename is not None:
[256]258 if filename is "":
259 filename = 'scantable_summary.txt'
[415]260 from os.path import expandvars, isdir
[411]261 filename = expandvars(filename)
[415]262 if not isdir(filename):
[413]263 data = open(filename, 'w')
264 data.write(info)
265 data.close()
266 else:
[718]267 msg = "Illegal file name '%s'." % (filename)
268 if rcParams['verbose']:
269 print msg
270 else:
271 raise IOError(msg)
272 if rcParams['verbose']:
[794]273 try:
274 from IPython.genutils import page as pager
275 except ImportError:
276 from pydoc import pager
277 pager(info)
[718]278 else:
279 return info
[710]280
[946]281
282 def get_selection(self):
283 """
[1005]284 Get the selection object currently set on this scantable.
285 Parameters:
286 none
287 Example:
288 sel = scan.get_selection()
289 sel.set_ifs(0) # select IF 0
290 scan.set_selection(sel) # apply modified selection
[946]291 """
292 return selector(self._getselection())
293
[1005]294 def set_selection(self, selection=selector()):
[946]295 """
[1005]296 Select a subset of the data. All following operations on this scantable
297 are only applied to thi selection.
298 Parameters:
299 selection: a selector object (default unset the selection)
300 Examples:
301 sel = selector() # create a selection object
[1118]302 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]303 scan.set_selection(sel) # set the selection
304 scan.summary() # will only print summary of scanno 0 an 3
305 scan.set_selection() # unset the selection
[946]306 """
307 self._setselection(selection)
308
[1446]309 def get_row(self, row=0, insitu=None):
310 """
311 Select a row in the scantable.
312 Return a scantable with single row.
313 Parameters:
314 row: row no of integration, default is 0.
315 insitu: if False a new scantable is returned.
316 Otherwise, the scaling is done in-situ
317 The default is taken from .asaprc (False)
318 """
319 if insitu is None: insitu = rcParams['insitu']
320 if not insitu:
321 workscan = self.copy()
322 else:
323 workscan = self
324 # Select a row
325 sel=selector()
326 sel.set_scans([workscan.getscan(row)])
327 sel.set_cycles([workscan.getcycle(row)])
328 sel.set_beams([workscan.getbeam(row)])
329 sel.set_ifs([workscan.getif(row)])
330 sel.set_polarisations([workscan.getpol(row)])
331 sel.set_name(workscan._getsourcename(row))
332 workscan.set_selection(sel)
333 if not workscan.nrow() == 1:
334 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
335 raise RuntimeError(msg)
336 del sel
337 if insitu:
338 self._assign(workscan)
339 else:
340 return workscan
341
[876]342 def stats(self, stat='stddev', mask=None):
[102]343 """
[135]344 Determine the specified statistic of the current beam/if/pol
[102]345 Takes a 'mask' as an optional parameter to specify which
346 channels should be excluded.
[1517]347 You can get min/max values with their
348 channels/frequencies/velocities by selecting stat='min_abc'
349 or 'max_abc'.
[102]350 Parameters:
[1517]351 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
[1515]352 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[135]353 mask: an optional mask specifying where the statistic
[102]354 should be determined.
355 Example:
[113]356 scan.set_unit('channel')
[1118]357 msk = scan.create_mask([100, 200], [500, 600])
[135]358 scan.stats(stat='mean', mask=m)
[102]359 """
360 if mask == None:
[876]361 mask = []
[1118]362 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]363 if not self._check_ifs():
[1118]364 raise ValueError("Cannot apply mask as the IFs have different "
365 "number of channels. Please use setselection() "
366 "to select individual IFs")
[256]367
[1517]368 if stat.lower().find('_abc') == -1:
[1515]369 statvals = self._math._stats(self, mask, stat)
[1517]370 getchan = False
[1515]371 sstat = str(stat)
372 else:
[1517]373 chan = self._math._minmaxchan(self, mask, stat)
374 getchan = True
[1515]375 statvals = []
[1517]376 sstat = stat.lower().strip('_abc')
[876]377 out = ''
378 axes = []
379 for i in range(self.nrow()):
380 axis = []
381 axis.append(self.getscan(i))
382 axis.append(self.getbeam(i))
383 axis.append(self.getif(i))
384 axis.append(self.getpol(i))
385 axis.append(self.getcycle(i))
386 axes.append(axis)
387 tm = self._gettime(i)
388 src = self._getsourcename(i)
[1515]389 xpos = ''
[1517]390 if getchan:
391 qx, qy = self.chan2data(rowno=i, chan=chan[i])
392 statvals.append(qy['value'])
[1515]393 xpos = '(x = %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
[876]394 out += 'Scan[%d] (%s) ' % (axis[0], src)
395 out += 'Time[%s]:\n' % (tm)
396 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
397 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
398 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
[1515]399 out += '= %3.3f ' % (statvals[i]) +xpos+'\n'
[876]400 out += "--------------------------------------------------\n"
[256]401
[876]402 if rcParams['verbose']:
403 print "--------------------------------------------------"
[1515]404 print " ", sstat
[876]405 print "--------------------------------------------------"
406 print out
[1295]407 #else:
408 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
409 # 'axes' : axes,
410 # 'data': statvals}
411 return statvals
[102]412
[1517]413 def chan2data(self, rowno=0, chan=0):
[1515]414 """
[1517]415 Returns channel/frequency/velocity and spectral value
[1515]416 at an arbitrary row and channel in the scantable.
417 Parameters:
418 rowno: a row number in the scantable. Default is the
419 first row, i.e. rowno=0
[1517]420 chan: a channel in the scantable. Default is the first
[1515]421 channel, i.e. pos=0
422 """
[1517]423 if isinstance(rowno, int) and isinstance(chan, int):
[1515]424 x, xlbl = self.get_abcissa(rowno)
[1517]425 qx = {'unit': xlbl, 'value': x[chan]}
426 qy = {'unit': self.get_fluxunit(),
427 'value': self._getspectrum(rowno)[chan]}
428 return qx, qy
[1515]429
[1118]430 def stddev(self, mask=None):
[135]431 """
432 Determine the standard deviation of the current beam/if/pol
433 Takes a 'mask' as an optional parameter to specify which
434 channels should be excluded.
435 Parameters:
436 mask: an optional mask specifying where the standard
437 deviation should be determined.
438
439 Example:
440 scan.set_unit('channel')
[1118]441 msk = scan.create_mask([100, 200], [500, 600])
[135]442 scan.stddev(mask=m)
443 """
[1118]444 return self.stats(stat='stddev', mask=mask);
[135]445
[1003]446
[1259]447 def get_column_names(self):
[1003]448 """
449 Return a list of column names, which can be used for selection.
450 """
[1259]451 return list(Scantable.get_column_names(self))
[1003]452
[876]453 def get_tsys(self):
[113]454 """
455 Return the System temperatures.
456 Returns:
[876]457 a list of Tsys values for the current selection
[113]458 """
[256]459
[876]460 return self._row_callback(self._gettsys, "Tsys")
[256]461
[876]462 def _row_callback(self, callback, label):
463 axes = []
[1118]464 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]465 out = ""
[1118]466 outvec = []
[876]467 for i in range(self.nrow()):
468 axis = []
469 axis.append(self.getscan(i))
470 axis.append(self.getbeam(i))
471 axis.append(self.getif(i))
472 axis.append(self.getpol(i))
473 axis.append(self.getcycle(i))
474 axes.append(axis)
475 tm = self._gettime(i)
476 src = self._getsourcename(i)
477 out += 'Scan[%d] (%s) ' % (axis[0], src)
478 out += 'Time[%s]:\n' % (tm)
479 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
480 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
481 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
482 outvec.append(callback(i))
483 out += '= %3.3f\n' % (outvec[i])
484 out += "--------------------------------------------------\n"
485 if rcParams['verbose']:
486 print "--------------------------------------------------"
487 print " %s" % (label)
488 print "--------------------------------------------------"
489 print out
[1175]490 # disabled because the vector seems more useful
491 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
492 return outvec
[256]493
[1070]494 def _get_column(self, callback, row=-1):
495 """
496 """
497 if row == -1:
498 return [callback(i) for i in range(self.nrow())]
499 else:
500 if 0 <= row < self.nrow():
501 return callback(row)
[256]502
[1070]503
[1348]504 def get_time(self, row=-1, asdatetime=False):
[113]505 """
506 Get a list of time stamps for the observations.
[1348]507 Return a datetime object for each integration time stamp in the scantable.
[113]508 Parameters:
[1348]509 row: row no of integration. Default -1 return all rows
510 asdatetime: return values as datetime objects rather than strings
[113]511 Example:
512 none
513 """
[1175]514 from time import strptime
515 from datetime import datetime
[1457]516 times = self._get_column(self._gettime, row)
[1348]517 if not asdatetime:
[1457]518 return times
[1175]519 format = "%Y/%m/%d/%H:%M:%S"
520 if isinstance(times, list):
521 return [datetime(*strptime(i, format)[:6]) for i in times]
522 else:
523 return datetime(*strptime(times, format)[:6])
[102]524
[1348]525
526 def get_inttime(self, row=-1):
527 """
528 Get a list of integration times for the observations.
529 Return a time in seconds for each integration in the scantable.
530 Parameters:
531 row: row no of integration. Default -1 return all rows.
532 Example:
533 none
534 """
535 return self._get_column(self._getinttime, row)
536
537
[714]538 def get_sourcename(self, row=-1):
539 """
[794]540 Get a list source names for the observations.
[714]541 Return a string for each integration in the scantable.
542 Parameters:
[1348]543 row: row no of integration. Default -1 return all rows.
[714]544 Example:
545 none
546 """
[1070]547 return self._get_column(self._getsourcename, row)
[714]548
[794]549 def get_elevation(self, row=-1):
550 """
551 Get a list of elevations for the observations.
552 Return a float for each integration in the scantable.
553 Parameters:
[1348]554 row: row no of integration. Default -1 return all rows.
[794]555 Example:
556 none
557 """
[1070]558 return self._get_column(self._getelevation, row)
[794]559
560 def get_azimuth(self, row=-1):
561 """
562 Get a list of azimuths for the observations.
563 Return a float for each integration in the scantable.
564 Parameters:
[1348]565 row: row no of integration. Default -1 return all rows.
[794]566 Example:
567 none
568 """
[1070]569 return self._get_column(self._getazimuth, row)
[794]570
571 def get_parangle(self, row=-1):
572 """
573 Get a list of parallactic angles for the observations.
574 Return a float for each integration in the scantable.
575 Parameters:
[1348]576 row: row no of integration. Default -1 return all rows.
[794]577 Example:
578 none
579 """
[1070]580 return self._get_column(self._getparangle, row)
[794]581
[1070]582 def get_direction(self, row=-1):
583 """
584 Get a list of Positions on the sky (direction) for the observations.
585 Return a float for each integration in the scantable.
586 Parameters:
587 row: row no of integration. Default -1 return all rows
588 Example:
589 none
590 """
591 return self._get_column(self._getdirection, row)
592
[1389]593 def get_directionval(self, row=-1):
594 """
595 Get a list of Positions on the sky (direction) for the observations.
596 Return a float for each integration in the scantable.
597 Parameters:
598 row: row no of integration. Default -1 return all rows
599 Example:
600 none
601 """
602 return self._get_column(self._getdirectionvec, row)
603
[102]604 def set_unit(self, unit='channel'):
605 """
606 Set the unit for all following operations on this scantable
607 Parameters:
608 unit: optional unit, default is 'channel'
[1118]609 one of '*Hz', 'km/s', 'channel', ''
[102]610 """
[484]611 varlist = vars()
[1118]612 if unit in ['', 'pixel', 'channel']:
[113]613 unit = ''
614 inf = list(self._getcoordinfo())
615 inf[0] = unit
616 self._setcoordinfo(inf)
[1118]617 self._add_history("set_unit", varlist)
[113]618
[484]619 def set_instrument(self, instr):
[358]620 """
[1348]621 Set the instrument for subsequent processing.
[358]622 Parameters:
[710]623 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]624 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]625 """
626 self._setInstrument(instr)
[1118]627 self._add_history("set_instument", vars())
[718]628 print_log()
[358]629
[1190]630 def set_feedtype(self, feedtype):
631 """
632 Overwrite the feed type, which might not be set correctly.
633 Parameters:
634 feedtype: 'linear' or 'circular'
635 """
636 self._setfeedtype(feedtype)
637 self._add_history("set_feedtype", vars())
638 print_log()
639
[276]640 def set_doppler(self, doppler='RADIO'):
641 """
642 Set the doppler for all following operations on this scantable.
643 Parameters:
644 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
645 """
[484]646 varlist = vars()
[276]647 inf = list(self._getcoordinfo())
648 inf[2] = doppler
649 self._setcoordinfo(inf)
[1118]650 self._add_history("set_doppler", vars())
[718]651 print_log()
[710]652
[226]653 def set_freqframe(self, frame=None):
[113]654 """
655 Set the frame type of the Spectral Axis.
656 Parameters:
[591]657 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]658 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
659 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]660 Examples:
661 scan.set_freqframe('BARY')
662 """
[484]663 if frame is None: frame = rcParams['scantable.freqframe']
664 varlist = vars()
[1118]665 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
666 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]667
[989]668 if frame in valid:
[113]669 inf = list(self._getcoordinfo())
670 inf[1] = frame
671 self._setcoordinfo(inf)
[1118]672 self._add_history("set_freqframe", varlist)
[102]673 else:
[1118]674 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]675 if rcParams['verbose']:
676 print msg
677 else:
678 raise TypeError(msg)
679 print_log()
[710]680
[989]681 def set_dirframe(self, frame=""):
682 """
683 Set the frame type of the Direction on the sky.
684 Parameters:
685 frame: an optional frame type, default ''. Valid frames are:
686 'J2000', 'B1950', 'GALACTIC'
687 Examples:
688 scan.set_dirframe('GALACTIC')
689 """
690 varlist = vars()
691 try:
692 Scantable.set_dirframe(self, frame)
[1118]693 except RuntimeError, msg:
[989]694 if rcParams['verbose']:
695 print msg
696 else:
697 raise
[1118]698 self._add_history("set_dirframe", varlist)
[989]699
[113]700 def get_unit(self):
701 """
702 Get the default unit set in this scantable
703 Returns:
704 A unit string
705 """
706 inf = self._getcoordinfo()
707 unit = inf[0]
708 if unit == '': unit = 'channel'
709 return unit
[102]710
[158]711 def get_abcissa(self, rowno=0):
[102]712 """
[158]713 Get the abcissa in the current coordinate setup for the currently
[113]714 selected Beam/IF/Pol
715 Parameters:
[226]716 rowno: an optional row number in the scantable. Default is the
717 first row, i.e. rowno=0
[113]718 Returns:
[1348]719 The abcissa values and the format string (as a dictionary)
[113]720 """
[256]721 abc = self._getabcissa(rowno)
[710]722 lbl = self._getabcissalabel(rowno)
[718]723 print_log()
[158]724 return abc, lbl
[113]725
[1401]726 def flag(self, mask=None, unflag=False):
[1001]727 """
728 Flag the selected data using an optional channel mask.
729 Parameters:
730 mask: an optional channel mask, created with create_mask. Default
731 (no mask) is all channels.
[1401]732 unflag: if True, unflag the data
[1001]733 """
734 varlist = vars()
[1118]735 if mask is None:
736 mask = []
[1001]737 try:
[1401]738 self._flag(mask, unflag)
[1118]739 except RuntimeError, msg:
[1001]740 if rcParams['verbose']:
741 print msg
742 return
743 else: raise
744 self._add_history("flag", varlist)
745
[1203]746 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]747 """
748 Flag the data in 'lag' space by providing a frequency to remove.
749 Flagged data in the scantable gets set to 0.0 before the fft.
750 No taper is applied.
751 Parameters:
[1348]752 frequency: the frequency (really a period within the bandwidth)
753 to remove
754 width: the width of the frequency to remove, to remove a
755 range of frequencies aroung the centre.
[1203]756 unit: the frequency unit (default "GHz")
757 Notes:
[1348]758 It is recommended to flag edges of the band or strong
759 signals beforehand.
[1192]760 """
761 if insitu is None: insitu = rcParams['insitu']
762 self._math._setinsitu(insitu)
763 varlist = vars()
[1370]764 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]765 if not base.has_key(unit):
766 raise ValueError("%s is not a valid unit." % unit)
767 try:
[1200]768 s = scantable(self._math._lag_flag(self, frequency*base[unit],
769 width*base[unit]))
[1192]770 except RuntimeError, msg:
771 if rcParams['verbose']:
772 print msg
773 return
774 else: raise
775 s._add_history("lag_flag", varlist)
776 print_log()
777 if insitu:
778 self._assign(s)
779 else:
780 return s
[1001]781
[1192]782
[113]783 def create_mask(self, *args, **kwargs):
784 """
[1118]785 Compute and return a mask based on [min, max] windows.
[189]786 The specified windows are to be INCLUDED, when the mask is
[113]787 applied.
[102]788 Parameters:
[1118]789 [min, max], [min2, max2], ...
[1024]790 Pairs of start/end points (inclusive)specifying the regions
[102]791 to be masked
[189]792 invert: optional argument. If specified as True,
793 return an inverted mask, i.e. the regions
794 specified are EXCLUDED
[513]795 row: create the mask using the specified row for
796 unit conversions, default is row=0
797 only necessary if frequency varies over rows.
[102]798 Example:
[113]799 scan.set_unit('channel')
800 a)
[1118]801 msk = scan.create_mask([400, 500], [800, 900])
[189]802 # masks everything outside 400 and 500
[113]803 # and 800 and 900 in the unit 'channel'
804
805 b)
[1118]806 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]807 # masks the regions between 400 and 500
[113]808 # and 800 and 900 in the unit 'channel'
[1024]809 c)
810 mask only channel 400
[1118]811 msk = scan.create_mask([400, 400])
[102]812 """
[513]813 row = 0
814 if kwargs.has_key("row"):
815 row = kwargs.get("row")
816 data = self._getabcissa(row)
[113]817 u = self._getcoordinfo()[0]
[718]818 if rcParams['verbose']:
[113]819 if u == "": u = "channel"
[718]820 msg = "The current mask window unit is %s" % u
[1118]821 i = self._check_ifs()
822 if not i:
[876]823 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]824 asaplog.push(msg)
[102]825 n = self.nchan()
[1295]826 msk = _n_bools(n, False)
[710]827 # test if args is a 'list' or a 'normal *args - UGLY!!!
828
[1118]829 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
830 and args or args[0]
[710]831 for window in ws:
[102]832 if (len(window) != 2 or window[0] > window[1] ):
[1118]833 raise TypeError("A window needs to be defined as [min, max]")
[102]834 for i in range(n):
[1024]835 if data[i] >= window[0] and data[i] <= window[1]:
[1295]836 msk[i] = True
[113]837 if kwargs.has_key('invert'):
838 if kwargs.get('invert'):
[1295]839 msk = mask_not(msk)
[718]840 print_log()
[102]841 return msk
[710]842
[1446]843 def get_masklist(self, mask=None, row=0):
[256]844 """
[1446]845 Compute and return a list of mask windows, [min, max].
846 Parameters:
847 mask: channel mask, created with create_mask.
848 row: calcutate the masklist using the specified row
849 for unit conversions, default is row=0
850 only necessary if frequency varies over rows.
851 Returns:
852 [min, max], [min2, max2], ...
853 Pairs of start/end points (inclusive)specifying
854 the masked regions
855 """
856 if not (isinstance(mask,list) or isinstance(mask, tuple)):
857 raise TypeError("The mask should be list or tuple.")
858 if len(mask) < 2:
859 raise TypeError("The mask elements should be > 1")
860 if self.nchan() != len(mask):
861 msg = "Number of channels in scantable != number of mask elements"
862 raise TypeError(msg)
863 data = self._getabcissa(row)
864 u = self._getcoordinfo()[0]
865 if rcParams['verbose']:
866 if u == "": u = "channel"
867 msg = "The current mask window unit is %s" % u
868 i = self._check_ifs()
869 if not i:
870 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
871 asaplog.push(msg)
872 masklist=[]
873 ist, ien = None, None
874 ist, ien=self.get_mask_indices(mask)
875 if ist is not None and ien is not None:
876 for i in xrange(len(ist)):
877 range=[data[ist[i]],data[ien[i]]]
878 range.sort()
879 masklist.append([range[0],range[1]])
880 return masklist
881
882 def get_mask_indices(self, mask=None):
883 """
884 Compute and Return lists of mask start indices and mask end indices.
885 Parameters:
886 mask: channel mask, created with create_mask.
887 Returns:
888 List of mask start indices and that of mask end indices,
889 i.e., [istart1,istart2,....], [iend1,iend2,....].
890 """
891 if not (isinstance(mask,list) or isinstance(mask, tuple)):
892 raise TypeError("The mask should be list or tuple.")
893 if len(mask) < 2:
894 raise TypeError("The mask elements should be > 1")
895 istart=[]
896 iend=[]
897 if mask[0]: istart.append(0)
898 for i in range(len(mask)-1):
899 if not mask[i] and mask[i+1]:
900 istart.append(i+1)
901 elif mask[i] and not mask[i+1]:
902 iend.append(i)
903 if mask[len(mask)-1]: iend.append(len(mask)-1)
904 if len(istart) != len(iend):
905 raise RuntimeError("Numbers of mask start != mask end.")
906 for i in range(len(istart)):
907 if istart[i] > iend[i]:
908 raise RuntimeError("Mask start index > mask end index")
909 break
910 return istart,iend
911
912# def get_restfreqs(self):
913# """
914# Get the restfrequency(s) stored in this scantable.
915# The return value(s) are always of unit 'Hz'
916# Parameters:
917# none
918# Returns:
919# a list of doubles
920# """
921# return list(self._getrestfreqs())
922
923 def get_restfreqs(self, ids=None):
924 """
[256]925 Get the restfrequency(s) stored in this scantable.
926 The return value(s) are always of unit 'Hz'
927 Parameters:
[1446]928 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
929 be retrieved
[256]930 Returns:
[1446]931 dictionary containing ids and a list of doubles for each id
[256]932 """
[1446]933 if ids is None:
934 rfreqs={}
935 idlist = self.getmolnos()
936 for i in idlist:
937 rfreqs[i]=list(self._getrestfreqs(i))
938 return rfreqs
939 else:
940 if type(ids)==list or type(ids)==tuple:
941 rfreqs={}
942 for i in ids:
943 rfreqs[i]=list(self._getrestfreqs(i))
944 return rfreqs
945 else:
946 return list(self._getrestfreqs(ids))
947 #return list(self._getrestfreqs(ids))
[102]948
[931]949 def set_restfreqs(self, freqs=None, unit='Hz'):
950 """
[1446]951 ********NEED TO BE UPDATED begin************
[931]952 Set or replace the restfrequency specified and
953 If the 'freqs' argument holds a scalar,
954 then that rest frequency will be applied to all the selected
955 data. If the 'freqs' argument holds
956 a vector, then it MUST be of equal or smaller length than
957 the number of IFs (and the available restfrequencies will be
958 replaced by this vector). In this case, *all* data have
959 the restfrequency set per IF according
960 to the corresponding value you give in the 'freqs' vector.
[1118]961 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]962 IF 1 gets restfreq 2e9.
[1446]963 ********NEED TO BE UPDATED end************
[1153]964 You can also specify the frequencies via a linecatalog/
965
[931]966 Parameters:
967 freqs: list of rest frequency values or string idenitfiers
968 unit: unit for rest frequency (default 'Hz')
[402]969
[931]970 Example:
[1446]971 # set the given restfrequency for the all currently selected IFs
[931]972 scan.set_restfreqs(freqs=1.4e9)
[1446]973 # set multiple restfrequencies to all the selected data
974 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
975 # If the number of IFs in the data is >= 2 the IF0 gets the first
976 # value IF1 the second... NOTE that freqs needs to be
977 # specified in list of list (e.g. [[],[],...] ).
978 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]979 #set the given restfrequency for the whole table (by name)
980 scan.set_restfreqs(freqs="OH1667")
[391]981
[931]982 Note:
983 To do more sophisticate Restfrequency setting, e.g. on a
984 source and IF basis, use scantable.set_selection() before using
985 this function.
986 # provide your scantable is call scan
987 selection = selector()
988 selection.set_name("ORION*")
989 selection.set_ifs([1])
990 scan.set_selection(selection)
991 scan.set_restfreqs(freqs=86.6e9)
992
993 """
994 varlist = vars()
[1157]995 from asap import linecatalog
996 # simple value
[1118]997 if isinstance(freqs, int) or isinstance(freqs, float):
[1446]998 # TT mod
999 #self._setrestfreqs(freqs, "",unit)
1000 self._setrestfreqs([freqs], [""],unit)
[1157]1001 # list of values
[1118]1002 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1003 # list values are scalars
[1118]1004 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1446]1005 self._setrestfreqs(freqs, [""],unit)
[1157]1006 # list values are tuples, (value, name)
1007 elif isinstance(freqs[-1], dict):
[1446]1008 #sel = selector()
1009 #savesel = self._getselection()
1010 #iflist = self.getifnos()
1011 #for i in xrange(len(freqs)):
1012 # sel.set_ifs(iflist[i])
1013 # self._setselection(sel)
1014 # self._setrestfreqs(freqs[i], "",unit)
1015 #self._setselection(savesel)
1016 self._setrestfreqs(freqs["value"],
1017 freqs["name"], "MHz")
1018 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1019 sel = selector()
1020 savesel = self._getselection()
[1322]1021 iflist = self.getifnos()
[1446]1022 if len(freqs)>len(iflist):
1023 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]1024 for i in xrange(len(freqs)):
[1322]1025 sel.set_ifs(iflist[i])
[1259]1026 self._setselection(sel)
[1157]1027 self._setrestfreqs(freqs[i]["value"],
1028 freqs[i]["name"], "MHz")
1029 self._setselection(savesel)
1030 # freqs are to be taken from a linecatalog
[1153]1031 elif isinstance(freqs, linecatalog):
1032 sel = selector()
1033 savesel = self._getselection()
1034 for i in xrange(freqs.nrow()):
[1322]1035 sel.set_ifs(iflist[i])
[1153]1036 self._setselection(sel)
1037 self._setrestfreqs(freqs.get_frequency(i),
1038 freqs.get_name(i), "MHz")
1039 # ensure that we are not iterating past nIF
1040 if i == self.nif()-1: break
1041 self._setselection(savesel)
[931]1042 else:
1043 return
1044 self._add_history("set_restfreqs", varlist)
1045
[1360]1046 def shift_refpix(self, delta):
1047 """
1048 Shift the reference pixel of the Spectra Coordinate by an
1049 integer amount.
1050 Parameters:
1051 delta: the amount to shift by
1052 Note:
1053 Be careful using this with broadband data.
1054 """
1055 Scantable.shift(self, delta)
[931]1056
[1259]1057 def history(self, filename=None):
1058 """
1059 Print the history. Optionally to a file.
[1348]1060 Parameters:
1061 filename: The name of the file to save the history to.
[1259]1062 """
[484]1063 hist = list(self._gethistory())
[794]1064 out = "-"*80
[484]1065 for h in hist:
[489]1066 if h.startswith("---"):
[794]1067 out += "\n"+h
[489]1068 else:
1069 items = h.split("##")
1070 date = items[0]
1071 func = items[1]
1072 items = items[2:]
[794]1073 out += "\n"+date+"\n"
1074 out += "Function: %s\n Parameters:" % (func)
[489]1075 for i in items:
1076 s = i.split("=")
[1118]1077 out += "\n %s = %s" % (s[0], s[1])
[794]1078 out += "\n"+"-"*80
[1259]1079 if filename is not None:
1080 if filename is "":
1081 filename = 'scantable_history.txt'
1082 import os
1083 filename = os.path.expandvars(os.path.expanduser(filename))
1084 if not os.path.isdir(filename):
1085 data = open(filename, 'w')
1086 data.write(out)
1087 data.close()
1088 else:
1089 msg = "Illegal file name '%s'." % (filename)
1090 if rcParams['verbose']:
1091 print msg
1092 else:
1093 raise IOError(msg)
1094 if rcParams['verbose']:
1095 try:
1096 from IPython.genutils import page as pager
1097 except ImportError:
1098 from pydoc import pager
1099 pager(out)
1100 else:
1101 return out
[484]1102 return
[513]1103 #
1104 # Maths business
1105 #
1106
[931]1107 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1108 """
[1070]1109 Return the (time) weighted average of a scan.
[513]1110 Note:
[1070]1111 in channels only - align if necessary
[513]1112 Parameters:
1113 mask: an optional mask (only used for 'var' and 'tsys'
1114 weighting)
[558]1115 scanav: True averages each scan separately
1116 False (default) averages all scans together,
[1099]1117 weight: Weighting scheme.
1118 'none' (mean no weight)
1119 'var' (1/var(spec) weighted)
1120 'tsys' (1/Tsys**2 weighted)
1121 'tint' (integration time weighted)
1122 'tintsys' (Tint/Tsys**2)
1123 'median' ( median averaging)
[535]1124 The default is 'tint'
[931]1125 align: align the spectra in velocity before averaging. It takes
1126 the time of the first spectrum as reference time.
[513]1127 Example:
1128 # time average the scantable without using a mask
[710]1129 newscan = scan.average_time()
[513]1130 """
1131 varlist = vars()
[976]1132 if weight is None: weight = 'TINT'
[513]1133 if mask is None: mask = ()
[1099]1134 if scanav: scanav = "SCAN"
1135 else: scanav = "NONE"
[1118]1136 scan = (self, )
[989]1137 try:
[1118]1138 if align:
1139 scan = (self.freq_align(insitu=False), )
1140 s = None
1141 if weight.upper() == 'MEDIAN':
1142 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1143 scanav))
1144 else:
1145 s = scantable(self._math._average(scan, mask, weight.upper(),
1146 scanav))
1147 except RuntimeError, msg:
[989]1148 if rcParams['verbose']:
1149 print msg
1150 return
1151 else: raise
[1099]1152 s._add_history("average_time", varlist)
[718]1153 print_log()
[513]1154 return s
[710]1155
[876]1156 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1157 """
1158 Return a scan where all spectra are converted to either
1159 Jansky or Kelvin depending upon the flux units of the scan table.
1160 By default the function tries to look the values up internally.
1161 If it can't find them (or if you want to over-ride), you must
1162 specify EITHER jyperk OR eta (and D which it will try to look up
1163 also if you don't set it). jyperk takes precedence if you set both.
1164 Parameters:
1165 jyperk: the Jy / K conversion factor
1166 eta: the aperture efficiency
1167 d: the geomtric diameter (metres)
1168 insitu: if False a new scantable is returned.
1169 Otherwise, the scaling is done in-situ
1170 The default is taken from .asaprc (False)
1171 """
1172 if insitu is None: insitu = rcParams['insitu']
[876]1173 self._math._setinsitu(insitu)
[513]1174 varlist = vars()
1175 if jyperk is None: jyperk = -1.0
1176 if d is None: d = -1.0
1177 if eta is None: eta = -1.0
[876]1178 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1179 s._add_history("convert_flux", varlist)
1180 print_log()
1181 if insitu: self._assign(s)
1182 else: return s
[513]1183
[876]1184 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1185 """
1186 Return a scan after applying a gain-elevation correction.
1187 The correction can be made via either a polynomial or a
1188 table-based interpolation (and extrapolation if necessary).
1189 You specify polynomial coefficients, an ascii table or neither.
1190 If you specify neither, then a polynomial correction will be made
1191 with built in coefficients known for certain telescopes (an error
1192 will occur if the instrument is not known).
1193 The data and Tsys are *divided* by the scaling factors.
1194 Parameters:
1195 poly: Polynomial coefficients (default None) to compute a
1196 gain-elevation correction as a function of
1197 elevation (in degrees).
1198 filename: The name of an ascii file holding correction factors.
1199 The first row of the ascii file must give the column
1200 names and these MUST include columns
1201 "ELEVATION" (degrees) and "FACTOR" (multiply data
1202 by this) somewhere.
1203 The second row must give the data type of the
1204 column. Use 'R' for Real and 'I' for Integer.
1205 An example file would be
1206 (actual factors are arbitrary) :
1207
1208 TIME ELEVATION FACTOR
1209 R R R
1210 0.1 0 0.8
1211 0.2 20 0.85
1212 0.3 40 0.9
1213 0.4 60 0.85
1214 0.5 80 0.8
1215 0.6 90 0.75
1216 method: Interpolation method when correcting from a table.
1217 Values are "nearest", "linear" (default), "cubic"
1218 and "spline"
1219 insitu: if False a new scantable is returned.
1220 Otherwise, the scaling is done in-situ
1221 The default is taken from .asaprc (False)
1222 """
1223
1224 if insitu is None: insitu = rcParams['insitu']
[876]1225 self._math._setinsitu(insitu)
[513]1226 varlist = vars()
1227 if poly is None:
[1118]1228 poly = ()
[513]1229 from os.path import expandvars
1230 filename = expandvars(filename)
[876]1231 s = scantable(self._math._gainel(self, poly, filename, method))
1232 s._add_history("gain_el", varlist)
1233 print_log()
1234 if insitu: self._assign(s)
1235 else: return s
[710]1236
[931]1237 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1238 """
1239 Return a scan where all rows have been aligned in frequency/velocity.
1240 The alignment frequency frame (e.g. LSRK) is that set by function
1241 set_freqframe.
1242 Parameters:
1243 reftime: reference time to align at. By default, the time of
1244 the first row of data is used.
1245 method: Interpolation method for regridding the spectra.
1246 Choose from "nearest", "linear", "cubic" (default)
1247 and "spline"
1248 insitu: if False a new scantable is returned.
1249 Otherwise, the scaling is done in-situ
1250 The default is taken from .asaprc (False)
1251 """
[931]1252 if insitu is None: insitu = rcParams["insitu"]
[876]1253 self._math._setinsitu(insitu)
[513]1254 varlist = vars()
[931]1255 if reftime is None: reftime = ""
1256 s = scantable(self._math._freq_align(self, reftime, method))
[876]1257 s._add_history("freq_align", varlist)
1258 print_log()
1259 if insitu: self._assign(s)
1260 else: return s
[513]1261
[876]1262 def opacity(self, tau, insitu=None):
[513]1263 """
1264 Apply an opacity correction. The data
1265 and Tsys are multiplied by the correction factor.
1266 Parameters:
1267 tau: Opacity from which the correction factor is
1268 exp(tau*ZD)
1269 where ZD is the zenith-distance
1270 insitu: if False a new scantable is returned.
1271 Otherwise, the scaling is done in-situ
1272 The default is taken from .asaprc (False)
1273 """
1274 if insitu is None: insitu = rcParams['insitu']
[876]1275 self._math._setinsitu(insitu)
[513]1276 varlist = vars()
[876]1277 s = scantable(self._math._opacity(self, tau))
1278 s._add_history("opacity", varlist)
1279 print_log()
1280 if insitu: self._assign(s)
1281 else: return s
[513]1282
1283 def bin(self, width=5, insitu=None):
1284 """
1285 Return a scan where all spectra have been binned up.
[1348]1286 Parameters:
[513]1287 width: The bin width (default=5) in pixels
1288 insitu: if False a new scantable is returned.
1289 Otherwise, the scaling is done in-situ
1290 The default is taken from .asaprc (False)
1291 """
1292 if insitu is None: insitu = rcParams['insitu']
[876]1293 self._math._setinsitu(insitu)
[513]1294 varlist = vars()
[876]1295 s = scantable(self._math._bin(self, width))
[1118]1296 s._add_history("bin", varlist)
[876]1297 print_log()
1298 if insitu: self._assign(s)
1299 else: return s
[513]1300
[710]1301
[513]1302 def resample(self, width=5, method='cubic', insitu=None):
1303 """
[1348]1304 Return a scan where all spectra have been binned up.
1305
1306 Parameters:
[513]1307 width: The bin width (default=5) in pixels
1308 method: Interpolation method when correcting from a table.
1309 Values are "nearest", "linear", "cubic" (default)
1310 and "spline"
1311 insitu: if False a new scantable is returned.
1312 Otherwise, the scaling is done in-situ
1313 The default is taken from .asaprc (False)
1314 """
1315 if insitu is None: insitu = rcParams['insitu']
[876]1316 self._math._setinsitu(insitu)
[513]1317 varlist = vars()
[876]1318 s = scantable(self._math._resample(self, method, width))
[1118]1319 s._add_history("resample", varlist)
[876]1320 print_log()
1321 if insitu: self._assign(s)
1322 else: return s
[513]1323
1324
[946]1325 def average_pol(self, mask=None, weight='none'):
1326 """
1327 Average the Polarisations together.
1328 Parameters:
1329 mask: An optional mask defining the region, where the
1330 averaging will be applied. The output will have all
1331 specified points masked.
1332 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1333 weighted), or 'tsys' (1/Tsys**2 weighted)
1334 """
1335 varlist = vars()
1336 if mask is None:
1337 mask = ()
[1010]1338 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1339 s._add_history("average_pol", varlist)
[946]1340 print_log()
[992]1341 return s
[513]1342
[1145]1343 def average_beam(self, mask=None, weight='none'):
1344 """
1345 Average the Beams together.
1346 Parameters:
1347 mask: An optional mask defining the region, where the
1348 averaging will be applied. The output will have all
1349 specified points masked.
1350 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1351 weighted), or 'tsys' (1/Tsys**2 weighted)
1352 """
1353 varlist = vars()
1354 if mask is None:
1355 mask = ()
1356 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1357 s._add_history("average_beam", varlist)
1358 print_log()
1359 return s
1360
[992]1361 def convert_pol(self, poltype=None):
1362 """
1363 Convert the data to a different polarisation type.
1364 Parameters:
1365 poltype: The new polarisation type. Valid types are:
1366 "linear", "stokes" and "circular"
1367 """
1368 varlist = vars()
1369 try:
1370 s = scantable(self._math._convertpol(self, poltype))
[1118]1371 except RuntimeError, msg:
[992]1372 if rcParams['verbose']:
[1118]1373 print msg
1374 return
[992]1375 else:
1376 raise
[1118]1377 s._add_history("convert_pol", varlist)
[992]1378 print_log()
1379 return s
1380
[876]1381 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1382 """
1383 Smooth the spectrum by the specified kernel (conserving flux).
1384 Parameters:
1385 kernel: The type of smoothing kernel. Select from
[1373]1386 'hanning' (default), 'gaussian', 'boxcar' and
1387 'rmedian'
[513]1388 width: The width of the kernel in pixels. For hanning this is
1389 ignored otherwise it defauls to 5 pixels.
1390 For 'gaussian' it is the Full Width Half
1391 Maximum. For 'boxcar' it is the full width.
[1373]1392 For 'rmedian' it is the half width.
[513]1393 insitu: if False a new scantable is returned.
1394 Otherwise, the scaling is done in-situ
1395 The default is taken from .asaprc (False)
1396 Example:
1397 none
1398 """
1399 if insitu is None: insitu = rcParams['insitu']
[876]1400 self._math._setinsitu(insitu)
[513]1401 varlist = vars()
[1118]1402 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1403 s._add_history("smooth", varlist)
1404 print_log()
1405 if insitu: self._assign(s)
1406 else: return s
[513]1407
[876]1408
[1389]1409 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1410 """
1411 Return a scan which has been baselined (all rows) by a polynomial.
1412 Parameters:
[794]1413 mask: an optional mask
1414 order: the order of the polynomial (default is 0)
[1061]1415 plot: plot the fit and the residual. In this each
1416 indivual fit has to be approved, by typing 'y'
1417 or 'n'
[1389]1418 uselin: use linear polynomial fit
[794]1419 insitu: if False a new scantable is returned.
1420 Otherwise, the scaling is done in-situ
1421 The default is taken from .asaprc (False)
[513]1422 Example:
1423 # return a scan baselined by a third order polynomial,
1424 # not using a mask
1425 bscan = scan.poly_baseline(order=3)
[579]1426 """
[513]1427 if insitu is None: insitu = rcParams['insitu']
1428 varlist = vars()
1429 if mask is None:
[1295]1430 mask = [True for i in xrange(self.nchan(-1))]
[513]1431 from asap.asapfitter import fitter
[1217]1432 try:
1433 f = fitter()
1434 f.set_scan(self, mask)
[1389]1435 if uselin:
1436 f.set_function(lpoly=order)
1437 else:
1438 f.set_function(poly=order)
[1217]1439 s = f.auto_fit(insitu, plot=plot)
[1446]1440 # Save parameters of baseline fits as a class attribute.
1441 # NOTICE: It does not reflect changes in scantable!
1442 self.blpars = f.blpars
[1217]1443 s._add_history("poly_baseline", varlist)
1444 print_log()
1445 if insitu: self._assign(s)
1446 else: return s
1447 except RuntimeError:
1448 msg = "The fit failed, possibly because it didn't converge."
1449 if rcParams['verbose']:
1450 print msg
1451 return
1452 else:
1453 raise RuntimeError(msg)
[513]1454
[1217]1455
[1118]1456 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1457 threshold=3, chan_avg_limit=1, plot=False,
1458 insitu=None):
[880]1459 """
1460 Return a scan which has been baselined (all rows) by a polynomial.
1461 Spectral lines are detected first using linefinder and masked out
1462 to avoid them affecting the baseline solution.
1463
1464 Parameters:
1465 mask: an optional mask retreived from scantable
1466 edge: an optional number of channel to drop at
1467 the edge of spectrum. If only one value is
1468 specified, the same number will be dropped from
1469 both sides of the spectrum. Default is to keep
[907]1470 all channels. Nested tuples represent individual
[976]1471 edge selection for different IFs (a number of spectral
1472 channels can be different)
[880]1473 order: the order of the polynomial (default is 0)
1474 threshold: the threshold used by line finder. It is better to
1475 keep it large as only strong lines affect the
1476 baseline solution.
[1280]1477 chan_avg_limit:
1478 a maximum number of consequtive spectral channels to
1479 average during the search of weak and broad lines.
1480 The default is no averaging (and no search for weak
1481 lines). If such lines can affect the fitted baseline
1482 (e.g. a high order polynomial is fitted), increase this
1483 parameter (usually values up to 8 are reasonable). Most
1484 users of this method should find the default value
1485 sufficient.
[1061]1486 plot: plot the fit and the residual. In this each
1487 indivual fit has to be approved, by typing 'y'
1488 or 'n'
[880]1489 insitu: if False a new scantable is returned.
1490 Otherwise, the scaling is done in-situ
1491 The default is taken from .asaprc (False)
1492
1493 Example:
1494 scan2=scan.auto_poly_baseline(order=7)
1495 """
1496 if insitu is None: insitu = rcParams['insitu']
1497 varlist = vars()
1498 from asap.asapfitter import fitter
1499 from asap.asaplinefind import linefinder
1500 from asap import _is_sequence_or_number as _is_valid
1501
[976]1502 # check whether edge is set up for each IF individually
[1118]1503 individualedge = False;
1504 if len(edge) > 1:
1505 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1506 individualedge = True;
[907]1507
[1118]1508 if not _is_valid(edge, int) and not individualedge:
[909]1509 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1510 pair of integers specified as a tuple. Nested tuples are allowed \
1511 to make individual selection for different IFs."
[919]1512
[1118]1513 curedge = (0, 0)
1514 if individualedge:
1515 for edgepar in edge:
1516 if not _is_valid(edgepar, int):
1517 raise ValueError, "Each element of the 'edge' tuple has \
1518 to be a pair of integers or an integer."
[907]1519 else:
[1118]1520 curedge = edge;
[880]1521
1522 # setup fitter
1523 f = fitter()
1524 f.set_function(poly=order)
1525
1526 # setup line finder
[1118]1527 fl = linefinder()
[1268]1528 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1529
1530 if not insitu:
[1118]1531 workscan = self.copy()
[880]1532 else:
[1118]1533 workscan = self
[880]1534
[907]1535 fl.set_scan(workscan)
1536
[1118]1537 rows = range(workscan.nrow())
[1446]1538 # Save parameters of baseline fits & masklists as a class attribute.
1539 # NOTICE: It does not reflect changes in scantable!
1540 if len(rows) > 0:
1541 self.blpars=[]
1542 self.masklists=[]
[880]1543 asaplog.push("Processing:")
1544 for r in rows:
[1118]1545 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1546 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1547 workscan.getpol(r), workscan.getcycle(r))
[880]1548 asaplog.push(msg, False)
[907]1549
[976]1550 # figure out edge parameter
[1118]1551 if individualedge:
1552 if len(edge) >= workscan.getif(r):
1553 raise RuntimeError, "Number of edge elements appear to " \
1554 "be less than the number of IFs"
1555 curedge = edge[workscan.getif(r)]
[919]1556
[976]1557 # setup line finder
[1118]1558 fl.find_lines(r, mask, curedge)
[1446]1559 outmask=fl.get_mask()
[880]1560 f.set_scan(workscan, fl.get_mask())
1561 f.x = workscan._getabcissa(r)
1562 f.y = workscan._getspectrum(r)
1563 f.data = None
1564 f.fit()
[1446]1565
1566 # Show mask list
1567 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1568 msg = "mask range: "+str(masklist)
1569 asaplog.push(msg, False)
1570
1571 fpar = f.get_parameters()
[1061]1572 if plot:
1573 f.plot(residual=True)
1574 x = raw_input("Accept fit ( [y]/n ): ")
1575 if x.upper() == 'N':
[1446]1576 self.blpars.append(None)
1577 self.masklists.append(None)
[1061]1578 continue
[880]1579 workscan._setspectrum(f.fitter.getresidual(), r)
[1446]1580 self.blpars.append(fpar)
1581 self.masklists.append(masklist)
[1061]1582 if plot:
1583 f._p.unmap()
1584 f._p = None
1585 workscan._add_history("auto_poly_baseline", varlist)
[880]1586 if insitu:
1587 self._assign(workscan)
1588 else:
1589 return workscan
1590
[914]1591 def rotate_linpolphase(self, angle):
1592 """
1593 Rotate the phase of the complex polarization O=Q+iU correlation.
1594 This is always done in situ in the raw data. So if you call this
1595 function more than once then each call rotates the phase further.
1596 Parameters:
1597 angle: The angle (degrees) to rotate (add) by.
1598 Examples:
1599 scan.rotate_linpolphase(2.3)
1600 """
1601 varlist = vars()
[936]1602 self._math._rotate_linpolphase(self, angle)
[914]1603 self._add_history("rotate_linpolphase", varlist)
1604 print_log()
1605 return
[710]1606
[513]1607
[914]1608 def rotate_xyphase(self, angle):
1609 """
1610 Rotate the phase of the XY correlation. This is always done in situ
1611 in the data. So if you call this function more than once
1612 then each call rotates the phase further.
1613 Parameters:
1614 angle: The angle (degrees) to rotate (add) by.
1615 Examples:
1616 scan.rotate_xyphase(2.3)
1617 """
1618 varlist = vars()
[936]1619 self._math._rotate_xyphase(self, angle)
[914]1620 self._add_history("rotate_xyphase", varlist)
1621 print_log()
1622 return
1623
1624 def swap_linears(self):
1625 """
[1348]1626 Swap the linear polarisations XX and YY, or better the first two
1627 polarisations as this also works for ciculars.
[914]1628 """
1629 varlist = vars()
[936]1630 self._math._swap_linears(self)
[914]1631 self._add_history("swap_linears", varlist)
1632 print_log()
1633 return
1634
1635 def invert_phase(self):
1636 """
1637 Invert the phase of the complex polarisation
1638 """
1639 varlist = vars()
[936]1640 self._math._invert_phase(self)
[914]1641 self._add_history("invert_phase", varlist)
1642 print_log()
1643 return
1644
[876]1645 def add(self, offset, insitu=None):
[513]1646 """
1647 Return a scan where all spectra have the offset added
1648 Parameters:
1649 offset: the offset
1650 insitu: if False a new scantable is returned.
1651 Otherwise, the scaling is done in-situ
1652 The default is taken from .asaprc (False)
1653 """
1654 if insitu is None: insitu = rcParams['insitu']
[876]1655 self._math._setinsitu(insitu)
[513]1656 varlist = vars()
[876]1657 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1658 s._add_history("add", varlist)
[876]1659 print_log()
1660 if insitu:
1661 self._assign(s)
1662 else:
[513]1663 return s
1664
[1308]1665 def scale(self, factor, tsys=True, insitu=None):
[513]1666 """
1667 Return a scan where all spectra are scaled by the give 'factor'
1668 Parameters:
1669 factor: the scaling factor
1670 insitu: if False a new scantable is returned.
1671 Otherwise, the scaling is done in-situ
1672 The default is taken from .asaprc (False)
1673 tsys: if True (default) then apply the operation to Tsys
1674 as well as the data
1675 """
1676 if insitu is None: insitu = rcParams['insitu']
[876]1677 self._math._setinsitu(insitu)
[513]1678 varlist = vars()
[876]1679 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1680 s._add_history("scale", varlist)
[876]1681 print_log()
1682 if insitu:
1683 self._assign(s)
1684 else:
[513]1685 return s
1686
[1348]1687 def auto_quotient(self, preserve=True, mode='paired'):
[670]1688 """
1689 This function allows to build quotients automatically.
1690 It assumes the observation to have the same numer of
1691 "ons" and "offs"
1692 Parameters:
[710]1693 preserve: you can preserve (default) the continuum or
1694 remove it. The equations used are
[670]1695 preserve: Output = Toff * (on/off) - Toff
[1070]1696 remove: Output = Toff * (on/off) - Ton
[1348]1697 mode: the on/off detection mode
1698 'paired' (default)
1699 identifies 'off' scans by the
1700 trailing '_R' (Mopra/Parkes) or
1701 '_e'/'_w' (Tid) and matches
1702 on/off pairs from the observing pattern
1703 'time'
1704 finds the closest off in time
1705
[670]1706 """
[1348]1707 modes = ["time", "paired"]
[670]1708 if not mode in modes:
[876]1709 msg = "please provide valid mode. Valid modes are %s" % (modes)
1710 raise ValueError(msg)
1711 varlist = vars()
[1348]1712 s = None
1713 if mode.lower() == "paired":
1714 basesel = self.get_selection()
[1356]1715 sel = selector()+basesel
1716 sel.set_query("SRCTYPE==1")
1717 self.set_selection(sel)
[1348]1718 offs = self.copy()
1719 sel.set_query("SRCTYPE==0")
[1356]1720 self.set_selection(sel)
[1348]1721 ons = self.copy()
1722 s = scantable(self._math._quotient(ons, offs, preserve))
1723 self.set_selection(basesel)
1724 elif mode.lower() == "time":
1725 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1726 s._add_history("auto_quotient", varlist)
[876]1727 print_log()
1728 return s
[710]1729
[1145]1730 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1731 """
[1143]1732 Form a quotient using "off" beams when observing in "MX" mode.
1733 Parameters:
[1145]1734 mask: an optional mask to be used when weight == 'stddev'
[1143]1735 weight: How to average the off beams. Default is 'median'.
[1145]1736 preserve: you can preserve (default) the continuum or
1737 remove it. The equations used are
1738 preserve: Output = Toff * (on/off) - Toff
1739 remove: Output = Toff * (on/off) - Ton
[1217]1740 """
[1143]1741 if mask is None: mask = ()
[1141]1742 varlist = vars()
1743 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1744 preoff = scantable(self._math._mx_extract(self, 'off'))
1745 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1746 from asapmath import quotient
[1145]1747 q = quotient(on, off, preserve)
[1143]1748 q._add_history("mx_quotient", varlist)
[1145]1749 print_log()
[1217]1750 return q
[513]1751
[718]1752 def freq_switch(self, insitu=None):
1753 """
1754 Apply frequency switching to the data.
1755 Parameters:
1756 insitu: if False a new scantable is returned.
1757 Otherwise, the swictching is done in-situ
1758 The default is taken from .asaprc (False)
1759 Example:
1760 none
1761 """
1762 if insitu is None: insitu = rcParams['insitu']
[876]1763 self._math._setinsitu(insitu)
[718]1764 varlist = vars()
[876]1765 s = scantable(self._math._freqswitch(self))
[1118]1766 s._add_history("freq_switch", varlist)
[876]1767 print_log()
1768 if insitu: self._assign(s)
1769 else: return s
[718]1770
[780]1771 def recalc_azel(self):
1772 """
1773 Recalculate the azimuth and elevation for each position.
1774 Parameters:
1775 none
1776 Example:
1777 """
1778 varlist = vars()
[876]1779 self._recalcazel()
[780]1780 self._add_history("recalc_azel", varlist)
1781 print_log()
1782 return
1783
[513]1784 def __add__(self, other):
1785 varlist = vars()
1786 s = None
1787 if isinstance(other, scantable):
[1308]1788 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1789 elif isinstance(other, float):
[876]1790 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1791 else:
[718]1792 raise TypeError("Other input is not a scantable or float value")
[513]1793 s._add_history("operator +", varlist)
[718]1794 print_log()
[513]1795 return s
1796
1797 def __sub__(self, other):
1798 """
1799 implicit on all axes and on Tsys
1800 """
1801 varlist = vars()
1802 s = None
1803 if isinstance(other, scantable):
[1308]1804 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1805 elif isinstance(other, float):
[876]1806 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1807 else:
[718]1808 raise TypeError("Other input is not a scantable or float value")
[513]1809 s._add_history("operator -", varlist)
[718]1810 print_log()
[513]1811 return s
[710]1812
[513]1813 def __mul__(self, other):
1814 """
1815 implicit on all axes and on Tsys
1816 """
1817 varlist = vars()
1818 s = None
1819 if isinstance(other, scantable):
[1308]1820 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1821 elif isinstance(other, float):
[876]1822 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1823 else:
[718]1824 raise TypeError("Other input is not a scantable or float value")
[513]1825 s._add_history("operator *", varlist)
[718]1826 print_log()
[513]1827 return s
1828
[710]1829
[513]1830 def __div__(self, other):
1831 """
1832 implicit on all axes and on Tsys
1833 """
1834 varlist = vars()
1835 s = None
1836 if isinstance(other, scantable):
[1308]1837 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1838 elif isinstance(other, float):
1839 if other == 0.0:
[718]1840 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1841 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1842 else:
[718]1843 raise TypeError("Other input is not a scantable or float value")
[513]1844 s._add_history("operator /", varlist)
[718]1845 print_log()
[513]1846 return s
1847
[530]1848 def get_fit(self, row=0):
1849 """
1850 Print or return the stored fits for a row in the scantable
1851 Parameters:
1852 row: the row which the fit has been applied to.
1853 """
1854 if row > self.nrow():
1855 return
[976]1856 from asap.asapfit import asapfit
[530]1857 fit = asapfit(self._getfit(row))
[718]1858 if rcParams['verbose']:
[530]1859 print fit
1860 return
1861 else:
1862 return fit.as_dict()
1863
[484]1864 def _add_history(self, funcname, parameters):
1865 # create date
1866 sep = "##"
1867 from datetime import datetime
1868 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1869 hist = dstr+sep
1870 hist += funcname+sep#cdate+sep
1871 if parameters.has_key('self'): del parameters['self']
[1118]1872 for k, v in parameters.iteritems():
[484]1873 if type(v) is dict:
[1118]1874 for k2, v2 in v.iteritems():
[484]1875 hist += k2
1876 hist += "="
[1118]1877 if isinstance(v2, scantable):
[484]1878 hist += 'scantable'
1879 elif k2 == 'mask':
[1118]1880 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1881 hist += str(self._zip_mask(v2))
1882 else:
1883 hist += str(v2)
[484]1884 else:
[513]1885 hist += str(v2)
[484]1886 else:
1887 hist += k
1888 hist += "="
[1118]1889 if isinstance(v, scantable):
[484]1890 hist += 'scantable'
1891 elif k == 'mask':
[1118]1892 if isinstance(v, list) or isinstance(v, tuple):
[513]1893 hist += str(self._zip_mask(v))
1894 else:
1895 hist += str(v)
[484]1896 else:
1897 hist += str(v)
1898 hist += sep
1899 hist = hist[:-2] # remove trailing '##'
1900 self._addhistory(hist)
1901
[710]1902
[484]1903 def _zip_mask(self, mask):
1904 mask = list(mask)
1905 i = 0
1906 segments = []
1907 while mask[i:].count(1):
1908 i += mask[i:].index(1)
1909 if mask[i:].count(0):
1910 j = i + mask[i:].index(0)
1911 else:
[710]1912 j = len(mask)
[1118]1913 segments.append([i, j])
[710]1914 i = j
[484]1915 return segments
[714]1916
[626]1917 def _get_ordinate_label(self):
1918 fu = "("+self.get_fluxunit()+")"
1919 import re
1920 lbl = "Intensity"
[1118]1921 if re.match(".K.", fu):
[626]1922 lbl = "Brightness Temperature "+ fu
[1118]1923 elif re.match(".Jy.", fu):
[626]1924 lbl = "Flux density "+ fu
1925 return lbl
[710]1926
[876]1927 def _check_ifs(self):
1928 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1929 nchans = filter(lambda t: t > 0, nchans)
[876]1930 return (sum(nchans)/len(nchans) == nchans[0])
[976]1931
[1496]1932 def _fill(self, names, unit, average, getpt):
[976]1933 import os
1934 from asap._asap import stfiller
1935 first = True
1936 fullnames = []
1937 for name in names:
1938 name = os.path.expandvars(name)
1939 name = os.path.expanduser(name)
1940 if not os.path.exists(name):
1941 msg = "File '%s' does not exists" % (name)
1942 if rcParams['verbose']:
1943 asaplog.push(msg)
1944 print asaplog.pop().strip()
1945 return
1946 raise IOError(msg)
1947 fullnames.append(name)
1948 if average:
1949 asaplog.push('Auto averaging integrations')
[1079]1950 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1951 for name in fullnames:
[1073]1952 tbl = Scantable(stype)
1953 r = stfiller(tbl)
[976]1954 msg = "Importing %s..." % (name)
[1118]1955 asaplog.push(msg, False)
[976]1956 print_log()
[1496]1957 r._open(name, -1, -1, getpt)
[976]1958 r._read()
[1073]1959 #tbl = r._getdata()
[976]1960 if average:
[1118]1961 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1962 #tbl = tbl2
1963 if not first:
1964 tbl = self._math._merge([self, tbl])
1965 #tbl = tbl2
1966 Scantable.__init__(self, tbl)
1967 r._close()
[1118]1968 del r, tbl
[976]1969 first = False
1970 if unit is not None:
1971 self.set_fluxunit(unit)
[1446]1972 #self.set_freqframe(rcParams['scantable.freqframe'])
[976]1973
Note: See TracBrowser for help on using the repository browser.